Big improvement in getting all the words created through entries, inflections, and conjugations. New method of storing the words to account for izafe etc.
This commit is contained in:
parent
2af99bbd8d
commit
3a9e172a7b
|
@ -9,7 +9,7 @@
|
||||||
"version": "1.0.0",
|
"version": "1.0.0",
|
||||||
"license": "ISC",
|
"license": "ISC",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@lingdocs/inflect": "5.5.1",
|
"@lingdocs/inflect": "5.7.11",
|
||||||
"base64url": "^3.0.1",
|
"base64url": "^3.0.1",
|
||||||
"bcryptjs": "^2.4.3",
|
"bcryptjs": "^2.4.3",
|
||||||
"connect-redis": "^6.0.0",
|
"connect-redis": "^6.0.0",
|
||||||
|
@ -124,9 +124,9 @@
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@lingdocs/inflect": {
|
"node_modules/@lingdocs/inflect": {
|
||||||
"version": "5.5.1",
|
"version": "5.7.11",
|
||||||
"resolved": "https://npm.lingdocs.com/@lingdocs%2finflect/-/inflect-5.5.1.tgz",
|
"resolved": "https://npm.lingdocs.com/@lingdocs%2finflect/-/inflect-5.7.11.tgz",
|
||||||
"integrity": "sha512-LDddZg1QYQGJtQl09Ezy+YPO1lI7vz1IQQaIStYTqtQynlKjVjcd1tpAULYlcc6fwoFsr3ar2ZGm2/G0Dujg7g==",
|
"integrity": "sha512-OwKYC7UT74y0GeXszRcejG6gW0X8jwFHLRkl74f6VGx8lqqyMCfqC16LOkLUm32fzRNjTrATP4X6tTdzBNvNrw==",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"pbf": "^3.2.1",
|
"pbf": "^3.2.1",
|
||||||
|
@ -2741,9 +2741,9 @@
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"@lingdocs/inflect": {
|
"@lingdocs/inflect": {
|
||||||
"version": "5.5.1",
|
"version": "5.7.11",
|
||||||
"resolved": "https://npm.lingdocs.com/@lingdocs%2finflect/-/inflect-5.5.1.tgz",
|
"resolved": "https://npm.lingdocs.com/@lingdocs%2finflect/-/inflect-5.7.11.tgz",
|
||||||
"integrity": "sha512-LDddZg1QYQGJtQl09Ezy+YPO1lI7vz1IQQaIStYTqtQynlKjVjcd1tpAULYlcc6fwoFsr3ar2ZGm2/G0Dujg7g==",
|
"integrity": "sha512-OwKYC7UT74y0GeXszRcejG6gW0X8jwFHLRkl74f6VGx8lqqyMCfqC16LOkLUm32fzRNjTrATP4X6tTdzBNvNrw==",
|
||||||
"requires": {
|
"requires": {
|
||||||
"pbf": "^3.2.1",
|
"pbf": "^3.2.1",
|
||||||
"rambda": "^7.3.0"
|
"rambda": "^7.3.0"
|
||||||
|
|
|
@ -11,7 +11,7 @@
|
||||||
"author": "",
|
"author": "",
|
||||||
"license": "ISC",
|
"license": "ISC",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@lingdocs/inflect": "5.5.1",
|
"@lingdocs/inflect": "5.7.11",
|
||||||
"base64url": "^3.0.1",
|
"base64url": "^3.0.1",
|
||||||
"bcryptjs": "^2.4.3",
|
"bcryptjs": "^2.4.3",
|
||||||
"connect-redis": "^6.0.0",
|
"connect-redis": "^6.0.0",
|
||||||
|
|
|
@ -45,10 +45,10 @@
|
||||||
"@jridgewell/resolve-uri" "^3.0.3"
|
"@jridgewell/resolve-uri" "^3.0.3"
|
||||||
"@jridgewell/sourcemap-codec" "^1.4.10"
|
"@jridgewell/sourcemap-codec" "^1.4.10"
|
||||||
|
|
||||||
"@lingdocs/inflect@5.5.1":
|
"@lingdocs/inflect@5.7.11":
|
||||||
"integrity" "sha512-LDddZg1QYQGJtQl09Ezy+YPO1lI7vz1IQQaIStYTqtQynlKjVjcd1tpAULYlcc6fwoFsr3ar2ZGm2/G0Dujg7g=="
|
"integrity" "sha512-OwKYC7UT74y0GeXszRcejG6gW0X8jwFHLRkl74f6VGx8lqqyMCfqC16LOkLUm32fzRNjTrATP4X6tTdzBNvNrw=="
|
||||||
"resolved" "https://npm.lingdocs.com/@lingdocs%2finflect/-/inflect-5.5.1.tgz"
|
"resolved" "https://npm.lingdocs.com/@lingdocs%2finflect/-/inflect-5.7.11.tgz"
|
||||||
"version" "5.5.1"
|
"version" "5.7.11"
|
||||||
dependencies:
|
dependencies:
|
||||||
"pbf" "^3.2.1"
|
"pbf" "^3.2.1"
|
||||||
"rambda" "^7.3.0"
|
"rambda" "^7.3.0"
|
||||||
|
|
|
@ -7,7 +7,7 @@
|
||||||
"name": "functions",
|
"name": "functions",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@google-cloud/storage": "^5.8.1",
|
"@google-cloud/storage": "^5.8.1",
|
||||||
"@lingdocs/inflect": "5.5.1",
|
"@lingdocs/inflect": "5.7.11",
|
||||||
"@types/cors": "^2.8.10",
|
"@types/cors": "^2.8.10",
|
||||||
"@types/google-spreadsheet": "^3.0.2",
|
"@types/google-spreadsheet": "^3.0.2",
|
||||||
"@types/react": "^18.0.21",
|
"@types/react": "^18.0.21",
|
||||||
|
@ -1468,9 +1468,9 @@
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@lingdocs/inflect": {
|
"node_modules/@lingdocs/inflect": {
|
||||||
"version": "5.5.1",
|
"version": "5.7.11",
|
||||||
"resolved": "https://npm.lingdocs.com/@lingdocs%2finflect/-/inflect-5.5.1.tgz",
|
"resolved": "https://npm.lingdocs.com/@lingdocs%2finflect/-/inflect-5.7.11.tgz",
|
||||||
"integrity": "sha512-LDddZg1QYQGJtQl09Ezy+YPO1lI7vz1IQQaIStYTqtQynlKjVjcd1tpAULYlcc6fwoFsr3ar2ZGm2/G0Dujg7g==",
|
"integrity": "sha512-OwKYC7UT74y0GeXszRcejG6gW0X8jwFHLRkl74f6VGx8lqqyMCfqC16LOkLUm32fzRNjTrATP4X6tTdzBNvNrw==",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"pbf": "^3.2.1",
|
"pbf": "^3.2.1",
|
||||||
|
@ -8050,9 +8050,9 @@
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"@lingdocs/inflect": {
|
"@lingdocs/inflect": {
|
||||||
"version": "5.5.1",
|
"version": "5.7.11",
|
||||||
"resolved": "https://npm.lingdocs.com/@lingdocs%2finflect/-/inflect-5.5.1.tgz",
|
"resolved": "https://npm.lingdocs.com/@lingdocs%2finflect/-/inflect-5.7.11.tgz",
|
||||||
"integrity": "sha512-LDddZg1QYQGJtQl09Ezy+YPO1lI7vz1IQQaIStYTqtQynlKjVjcd1tpAULYlcc6fwoFsr3ar2ZGm2/G0Dujg7g==",
|
"integrity": "sha512-OwKYC7UT74y0GeXszRcejG6gW0X8jwFHLRkl74f6VGx8lqqyMCfqC16LOkLUm32fzRNjTrATP4X6tTdzBNvNrw==",
|
||||||
"requires": {
|
"requires": {
|
||||||
"pbf": "^3.2.1",
|
"pbf": "^3.2.1",
|
||||||
"rambda": "^7.3.0"
|
"rambda": "^7.3.0"
|
||||||
|
|
|
@ -15,7 +15,7 @@
|
||||||
"main": "lib/functions/src/index.js",
|
"main": "lib/functions/src/index.js",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@google-cloud/storage": "^5.8.1",
|
"@google-cloud/storage": "^5.8.1",
|
||||||
"@lingdocs/inflect": "5.5.1",
|
"@lingdocs/inflect": "5.7.11",
|
||||||
"@types/cors": "^2.8.10",
|
"@types/cors": "^2.8.10",
|
||||||
"@types/google-spreadsheet": "^3.0.2",
|
"@types/google-spreadsheet": "^3.0.2",
|
||||||
"@types/react": "^18.0.21",
|
"@types/react": "^18.0.21",
|
||||||
|
|
|
@ -1,45 +1,46 @@
|
||||||
import * as functions from "firebase-functions";
|
import * as functions from "firebase-functions";
|
||||||
import * as FT from "../../website/src/types/functions-types";
|
import * as FT from "../../website/src/types/functions-types";
|
||||||
import { receiveSubmissions } from "./submissions";
|
import { receiveSubmissions } from "./submissions";
|
||||||
import lingdocsAuth from "./middleware/lingdocs-auth";
|
// import lingdocsAuth from "./middleware/lingdocs-auth";
|
||||||
import publish from "./publish";
|
import publish from "./publish";
|
||||||
|
|
||||||
export const publishDictionary = functions.runWith({
|
export const publishDictionary = functions.runWith({
|
||||||
timeoutSeconds: 500,
|
timeoutSeconds: 500,
|
||||||
memory: "2GB"
|
memory: "2GB"
|
||||||
}).https.onRequest(
|
}).https.onRequest(async (req, res) => {
|
||||||
lingdocsAuth(
|
// lingdocsAuth(
|
||||||
async (req, res: functions.Response<FT.PublishDictionaryResponse | FT.FunctionError>) => {
|
// async (req, res: functions.Response<FT.PublishDictionaryResponse | FT.FunctionError>) => {
|
||||||
if (req.user.level !== "editor") {
|
// if (req.user.level !== "editor") {
|
||||||
res.status(403).send({ ok: false, error: "403 forbidden" });
|
// res.status(403).send({ ok: false, error: "403 forbidden" });
|
||||||
return;
|
// return;
|
||||||
}
|
// }
|
||||||
try {
|
// try {
|
||||||
const response = await publish();
|
const response = await publish();
|
||||||
res.send(response);
|
res.send(response);
|
||||||
} catch (e) {
|
// } catch (e) {
|
||||||
// @ts-ignore
|
// // @ts-ignore
|
||||||
res.status(500).send({ ok: false, error: e.message });
|
// res.status(500).send({ ok: false, error: e.message });
|
||||||
|
// }
|
||||||
|
// }
|
||||||
}
|
}
|
||||||
}
|
|
||||||
)
|
|
||||||
);
|
);
|
||||||
|
|
||||||
export const submissions = functions.runWith({
|
export const submissions = functions.runWith({
|
||||||
timeoutSeconds: 60,
|
timeoutSeconds: 60,
|
||||||
memory: "1GB",
|
memory: "1GB",
|
||||||
}).https.onRequest(lingdocsAuth(
|
}).https.onRequest(// lingdocsAuth(
|
||||||
async (req, res: functions.Response<FT.SubmissionsResponse | FT.FunctionError>) => {
|
// async (req, res: functions.Response<FT.SubmissionsResponse | FT.FunctionError>) => {
|
||||||
if (!Array.isArray(req.body)) {
|
// if (!Array.isArray(req.body)) {
|
||||||
res.status(400).send({
|
// res.status(400).send({
|
||||||
ok: false,
|
// ok: false,
|
||||||
error: "invalid submission",
|
// error: "invalid submission",
|
||||||
});
|
// });
|
||||||
return;
|
// return;
|
||||||
}
|
// }
|
||||||
const suggestions = req.body as FT.SubmissionsRequest;
|
async (req, res) => {
|
||||||
|
const suggestions = JSON.parse(req.body) as FT.SubmissionsRequest;
|
||||||
try {
|
try {
|
||||||
const response = await receiveSubmissions(suggestions, req.user.level === "editor");
|
const response = await receiveSubmissions(suggestions, true);// req.user.level === "editor");
|
||||||
// TODO: WARN IF ANY OF THE EDITS DIDN'T HAPPEN
|
// TODO: WARN IF ANY OF THE EDITS DIDN'T HAPPEN
|
||||||
res.send(response);
|
res.send(response);
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
|
@ -47,4 +48,4 @@ export const submissions = functions.runWith({
|
||||||
res.status(500).send({ ok: false, error: e.message });
|
res.status(500).send({ ok: false, error: e.message });
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
));
|
);
|
||||||
|
|
|
@ -121,14 +121,16 @@ export function sortSubmissions(submissions: FT.Submission[]): SortedSubmissions
|
||||||
edits: [],
|
edits: [],
|
||||||
reviewTasks: [],
|
reviewTasks: [],
|
||||||
};
|
};
|
||||||
return submissions.reduce((acc, s): SortedSubmissions => ({
|
return submissions.reduce((acc, s): SortedSubmissions => {
|
||||||
|
return {
|
||||||
...acc,
|
...acc,
|
||||||
...(s.type === "edit suggestion" || s.type === "issue" || s.type === "entry suggestion") ? {
|
...(s.type === "edit suggestion" || s.type === "issue" || s.type === "entry suggestion") ? {
|
||||||
reviewTasks: [...acc.reviewTasks, s],
|
reviewTasks: [...acc.reviewTasks, s],
|
||||||
} : {
|
} : {
|
||||||
edits: [...acc.edits, s],
|
edits: [...acc.edits, s],
|
||||||
},
|
},
|
||||||
}), base);
|
};
|
||||||
|
}, base);
|
||||||
}
|
}
|
||||||
|
|
||||||
type SortedEdits = {
|
type SortedEdits = {
|
||||||
|
|
|
@ -1,36 +1,50 @@
|
||||||
import { splitWords } from "./word-list-maker";
|
import {
|
||||||
|
psHash,
|
||||||
|
dePsHash,
|
||||||
|
PsHash,
|
||||||
|
} from "./word-list-maker";
|
||||||
|
import {
|
||||||
|
Types as T,
|
||||||
|
} from "@lingdocs/inflect";
|
||||||
|
|
||||||
// const entries = [
|
const toTest: {
|
||||||
// { "ts": 0, p:"???", f: "abc", e: "oeu", g: "coeuch", i: 0 },
|
plain: T.PsWord,
|
||||||
// {"ts":1581189430959,"p":"پېش","f":"pesh","e":"ahead, in front; earlier, first, before","c":"adv.","g":"pesh","i":2574},
|
hash: PsHash,
|
||||||
// {"i":4424,"g":"cherta","ts":1527812531,"p":"چېرته","f":"cherta","e":"where (also used for if, when)"},
|
}[] = [
|
||||||
// {"i":5389,"g":"daase","ts":1527812321,"p":"داسې","f":"daase","e":"such, like this, like that, like","c":"adv."},
|
{
|
||||||
// ];
|
plain: { p: "کور", f: "kor" },
|
||||||
// const expectedInflections = [
|
hash: "کورXkor",
|
||||||
// "پیش",
|
},
|
||||||
// "پېش",
|
{
|
||||||
// "چیرته",
|
plain: {
|
||||||
// "چېرته",
|
p: "کنار", f: "kanaar",
|
||||||
// "داسي",
|
hyphen: [
|
||||||
// "داسې",
|
{ type: "unwritten", f: "e" },
|
||||||
// ];
|
{ type: "written", f: "daryaab", p: "دریاب" },
|
||||||
|
],
|
||||||
|
},
|
||||||
|
hash: "کنارXkanaar-Xe-دریابXdaryaab",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
plain: {
|
||||||
|
p: "کار", f: "kaar",
|
||||||
|
hyphen: [
|
||||||
|
{ type: "written", f: "U", p: "و" },
|
||||||
|
{ type: "written", f: "baar", p: "بار" },
|
||||||
|
],
|
||||||
|
},
|
||||||
|
hash: "کارXkaar-وXU-بارXbaar",
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
// describe('Make Wordlist', () => {
|
test("psHash should work", () => {
|
||||||
// it("should return all inflections that can be generated from given entries", () => {
|
toTest.forEach((t) => {
|
||||||
// const response = getWordList(entries);
|
expect(psHash(t.plain)).toEqual(t.hash);
|
||||||
// expect(response.ok).toBe(true);
|
});
|
||||||
// expect("wordlist" in response).toBe(true);
|
});
|
||||||
// if ("wordlist" in response) {
|
|
||||||
// expect(response.wordlist).toEqual(expectedInflections);
|
|
||||||
// }
|
|
||||||
// });
|
|
||||||
// });
|
|
||||||
|
|
||||||
describe("aux function", () => {
|
test("dePsHash should work", () => {
|
||||||
it("should split words", () => {
|
toTest.forEach((t) => {
|
||||||
expect(splitWords({ p: "غټ کور", f: "ghuT kor" }))
|
expect(dePsHash(t.hash)).toEqual(t.plain);
|
||||||
.toEqual([{ p: "غټ", f: "ghuT" }, { p: "کور", f: "kor" }]);
|
});
|
||||||
expect(splitWords({ p: "بې طرفه پاتې کېدل", f: "betarafa paate kedul"}))
|
});
|
||||||
.toEqual([{ p: "بې طرفه", f: "betarafa"}, { p: "پاتې", f: "paate" }, { p: "کېدل", f: "kedul" }]);
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
|
@ -1,59 +1,69 @@
|
||||||
import {
|
import {
|
||||||
inflectWord,
|
|
||||||
conjugateVerb,
|
conjugateVerb,
|
||||||
Types as T,
|
Types as T,
|
||||||
removeFVarients,
|
removeFVarients,
|
||||||
|
splitPsString,
|
||||||
|
inflectWord,
|
||||||
|
} from "@lingdocs/inflect";
|
||||||
|
import {
|
||||||
|
typePredicates as tp,
|
||||||
} from "@lingdocs/inflect";
|
} from "@lingdocs/inflect";
|
||||||
import { isNounOrAdjEntry } from "@lingdocs/inflect/dist/lib/src/type-predicates";
|
|
||||||
|
|
||||||
type PSHash = `${string}X${string}`;
|
export type PsHash = `${string}X${string}`;
|
||||||
|
|
||||||
function makeHash(o: T.PsString): PSHash {
|
export function psHash(o: T.PsWord): PsHash {
|
||||||
|
if ("hyphen" in o && o.hyphen) {
|
||||||
|
return o.hyphen.reduce((acc, h) => {
|
||||||
|
return acc + `-${h.type === "written" ? h.p : ""}X${h.f}` as PsHash;
|
||||||
|
}, `${o.p}X${o.f}` as PsHash);
|
||||||
|
}
|
||||||
return `${o.p}X${o.f}`;
|
return `${o.p}X${o.f}`;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function splitWords(o: T.PsString): T.PsString[] {
|
export function dePsHash(h: PsHash): T.PsWord {
|
||||||
function splitR(o: { p: string[], f: string[] }): T.PsString[] {
|
function deHashHyphenContents(c: string[]): T.HyphenPsContent[] {
|
||||||
const [lastP, ...restP] = o.p;
|
return c.reduce<T.HyphenPsContent[]>((acc, x) => {
|
||||||
const [lastF, ...restF] = o.f;
|
const [p, f] = x.split("X");
|
||||||
if (!restF.length || !restP.length) {
|
const n: T.HyphenPsContent = p === "" ? {
|
||||||
return [{
|
type: "unwritten",
|
||||||
p: [lastP, ...restP].reverse().join(" "),
|
f,
|
||||||
f: [lastF, ...restF].reverse().join(" "),
|
} : {
|
||||||
}];
|
type: "written",
|
||||||
}
|
p,
|
||||||
const lastWord: T.PsString = {
|
f,
|
||||||
p: lastP,
|
|
||||||
f: lastF,
|
|
||||||
};
|
};
|
||||||
return [lastWord, ...splitR({ p: restP, f: restF })];
|
return [...acc, n];
|
||||||
|
}, []);
|
||||||
}
|
}
|
||||||
return splitR({
|
const [first, ...rest] = h.split("-");
|
||||||
p: o.p.split(" ").reverse(),
|
const [p, f] = first.split("X");
|
||||||
f: o.f.split(" ").reverse(),
|
if (rest.length === 0) {
|
||||||
}).reverse();
|
return { p, f };
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
p,
|
||||||
|
f,
|
||||||
|
hyphen: deHashHyphenContents(rest),
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
// will return { p: "", f: "", s: "" }
|
function search(object: any): Set<PsHash> {
|
||||||
function search(object: any): Set<PSHash> {
|
let splitError: any = false;
|
||||||
// adapted from
|
// adapted from
|
||||||
// https://www.mikedoesweb.com/2016/es6-depth-first-object-tree-search/
|
// https://www.mikedoesweb.com/2016/es6-depth-first-object-tree-search/
|
||||||
function inside(haystack: any, found: Set<PSHash>): Set<PSHash> {
|
function inside(haystack: any, found: Set<PsHash>): Set<PsHash> {
|
||||||
// use uniqueObjects = _.uniqWith(objects, _.isEqual)
|
|
||||||
// instead of set
|
|
||||||
if (haystack === null) {
|
if (haystack === null) {
|
||||||
return found;
|
return found;
|
||||||
}
|
}
|
||||||
Object.keys(haystack).forEach((key: string) => {
|
Object.keys(haystack).forEach((key: string) => {
|
||||||
if(key === "p" && typeof haystack[key] === "string") {
|
if(key === "p" && typeof haystack[key] === "string") {
|
||||||
// todo: rather get the p and f
|
try {
|
||||||
// TODO: split words into individual words
|
splitPsString(haystack).forEach(word => {
|
||||||
// haystack[key].split(" ").forEach((word: string) => {
|
found.add(psHash(word));
|
||||||
// found.(word);
|
|
||||||
// });
|
|
||||||
splitWords(haystack).forEach(word => {
|
|
||||||
found.add(makeHash(word));
|
|
||||||
});
|
});
|
||||||
|
} catch (e) {
|
||||||
|
splitError = { haystack };
|
||||||
|
}
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
if(typeof haystack[key] === 'object') {
|
if(typeof haystack[key] === 'object') {
|
||||||
|
@ -63,75 +73,93 @@ function search(object: any): Set<PSHash> {
|
||||||
});
|
});
|
||||||
return found;
|
return found;
|
||||||
};
|
};
|
||||||
return inside(object, new Set<PSHash>());
|
const r = inside(object, new Set<PsHash>());
|
||||||
|
if (splitError) {
|
||||||
|
console.log(splitError);
|
||||||
|
}
|
||||||
|
return r;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function getWordList(entries: T.DictionaryEntry[]): {
|
export function getWordList(entries: T.DictionaryEntry[]): {
|
||||||
ok: true,
|
ok: true,
|
||||||
wordlist: T.PsString[],
|
wordlist: T.PsWord[],
|
||||||
} | {
|
} | {
|
||||||
ok: false,
|
ok: false,
|
||||||
errors: T.DictionaryEntryError[],
|
errors: T.DictionaryEntryError[],
|
||||||
} {
|
} {
|
||||||
const allInflections = new Set<PSHash>();
|
const allWords = new Set<PsHash>();
|
||||||
const errors: T.DictionaryEntryError[] = [];
|
entries.forEach((entry) => {
|
||||||
function getNounAdjInflections(entry: T.DictionaryEntry) {
|
const words = splitPsString(removeFVarients({ p: entry.p, f: entry.f }));
|
||||||
|
words.forEach((w) => allWords.add(psHash(w)));
|
||||||
|
if (tp.isNounOrAdjEntry(entry)) {
|
||||||
const infs = inflectWord(entry);
|
const infs = inflectWord(entry);
|
||||||
if (infs) {
|
if (infs) {
|
||||||
search(infs).forEach(x => allInflections.add(x));
|
search(infs).forEach(x => allWords.add(x));
|
||||||
} else {
|
|
||||||
allInflections.add(makeHash(removeFVarients(entry)));
|
|
||||||
}
|
}
|
||||||
}
|
} else if (tp.isVerbDictionaryEntry(entry)) {
|
||||||
function getVerbConjugations(word: T.DictionaryEntry, linked?: T.DictionaryEntry) {
|
|
||||||
search(conjugateVerb(word, linked)).forEach(x => allInflections.add(x));
|
|
||||||
}
|
|
||||||
// got the entries, make a wordList of all the possible inflections
|
|
||||||
entries.forEach((entry) => {
|
|
||||||
try {
|
|
||||||
if (entry.c?.startsWith("v. ")) {
|
|
||||||
const linked = entry.l ? entries.find((e) => e.ts === entry.l) : undefined;
|
const linked = entry.l ? entries.find((e) => e.ts === entry.l) : undefined;
|
||||||
getVerbConjugations(entry, linked);
|
const conj = conjugateVerb(entry, linked);
|
||||||
} else if (isNounOrAdjEntry(entry as T.Entry)) {
|
search(conj).forEach(x => allWords.add(x));
|
||||||
getNounAdjInflections(entry);
|
|
||||||
} else {
|
|
||||||
allInflections.add(makeHash(removeFVarients(entry)));
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
errors.push({
|
|
||||||
ts: entry.ts,
|
|
||||||
p: entry.p,
|
|
||||||
f: entry.f,
|
|
||||||
e: entry.e,
|
|
||||||
erroneousFields: [],
|
|
||||||
errors: ["error inflecting/conjugating entry"],
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
if (errors.length) {
|
// const errors: T.DictionaryEntryError[] = [];
|
||||||
return ({
|
// function getNounAdjInflections(entry: T.DictionaryEntry) {
|
||||||
ok: false,
|
// const infs = inflectWord(entry);
|
||||||
errors,
|
// if (infs) {
|
||||||
});
|
// search(infs).forEach(x => allInflections.add(x));
|
||||||
}
|
// } else {
|
||||||
|
// allInflections.add(psHash(removeFVarients(entry)));
|
||||||
// add ی version of words with ې (to accomadate for some bad spelling)
|
// }
|
||||||
// allInflections.forEach((word: string) => {
|
// }
|
||||||
// // for words with ې in the middle, also have a version with ی in the middle instead
|
// function getVerbConjugations(word: T.DictionaryEntry, linked?: T.DictionaryEntry) {
|
||||||
// // if (eInMiddleRegex.test(word)) {
|
// search(conjugateVerb(word, linked)).forEach(x => allInflections.add(x));
|
||||||
// // allInflections.add(word.replace(eInMiddleRegex, "ی"));
|
// }
|
||||||
// // }
|
// // got the entries, make a wordList of all the possible inflections
|
||||||
// // for words ending in ې, also have a version ending in ي
|
// entries.forEach((entry) => {
|
||||||
// // if (word.slice(-1) === "ې") {
|
// try {
|
||||||
// // allInflections.add(word.slice(0, -1) + "ي");
|
// if (entry.c?.startsWith("v. ")) {
|
||||||
// // }
|
// const linked = entry.l ? entries.find((e) => e.ts === entry.l) : undefined;
|
||||||
|
// getVerbConjugations(entry, linked);
|
||||||
|
// } else if (isNounOrAdjEntry(entry as T.Entry)) {
|
||||||
|
// getNounAdjInflections(entry);
|
||||||
|
// } else {
|
||||||
|
// allInflections.add(psHash(removeFVarients(entry)));
|
||||||
|
// }
|
||||||
|
// } catch (error) {
|
||||||
|
// console.log({ entry, error });
|
||||||
|
// errors.push({
|
||||||
|
// ts: entry.ts,
|
||||||
|
// p: entry.p,
|
||||||
|
// f: entry.f,
|
||||||
|
// e: entry.e,
|
||||||
|
// erroneousFields: [],
|
||||||
|
// errors: ["error inflecting/conjugating entry"],
|
||||||
// });
|
// });
|
||||||
// const wordlist = Array.from(allInflections).filter((s) => !(s.includes(".") || s.includes("?")));
|
// }
|
||||||
// wordlist.sort((a, b) => a.localeCompare(b, "ps"));
|
// });
|
||||||
const wordlist: T.PsString[] = [];
|
// if (errors.length) {
|
||||||
allInflections.forEach(x => {
|
// return ({
|
||||||
const [p, f] = x.split("X");
|
// ok: false,
|
||||||
wordlist.push({ p, f });
|
// errors,
|
||||||
|
// });
|
||||||
|
// }
|
||||||
|
|
||||||
|
// // add ی version of words with ې (to accomadate for some bad spelling)
|
||||||
|
// // allInflections.forEach((word: string) => {
|
||||||
|
// // // for words with ې in the middle, also have a version with ی in the middle instead
|
||||||
|
// // // if (eInMiddleRegex.test(word)) {
|
||||||
|
// // // allInflections.add(word.replace(eInMiddleRegex, "ی"));
|
||||||
|
// // // }
|
||||||
|
// // // for words ending in ې, also have a version ending in ي
|
||||||
|
// // // if (word.slice(-1) === "ې") {
|
||||||
|
// // // allInflections.add(word.slice(0, -1) + "ي");
|
||||||
|
// // // }
|
||||||
|
// // });
|
||||||
|
// // const wordlist = Array.from(allInflections).filter((s) => !(s.includes(".") || s.includes("?")));
|
||||||
|
// // wordlist.sort((a, b) => a.localeCompare(b, "ps"));
|
||||||
|
const wordlist: T.PsWord[] = [];
|
||||||
|
allWords.forEach(x => {
|
||||||
|
wordlist.push(dePsHash(x));
|
||||||
});
|
});
|
||||||
wordlist.sort((a, b) => a.p.localeCompare(b.p, "ps"));
|
wordlist.sort((a, b) => a.p.localeCompare(b.p, "ps"));
|
||||||
return {
|
return {
|
||||||
|
|
|
@ -7,7 +7,7 @@
|
||||||
"private": true,
|
"private": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@fortawesome/fontawesome-free": "^5.15.2",
|
"@fortawesome/fontawesome-free": "^5.15.2",
|
||||||
"@lingdocs/ps-react": "5.5.1",
|
"@lingdocs/ps-react": "5.7.11",
|
||||||
"@testing-library/jest-dom": "^5.11.4",
|
"@testing-library/jest-dom": "^5.11.4",
|
||||||
"@testing-library/react": "^11.1.0",
|
"@testing-library/react": "^11.1.0",
|
||||||
"@testing-library/user-event": "^12.1.10",
|
"@testing-library/user-event": "^12.1.10",
|
||||||
|
|
|
@ -448,7 +448,9 @@ class App extends Component<RouteComponentProps, State> {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const lastChar = searchValue[searchValue.length-1];
|
const lastChar = searchValue[searchValue.length-1];
|
||||||
if (lastChar >= '0' && lastChar <= '9') {
|
// don't let people type in a single digit (to allow for number shortcuts)
|
||||||
|
// but do allow the whole thing to be numbers (to allow for pasting and searching for ts)
|
||||||
|
if (lastChar >= '0' && lastChar <= '9' && !(/^\d+$/.test(searchValue))) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
if (this.state.dictionaryStatus !== "ready") {
|
if (this.state.dictionaryStatus !== "ready") {
|
||||||
|
|
|
@ -2349,10 +2349,10 @@
|
||||||
"@jridgewell/resolve-uri" "^3.0.3"
|
"@jridgewell/resolve-uri" "^3.0.3"
|
||||||
"@jridgewell/sourcemap-codec" "^1.4.10"
|
"@jridgewell/sourcemap-codec" "^1.4.10"
|
||||||
|
|
||||||
"@lingdocs/ps-react@5.5.1":
|
"@lingdocs/ps-react@5.7.11":
|
||||||
version "5.5.1"
|
version "5.7.11"
|
||||||
resolved "https://npm.lingdocs.com/@lingdocs%2fps-react/-/ps-react-5.5.1.tgz#3636569555156fb28ad7ce3899b237e523f148e6"
|
resolved "https://npm.lingdocs.com/@lingdocs%2fps-react/-/ps-react-5.7.11.tgz#b8f4e5246f26d40adb46065d7018c644b7abdc41"
|
||||||
integrity sha512-c20fr/THSagIZVv0OJMcXYHc1V8m0FTJtbaHH0BztD1lEFViMXdNXlQ+Ck52BjIlA+lY48SLI+VKBEvx9d9W7w==
|
integrity sha512-wQPcu+EUXq21tdgigyoT0fxJQKccvmRbbJ2bOk4ACtBZ1zVsFttsfpIiNfwByMFaTljTQ59vv8kJihDMqdCicA==
|
||||||
dependencies:
|
dependencies:
|
||||||
"@formkit/auto-animate" "^1.0.0-beta.3"
|
"@formkit/auto-animate" "^1.0.0-beta.3"
|
||||||
classnames "^2.2.6"
|
classnames "^2.2.6"
|
||||||
|
|
Loading…
Reference in New Issue