endsWith function
This commit is contained in:
parent
ac3f930906
commit
e7e773c825
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "@lingdocs/pashto-inflector",
|
"name": "@lingdocs/pashto-inflector",
|
||||||
"version": "1.1.9",
|
"version": "1.2.0",
|
||||||
"author": "lingdocs.com",
|
"author": "lingdocs.com",
|
||||||
"description": "A Pashto inflection and verb conjugation engine, inculding React components for displaying Pashto text, inflections, and conjugations",
|
"description": "A Pashto inflection and verb conjugation engine, inculding React components for displaying Pashto text, inflections, and conjugations",
|
||||||
"homepage": "https://verbs.lingdocs.com",
|
"homepage": "https://verbs.lingdocs.com",
|
||||||
|
|
|
@ -25,6 +25,7 @@ import {
|
||||||
removeFVarients,
|
removeFVarients,
|
||||||
endsInShwa,
|
endsInShwa,
|
||||||
splitPsByVarients,
|
splitPsByVarients,
|
||||||
|
endsWith,
|
||||||
} from "./p-text-helpers";
|
} from "./p-text-helpers";
|
||||||
import * as T from "../types";
|
import * as T from "../types";
|
||||||
import {
|
import {
|
||||||
|
@ -1121,4 +1122,20 @@ test("splitPsByVarients", () => {
|
||||||
.toEqual([{ p: "حوادث", f: "hawáadis" }, { p: "حادثات", f: "haadisáat" }]);
|
.toEqual([{ p: "حوادث", f: "hawáadis" }, { p: "حادثات", f: "haadisáat" }]);
|
||||||
expect(splitPsByVarients({ p: "کور", f: "kor" }))
|
expect(splitPsByVarients({ p: "کور", f: "kor" }))
|
||||||
.toEqual([{ p: "کور", f: "kor" }]);
|
.toEqual([{ p: "کور", f: "kor" }]);
|
||||||
})
|
});
|
||||||
|
|
||||||
|
test("endsWith", () => {
|
||||||
|
expect(endsWith({ p: "سړی", f: "saRey" }, { p: "ی", f: "ey" }))
|
||||||
|
.toBe(true);
|
||||||
|
// f variations should be removed in case of using DictionaryEntry
|
||||||
|
expect(endsWith({ p: "سړی", f: "saRey, saRaayyy" }, { p: "ی", f: "ey" }))
|
||||||
|
.toBe(true);
|
||||||
|
expect(endsWith({ p: "سړی", f: "saRey" }, { p: "ي", f: "ee" }))
|
||||||
|
.toBe(false);
|
||||||
|
expect(endsWith({ p: "ویده", f: "weedú" }, { p: "ه", f: "u" }, true))
|
||||||
|
.toBe(false);
|
||||||
|
expect(endsWith({ p: "ویده", f: "weedú" }, { p: "ه", f: "u" }))
|
||||||
|
.toBe(true);
|
||||||
|
expect(endsWith({ p: "چای", f: "chaay" }, [{ p: "وی", f: "ooy" }, { p: "ای", f: "aay" }]))
|
||||||
|
.toBe(true);
|
||||||
|
});
|
|
@ -955,4 +955,25 @@ export function isPluralInflections(inf: T.PluralInflections | T.Inflections): i
|
||||||
return inf.masc.length === 2;
|
return inf.masc.length === 2;
|
||||||
}
|
}
|
||||||
return inf.fem.length === 2;
|
return inf.fem.length === 2;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* determines if ps ends with a given ending, or one of an array of given endings
|
||||||
|
* (can be accent sensitive or not)
|
||||||
|
*
|
||||||
|
* @param ps - the PsString in question
|
||||||
|
* @param ending - an ending (or array of possible endings) to check for
|
||||||
|
* @param matchAccent - true if you want it to be accent-sensitive
|
||||||
|
* @returns
|
||||||
|
*/
|
||||||
|
export function endsWith(ps: T.PsString, ending: T.PsString | T.PsString[], matchAccent?: boolean): boolean {
|
||||||
|
if (Array.isArray(ending)) {
|
||||||
|
return ending.some(e => endsWith(ps, e));
|
||||||
|
}
|
||||||
|
const f = removeFVarients(ps.f);
|
||||||
|
return (
|
||||||
|
ps.p.slice(-ending.p.length) === ending.p
|
||||||
|
&&
|
||||||
|
(matchAccent ? f.slice(-ending.f.length) : removeAccents(f.slice(-ending.f.length))) === ending.f
|
||||||
|
);
|
||||||
}
|
}
|
|
@ -36,6 +36,7 @@ import {
|
||||||
isUnisexSet,
|
isUnisexSet,
|
||||||
isInflectionSet,
|
isInflectionSet,
|
||||||
addEnglish,
|
addEnglish,
|
||||||
|
endsWith,
|
||||||
} from "./lib/p-text-helpers";
|
} from "./lib/p-text-helpers";
|
||||||
import {
|
import {
|
||||||
getEnglishWord,
|
getEnglishWord,
|
||||||
|
@ -132,6 +133,7 @@ export {
|
||||||
personGender,
|
personGender,
|
||||||
addEnglish,
|
addEnglish,
|
||||||
parseEc,
|
parseEc,
|
||||||
|
endsWith,
|
||||||
// protobuf helpers
|
// protobuf helpers
|
||||||
readDictionary,
|
readDictionary,
|
||||||
writeDictionary,
|
writeDictionary,
|
||||||
|
|
Loading…
Reference in New Issue