This commit is contained in:
adueck 2023-08-02 14:55:22 +04:00
parent 6eb5e081f0
commit 4cc81c8b10
7 changed files with 135 additions and 121 deletions

View File

@ -31,7 +31,11 @@ function ParserDemo({ opts }: { opts: T.TextOptions }) {
<input
dir="rtl"
className={`form-control ${
text && errors.length ? "is-invalid" : text ? "is-valid" : ""
text && (errors.length || !result.length)
? "is-invalid"
: result.length
? "is-valid"
: ""
}`}
type="text"
value={text}
@ -41,7 +45,9 @@ function ParserDemo({ opts }: { opts: T.TextOptions }) {
{errors.length > 0 && (
<>
<div className="alert alert-danger" role="alert">
<div>{errors[0]}</div>
{errors.map((e) => (
<div>{e}</div>
))}
</div>
<div className="text-center">Did you mean:</div>
</>

View File

@ -46,11 +46,11 @@ export function fmapParseResult<A extends object, B extends object>(
f: (x: A) => B,
x: T.ParseResult<A>[]
): T.ParseResult<B>[] {
return x.map<T.ParseResult<B>>(([tokens, result, errors]) => [
tokens,
f(result),
errors,
]);
return x.map<T.ParseResult<B>>((xi) => ({
tokens: xi.tokens,
body: f(xi.body),
errors: xi.errors,
}));
}
export function fmapSingleOrLengthOpts<A extends object, B extends object>(

View File

@ -24,16 +24,16 @@ export function parseAdjective(
const matches = wideMatches.filter(deets.predicate);
matches.forEach((m) => {
const selection = makeAdjectiveSelection(m);
w.push([
rest,
{
w.push({
tokens: rest,
body: {
selection,
inflection: deets.inflection,
gender: deets.gender,
given: first.s,
},
[],
]);
errors: [],
});
});
});
});

View File

@ -30,10 +30,11 @@ export function parseNoun(
if (possesor) {
const runsAfterPossesor: T.ParseResult<NounResult | undefined>[] = possesor
? possesor
: [[tokens, undefined, []]];
: [{ tokens, body: undefined, errors: [] }];
// could be a case for a monad ??
return removeUnneccesaryFailing(
runsAfterPossesor.flatMap(([tokens, possesor, errors]) =>
runsAfterPossesor.flatMap(
({ tokens, body: possesor, errors }) =>
parseNoun(
tokens,
lookup,
@ -54,11 +55,16 @@ export function parseNoun(
},
}
: undefined
).map<T.ParseResult<NounResult>>(([t, r, errs]) => [
t,
r,
[...errs, ...errors],
])
)
// .map<T.ParseResult<NounResult>>(([t, r, errs]) => [
// t,
// r,
// // TODO: should the errors from the runsAfterPossesor be thrown out?
// // or ...errors should be kept?
// // to show an error like د غتو ماشومان نومونه
// // adj error غټ should be first inflection (seems confusing)
// [...errs, ...errors],
// ])
)
);
} else {
@ -73,20 +79,20 @@ function removeUnneccesaryFailing(
): T.ParseResult<NounResult>[] {
// group by identical results
const groups = groupWith(
(a, b) => equals(a[1].selection, b[1].selection),
(a, b) => equals(a.body.selection, b.body.selection),
results
);
// if there's a group of identical results with some success in it
// remove any erroneous results
const stage1 = groups.flatMap((group) => {
if (group.find((x) => x[2].length === 0)) {
return group.filter((x) => x[2].length === 0);
if (group.find((x) => x.errors.length === 0)) {
return group.filter((x) => x.errors.length === 0);
}
return group;
});
// finally, if there's any success anywhere, remove any of the errors
if (stage1.find((x) => x[2].length === 0)) {
return stage1.filter((x) => x[2].length === 0);
if (stage1.find((x) => x.errors.length === 0)) {
return stage1.filter((x) => x.errors.length === 0);
} else {
return stage1;
}
@ -111,7 +117,7 @@ function parseNounAfterPossesor(
}
// TODO: add recognition of او between adjectives
const adjRes = parseAdjective(tokens, lookup);
const withAdj = adjRes.flatMap(([tkns, adj]) =>
const withAdj = adjRes.flatMap(({ tokens: tkns, body: adj }) =>
parseNounAfterPossesor(tkns, lookup, possesor, [...adjectives, adj])
);
const [first, ...rest] = tokens;
@ -141,9 +147,9 @@ function parseNounAfterPossesor(
convertInflection(inf, entry, gender, deets.plural).forEach(
({ inflected, number }) => {
const selection = makeNounSelection(entry, undefined);
w.push([
rest,
{
w.push({
tokens: rest,
body: {
inflected,
selection: {
...selection,
@ -167,7 +173,7 @@ function parseNounAfterPossesor(
: undefined,
},
},
[
errors: [
...(possesor?.inflected === false
? [{ message: "possesor should be inflected" }]
: []),
@ -175,7 +181,7 @@ function parseNounAfterPossesor(
message,
})),
],
] as T.ParseResult<NounResult>);
});
}
);
});

View File

@ -8,15 +8,13 @@ export function parsePhrase(
success: { inflected: boolean; selection: T.NPSelection }[];
errors: string[];
} {
const nps = parseNP(s, lookup).filter(([tkns]) => !tkns.length);
const nps = parseNP(s, lookup).filter(({ tokens }) => !tokens.length);
const success = nps.map((x) => x[1]);
const success = nps.map((x) => x.body);
return {
success,
errors: [
...new Set(
nps.flatMap(([tkns, r, errors]) => errors.map((e) => e.message))
),
...new Set(nps.flatMap(({ errors }) => errors.map((e) => e.message))),
],
};
}

View File

@ -9,9 +9,9 @@ export function parsePronoun(tokens: Readonly<T.Token[]>): T.ParseResult<{
}>[] {
const [{ s }, ...rest] = tokens;
if (s === "زه") {
return [0, 1].map((person) => [
rest,
{
return [0, 1].map((person) => ({
tokens: rest,
body: {
inflected: false,
selection: {
type: "pronoun",
@ -19,12 +19,12 @@ export function parsePronoun(tokens: Readonly<T.Token[]>): T.ParseResult<{
distance: "far",
},
},
[],
]);
errors: [],
}));
} else if (s === "ته") {
return [2, 3].map((person) => [
rest,
{
return [2, 3].map((person) => ({
tokens: rest,
body: {
inflected: false,
selection: {
type: "pronoun",
@ -32,13 +32,13 @@ export function parsePronoun(tokens: Readonly<T.Token[]>): T.ParseResult<{
distance: "far",
},
},
[],
]);
errors: [],
}));
} else if (s === "هغه") {
return [
...[false, true].map<Result>((inflected) => [
rest,
{
...[false, true].map<Result>((inflected) => ({
tokens: rest,
body: {
inflected,
selection: {
type: "pronoun",
@ -46,11 +46,11 @@ export function parsePronoun(tokens: Readonly<T.Token[]>): T.ParseResult<{
distance: "far",
},
},
[],
]),
[
rest,
errors: [],
})),
{
tokens: rest,
body: {
inflected: false,
selection: {
type: "pronoun",
@ -58,14 +58,14 @@ export function parsePronoun(tokens: Readonly<T.Token[]>): T.ParseResult<{
distance: "far",
},
},
[],
],
errors: [],
},
];
} else if (s === "هغې") {
return [
[
rest,
{
tokens: rest,
body: {
inflected: true,
selection: {
type: "pronoun",
@ -73,14 +73,14 @@ export function parsePronoun(tokens: Readonly<T.Token[]>): T.ParseResult<{
distance: "far",
},
},
[],
],
errors: [],
},
];
} else if (s === "دی") {
return [
[
rest,
{
tokens: rest,
body: {
inflected: false,
selection: {
type: "pronoun",
@ -88,14 +88,14 @@ export function parsePronoun(tokens: Readonly<T.Token[]>): T.ParseResult<{
distance: "near",
},
},
[],
],
errors: [],
},
];
} else if (s === "ده") {
return [
[
rest,
{
tokens: rest,
body: {
inflected: true,
selection: {
type: "pronoun",
@ -103,14 +103,14 @@ export function parsePronoun(tokens: Readonly<T.Token[]>): T.ParseResult<{
distance: "near",
},
},
[],
],
errors: [],
},
];
} else if (s === "دا") {
return [
[
rest,
{
tokens: rest,
body: {
inflected: false,
selection: {
type: "pronoun",
@ -118,14 +118,14 @@ export function parsePronoun(tokens: Readonly<T.Token[]>): T.ParseResult<{
distance: "near",
},
},
[],
],
errors: [],
},
];
} else if (s === "دې") {
return [
[
rest,
{
tokens: rest,
body: {
inflected: true,
selection: {
type: "pronoun",
@ -133,15 +133,15 @@ export function parsePronoun(tokens: Readonly<T.Token[]>): T.ParseResult<{
distance: "near",
},
},
[],
],
errors: [],
},
];
} else if (["مونږ", "موږ"].includes(s)) {
return [false, true].flatMap<Result>((inflected) =>
[T.Person.FirstPlurMale, T.Person.FirstPlurFemale].map<Result>(
(person) => [
rest,
{
(person) => ({
tokens: rest,
body: {
inflected,
selection: {
type: "pronoun",
@ -149,16 +149,16 @@ export function parsePronoun(tokens: Readonly<T.Token[]>): T.ParseResult<{
distance: "far",
},
},
[],
]
errors: [],
})
)
);
} else if (["تاسو", "تاسې"].includes(s)) {
return [false, true].flatMap<Result>((inflected) =>
[T.Person.SecondPlurMale, T.Person.SecondPlurFemale].map<Result>(
(person) => [
rest,
{
(person) => ({
tokens: rest,
body: {
inflected,
selection: {
type: "pronoun",
@ -166,16 +166,16 @@ export function parsePronoun(tokens: Readonly<T.Token[]>): T.ParseResult<{
distance: "far",
},
},
[],
]
errors: [],
})
)
);
} else if (["هغوي", "هغوی"].includes(s)) {
return [false, true].flatMap<Result>((inflected) =>
[T.Person.ThirdPlurMale, T.Person.ThirdPlurFemale].map<Result>(
(person) => [
rest,
{
(person) => ({
tokens: rest,
body: {
inflected,
selection: {
type: "pronoun",
@ -183,16 +183,16 @@ export function parsePronoun(tokens: Readonly<T.Token[]>): T.ParseResult<{
distance: "far",
},
},
[],
]
errors: [],
})
)
);
} else if (["دوي", "دوی"].includes(s)) {
return [false, true].flatMap<Result>((inflected) =>
[T.Person.ThirdPlurMale, T.Person.ThirdPlurFemale].map<Result>(
(person) => [
rest,
{
(person) => ({
tokens: rest,
body: {
inflected,
selection: {
type: "pronoun",
@ -200,8 +200,8 @@ export function parsePronoun(tokens: Readonly<T.Token[]>): T.ParseResult<{
distance: "near",
},
},
[],
]
errors: [],
})
)
);
}

View File

@ -1281,4 +1281,8 @@ export type ParseError = {
};
/** a tuple containing the [left over tokens, parse result, errors associated with the result] */
export type ParseResult<P> = [Readonly<Token[]>, P, ParseError[]];
export type ParseResult<P> = {
tokens: Readonly<Token[]>;
body: P;
errors: ParseError[];
};