diff --git a/src/demo-components/ParserDemo.tsx b/src/demo-components/ParserDemo.tsx index 5c7b03e..d15c045 100644 --- a/src/demo-components/ParserDemo.tsx +++ b/src/demo-components/ParserDemo.tsx @@ -31,7 +31,11 @@ function ParserDemo({ opts }: { opts: T.TextOptions }) { 0 && ( <>
-
{errors[0]}
+ {errors.map((e) => ( +
{e}
+ ))}
Did you mean:
diff --git a/src/lib/src/fp-ps.ts b/src/lib/src/fp-ps.ts index 6586e3a..36a259a 100644 --- a/src/lib/src/fp-ps.ts +++ b/src/lib/src/fp-ps.ts @@ -46,11 +46,11 @@ export function fmapParseResult( f: (x: A) => B, x: T.ParseResult[] ): T.ParseResult[] { - return x.map>(([tokens, result, errors]) => [ - tokens, - f(result), - errors, - ]); + return x.map>((xi) => ({ + tokens: xi.tokens, + body: f(xi.body), + errors: xi.errors, + })); } export function fmapSingleOrLengthOpts( diff --git a/src/lib/src/parsing/parse-adjective.ts b/src/lib/src/parsing/parse-adjective.ts index b21b1c8..c9d5f07 100644 --- a/src/lib/src/parsing/parse-adjective.ts +++ b/src/lib/src/parsing/parse-adjective.ts @@ -24,16 +24,16 @@ export function parseAdjective( const matches = wideMatches.filter(deets.predicate); matches.forEach((m) => { const selection = makeAdjectiveSelection(m); - w.push([ - rest, - { + w.push({ + tokens: rest, + body: { selection, inflection: deets.inflection, gender: deets.gender, given: first.s, }, - [], - ]); + errors: [], + }); }); }); }); diff --git a/src/lib/src/parsing/parse-noun.ts b/src/lib/src/parsing/parse-noun.ts index ebf8fdd..6008492 100644 --- a/src/lib/src/parsing/parse-noun.ts +++ b/src/lib/src/parsing/parse-noun.ts @@ -30,35 +30,41 @@ export function parseNoun( if (possesor) { const runsAfterPossesor: T.ParseResult[] = possesor ? possesor - : [[tokens, undefined, []]]; + : [{ tokens, body: undefined, errors: [] }]; // could be a case for a monad ?? return removeUnneccesaryFailing( - runsAfterPossesor.flatMap(([tokens, possesor, errors]) => - parseNoun( - tokens, - lookup, - possesor - ? { - inflected: possesor.inflected, - selection: { - ...possesor.selection, - possesor: prevPossesor - ? { - shrunken: false, - np: { - type: "NP", - selection: prevPossesor.selection, - }, - } - : undefined, - }, - } - : undefined - ).map>(([t, r, errs]) => [ - t, - r, - [...errs, ...errors], - ]) + runsAfterPossesor.flatMap( + ({ tokens, body: possesor, errors }) => + parseNoun( + tokens, + lookup, + possesor + ? { + inflected: possesor.inflected, + selection: { + ...possesor.selection, + possesor: prevPossesor + ? { + shrunken: false, + np: { + type: "NP", + selection: prevPossesor.selection, + }, + } + : undefined, + }, + } + : undefined + ) + // .map>(([t, r, errs]) => [ + // t, + // r, + // // TODO: should the errors from the runsAfterPossesor be thrown out? + // // or ...errors should be kept? + // // to show an error like د غتو ماشومان نومونه + // // adj error غټ should be first inflection (seems confusing) + // [...errs, ...errors], + // ]) ) ); } else { @@ -73,20 +79,20 @@ function removeUnneccesaryFailing( ): T.ParseResult[] { // group by identical results const groups = groupWith( - (a, b) => equals(a[1].selection, b[1].selection), + (a, b) => equals(a.body.selection, b.body.selection), results ); // if there's a group of identical results with some success in it // remove any erroneous results const stage1 = groups.flatMap((group) => { - if (group.find((x) => x[2].length === 0)) { - return group.filter((x) => x[2].length === 0); + if (group.find((x) => x.errors.length === 0)) { + return group.filter((x) => x.errors.length === 0); } return group; }); // finally, if there's any success anywhere, remove any of the errors - if (stage1.find((x) => x[2].length === 0)) { - return stage1.filter((x) => x[2].length === 0); + if (stage1.find((x) => x.errors.length === 0)) { + return stage1.filter((x) => x.errors.length === 0); } else { return stage1; } @@ -111,7 +117,7 @@ function parseNounAfterPossesor( } // TODO: add recognition of او between adjectives const adjRes = parseAdjective(tokens, lookup); - const withAdj = adjRes.flatMap(([tkns, adj]) => + const withAdj = adjRes.flatMap(({ tokens: tkns, body: adj }) => parseNounAfterPossesor(tkns, lookup, possesor, [...adjectives, adj]) ); const [first, ...rest] = tokens; @@ -141,9 +147,9 @@ function parseNounAfterPossesor( convertInflection(inf, entry, gender, deets.plural).forEach( ({ inflected, number }) => { const selection = makeNounSelection(entry, undefined); - w.push([ - rest, - { + w.push({ + tokens: rest, + body: { inflected, selection: { ...selection, @@ -167,7 +173,7 @@ function parseNounAfterPossesor( : undefined, }, }, - [ + errors: [ ...(possesor?.inflected === false ? [{ message: "possesor should be inflected" }] : []), @@ -175,7 +181,7 @@ function parseNounAfterPossesor( message, })), ], - ] as T.ParseResult); + }); } ); }); diff --git a/src/lib/src/parsing/parse-phrase.ts b/src/lib/src/parsing/parse-phrase.ts index 5138f13..cbf5d3b 100644 --- a/src/lib/src/parsing/parse-phrase.ts +++ b/src/lib/src/parsing/parse-phrase.ts @@ -8,15 +8,13 @@ export function parsePhrase( success: { inflected: boolean; selection: T.NPSelection }[]; errors: string[]; } { - const nps = parseNP(s, lookup).filter(([tkns]) => !tkns.length); + const nps = parseNP(s, lookup).filter(({ tokens }) => !tokens.length); - const success = nps.map((x) => x[1]); + const success = nps.map((x) => x.body); return { success, errors: [ - ...new Set( - nps.flatMap(([tkns, r, errors]) => errors.map((e) => e.message)) - ), + ...new Set(nps.flatMap(({ errors }) => errors.map((e) => e.message))), ], }; } diff --git a/src/lib/src/parsing/parse-pronoun.ts b/src/lib/src/parsing/parse-pronoun.ts index 9c792d0..2449548 100644 --- a/src/lib/src/parsing/parse-pronoun.ts +++ b/src/lib/src/parsing/parse-pronoun.ts @@ -9,9 +9,9 @@ export function parsePronoun(tokens: Readonly): T.ParseResult<{ }>[] { const [{ s }, ...rest] = tokens; if (s === "زه") { - return [0, 1].map((person) => [ - rest, - { + return [0, 1].map((person) => ({ + tokens: rest, + body: { inflected: false, selection: { type: "pronoun", @@ -19,12 +19,12 @@ export function parsePronoun(tokens: Readonly): T.ParseResult<{ distance: "far", }, }, - [], - ]); + errors: [], + })); } else if (s === "ته") { - return [2, 3].map((person) => [ - rest, - { + return [2, 3].map((person) => ({ + tokens: rest, + body: { inflected: false, selection: { type: "pronoun", @@ -32,13 +32,13 @@ export function parsePronoun(tokens: Readonly): T.ParseResult<{ distance: "far", }, }, - [], - ]); + errors: [], + })); } else if (s === "هغه") { return [ - ...[false, true].map((inflected) => [ - rest, - { + ...[false, true].map((inflected) => ({ + tokens: rest, + body: { inflected, selection: { type: "pronoun", @@ -46,11 +46,11 @@ export function parsePronoun(tokens: Readonly): T.ParseResult<{ distance: "far", }, }, - [], - ]), - [ - rest, - { + errors: [], + })), + { + tokens: rest, + body: { inflected: false, selection: { type: "pronoun", @@ -58,14 +58,14 @@ export function parsePronoun(tokens: Readonly): T.ParseResult<{ distance: "far", }, }, - [], - ], + errors: [], + }, ]; } else if (s === "هغې") { return [ - [ - rest, - { + { + tokens: rest, + body: { inflected: true, selection: { type: "pronoun", @@ -73,14 +73,14 @@ export function parsePronoun(tokens: Readonly): T.ParseResult<{ distance: "far", }, }, - [], - ], + errors: [], + }, ]; } else if (s === "دی") { return [ - [ - rest, - { + { + tokens: rest, + body: { inflected: false, selection: { type: "pronoun", @@ -88,14 +88,14 @@ export function parsePronoun(tokens: Readonly): T.ParseResult<{ distance: "near", }, }, - [], - ], + errors: [], + }, ]; } else if (s === "ده") { return [ - [ - rest, - { + { + tokens: rest, + body: { inflected: true, selection: { type: "pronoun", @@ -103,14 +103,14 @@ export function parsePronoun(tokens: Readonly): T.ParseResult<{ distance: "near", }, }, - [], - ], + errors: [], + }, ]; } else if (s === "دا") { return [ - [ - rest, - { + { + tokens: rest, + body: { inflected: false, selection: { type: "pronoun", @@ -118,14 +118,14 @@ export function parsePronoun(tokens: Readonly): T.ParseResult<{ distance: "near", }, }, - [], - ], + errors: [], + }, ]; } else if (s === "دې") { return [ - [ - rest, - { + { + tokens: rest, + body: { inflected: true, selection: { type: "pronoun", @@ -133,15 +133,15 @@ export function parsePronoun(tokens: Readonly): T.ParseResult<{ distance: "near", }, }, - [], - ], + errors: [], + }, ]; } else if (["مونږ", "موږ"].includes(s)) { return [false, true].flatMap((inflected) => [T.Person.FirstPlurMale, T.Person.FirstPlurFemale].map( - (person) => [ - rest, - { + (person) => ({ + tokens: rest, + body: { inflected, selection: { type: "pronoun", @@ -149,16 +149,16 @@ export function parsePronoun(tokens: Readonly): T.ParseResult<{ distance: "far", }, }, - [], - ] + errors: [], + }) ) ); } else if (["تاسو", "تاسې"].includes(s)) { return [false, true].flatMap((inflected) => [T.Person.SecondPlurMale, T.Person.SecondPlurFemale].map( - (person) => [ - rest, - { + (person) => ({ + tokens: rest, + body: { inflected, selection: { type: "pronoun", @@ -166,16 +166,16 @@ export function parsePronoun(tokens: Readonly): T.ParseResult<{ distance: "far", }, }, - [], - ] + errors: [], + }) ) ); } else if (["هغوي", "هغوی"].includes(s)) { return [false, true].flatMap((inflected) => [T.Person.ThirdPlurMale, T.Person.ThirdPlurFemale].map( - (person) => [ - rest, - { + (person) => ({ + tokens: rest, + body: { inflected, selection: { type: "pronoun", @@ -183,16 +183,16 @@ export function parsePronoun(tokens: Readonly): T.ParseResult<{ distance: "far", }, }, - [], - ] + errors: [], + }) ) ); } else if (["دوي", "دوی"].includes(s)) { return [false, true].flatMap((inflected) => [T.Person.ThirdPlurMale, T.Person.ThirdPlurFemale].map( - (person) => [ - rest, - { + (person) => ({ + tokens: rest, + body: { inflected, selection: { type: "pronoun", @@ -200,8 +200,8 @@ export function parsePronoun(tokens: Readonly): T.ParseResult<{ distance: "near", }, }, - [], - ] + errors: [], + }) ) ); } diff --git a/src/types.ts b/src/types.ts index 788178b..efe1b8c 100644 --- a/src/types.ts +++ b/src/types.ts @@ -1281,4 +1281,8 @@ export type ParseError = { }; /** a tuple containing the [left over tokens, parse result, errors associated with the result] */ -export type ParseResult

= [Readonly, P, ParseError[]]; +export type ParseResult

= { + tokens: Readonly; + body: P; + errors: ParseError[]; +};