Skip to content

Commit 7397d07

Browse files
committed
remove unneeded as const
1 parent 1dbcb75 commit 7397d07

File tree

9 files changed

+120
-144
lines changed

9 files changed

+120
-144
lines changed

dictionary/parser.ts

Lines changed: 37 additions & 47 deletions
Original file line numberDiff line numberDiff line change
@@ -193,14 +193,12 @@ const determiner = checkedSequence(
193193
),
194194
sequence(determinerType, optionalNumber.skip(closeParenthesis)),
195195
)
196-
.map(([[determiner, plural], [kind, quantity]]) =>
197-
({
198-
determiner,
199-
plural,
200-
kind,
201-
quantity: quantity ?? "both",
202-
}) as const
203-
);
196+
.map(([[determiner, plural], [kind, quantity]]) => ({
197+
determiner,
198+
plural,
199+
kind,
200+
quantity: quantity ?? "both",
201+
}));
204202
const adjectiveKind = choiceWithCheck(
205203
checkedSequence(keyword("physical"), keyword("quality"))
206204
.map(() => "physical quality" as const),
@@ -244,14 +242,12 @@ const noun = sequence(
244242
.map(([adjective, name]) => ({ adjective, name })),
245243
),
246244
)
247-
.map(([determiner, adjective, noun, postAdjective]) =>
248-
({
249-
...noun,
250-
determiner,
251-
adjective,
252-
postAdjective,
253-
}) as const
254-
);
245+
.map(([determiner, adjective, noun, postAdjective]) => ({
246+
...noun,
247+
determiner,
248+
adjective,
249+
postAdjective,
250+
}));
255251
const checkedNoun = new CheckedParser(
256252
choiceOnlyOne(
257253
determiner.check,
@@ -277,28 +273,26 @@ function checkedSimpleUnitWithTemplate(
277273
.map(([word]) => word);
278274
}
279275
const interjectionDefinition = checkedSimpleUnit("i")
280-
.map((interjection) => ({ type: "interjection", interjection }) as const);
276+
.map((interjection) => ({ type: "interjection", interjection }));
281277
const particleDefinition = checkedSequence(
282278
word.skip(openParenthesis).skip(keyword("particle")),
283279
sequence(keyword("def"), closeParenthesis),
284280
)
285-
.map(([definition]) =>
286-
({ type: "particle definition", definition }) as const
287-
);
281+
.map(([definition]) => ({ type: "particle definition", definition }));
288282
const adverbDefinition = checkedSimpleUnit("adv")
289-
.map((adverb) => ({ type: "adverb", adverb }) as const);
283+
.map((adverb) => ({ type: "adverb", adverb }));
290284
const prepositionDefinition = checkedSimpleUnitWithTemplate(
291285
"prep",
292286
sequence(keyword("indirect"), keyword("object")),
293287
)
294-
.map((preposition) => ({ type: "preposition", preposition }) as const);
288+
.map((preposition) => ({ type: "preposition", preposition }));
295289
const numeralDefinition = checkedSimpleUnit("num")
296290
.mapWithPositionedError((num) => {
297291
const numeral = +num;
298292
if (!Number.isInteger(numeral)) {
299293
throw `"${num}" is not a number`;
300294
} else {
301-
return { type: "numeral", numeral } as const;
295+
return { type: "numeral", numeral };
302296
}
303297
});
304298
const fillerDefinition = checkedSequence(
@@ -320,7 +314,7 @@ const fillerDefinition = checkedSequence(
320314
before: forms[0],
321315
repeat: "",
322316
after: "",
323-
} as const;
317+
};
324318
}
325319
const [first, ...rest] = forms;
326320
for (let i = 0; i < first.length; i++) {
@@ -332,7 +326,7 @@ const fillerDefinition = checkedSequence(
332326
test === `${before}${repeatString.repeat(i + 2)}${after}`
333327
);
334328
if (passed) {
335-
return { type: "filler", before, repeat: repeatString, after } as const;
329+
return { type: "filler", before, repeat: repeatString, after };
336330
}
337331
}
338332
throw `"${forms.join("/")}" has no repetition pattern found`;
@@ -349,14 +343,12 @@ const fourFormPersonalPronounDefinition = checkedSequence(
349343
.map(([
350344
[singularSubject, singularObject, pluralSubject, pluralObject],
351345
perspective,
352-
]) =>
353-
({
354-
type: "personal pronoun",
355-
singular: { subject: singularSubject, object: singularObject },
356-
plural: { subject: pluralSubject, object: pluralObject },
357-
perspective,
358-
}) as const
359-
);
346+
]) => ({
347+
type: "personal pronoun",
348+
singular: { subject: singularSubject, object: singularObject },
349+
plural: { subject: pluralSubject, object: pluralObject },
350+
perspective,
351+
}));
360352
const twoFormPersonalPronounDefinition = checkedSequence(
361353
sequence(
362354
word.skip(slash),
@@ -367,15 +359,13 @@ const twoFormPersonalPronounDefinition = checkedSequence(
367359
number.skip(closeParenthesis),
368360
),
369361
)
370-
.map(([[subject, object], [perspective, number]]) =>
371-
({
372-
type: "personal pronoun",
373-
singular: null,
374-
plural: null,
375-
[number]: { subject, object },
376-
perspective,
377-
}) as const
378-
);
362+
.map(([[subject, object], [perspective, number]]) => ({
363+
type: "personal pronoun",
364+
singular: null,
365+
plural: null,
366+
[number]: { subject, object },
367+
perspective,
368+
}));
379369
const nounDefinition = new CheckedParser(
380370
choiceWithCheck(
381371
new CheckedParser(
@@ -403,8 +393,8 @@ const nounDefinition = new CheckedParser(
403393
)
404394
.map(([noun, preposition]) =>
405395
preposition == null
406-
? { ...noun, type: "noun" } as const
407-
: { type: "noun preposition", noun, preposition } as const
396+
? { ...noun, type: "noun" }
397+
: { type: "noun preposition", noun, preposition }
408398
);
409399
const compoundAdjectiveDefinition = checkedSequence(
410400
adjective
@@ -414,7 +404,7 @@ const compoundAdjectiveDefinition = checkedSequence(
414404
.skip(keyword("c")),
415405
closeParenthesis.with(adjective.parser),
416406
)
417-
.map((adjective) => ({ type: "compound adjective", adjective }) as const)
407+
.map((adjective) => ({ type: "compound adjective", adjective }))
418408
.filterWithPositionedError(({ adjective }) =>
419409
adjective.every((adjective) => adjective.adverb.length === 0) ||
420410
throwError("compound adjective cannot have adverb")
@@ -434,7 +424,7 @@ const verbDefinition = checkedSequence(
434424
closeBracket
435425
.with(optionalWithCheck(
436426
checkedSimpleUnitWith("prep", noun)
437-
.map(([preposition, object]) => ({ preposition, object }) as const),
427+
.map(([preposition, object]) => ({ preposition, object })),
438428
))
439429
.map(nullableAsArray),
440430
)
@@ -481,7 +471,7 @@ const verbDefinition = checkedSequence(
481471
openBracket,
482472
sequence(keyword("object"), closeBracket),
483473
)
484-
.map(() => "template" as const),
474+
.map(() => "template"),
485475
checkedNoun,
486476
),
487477
),

src/parser/lexer.ts

Lines changed: 14 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -66,7 +66,7 @@ const properWords = allAtLeastOnce(
6666
match(/[A-Z][a-zA-Z]*/, "proper word").skip(spaces),
6767
)
6868
.map((array) => array.join(" "))
69-
.map((words) => ({ type: "proper word", words, kind: "latin" }) as const);
69+
.map((words) => ({ type: "proper word", words, kind: "latin" }) );
7070

7171
const specificWord = memoize((thatWord: string) =>
7272
word.filter((thisWord) =>
@@ -76,7 +76,7 @@ const specificWord = memoize((thatWord: string) =>
7676
);
7777
const multipleA = specificWord("a")
7878
.with(count(allAtLeastOnce(specificWord("a"))))
79-
.map((count) => ({ type: "multiple a", count: count + 1 }) as const);
79+
.map((count) => ({ type: "multiple a", count: count + 1 }) );
8080
const repeatingLetter = match(/[a-zA-Z]/, "latin letter")
8181
.then(memoize((letter) =>
8282
count(all(matchString(letter)))
@@ -87,15 +87,15 @@ const longWord = allAtLeastOnce(repeatingLetter)
8787
.map((letters) => {
8888
const word = letters.map(([letter]) => letter).join("");
8989
const length = sumOf(letters, ([_, count]) => count) - word.length + 1;
90-
return { type: "long word", word, length } as const;
90+
return { type: "long word", word, length } ;
9191
})
9292
.filter(({ word, length }) => /^[a-z]/.test(word) && length > 1);
9393

9494
const alaX = memoize((word: string) =>
9595
sequence(specificWord("ala"), specificWord(word)).map(() => word)
9696
);
9797
const xAlaX = lazy(() => settings.xAlaXPartialParsing ? empty : word.then(alaX))
98-
.map((word) => ({ type: "x ala x", word }) as const);
98+
.map((word) => ({ type: "x ala x", word }) );
9999
const punctuation = choiceOnlyOne(
100100
allAtLeastOnce(
101101
match(SENTENCE_TERMINATOR, "punctuation")
@@ -105,7 +105,7 @@ const punctuation = choiceOnlyOne(
105105
.map((punctuation) => punctuation.join("").replaceAll("...", ELLIPSIS)),
106106
newline.map(() => "."),
107107
)
108-
.map((punctuation) => ({ type: "punctuation", punctuation }) as const);
108+
.map((punctuation) => ({ type: "punctuation", punctuation }) );
109109
const cartoucheElement = choiceOnlyOne(
110110
singleUcsurWord
111111
.skip(match(NSK_COLON, "full width colon").skip(spaces)),
@@ -146,7 +146,7 @@ const cartouches = allAtLeastOnce(cartouche)
146146
type: "proper word",
147147
words,
148148
kind: "cartouche",
149-
}) as const
149+
})
150150
);
151151
const longSpaceContainer = specificSpecialUcsur(START_OF_LONG_GLYPH)
152152
.with(count(spacesWithoutNewline).filter((length) => length > 0))
@@ -165,31 +165,31 @@ const spaceLongGlyph = sequence(
165165
type: "space long glyph",
166166
words,
167167
spaceLength,
168-
}) as const
168+
})
169169
);
170170
const headedLongGlyphStart = longGlyphHead
171171
.skip(specificSpecialUcsur(START_OF_LONG_GLYPH))
172172
.skip(spaces)
173-
.map((words) => ({ type: "headed long glyph start", words }) as const);
173+
.map((words) => ({ type: "headed long glyph start", words }) );
174174
const headlessLongGlyphEnd = specificSpecialUcsur(END_OF_LONG_GLYPH)
175175
.skip(spaces)
176-
.map(() => ({ type: "headless long glyph end" }) as const);
176+
.map(() => ({ type: "headless long glyph end" }) );
177177
const headlessLongGlyphStart = specificSpecialUcsur(START_OF_REVERSE_LONG_GLYPH)
178178
.skip(spaces)
179-
.map(() => ({ type: "headless long glyph end" }) as const);
179+
.map(() => ({ type: "headless long glyph end" }) );
180180
const headedLongGlyphEnd = specificSpecialUcsur(END_OF_REVERSE_LONG_GLYPH)
181181
.with(longGlyphHead)
182182
.skip(spaces)
183-
.map((words) => ({ type: "headed long glyph start", words }) as const);
183+
.map((words) => ({ type: "headed long glyph start", words }) );
184184
const insideLongGlyph = specificSpecialUcsur(END_OF_REVERSE_LONG_GLYPH)
185185
.with(longGlyphHead)
186186
.skip(specificSpecialUcsur(START_OF_LONG_GLYPH))
187187
.skip(spaces)
188-
.map((words) => ({ type: "inside long glyph", words }) as const);
188+
.map((words) => ({ type: "inside long glyph", words }) );
189189
const combinedGlyphsToken = combinedGlyphs
190190
.skip(spaces)
191-
.map((words) => ({ type: "combined glyphs", words }) as const);
192-
const wordToken = word.map((word) => ({ type: "word", word }) as const);
191+
.map((words) => ({ type: "combined glyphs", words }) );
192+
const wordToken = word.map((word) => ({ type: "word", word }) );
193193

194194
export const token: Parser<Token> = choiceOnlyOne<Token>(
195195
xAlaX,

src/parser/parser.ts

Lines changed: 25 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -129,11 +129,11 @@ const optionalEmphasis = optional(emphasis);
129129
const alaXLongGlyph = memoize((word: string) =>
130130
specificWord(word)
131131
.skip(specificToken("headless long glyph end"))
132-
.map(() => ({ type: "x ala x", word }) as const)
132+
.map(() => ({ type: "x ala x", word }))
133133
);
134134
const alaX = memoize((word: string) =>
135135
sequence(specificWord("ala"), specificWord(word))
136-
.map(() => ({ type: "x ala x", word }) as const)
136+
.map(() => ({ type: "x ala x", word }))
137137
);
138138
function xAlaX(useWord: Set<string>, description: string) {
139139
return choice(
@@ -145,20 +145,18 @@ function xAlaX(useWord: Set<string>, description: string) {
145145
)
146146
.then(alaXLongGlyph),
147147
specificToken("x ala x")
148-
.map(({ word }) => ({ type: "x ala x", word }) as const),
148+
.map(({ word }) => ({ type: "x ala x", word })),
149149
word
150150
.then(alaX),
151151
);
152152
}
153153
const reduplicateRest = memoize((word: string) =>
154154
count(manyAtLeastOnce(specificWord(word)))
155-
.map((count) =>
156-
({
157-
type: "reduplication",
158-
word,
159-
count: count + 1,
160-
}) as const
161-
)
155+
.map((count) => ({
156+
type: "reduplication",
157+
word,
158+
count: count + 1,
159+
}))
162160
);
163161
function simpleWordUnit(word: Set<string>, description: string) {
164162
return choice<SimpleHeadedWordUnit>(
@@ -277,25 +275,23 @@ const modifiers = sequence(
277275
many(
278276
choice<Modifier>(
279277
sequence(number, optionalEmphasis)
280-
.map(([words, emphasis]) =>
281-
({
282-
type: "default",
283-
word: { type: "number", words, emphasis },
284-
}) as const
285-
)
278+
.map(([words, emphasis]) => ({
279+
type: "default",
280+
word: { type: "number", words, emphasis },
281+
}))
286282
.filter(filter(MODIFIER_RULES)),
287283
wordUnit(contentWordSet, "modifier")
288-
.map((word) => ({ type: "default", word }) as const)
284+
.map((word) => ({ type: "default", word }))
289285
.filter(filter(MODIFIER_RULES)),
290286
properWords
291-
.map((words) => ({ type: "proper words", words }) as const)
287+
.map((words) => ({ type: "proper words", words }))
292288
.filter(filter(MODIFIER_RULES)),
293289
),
294290
),
295-
many(nanpa.map((nanpa) => ({ ...nanpa, type: "nanpa" }) as const)),
291+
many(nanpa.map((nanpa) => ({ ...nanpa, type: "nanpa" }))),
296292
many(
297293
pi
298-
.map((phrase) => ({ type: "pi", phrase }) as const)
294+
.map((phrase) => ({ type: "pi", phrase }))
299295
.filter(filter(MODIFIER_RULES)),
300296
),
301297
)
@@ -307,7 +303,7 @@ const modifiers = sequence(
307303
])
308304
.filter(filter(MULTIPLE_MODIFIERS_RULES));
309305
const singlePhrase = phrase
310-
.map((phrase) => ({ type: "single", phrase }) as const);
306+
.map((phrase) => ({ type: "single", phrase }));
311307
const longAnu = sequence(
312308
specificToken("headless long glyph start").with(phrase),
313309
manyAtLeastOnce(
@@ -466,14 +462,12 @@ function associatedPredicates(nestingRule: ReadonlyArray<"li" | "o" | "anu">) {
466462
),
467463
many(optionalComma.with(preposition)),
468464
)
469-
.map(([predicates, objects, prepositions]) =>
470-
({
471-
type: "associated",
472-
predicates,
473-
objects,
474-
prepositions,
475-
}) as const
476-
)
465+
.map(([predicates, objects, prepositions]) => ({
466+
type: "associated",
467+
predicates,
468+
objects,
469+
prepositions,
470+
}))
477471
.filter(({ objects, prepositions }) =>
478472
objects != null || prepositions.length > 0
479473
)
@@ -485,7 +479,7 @@ function multiplePredicates(
485479
if (nestingRule.length === 0) {
486480
return choice<Predicate>(
487481
associatedPredicates([]),
488-
phrase.map((predicate) => ({ type: "single", predicate }) as const),
482+
phrase.map((predicate) => ({ type: "single", predicate })),
489483
);
490484
} else {
491485
const [first, ...rest] = nestingRule;
@@ -684,7 +678,7 @@ const sentence = choice<Sentence>(
684678
)
685679
? "seme"
686680
: null;
687-
return { ...sentence, interrogative } as const;
681+
return { ...sentence, interrogative };
688682
})
689683
.sortBy(({ anuSeme }) => anuSeme == null ? 1 : 0),
690684
sequence(filler, optional(punctuation))

0 commit comments

Comments
 (0)