This commit is contained in:
469
frontend/scripts/check-assets.ts
Normal file
469
frontend/scripts/check-assets.ts
Normal file
@@ -0,0 +1,469 @@
|
||||
/**
|
||||
* Example usage in root or frontend:
|
||||
* pnpm check-assets (npm run check-assets)
|
||||
* pnpm check-assets -- -- quotes others (npm run check-assets -- -- quotes others)
|
||||
* pnpm check-assets -- -- challenges sound -p (npm run check-assets -- -- challenges sound -p)
|
||||
*/
|
||||
|
||||
import * as fs from "fs";
|
||||
import { LanguageGroups, LanguageList } from "../src/ts/constants/languages";
|
||||
import {
|
||||
Language,
|
||||
LanguageObject,
|
||||
LanguageObjectSchema,
|
||||
} from "@monkeytype/schemas/languages";
|
||||
import { Layout, ThemeName } from "@monkeytype/schemas/configs";
|
||||
import { LayoutsList } from "../src/ts/constants/layouts";
|
||||
import { KnownFontName } from "@monkeytype/schemas/fonts";
|
||||
import { Fonts } from "../src/ts/constants/fonts";
|
||||
import { themes, ThemeSchema, ThemesList } from "../src/ts/constants/themes";
|
||||
import { z } from "zod";
|
||||
import { ChallengeSchema, Challenge } from "@monkeytype/schemas/challenges";
|
||||
import { LayoutObject, LayoutObjectSchema } from "@monkeytype/schemas/layouts";
|
||||
import { QuoteDataSchema, QuoteData } from "@monkeytype/schemas/quotes";
|
||||
|
||||
class Problems<K extends string, T extends string> {
|
||||
private type: string;
|
||||
private labels: Record<T, string>;
|
||||
private problems: Partial<Record<K | T, string[]>> = {};
|
||||
|
||||
constructor(type: string, labels: Record<T, string>) {
|
||||
this.type = type;
|
||||
this.labels = labels;
|
||||
}
|
||||
|
||||
public add(key: K | T, problem: string): void {
|
||||
this.problems[key] = [...(this.problems[key] ?? []), problem];
|
||||
}
|
||||
|
||||
public addValidation(
|
||||
key: K | T,
|
||||
validationResult: z.SafeParseReturnType<unknown, unknown>,
|
||||
): void {
|
||||
if (validationResult.success) return;
|
||||
validationResult.error.errors.forEach((e) =>
|
||||
this.add(key, `${e.path.join(".")}: ${e.message}`),
|
||||
);
|
||||
}
|
||||
|
||||
public hasError(): boolean {
|
||||
return Object.keys(this.problems).length !== 0;
|
||||
}
|
||||
public toString(): string {
|
||||
if (!this.hasError()) {
|
||||
return `${this.type} are all \u001b[32mvalid\u001b[0m`;
|
||||
}
|
||||
|
||||
return (
|
||||
`${this.type} are \u001b[31minvalid\u001b[0m\n` +
|
||||
Object.entries(this.problems)
|
||||
.map(([key, problems]) => {
|
||||
let label: string = this.labels[key as T] ?? `${key}`;
|
||||
|
||||
return `${label}:\n ${(problems as string[])
|
||||
.map((error) => "\t- " + error)
|
||||
.join("\n")}`;
|
||||
})
|
||||
.join("\n")
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
function findDuplicates<T>(items: T[]): T[] {
|
||||
const seen = new Set<T>();
|
||||
const duplicates = new Set<T>();
|
||||
|
||||
for (const item of items) {
|
||||
if (seen.has(item)) {
|
||||
duplicates.add(item);
|
||||
} else {
|
||||
seen.add(item);
|
||||
}
|
||||
}
|
||||
|
||||
return Array.from(duplicates);
|
||||
}
|
||||
|
||||
async function validateChallenges(): Promise<void> {
|
||||
const problems = new Problems<"_list.json", never>("Challenges", {});
|
||||
|
||||
const challengesData = JSON.parse(
|
||||
fs.readFileSync("./static/challenges/_list.json", {
|
||||
encoding: "utf8",
|
||||
flag: "r",
|
||||
}),
|
||||
) as Challenge;
|
||||
const validationResult = z.array(ChallengeSchema).safeParse(challengesData);
|
||||
problems.addValidation("_list.json", validationResult);
|
||||
|
||||
console.log(problems.toString());
|
||||
if (problems.hasError()) {
|
||||
throw new Error("challenges with errors");
|
||||
}
|
||||
}
|
||||
|
||||
async function validateLayouts(): Promise<void> {
|
||||
const problems = new Problems<Layout, "_additional">("Layouts", {
|
||||
_additional:
|
||||
"Layout files present but missing in packages/schemas/src/layouts.ts",
|
||||
});
|
||||
|
||||
for (let layoutName of LayoutsList) {
|
||||
let layoutData = undefined;
|
||||
if (!fs.existsSync(`./static/layouts/${layoutName}.json`)) {
|
||||
problems.add(
|
||||
layoutName,
|
||||
`missing json file frontend/static/layouts/${layoutName}.json`,
|
||||
);
|
||||
continue;
|
||||
}
|
||||
try {
|
||||
layoutData = JSON.parse(
|
||||
fs.readFileSync(`./static/layouts/${layoutName}.json`, "utf-8"),
|
||||
) as LayoutObject;
|
||||
} catch (e) {
|
||||
problems.add(
|
||||
layoutName,
|
||||
`Unable to parse ${e instanceof Error ? e.message : e}`,
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
const validationResult = LayoutObjectSchema.safeParse(layoutData);
|
||||
problems.addValidation(layoutName, validationResult);
|
||||
}
|
||||
|
||||
//no files not defined in LayoutsList
|
||||
const additionalLayoutFiles = fs
|
||||
.readdirSync("./static/layouts")
|
||||
.filter((it) => !LayoutsList.some((layout) => layout + ".json" === it));
|
||||
if (additionalLayoutFiles.length !== 0) {
|
||||
additionalLayoutFiles.forEach((it) => problems.add("_additional", it));
|
||||
}
|
||||
|
||||
console.log(problems.toString());
|
||||
|
||||
if (problems.hasError()) {
|
||||
throw new Error("layouts with errors");
|
||||
}
|
||||
}
|
||||
|
||||
async function validateQuotes(): Promise<void> {
|
||||
const problems = new Problems<string, never>("Quotes", {});
|
||||
|
||||
const shortQuotes = JSON.parse(
|
||||
fs.readFileSync("./scripts/short-quotes.json", "utf8"),
|
||||
) as Record<QuoteData["language"], number[]>;
|
||||
|
||||
const quotesFiles = fs.readdirSync("./static/quotes/");
|
||||
for (let quotefilename of quotesFiles) {
|
||||
quotefilename = quotefilename.split(".")[0] as string;
|
||||
let quoteData;
|
||||
|
||||
try {
|
||||
quoteData = JSON.parse(
|
||||
fs.readFileSync(`./static/quotes/${quotefilename}.json`, {
|
||||
encoding: "utf8",
|
||||
flag: "r",
|
||||
}),
|
||||
) as QuoteData;
|
||||
} catch (e) {
|
||||
problems.add(
|
||||
quotefilename,
|
||||
`Unable to parse ${e instanceof Error ? e.message : e}`,
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
//check filename matching language
|
||||
if (quoteData.language !== quotefilename) {
|
||||
problems.add(
|
||||
quotefilename,
|
||||
`Name not matching language ${quoteData.language}`,
|
||||
);
|
||||
}
|
||||
|
||||
//check schema
|
||||
problems.addValidation(quotefilename, QuoteDataSchema.safeParse(quoteData));
|
||||
|
||||
//check for duplicate ids
|
||||
const duplicates = findDuplicates(quoteData.quotes.map((it) => it.id));
|
||||
if (duplicates.length !== 0) {
|
||||
problems.add(
|
||||
quotefilename,
|
||||
`contains ${duplicates.length} duplicates:\n ${duplicates.join(",")}`,
|
||||
);
|
||||
}
|
||||
|
||||
//check quote length
|
||||
quoteData.quotes.forEach((quote) => {
|
||||
if (quote.text.length !== quote.length) {
|
||||
problems.add(
|
||||
quotefilename,
|
||||
`ID ${quote.id}: expected length ${quote.text.length}`,
|
||||
);
|
||||
}
|
||||
|
||||
if (!shortQuotes[quoteData.language]?.includes(quote.id)) {
|
||||
if (quote.text.length < 60) {
|
||||
problems.add(
|
||||
quotefilename,
|
||||
`ID ${quote.id}: length too short (under 60 characters)`,
|
||||
);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
//check groups
|
||||
let last = -1;
|
||||
for (const group of quoteData.groups) {
|
||||
if (group[0] !== last + 1) {
|
||||
problems.add(
|
||||
quotefilename,
|
||||
`error in group ${group}: expect to start at ${last + 1}`,
|
||||
);
|
||||
} else if (group[0] >= group[1]) {
|
||||
problems.add(
|
||||
quotefilename,
|
||||
`error in group ${group}: second number to be greater than first number`,
|
||||
);
|
||||
}
|
||||
last = group[1];
|
||||
}
|
||||
}
|
||||
|
||||
console.log(problems.toString());
|
||||
|
||||
if (problems.hasError()) {
|
||||
throw new Error("quotes with errors");
|
||||
}
|
||||
}
|
||||
|
||||
async function validateLanguages(): Promise<void> {
|
||||
const problems = new Problems<Language, "_additional" | "_groups">(
|
||||
"Languages",
|
||||
{
|
||||
_additional:
|
||||
"Language files present but missing in packages/schemas/src/languages.ts",
|
||||
_groups:
|
||||
"Problems in LanguageGroups in frontend/src/ts/constants/languages.ts",
|
||||
},
|
||||
);
|
||||
|
||||
const duplicatePercentageThreshold = 0.0001;
|
||||
for (const language of LanguageList) {
|
||||
let languageFileData: LanguageObject;
|
||||
try {
|
||||
languageFileData = JSON.parse(
|
||||
fs.readFileSync(`./static/languages/${language}.json`, {
|
||||
encoding: "utf8",
|
||||
flag: "r",
|
||||
}),
|
||||
) as LanguageObject;
|
||||
} catch (e) {
|
||||
problems.add(
|
||||
language,
|
||||
`missing json file frontend/static/languages/${language}.json`,
|
||||
);
|
||||
|
||||
continue;
|
||||
}
|
||||
problems.addValidation(
|
||||
language,
|
||||
LanguageObjectSchema.extend({
|
||||
_comment: z.string().optional(),
|
||||
}).safeParse(languageFileData),
|
||||
);
|
||||
|
||||
if (languageFileData.name !== language) {
|
||||
problems.add(language, "Name is not " + language);
|
||||
}
|
||||
const duplicates = findDuplicates(languageFileData.words);
|
||||
const duplicatePercentage =
|
||||
(duplicates.length / languageFileData.words.length) * 100;
|
||||
if (duplicatePercentage >= duplicatePercentageThreshold) {
|
||||
problems.add(
|
||||
language,
|
||||
`contains ${duplicates.length} (${Math.round(
|
||||
duplicatePercentage,
|
||||
)}%) duplicates:\n ${duplicates.join(",")}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
//no files not defined in LanguageList
|
||||
fs.readdirSync("./static/languages")
|
||||
.filter((it) => !LanguageList.some((language) => language + ".json" === it))
|
||||
.forEach((it) => problems.add("_additional", it));
|
||||
|
||||
//check groups
|
||||
const languagesWithMultipleGroups = [];
|
||||
const groupByLanguage = new Map<Language, string>();
|
||||
|
||||
for (const group of Object.keys(LanguageGroups)) {
|
||||
for (const language of LanguageGroups[group] as Language[]) {
|
||||
if (groupByLanguage.has(language)) {
|
||||
languagesWithMultipleGroups.push(language);
|
||||
}
|
||||
groupByLanguage.set(language, group);
|
||||
}
|
||||
}
|
||||
if (languagesWithMultipleGroups.length !== 0) {
|
||||
problems.add(
|
||||
"_groups",
|
||||
`languages with multiple groups: ${languagesWithMultipleGroups.join(
|
||||
", ",
|
||||
)}`,
|
||||
);
|
||||
}
|
||||
|
||||
const languagesMissingGroup = LanguageList.filter(
|
||||
(lang) => !groupByLanguage.has(lang),
|
||||
);
|
||||
if (languagesMissingGroup.length !== 0) {
|
||||
problems.add(
|
||||
"_groups",
|
||||
`languages missing group: ${languagesMissingGroup.join(", ")}`,
|
||||
);
|
||||
}
|
||||
|
||||
console.log(problems.toString());
|
||||
|
||||
if (problems.hasError()) {
|
||||
throw new Error("languages with errors");
|
||||
}
|
||||
}
|
||||
|
||||
async function validateFonts(): Promise<void> {
|
||||
const problems = new Problems<KnownFontName, "_additional">("Fonts", {
|
||||
_additional:
|
||||
"Font files present but missing in frontend/src/ts/constants/fonts.ts",
|
||||
});
|
||||
|
||||
//no missing files
|
||||
const ignoredFonts = new Set([
|
||||
"GallaudetRegular.woff2", //used for asl
|
||||
"Vazirharf-NL-Regular.woff2", //default font
|
||||
]);
|
||||
|
||||
const fontFiles = fs
|
||||
.readdirSync("./static/webfonts")
|
||||
.filter((it) => !ignoredFonts.has(it));
|
||||
|
||||
//missing font files
|
||||
Object.entries(Fonts)
|
||||
.filter(([_name, config]) => !config.systemFont)
|
||||
.filter(([_name, config]) => !fontFiles.includes(config.fileName as string))
|
||||
.forEach(([name, config]) =>
|
||||
problems.add(
|
||||
name as KnownFontName,
|
||||
`missing file frontend/static/webfonts/${config.fileName}`,
|
||||
),
|
||||
);
|
||||
|
||||
//additional font files
|
||||
const expectedFontFiles = new Set(
|
||||
Object.entries(Fonts)
|
||||
.filter(([_name, config]) => !config.systemFont)
|
||||
.map(([_name, config]) => config.fileName as string),
|
||||
);
|
||||
|
||||
fontFiles
|
||||
.filter((name) => !expectedFontFiles.has(name))
|
||||
.forEach((file) => problems.add("_additional", file));
|
||||
|
||||
console.log(problems.toString());
|
||||
|
||||
if (problems.hasError()) {
|
||||
throw new Error("layouts with errors");
|
||||
}
|
||||
}
|
||||
|
||||
async function validateThemes(): Promise<void> {
|
||||
const problems = new Problems<ThemeName, "_additional">("Themes", {
|
||||
_additional:
|
||||
"Theme files present but missing in frontend/src/ts/constants/themes.ts",
|
||||
});
|
||||
|
||||
//no missing files
|
||||
const themeFiles = fs.readdirSync("./static/themes");
|
||||
|
||||
//missing or additional theme files (mismatch in hasCss)
|
||||
ThemesList.filter(
|
||||
(it) => themeFiles.includes(it.name + ".css") !== (it.hasCss ?? false),
|
||||
).forEach((it) =>
|
||||
problems.add(
|
||||
it.name,
|
||||
`${it.hasCss ? "missing" : "additional"} file frontend/static/themes/${it.name}.css`,
|
||||
),
|
||||
);
|
||||
|
||||
//additional theme files
|
||||
themeFiles
|
||||
.filter((it) => !ThemesList.some((theme) => theme.name + ".css" === it))
|
||||
.forEach((it) => problems.add("_additional", it));
|
||||
|
||||
//validate theme colors are valid hex colors, not covered by typescipt
|
||||
const themeNameSchema = z.string().regex(/^[a-z0-9_]+$/, {
|
||||
message:
|
||||
"theme name can only contain lowercase letters, digits and underscore",
|
||||
});
|
||||
for (const name of Object.keys(themes)) {
|
||||
const theme = themes[name as ThemeName];
|
||||
problems.addValidation(name as ThemeName, ThemeSchema.safeParse(theme));
|
||||
problems.addValidation(name as ThemeName, themeNameSchema.safeParse(name));
|
||||
}
|
||||
|
||||
console.log(problems.toString());
|
||||
|
||||
if (problems.hasError()) {
|
||||
throw new Error("themes with errors");
|
||||
}
|
||||
}
|
||||
|
||||
type Validator = () => Promise<void>;
|
||||
|
||||
async function main(): Promise<void> {
|
||||
const args = process.argv.slice(2);
|
||||
|
||||
const flags = new Set(args.filter((arg) => arg.startsWith("-")));
|
||||
const keys = args.filter((arg) => !arg.startsWith("-"));
|
||||
|
||||
const validators: Record<string, Validator[]> = {
|
||||
quotes: [validateQuotes],
|
||||
languages: [validateLanguages],
|
||||
layouts: [validateLayouts],
|
||||
challenges: [validateChallenges],
|
||||
fonts: [validateFonts],
|
||||
themes: [validateThemes],
|
||||
others: [
|
||||
validateChallenges,
|
||||
validateLayouts,
|
||||
validateFonts,
|
||||
validateThemes,
|
||||
],
|
||||
};
|
||||
|
||||
// flags
|
||||
const validateAll = keys.length < 1 || flags.has("--all") || flags.has("-a");
|
||||
const passWithNoValidators =
|
||||
flags.has("--pass-with-no-validators") || flags.has("-p");
|
||||
|
||||
const tasks: Set<Validator> = new Set(
|
||||
validateAll ? Object.values(validators).flat() : [],
|
||||
);
|
||||
for (const key of keys) {
|
||||
if (!Object.keys(validators).includes(key)) {
|
||||
console.error(`There is no validator for key '${key}'.`);
|
||||
if (!passWithNoValidators) process.exit(1);
|
||||
} else if (!validateAll) {
|
||||
validators[key]?.forEach((validator) => tasks.add(validator));
|
||||
}
|
||||
}
|
||||
|
||||
if (tasks.size > 0) {
|
||||
await Promise.all([...tasks].map(async (validator) => validator()));
|
||||
return;
|
||||
}
|
||||
}
|
||||
void main();
|
||||
59
frontend/scripts/fill-colors.js
Normal file
59
frontend/scripts/fill-colors.js
Normal file
@@ -0,0 +1,59 @@
|
||||
// oxlint-disable no-require-imports
|
||||
const fs = require("fs");
|
||||
|
||||
function main() {
|
||||
return new Promise((resolve) => {
|
||||
const listFile = JSON.parse(
|
||||
fs.readFileSync("../static/themes/_list.json", {
|
||||
encoding: "utf8",
|
||||
}),
|
||||
);
|
||||
const themeFiles = fs.readdirSync("../static/themes/");
|
||||
// themeFiles.forEach((filename) => {
|
||||
for (let filename of themeFiles) {
|
||||
if (filename === "_list.json") continue;
|
||||
|
||||
filename = filename.split(".")[0];
|
||||
let themeData = fs.readFileSync(`../static/themes/${filename}.css`, {
|
||||
encoding: "utf8",
|
||||
flag: "r",
|
||||
});
|
||||
|
||||
const subMatches = /--sub-color: (#.+);/g.exec(themeData);
|
||||
|
||||
const bgMatches = /--bg-color: (#.+);/g.exec(themeData);
|
||||
|
||||
const mainMatches = /--main-color: (#.+);/g.exec(themeData);
|
||||
|
||||
const textMatches = /--text-color: (#.+);/g.exec(themeData);
|
||||
|
||||
listFile.find(
|
||||
(theme) => theme.name === filename.split(".css")[0],
|
||||
).subColor = subMatches[1];
|
||||
listFile.find(
|
||||
(theme) => theme.name === filename.split(".css")[0],
|
||||
).textColor = textMatches[1];
|
||||
listFile.find(
|
||||
(theme) => theme.name === filename.split(".css")[0],
|
||||
).bgColor = bgMatches[1];
|
||||
listFile.find(
|
||||
(theme) => theme.name === filename.split(".css")[0],
|
||||
).mainColor = mainMatches[1];
|
||||
|
||||
// console.log(themeData);
|
||||
|
||||
// fs.writeFileSync(
|
||||
// `../static/quotes/${filename}.json`,
|
||||
// JSON.stringify(quoteData, null, 2)
|
||||
// );
|
||||
}
|
||||
fs.writeFileSync(
|
||||
`../static/themes/_list.json`,
|
||||
JSON.stringify(listFile, null, 2),
|
||||
);
|
||||
resolve();
|
||||
});
|
||||
// });
|
||||
}
|
||||
|
||||
main();
|
||||
29
frontend/scripts/fix-quote-lengths.cjs
Normal file
29
frontend/scripts/fix-quote-lengths.cjs
Normal file
@@ -0,0 +1,29 @@
|
||||
// oxlint-disable no-require-imports
|
||||
const fs = require("fs");
|
||||
|
||||
function fixQuoteLengths() {
|
||||
return new Promise((resolve) => {
|
||||
const quotesFiles = fs.readdirSync("../static/quotes/");
|
||||
quotesFiles.forEach((quotefilename) => {
|
||||
quotefilename = quotefilename.split(".")[0];
|
||||
let quoteData = JSON.parse(
|
||||
fs.readFileSync(`../static/quotes/${quotefilename}.json`, {
|
||||
encoding: "utf8",
|
||||
flag: "r",
|
||||
}),
|
||||
);
|
||||
|
||||
quoteData.quotes.forEach((quote) => {
|
||||
quote.length = quote.text.length;
|
||||
});
|
||||
|
||||
fs.writeFileSync(
|
||||
`../static/quotes/${quotefilename}.json`,
|
||||
JSON.stringify(quoteData, null, 2),
|
||||
);
|
||||
});
|
||||
resolve();
|
||||
});
|
||||
}
|
||||
|
||||
fixQuoteLengths();
|
||||
39
frontend/scripts/get-short-quotes.ts
Normal file
39
frontend/scripts/get-short-quotes.ts
Normal file
@@ -0,0 +1,39 @@
|
||||
import { QuoteData } from "@monkeytype/schemas/quotes";
|
||||
import * as fs from "fs";
|
||||
|
||||
import path, { dirname } from "path";
|
||||
import { fileURLToPath } from "url";
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = dirname(__filename);
|
||||
const FRONTEND_ROOT = path.resolve(__dirname, "..");
|
||||
|
||||
async function getShortQuotes(): Promise<void> {
|
||||
const shortQuotes = {} as Record<QuoteData["language"], number[]>;
|
||||
let count = 0;
|
||||
const quotesFiles = fs.readdirSync(
|
||||
path.resolve(FRONTEND_ROOT, "static/quotes"),
|
||||
);
|
||||
for (const quotefilename of quotesFiles) {
|
||||
const quoteJson = fs.readFileSync(
|
||||
path.resolve(FRONTEND_ROOT, `static/quotes/${quotefilename}`),
|
||||
"utf8",
|
||||
);
|
||||
//const quoteJson = await (await fetch(`https://raw.githubusercontent.com/monkeytypegame/monkeytype/refs/heads/master/frontend/static/quotes/${quotefilename}`)).json();
|
||||
const quoteData = JSON.parse(quoteJson) as QuoteData;
|
||||
for (const quote of quoteData.quotes) {
|
||||
if (quote.length < 60) {
|
||||
shortQuotes[quoteData.language] ??= [];
|
||||
shortQuotes[quoteData.language].push(quote.id);
|
||||
count++;
|
||||
}
|
||||
}
|
||||
}
|
||||
fs.writeFileSync(
|
||||
path.resolve(__dirname, "short-quotes.json"),
|
||||
JSON.stringify(shortQuotes),
|
||||
);
|
||||
console.log(`There are ${count} allowed short quotes`);
|
||||
}
|
||||
|
||||
void getShortQuotes();
|
||||
389
frontend/scripts/import-tree.ts
Normal file
389
frontend/scripts/import-tree.ts
Normal file
@@ -0,0 +1,389 @@
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
import ts from "typescript";
|
||||
|
||||
const ROOT = path.resolve(import.meta.dirname, "..");
|
||||
|
||||
// --- Argument handling ---
|
||||
|
||||
const args = process.argv.slice(2);
|
||||
const maxDepthFlagIdx = args.indexOf("--depth");
|
||||
let maxDepthLimit = Infinity;
|
||||
if (maxDepthFlagIdx !== -1) {
|
||||
maxDepthLimit = Number(args[maxDepthFlagIdx + 1]);
|
||||
args.splice(maxDepthFlagIdx, 2);
|
||||
}
|
||||
|
||||
const target = args[0];
|
||||
if (target === undefined || target === "") {
|
||||
console.log("Usage: pnpm import-tree <file-or-directory> [--depth <n>]");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const resolved = path.resolve(target);
|
||||
|
||||
function collectTsFiles(dir: string): string[] {
|
||||
const results: string[] = [];
|
||||
for (const entry of fs.readdirSync(dir, { withFileTypes: true })) {
|
||||
const full = path.join(dir, entry.name);
|
||||
if (entry.isDirectory()) {
|
||||
results.push(...collectTsFiles(full));
|
||||
} else if (/\.tsx?$/.test(entry.name)) {
|
||||
results.push(full);
|
||||
}
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
const isDir = fs.statSync(resolved).isDirectory();
|
||||
const boundary = isDir ? resolved : null;
|
||||
|
||||
let entryPoints: string[];
|
||||
if (isDir) {
|
||||
entryPoints = collectTsFiles(resolved);
|
||||
} else {
|
||||
entryPoints = [resolved];
|
||||
}
|
||||
|
||||
if (entryPoints.length === 0) {
|
||||
console.log("No .ts/.tsx files found.");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// --- Import extraction (type-aware) ---
|
||||
|
||||
const tsConfig: ts.CompilerOptions = {
|
||||
module: ts.ModuleKind.ESNext,
|
||||
target: ts.ScriptTarget.ESNext,
|
||||
jsx: ts.JsxEmit.Preserve,
|
||||
sourceMap: false,
|
||||
declaration: false,
|
||||
isolatedModules: true,
|
||||
};
|
||||
|
||||
const JS_IMPORT_RE =
|
||||
/(?:import|export)\s+(?:(?:\{[^}]*\}|[\w*]+(?:\s*,\s*\{[^}]*\})?)\s+from\s+)?["']([^"']+)["']/g;
|
||||
|
||||
function extractImports(filePath: string): string[] {
|
||||
const content = fs.readFileSync(filePath, "utf-8");
|
||||
let outputText: string;
|
||||
try {
|
||||
({ outputText } = ts.transpileModule(content, {
|
||||
compilerOptions: tsConfig,
|
||||
fileName: filePath,
|
||||
}));
|
||||
} catch {
|
||||
// Some files (e.g. declaration files) can't be transpiled — fall back to
|
||||
// regex on the original source, which still strips type-only imports.
|
||||
outputText = content;
|
||||
}
|
||||
const specifiers: string[] = [];
|
||||
for (const match of outputText.matchAll(JS_IMPORT_RE)) {
|
||||
const spec = match[1];
|
||||
if (spec !== undefined) specifiers.push(spec);
|
||||
}
|
||||
return specifiers;
|
||||
}
|
||||
|
||||
// --- Resolution ---
|
||||
|
||||
const EXTENSIONS = [".ts", ".tsx", "/index.ts", "/index.tsx"];
|
||||
|
||||
function resolveSpecifier(
|
||||
specifier: string,
|
||||
importerDir: string,
|
||||
): string | null {
|
||||
if (specifier.startsWith("./") || specifier.startsWith("../")) {
|
||||
const base = path.resolve(importerDir, specifier);
|
||||
// exact match
|
||||
if (fs.existsSync(base) && fs.statSync(base).isFile()) return base;
|
||||
for (const ext of EXTENSIONS) {
|
||||
const candidate = base + ext;
|
||||
if (fs.existsSync(candidate)) return candidate;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
// @monkeytype packages are treated as leaf nodes (no recursion into them)
|
||||
if (specifier.startsWith("@monkeytype/")) return specifier;
|
||||
|
||||
return null; // third-party / virtual
|
||||
}
|
||||
|
||||
const printed = new Set<string>();
|
||||
|
||||
// --- Graph traversal ---
|
||||
|
||||
type NodeInfo = {
|
||||
directImports: string[];
|
||||
totalReachable: number;
|
||||
maxDepth: number;
|
||||
};
|
||||
|
||||
const cache = new Map<string, NodeInfo>();
|
||||
|
||||
function walk(
|
||||
filePath: string,
|
||||
ancestors: Set<string>,
|
||||
): { reachable: Set<string>; maxDepth: number } {
|
||||
const cached = cache.get(filePath);
|
||||
if (cached !== undefined) {
|
||||
return {
|
||||
reachable: new Set(getAllReachable(filePath, new Set())),
|
||||
maxDepth: cached.maxDepth,
|
||||
};
|
||||
}
|
||||
|
||||
const importerDir = path.dirname(filePath);
|
||||
const specifiers = extractImports(filePath);
|
||||
const directImports: string[] = [];
|
||||
|
||||
const reachable = new Set<string>();
|
||||
let maxDepth = 0;
|
||||
|
||||
for (const spec of specifiers) {
|
||||
const resolved = resolveSpecifier(spec, importerDir);
|
||||
if (resolved === null) continue;
|
||||
if (directImports.includes(resolved)) continue;
|
||||
directImports.push(resolved);
|
||||
|
||||
if (ancestors.has(resolved)) continue; // circular
|
||||
|
||||
reachable.add(resolved);
|
||||
|
||||
// @monkeytype packages are leaf nodes — don't recurse
|
||||
if (resolved.startsWith("@monkeytype/")) {
|
||||
maxDepth = Math.max(maxDepth, 1);
|
||||
continue;
|
||||
}
|
||||
|
||||
ancestors.add(resolved);
|
||||
const sub = walk(resolved, ancestors);
|
||||
ancestors.delete(resolved);
|
||||
|
||||
for (const r of sub.reachable) reachable.add(r);
|
||||
maxDepth = Math.max(maxDepth, 1 + sub.maxDepth);
|
||||
}
|
||||
|
||||
if (directImports.length > 0 && maxDepth === 0) {
|
||||
maxDepth = 1;
|
||||
}
|
||||
|
||||
cache.set(filePath, {
|
||||
directImports,
|
||||
totalReachable: reachable.size,
|
||||
maxDepth,
|
||||
});
|
||||
|
||||
return { reachable, maxDepth };
|
||||
}
|
||||
|
||||
function getAllReachable(filePath: string, visited: Set<string>): string[] {
|
||||
const info = cache.get(filePath);
|
||||
if (!info) return [];
|
||||
const result: string[] = [];
|
||||
for (const dep of info.directImports) {
|
||||
if (visited.has(dep)) continue;
|
||||
visited.add(dep);
|
||||
result.push(dep);
|
||||
result.push(...getAllReachable(dep, visited));
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
// --- Colors ---
|
||||
|
||||
const c = {
|
||||
reset: "\x1b[0m",
|
||||
dim: "\x1b[2m",
|
||||
bold: "\x1b[1m",
|
||||
cyan: "\x1b[36m",
|
||||
green: "\x1b[32m",
|
||||
yellow: "\x1b[33m",
|
||||
magenta: "\x1b[35m",
|
||||
red: "\x1b[31m",
|
||||
blue: "\x1b[34m",
|
||||
white: "\x1b[37m",
|
||||
};
|
||||
|
||||
const DEPTH_COLORS = [c.cyan, c.green, c.yellow, c.blue, c.magenta, c.white];
|
||||
|
||||
function depthColor(depth: number): string {
|
||||
return DEPTH_COLORS[depth % DEPTH_COLORS.length] ?? c.cyan;
|
||||
}
|
||||
|
||||
// --- Display ---
|
||||
|
||||
function leavesFolder(filePath: string): boolean {
|
||||
if (boundary === null) return false;
|
||||
if (filePath.startsWith("@monkeytype/")) return true;
|
||||
return !filePath.startsWith(boundary + "/");
|
||||
}
|
||||
|
||||
function displayPath(filePath: string): string {
|
||||
if (filePath.startsWith(ROOT + "/")) {
|
||||
return path.relative(ROOT, filePath);
|
||||
}
|
||||
return filePath;
|
||||
}
|
||||
|
||||
function printTree(
|
||||
filePath: string,
|
||||
ancestors: Set<string>,
|
||||
prefix: string,
|
||||
isLast: boolean,
|
||||
isRoot: boolean,
|
||||
depth: number = 0,
|
||||
): void {
|
||||
const info = cache.get(filePath);
|
||||
const dp = displayPath(filePath);
|
||||
const connector = isRoot ? "" : isLast ? "└── " : "├── ";
|
||||
const dc = depthColor(depth);
|
||||
|
||||
const leaves = !isRoot && leavesFolder(filePath);
|
||||
const leavesTag = leaves ? ` ${c.red}[↑]${c.reset}` : "";
|
||||
|
||||
if (!info) {
|
||||
// leaf node (e.g. @monkeytype package)
|
||||
console.log(`${c.dim}${prefix}${connector}${dp}${c.reset}${leavesTag}`);
|
||||
return;
|
||||
}
|
||||
|
||||
const stats =
|
||||
info.directImports.length > 0
|
||||
? ` ${c.dim}(direct: ${info.directImports.length}, total: ${info.totalReachable}, depth: ${info.maxDepth})${c.reset}`
|
||||
: "";
|
||||
|
||||
const nameStyle = isRoot ? c.bold + dc : dc;
|
||||
const seen = !isRoot && printed.has(filePath);
|
||||
const seenTag = seen ? ` ${c.dim}[seen above]${c.reset}` : "";
|
||||
console.log(
|
||||
`${c.dim}${prefix}${connector}${c.reset}${nameStyle}${dp}${c.reset}${stats}${leavesTag}${seenTag}`,
|
||||
);
|
||||
|
||||
if (seen || depth >= maxDepthLimit) return;
|
||||
printed.add(filePath);
|
||||
|
||||
const childPrefix = isRoot ? "" : prefix + (isLast ? " " : "│ ");
|
||||
|
||||
const deps = [...info.directImports];
|
||||
if (depth === 0) {
|
||||
deps.sort((a, b) => {
|
||||
const ta = cache.get(a)?.totalReachable ?? 0;
|
||||
const tb = cache.get(b)?.totalReachable ?? 0;
|
||||
return tb - ta;
|
||||
});
|
||||
}
|
||||
|
||||
for (let i = 0; i < deps.length; i++) {
|
||||
const dep = deps[i];
|
||||
if (dep === undefined) continue;
|
||||
const last = i === deps.length - 1;
|
||||
|
||||
if (ancestors.has(dep)) {
|
||||
const cc = last ? "└── " : "├── ";
|
||||
console.log(
|
||||
`${c.dim}${childPrefix}${cc}${c.reset}${c.red}[circular] ${displayPath(dep)}${c.reset}`,
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
ancestors.add(dep);
|
||||
printTree(dep, ancestors, childPrefix, last, false, depth + 1);
|
||||
ancestors.delete(dep);
|
||||
}
|
||||
}
|
||||
|
||||
// --- Main ---
|
||||
|
||||
for (const entry of entryPoints) {
|
||||
if (!fs.existsSync(entry)) {
|
||||
console.log(`File not found: ${entry}`);
|
||||
continue;
|
||||
}
|
||||
walk(entry, new Set([entry]));
|
||||
}
|
||||
|
||||
entryPoints.sort((a, b) => {
|
||||
const ta = cache.get(a)?.totalReachable ?? 0;
|
||||
const tb = cache.get(b)?.totalReachable ?? 0;
|
||||
return tb - ta;
|
||||
});
|
||||
|
||||
for (const entry of entryPoints) {
|
||||
if (!cache.has(entry)) continue;
|
||||
printTree(entry, new Set([entry]), "", true, true);
|
||||
if (entryPoints.length > 1) console.log();
|
||||
}
|
||||
|
||||
// --- Summary ---
|
||||
|
||||
let totalDirect = 0;
|
||||
let totalTransitive = 0;
|
||||
const uniqueDirect = new Set<string>();
|
||||
const uniqueTransitive = new Set<string>();
|
||||
let maxDirect = 0;
|
||||
let maxDirectFile = "";
|
||||
let maxTransitive = 0;
|
||||
let maxTransitiveFile = "";
|
||||
let maxDepthSeen = 0;
|
||||
let maxDepthFile = "";
|
||||
|
||||
for (const entry of entryPoints) {
|
||||
const info = cache.get(entry);
|
||||
if (!info) continue;
|
||||
totalDirect += info.directImports.length;
|
||||
totalTransitive += info.totalReachable;
|
||||
for (const dep of info.directImports) {
|
||||
uniqueDirect.add(dep);
|
||||
}
|
||||
for (const dep of getAllReachable(entry, new Set())) {
|
||||
uniqueTransitive.add(dep);
|
||||
}
|
||||
if (info.directImports.length > maxDirect) {
|
||||
maxDirect = info.directImports.length;
|
||||
maxDirectFile = entry;
|
||||
}
|
||||
if (info.totalReachable > maxTransitive) {
|
||||
maxTransitive = info.totalReachable;
|
||||
maxTransitiveFile = entry;
|
||||
}
|
||||
if (info.maxDepth > maxDepthSeen) {
|
||||
maxDepthSeen = info.maxDepth;
|
||||
maxDepthFile = entry;
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`${c.dim}───────────────────────────${c.reset}`);
|
||||
console.log(`Target: ${c.bold}${displayPath(resolved)}${c.reset}`);
|
||||
console.log(`Total direct: ${c.bold}${totalDirect}${c.reset}`);
|
||||
console.log(`Total transitive: ${c.bold}${totalTransitive}${c.reset}`);
|
||||
console.log(`Unique direct: ${c.bold}${uniqueDirect.size}${c.reset}`);
|
||||
console.log(`Unique transitive: ${c.bold}${uniqueTransitive.size}${c.reset}`);
|
||||
console.log(
|
||||
`Max direct: ${c.bold}${maxDirect}${c.reset} ${c.dim}(${displayPath(maxDirectFile)})${c.reset}`,
|
||||
);
|
||||
console.log(
|
||||
`Max transitive: ${c.bold}${maxTransitive}${c.reset} ${c.dim}(${displayPath(maxTransitiveFile)})${c.reset}`,
|
||||
);
|
||||
console.log(
|
||||
`Max depth: ${c.bold}${maxDepthSeen}${c.reset} ${c.dim}(${displayPath(maxDepthFile)})${c.reset}`,
|
||||
);
|
||||
|
||||
if (boundary !== null) {
|
||||
const externalDirect = new Set<string>();
|
||||
const externalTransitive = new Set<string>();
|
||||
for (const entry of entryPoints) {
|
||||
const info = cache.get(entry);
|
||||
if (!info) continue;
|
||||
for (const dep of info.directImports) {
|
||||
if (leavesFolder(dep)) externalDirect.add(dep);
|
||||
}
|
||||
for (const dep of getAllReachable(entry, new Set())) {
|
||||
if (leavesFolder(dep)) externalTransitive.add(dep);
|
||||
}
|
||||
}
|
||||
console.log(
|
||||
`Leaves folder ${c.red}[↑]${c.reset}: ${c.bold}${externalDirect.size}${c.reset} direct, ${c.bold}${externalTransitive.size}${c.reset} transitive`,
|
||||
);
|
||||
}
|
||||
145
frontend/scripts/short-quotes.json
Normal file
145
frontend/scripts/short-quotes.json
Normal file
@@ -0,0 +1,145 @@
|
||||
{
|
||||
"arabic": [20, 21, 25, 44, 45, 54, 72, 78],
|
||||
"arabic_egypt": [23, 24, 25, 26, 27, 28, 29, 30, 32, 43, 47, 50, 51],
|
||||
"azerbaijani": [5, 13, 14, 24],
|
||||
"bangla": [
|
||||
1, 5, 11, 15, 20, 21, 22, 25, 26, 27, 28, 30, 34, 38, 40, 41, 42, 43, 47,
|
||||
50, 51, 53, 54, 63, 66, 72, 73, 76, 77, 81, 86, 87, 88, 89, 92, 93, 98, 101,
|
||||
102, 109, 110, 112, 116, 118, 119, 120, 122, 123, 124, 126, 127, 128, 129,
|
||||
131, 132, 139, 145, 149, 150, 152, 158, 165, 166, 167, 168, 170, 172, 174,
|
||||
175, 179, 180, 181, 182, 184, 188, 191, 193, 198, 199, 200, 202, 203, 206,
|
||||
207, 210, 213, 214, 217, 218, 220, 222, 223, 224, 225, 227, 232, 234, 236,
|
||||
237, 239, 240, 241, 244, 245, 246, 249, 250, 255, 258, 259, 262, 263, 267,
|
||||
270, 272, 273, 275
|
||||
],
|
||||
"belarusian": [10, 12, 13, 94],
|
||||
"belarusian_lacinka": [10, 12, 13],
|
||||
"chinese_simplified": [
|
||||
1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 19, 20, 21, 22,
|
||||
23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41,
|
||||
42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60,
|
||||
61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79,
|
||||
80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98,
|
||||
99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113,
|
||||
114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128,
|
||||
129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143,
|
||||
144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158,
|
||||
159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173,
|
||||
174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188,
|
||||
189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203,
|
||||
204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218,
|
||||
219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233,
|
||||
234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248,
|
||||
249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263,
|
||||
264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278,
|
||||
279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293,
|
||||
294, 295, 296, 297, 298, 299, 300, 301, 302, 303, 304, 305, 306, 307, 308,
|
||||
309, 310, 311, 312, 313, 314, 315, 316, 317, 318, 319, 320, 321, 322, 323,
|
||||
324, 325, 326, 327, 328, 329, 330
|
||||
],
|
||||
"code_assembly": [2],
|
||||
"code_bash": [1],
|
||||
"code_c++": [4, 10, 11, 17, 18, 20],
|
||||
"code_c": [23],
|
||||
"code_css": [1],
|
||||
"code_java": [3],
|
||||
"code_jule": [1, 10, 15, 19, 21, 23, 25],
|
||||
"code_lua": [5, 8],
|
||||
"code_nim": [
|
||||
1, 2, 3, 8, 9, 15, 17, 19, 20, 25, 31, 33, 35, 38, 39, 41, 45, 48, 51, 56,
|
||||
57, 59, 62, 63, 65, 68, 70, 71, 74, 76, 77, 81, 82, 84, 86, 87, 88, 90, 94,
|
||||
95, 98, 99, 105
|
||||
],
|
||||
"code_python": [3],
|
||||
"code_ruby": [12],
|
||||
"code_rust": [
|
||||
1, 2, 3, 5, 6, 7, 8, 17, 18, 19, 24, 26, 27, 29, 30, 31, 32, 34, 37, 38, 39,
|
||||
40, 41, 42, 48, 56, 57, 58, 59, 62, 66, 67, 68, 69, 70, 72, 73, 74, 75, 76,
|
||||
78, 81, 83, 84, 85, 86, 88, 89, 91, 98, 100, 101, 102, 103, 104, 106, 109,
|
||||
110, 115, 117, 118, 119, 120, 124, 130, 132, 133, 135, 138, 140, 141, 142,
|
||||
144, 150, 151, 152, 154, 155, 159, 165, 166, 169, 172, 174, 176, 178, 179,
|
||||
180, 183, 185, 186, 187, 188, 189, 190, 192, 199, 200, 201, 202, 203, 204,
|
||||
205, 206, 207, 208, 209, 210, 218, 222, 224, 227, 231, 233, 235, 238, 241,
|
||||
252, 259, 261, 262, 263, 264, 265, 266, 267, 271, 272, 273, 274, 275, 276,
|
||||
277
|
||||
],
|
||||
"code_yoptascript": [3],
|
||||
"czech": [1, 3, 4, 6, 7, 8, 10, 13, 17],
|
||||
"danish": [14, 23],
|
||||
"docker_file": [5, 9, 10, 17],
|
||||
"dutch": [16, 22, 29],
|
||||
"english": [
|
||||
2, 9, 39, 41, 42, 79, 82, 120, 131, 141, 225, 312, 332, 425, 621, 733, 770,
|
||||
968, 999, 1154, 1270, 1439, 1471, 1621, 2262, 2299, 2313, 2503, 2923, 3264,
|
||||
3814, 4567, 5005, 5015, 5024, 5045, 5049, 5050, 5055, 5074, 5089, 5095,
|
||||
5097, 5098, 5130, 5138, 5143, 5169, 5192, 5206, 5209, 5212, 5257, 5268,
|
||||
5269, 5282, 5286, 5321, 5341, 5346, 5348, 5398, 5409, 5444, 5452, 5453,
|
||||
5561, 5562, 5568, 5574, 5576, 5578, 5579, 5581, 5586, 5589, 5593, 5596,
|
||||
5609, 5610, 5616, 5631, 5637, 5639, 5646, 5668, 5669, 5680, 5688, 5690,
|
||||
5781, 5795, 6104, 6123, 6210, 6214, 6246, 6731, 6788, 6795, 6842, 6844,
|
||||
6847, 6849, 6944, 6945, 6948, 7027, 7643, 7690, 7737
|
||||
],
|
||||
"estonian": [2, 4, 5, 12],
|
||||
"filipino": [6],
|
||||
"french": [15, 36, 46, 47, 51, 52, 59, 61, 85, 86, 87, 88, 89, 90, 104, 106],
|
||||
"georgian": [3, 4],
|
||||
"german": [
|
||||
6, 26, 33, 36, 39, 44, 45, 47, 48, 58, 60, 64, 69, 72, 73, 82, 87, 110, 233,
|
||||
252, 276, 303, 313, 366, 369, 375, 400, 432, 436, 444, 446, 463, 476
|
||||
],
|
||||
"hebrew": [
|
||||
3, 6, 7, 8, 9, 11, 12, 13, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 38, 48,
|
||||
68, 71, 72, 73
|
||||
],
|
||||
"hindi": [38],
|
||||
"hungarian": [9],
|
||||
"icelandic": [7],
|
||||
"indonesian": [
|
||||
4, 7, 11, 12, 14, 16, 23, 33, 45, 75, 81, 83, 101, 104, 117, 120, 122, 153,
|
||||
200, 204, 205, 207, 211, 212
|
||||
],
|
||||
"irish": [
|
||||
2, 3, 4, 5, 6, 7, 8, 9, 11, 12, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24,
|
||||
25, 26, 28, 29, 31, 32, 33, 35, 46, 47, 48, 49, 51, 52, 53, 54, 57, 59, 60,
|
||||
61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79,
|
||||
80, 81, 82, 83, 84, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 98, 99, 100,
|
||||
101, 102, 103, 105, 106, 107, 108, 109, 110, 111, 112, 114, 115, 117, 118
|
||||
],
|
||||
"italian": [23, 28, 36, 74, 75, 78, 79, 80, 88, 93, 96, 151],
|
||||
"kannada": [8, 17, 20, 21, 22, 23, 24, 26, 27],
|
||||
"kazakh": [18],
|
||||
"korean": [1, 3, 4, 5, 6, 7, 8, 9, 13, 15, 17],
|
||||
"lithuanian": [45],
|
||||
"malagasy": [2],
|
||||
"mongolian": [25, 44, 58, 59],
|
||||
"norwegian_bokmal": [
|
||||
6, 10, 13, 17, 22, 24, 28, 30, 33, 39, 47, 49, 114, 126, 127, 128, 129
|
||||
],
|
||||
"norwegian_nynorsk": [1, 2, 7, 26, 28],
|
||||
"persian": [4, 8, 15],
|
||||
"polish": [
|
||||
2, 7, 9, 10, 12, 16, 17, 29, 31, 34, 40, 42, 43, 67, 68, 74, 82, 83, 84, 91,
|
||||
119, 121, 123, 134, 145, 155, 161, 206, 208, 212, 218, 236, 237
|
||||
],
|
||||
"portuguese": [11, 20, 57, 66, 91],
|
||||
"romanian": [68],
|
||||
"russian": [
|
||||
1, 5, 14, 15, 26, 60, 68, 71, 74, 80, 91, 95, 101, 116, 130, 137, 150, 152,
|
||||
163, 170, 266, 285, 358, 373, 393, 414, 457, 461, 463, 500, 574, 575, 607,
|
||||
630, 663, 668, 736, 748, 771, 790, 806, 918, 922, 935, 940, 1009, 1029
|
||||
],
|
||||
"serbian": [6, 7],
|
||||
"spanish": [27, 35, 60, 67, 150],
|
||||
"swedish": [62],
|
||||
"tamil": [27, 30, 32],
|
||||
"thai": [3, 14, 15],
|
||||
"turkish": [
|
||||
1, 8, 9, 10, 11, 13, 14, 15, 16, 17, 20, 22, 23, 24, 25, 26, 27, 28, 29, 30,
|
||||
50, 61, 64, 66, 67, 68, 69, 117, 122
|
||||
],
|
||||
"ukrainian": [
|
||||
9, 119, 120, 121, 122, 123, 124, 126, 127, 131, 132, 133, 135, 136, 137,
|
||||
138, 139, 140, 141, 143, 145, 147
|
||||
],
|
||||
"vietnamese": [18, 25, 33, 46, 63, 66, 78, 88]
|
||||
}
|
||||
Reference in New Issue
Block a user