Merged in feat/lokalise-rebuild (pull request #2993)

Feat/lokalise rebuild

* chore(lokalise): update translation ids

* chore(lokalise): easier to switch between projects

* chore(lokalise): update translation ids

* .

* .

* .

* .

* .

* .

* chore(lokalise): update translation ids

* chore(lokalise): update translation ids

* .

* .

* .

* chore(lokalise): update translation ids

* chore(lokalise): update translation ids

* .

* .

* chore(lokalise): update translation ids

* chore(lokalise): update translation ids

* chore(lokalise): new translations

* merge

* switch to errors for missing id's

* merge

* sync translations


Approved-by: Linus Flood
This commit is contained in:
Joakim Jäderberg
2025-10-22 11:00:03 +00:00
parent bdfe2ab213
commit aafad9781f
499 changed files with 93363 additions and 99164 deletions

View File

@@ -1,268 +1,279 @@
import fs from "node:fs/promises"
import { performance, PerformanceObserver } from "node:perf_hooks"
import fs from "node:fs/promises";
import { performance, PerformanceObserver } from "node:perf_hooks";
import { LokaliseApi } from "@lokalise/node-api"
import AdmZip from "adm-zip"
import { LokaliseApi } from "@lokalise/node-api";
import AdmZip from "adm-zip";
import { createLogger } from "@scandic-hotels/common/logger/createLogger"
import { createLogger } from "@scandic-hotels/common/logger/createLogger";
const projectId = "4194150766ff28c418f010.39532200"
const lokaliseApi = new LokaliseApi({ apiKey: process.env.LOKALISE_API_KEY })
const newProjectId = "7959806168ede449b7bf69.36591916";
const oldProjectId = "4194150766ff28c418f010.39532200";
const projectId = newProjectId;
const lokaliseApi = new LokaliseApi({ apiKey: process.env.LOKALISE_API_KEY });
const lokaliseLogger = createLogger("lokalise")
const lokaliseLogger = createLogger("lokalise");
let resolvePerf: (value?: unknown) => void
let resolvePerf: (value?: unknown) => void;
const performanceMetrics = new Promise((resolve) => {
resolvePerf = resolve
})
resolvePerf = resolve;
});
const perf = new PerformanceObserver((items) => {
const entries = items.getEntries()
for (const entry of entries) {
if (entry.name === "done") {
// This is the last measure meant for clean up
performance.clearMarks()
perf.disconnect()
if (typeof resolvePerf === "function") {
resolvePerf()
}
} else {
lokaliseLogger.info(
`[metrics] ${entry.name} completed in ${entry.duration} ms`
)
const entries = items.getEntries();
for (const entry of entries) {
if (entry.name === "done") {
// This is the last measure meant for clean up
performance.clearMarks();
perf.disconnect();
if (typeof resolvePerf === "function") {
resolvePerf();
}
} else {
lokaliseLogger.info(
`[metrics] ${entry.name} completed in ${entry.duration} ms`
);
}
}
}
performance.clearMeasures()
})
performance.clearMeasures();
});
async function waitUntilUploadDone(processId: string) {
return new Promise<void>((resolve, reject) => {
const interval = setInterval(async () => {
try {
performance.mark("waitUntilUploadDoneStart")
return new Promise<void>((resolve, reject) => {
const interval = setInterval(async () => {
try {
performance.mark("waitUntilUploadDoneStart");
lokaliseLogger.debug("Checking upload status...")
lokaliseLogger.debug("Checking upload status...");
performance.mark("getProcessStart")
const process = await lokaliseApi.queuedProcesses().get(processId, {
project_id: projectId,
})
performance.mark("getProcessEnd")
performance.measure(
"Get Queued Process",
"getProcessStart",
"getProcessEnd"
)
performance.mark("getProcessStart");
const process = await lokaliseApi
.queuedProcesses()
.get(processId, {
project_id: projectId,
});
performance.mark("getProcessEnd");
performance.measure(
"Get Queued Process",
"getProcessStart",
"getProcessEnd"
);
lokaliseLogger.debug(`Status: ${process.status}`)
lokaliseLogger.debug(`Status: ${process.status}`);
if (process.status === "finished") {
clearInterval(interval)
performance.mark("waitUntilUploadDoneEnd", { detail: "success" })
performance.measure(
"Wait on upload",
"waitUntilUploadDoneStart",
"waitUntilUploadDoneEnd"
)
resolve()
} else if (process.status === "failed") {
throw process
}
} catch (e) {
clearInterval(interval)
lokaliseLogger.error("An error occurred:", e)
performance.mark("waitUntilUploadDoneEnd", { detail: e })
performance.measure(
"Wait on upload",
"waitUntilUploadDoneStart",
"waitUntilUploadDoneEnd"
)
reject()
}
}, 1000)
})
if (process.status === "finished") {
clearInterval(interval);
performance.mark("waitUntilUploadDoneEnd", {
detail: "success",
});
performance.measure(
"Wait on upload",
"waitUntilUploadDoneStart",
"waitUntilUploadDoneEnd"
);
resolve();
} else if (process.status === "failed") {
throw process;
}
} catch (e) {
clearInterval(interval);
lokaliseLogger.error("An error occurred:", e);
performance.mark("waitUntilUploadDoneEnd", { detail: e });
performance.measure(
"Wait on upload",
"waitUntilUploadDoneStart",
"waitUntilUploadDoneEnd"
);
reject();
}
}, 1000);
});
}
export async function upload(filepath: string) {
perf.observe({ type: "measure" })
perf.observe({ type: "measure" });
try {
lokaliseLogger.debug(`Uploading ${filepath}...`)
try {
lokaliseLogger.debug(`Uploading ${filepath}...`);
performance.mark("uploadStart")
performance.mark("uploadStart");
performance.mark("sourceFileReadStart")
const data = await fs.readFile(filepath, "utf8")
const buff = Buffer.from(data, "utf8")
const base64 = buff.toString("base64")
performance.mark("sourceFileReadEnd")
performance.measure(
"Read source file",
"sourceFileReadStart",
"sourceFileReadEnd"
)
performance.mark("sourceFileReadStart");
const data = await fs.readFile(filepath, "utf8");
const buff = Buffer.from(data, "utf8");
const base64 = buff.toString("base64");
performance.mark("sourceFileReadEnd");
performance.measure(
"Read source file",
"sourceFileReadStart",
"sourceFileReadEnd"
);
performance.mark("lokaliseUploadInitStart")
const bgProcess = await lokaliseApi.files().upload(projectId, {
data: base64,
filename: "en.json",
lang_iso: "en",
detect_icu_plurals: true,
format: "json",
convert_placeholders: true,
replace_modified: true,
})
performance.mark("lokaliseUploadInitEnd")
performance.measure(
"Upload init",
"lokaliseUploadInitStart",
"lokaliseUploadInitEnd"
)
performance.mark("lokaliseUploadInitStart");
const bgProcess = await lokaliseApi.files().upload(projectId, {
data: base64,
filename: "en.json",
lang_iso: "en",
detect_icu_plurals: true,
format: "json",
convert_placeholders: true,
replace_modified: false,
});
performance.mark("lokaliseUploadInitEnd");
performance.measure(
"Upload init",
"lokaliseUploadInitStart",
"lokaliseUploadInitEnd"
);
performance.mark("lokaliseUploadStart")
await waitUntilUploadDone(bgProcess.process_id)
performance.mark("lokaliseUploadEnd")
performance.measure(
"Upload transfer",
"lokaliseUploadStart",
"lokaliseUploadEnd"
)
performance.mark("lokaliseUploadStart");
await waitUntilUploadDone(bgProcess.process_id);
performance.mark("lokaliseUploadEnd");
performance.measure(
"Upload transfer",
"lokaliseUploadStart",
"lokaliseUploadEnd"
);
lokaliseLogger.debug("Upload successful")
} catch (e) {
lokaliseLogger.error("Upload failed", e)
} finally {
performance.mark("uploadEnd")
lokaliseLogger.debug("Upload successful");
} catch (e) {
lokaliseLogger.error("Upload failed", e);
} finally {
performance.mark("uploadEnd");
performance.measure("Upload operation", "uploadStart", "uploadEnd")
}
performance.measure("Upload operation", "uploadStart", "uploadEnd");
}
performance.measure("done")
performance.measure("done");
await performanceMetrics
await performanceMetrics;
}
export async function download(extractPath: string, all: boolean = false) {
perf.observe({ type: "measure" })
perf.observe({ type: "measure" });
try {
lokaliseLogger.debug(
all
? "Downloading all translations..."
: "Downloading filtered translations..."
)
try {
lokaliseLogger.debug(
all
? "Downloading all translations..."
: "Downloading filtered translations..."
);
performance.mark("downloadStart")
performance.mark("downloadStart");
performance.mark("lokaliseDownloadInitStart")
const downloadResponse = await lokaliseApi.files().download(projectId, {
format: "json_structured",
indentation: "2sp",
placeholder_format: "icu",
plural_format: "icu",
icu_numeric: true,
bundle_structure: "%LANG_ISO%.%FORMAT%",
directory_prefix: "",
filter_data: all ? [] : ["translated", "nonhidden"],
export_empty_as: "skip",
})
performance.mark("lokaliseDownloadInitEnd")
performance.measure(
"Download init",
"lokaliseDownloadInitStart",
"lokaliseDownloadInitEnd"
)
performance.mark("lokaliseDownloadInitStart");
const downloadResponse = await lokaliseApi.files().download(projectId, {
format: "json_structured",
indentation: "2sp",
placeholder_format: "icu",
plural_format: "icu",
icu_numeric: false,
bundle_structure: "%LANG_ISO%.%FORMAT%",
directory_prefix: "",
filter_data: all ? [] : ["translated", "nonhidden"],
export_empty_as: "skip",
});
const { bundle_url } = downloadResponse
performance.mark("lokaliseDownloadInitEnd");
performance.measure(
"Download init",
"lokaliseDownloadInitStart",
"lokaliseDownloadInitEnd"
);
performance.mark("lokaliseDownloadStart")
const bundleResponse = await fetch(bundle_url)
performance.mark("lokaliseDownloadEnd")
performance.measure(
"Download transfer",
"lokaliseDownloadStart",
"lokaliseDownloadEnd"
)
const { bundle_url } = downloadResponse;
if (bundleResponse.ok) {
performance.mark("unpackTranslationsStart")
const arrayBuffer = await bundleResponse.arrayBuffer()
const buffer = Buffer.from(new Uint8Array(arrayBuffer))
const zip = new AdmZip(buffer)
zip.extractAllTo(extractPath, true)
performance.mark("unpackTranslationsEnd")
performance.measure(
"Unpacking translations",
"unpackTranslationsStart",
"unpackTranslationsEnd"
)
performance.mark("lokaliseDownloadStart");
const bundleResponse = await fetch(bundle_url);
performance.mark("lokaliseDownloadEnd");
performance.measure(
"Download transfer",
"lokaliseDownloadStart",
"lokaliseDownloadEnd"
);
lokaliseLogger.debug("Download successful")
} else {
throw bundleResponse
if (bundleResponse.ok) {
performance.mark("unpackTranslationsStart");
const arrayBuffer = await bundleResponse.arrayBuffer();
const buffer = Buffer.from(new Uint8Array(arrayBuffer));
const zip = new AdmZip(buffer);
zip.extractAllTo(extractPath, true);
performance.mark("unpackTranslationsEnd");
performance.measure(
"Unpacking translations",
"unpackTranslationsStart",
"unpackTranslationsEnd"
);
lokaliseLogger.debug("Download successful");
} else {
throw bundleResponse;
}
} catch (e) {
lokaliseLogger.error("Download failed", e);
} finally {
performance.mark("downloadEnd");
performance.measure(
"Download operation",
"downloadStart",
"downloadEnd"
);
}
} catch (e) {
lokaliseLogger.error("Download failed", e)
} finally {
performance.mark("downloadEnd")
performance.measure("Download operation", "downloadStart", "downloadEnd")
}
performance.measure("done");
performance.measure("done")
await performanceMetrics
await performanceMetrics;
}
export async function deleteBulk(keyNames: string[]) {
perf.observe({ type: "measure" })
perf.observe({ type: "measure" });
try {
performance.mark("bulkDeleteStart")
try {
performance.mark("bulkDeleteStart");
let keysToDelete: number[] = []
let cursor: string | undefined = undefined
let hasNext = true
do {
const keys = await lokaliseApi.keys().list({
project_id: projectId,
limit: 100,
pagination: "cursor",
cursor,
})
let keysToDelete: number[] = [];
let cursor: string | undefined = undefined;
let hasNext = true;
do {
const keys = await lokaliseApi.keys().list({
project_id: projectId,
limit: 100,
pagination: "cursor",
cursor,
});
cursor = keys.nextCursor ?? undefined
keys.items.forEach((key) => {
if (key.key_id && key.key_name.web) {
if (keyNames.includes(key.key_name.web)) {
keysToDelete.push(key.key_id)
}
}
})
cursor = keys.nextCursor ?? undefined;
keys.items.forEach((key) => {
if (key.key_id && key.key_name.web) {
if (keyNames.includes(key.key_name.web)) {
keysToDelete.push(key.key_id);
}
}
});
if (!keys.hasNextCursor()) {
hasNext = false
}
} while (hasNext)
if (!keys.hasNextCursor()) {
hasNext = false;
}
} while (hasNext);
const response = await lokaliseApi
.keys()
.bulk_delete(keysToDelete, { project_id: projectId })
const response = await lokaliseApi
.keys()
.bulk_delete(keysToDelete, { project_id: projectId });
lokaliseLogger.debug(
`Bulk delete successful, removed ${keysToDelete.length} keys`
)
lokaliseLogger.debug(
`Bulk delete successful, removed ${keysToDelete.length} keys`
);
return response
} catch (e) {
lokaliseLogger.error("Bulk delete failed", e)
} finally {
performance.mark("bulkDeleteEnd")
return response;
} catch (e) {
lokaliseLogger.error("Bulk delete failed", e);
} finally {
performance.mark("bulkDeleteEnd");
performance.measure(
"Bulk delete operation",
"bulkDeleteStart",
"bulkDeleteEnd"
)
}
performance.measure(
"Bulk delete operation",
"bulkDeleteStart",
"bulkDeleteEnd"
);
}
}

View File

@@ -0,0 +1,79 @@
import fs from "fs";
import * as glob from "glob";
import { syncFile } from "./syncFile";
async function main() {
const args = process.argv.slice(2);
if (args.length < 2) {
console.log(`
Usage:
bun index.ts <path/to/en.json> glob-pattern [--dry-run]
Examples:
# Create mapping and update files in the same directory
bun index.ts dictionaries/en.json 'apps/**/*.{ts,tsx}'
# Dry run - show what would be changed without writing to files
bun index.ts dictionaries/en.json apps/**/*.{ts,tsx} --dry-run
`);
process.exit(1);
}
// Load your messages from the JSON file
const englishTranslations = process.argv[2] || "./locales/en.json";
const translations = flattenTranslations(
JSON.parse(fs.readFileSync(englishTranslations, "utf-8")) as Record<
string,
string | { translation: string }
>
);
const isDryRun = args.includes("--dry-run");
const globPattern = args[1];
// Find all component files
const componentFiles = glob.sync(globPattern);
let filesUpdated = 0;
for (const filePath of componentFiles) {
if (isDryRun) {
console.log(`(dry run) Would sync file: ${filePath}`);
continue;
}
const { updated } = syncFile({ path: filePath, translations });
if (updated) {
filesUpdated++;
console.log(`Updated: ${filePath}`);
}
}
console.log(`\n✓ Sync complete! Updated ${filesUpdated} file(s)`);
}
function flattenTranslations(
translations: Record<string, string | { translation: string }>
): Record<string, string> {
const flat = Object.entries(translations).reduce(
(acc, [key, val]) => {
if (typeof val === "string") {
acc[key] = val;
} else if (
val &&
typeof val === "object" &&
"translation" in val &&
typeof val.translation === "string"
) {
acc[key] = val.translation;
}
return acc;
},
{} as Record<string, string>
);
return flat;
}
// Run CLI if this file is executed directly
if (require.main === module) {
main();
}

View File

@@ -0,0 +1,92 @@
import { vi, it, expect, beforeEach, afterEach, describe } from "vitest";
describe("syncFile", () => {
beforeEach(() => {
vi.resetModules();
vi.mock("fs", () => {
const existsMock = vi.fn();
const readMock = vi.fn();
const writeMock = vi.fn();
return {
existsSync: existsMock,
readFileSync: readMock,
writeFileSync: writeMock,
default: {
existsSync: existsMock,
readFileSync: readMock,
writeFileSync: writeMock,
},
};
});
});
afterEach(() => {
vi.restoreAllMocks();
});
it("throws if file does not exist", async () => {
const fsMock = (await import("fs")) as any;
fsMock.existsSync.mockReturnValue(false);
const { syncFile } = await import("./syncFile");
expect(() =>
syncFile({ path: "missing.ts", translations: {} })
).toThrow("File not found: missing.ts");
expect(fsMock.readFileSync).not.toHaveBeenCalled();
expect(fsMock.writeFileSync).not.toHaveBeenCalled();
});
it("reads file, calls syncIntlFormatMessage, writes updated content and returns it", async () => {
const fsMock = (await import("fs")) as any;
fsMock.existsSync.mockReturnValue(true);
fsMock.readFileSync.mockReturnValue(
createMockComponent("myKey", "old message")
);
const { syncFile } = await import("./syncFile");
const { fileContent: result } = syncFile({
path: "file.ts",
translations: { myKey: "new message" },
});
expect(fsMock.readFileSync).toHaveBeenCalledWith("file.ts", "utf-8");
expect(fsMock.writeFileSync).toHaveBeenCalled();
expect(result).toEqual(createMockComponent("myKey", "new message"));
});
it("reads file, calls syncIntlFormatMessage, ignores content if there are no matching keys, writes updated content and returns it", async () => {
const fsMock = (await import("fs")) as any;
fsMock.existsSync.mockReturnValue(true);
fsMock.readFileSync.mockReturnValue(
createMockComponent("myKey", "old message")
);
const { syncFile } = await import("./syncFile");
const { fileContent: result } = syncFile({
path: "file.ts",
translations: { someOtherKey: "not present" },
});
expect(fsMock.readFileSync).toHaveBeenCalledWith("file.ts", "utf-8");
expect(fsMock.writeFileSync).toHaveBeenCalled();
expect(result).toEqual(createMockComponent("myKey", "old message"));
});
});
function createMockComponent(translationId: string, defaultMessage: string) {
return `export function TestComponent() {
const { intl } = useIntl();
const message = intl.formatMessage({
id: "${translationId}",
defaultMessage: "${defaultMessage}",
});
return <div>{message}</div>;
}`;
}

View File

@@ -0,0 +1,25 @@
import fs from "fs";
import { syncIntlFormatMessage } from "./syncIntlFormatMessage";
export function syncFile({
path,
translations,
}: {
path: string;
translations: Record<string, string>;
}) {
if (!fs.existsSync(path)) {
throw new Error(`File not found: ${path}`);
}
const content = fs.readFileSync(path, "utf-8");
const { fileContent, updated } = syncIntlFormatMessage({
translations,
fileContent: content,
});
if (updated) {
fs.writeFileSync(path, fileContent, "utf-8");
}
return { updated, fileContent };
}

View File

@@ -0,0 +1,69 @@
import { describe, it, expect } from "vitest";
import { syncFormattedMessage } from "./syncFormattedMessage";
describe("syncFormattedMessage", () => {
it("updated false when given empty file and no translations", () => {
expect(
syncFormattedMessage({ fileContent: "", translations: {} })
).toEqual({ updated: false });
});
it("updates <FormattedMessage> components", () => {
expect(
syncFormattedMessage({
fileContent:
'<FormattedMessage id="myKey" defaultMessage="old message" />',
translations: { myKey: "new message" },
})
).toEqual({
updated: true,
fileContent:
'<FormattedMessage id="myKey" defaultMessage="new message" />',
});
});
it("updates multiline <FormattedMessage>", () => {
expect(
syncFormattedMessage({
fileContent: `<FormattedMessage\n\tid="myKey"\n\tdefaultMessage="old message" />`,
translations: { myKey: "new message" },
})
).toEqual({
updated: true,
fileContent: `<FormattedMessage\n\tid="myKey"\n\tdefaultMessage="new message" />`,
});
});
it("updates multiple <FormattedMessage> components", () => {
expect(
syncFormattedMessage({
fileContent:
'<FormattedMessage id="myKey" defaultMessage="old message" />' +
'<FormattedMessage id="anotherKey" defaultMessage="another old message" />',
translations: {
myKey: "new message",
anotherKey: "another new message",
},
})
).toEqual({
updated: true,
fileContent:
'<FormattedMessage id="myKey" defaultMessage="new message" />' +
'<FormattedMessage id="anotherKey" defaultMessage="another new message" />',
});
});
it("updates nothing if no key was found", () => {
expect(
syncFormattedMessage({
fileContent:
'<FormattedMessage id="myKey" defaultMessage="old message" />' +
'<FormattedMessage id="anotherKey" defaultMessage="another old message" />',
translations: {
unusedKey: "new message",
},
})
).toEqual({
updated: false,
});
});
});

View File

@@ -0,0 +1,28 @@
/**
* Pattern 1: FormattedMessage with id and defaultMessage
* @code <FormattedMessage id="myKey" defaultMessage="old message" />
*/
export function syncFormattedMessage({
translations,
fileContent,
}: {
translations: Record<string, string>;
fileContent: string;
}): { updated: false } | { updated: true; fileContent: string } {
let updated = false;
Object.entries(translations).forEach(([messageId, messageValue]) => {
const regex = new RegExp(
`(id=["']${messageId.replace(/[.*+?^${}()|[\]\\]/g, "\\$&")}["'].*?defaultMessage=)["']([^"']*?)["']`,
"gs"
);
if (regex.test(fileContent)) {
const escapedValue = messageValue
.replace(/"/g, '\\"')
.replace(/\n/g, "\\n");
fileContent = fileContent.replace(regex, `$1"${escapedValue}"`);
updated = true;
}
});
return updated ? { updated, fileContent } : { updated };
}

View File

@@ -0,0 +1,102 @@
import { describe, it, expect } from "vitest";
import { syncIntlFormatMessage } from "./syncIntlFormatMessage";
describe("syncIntlFormatMessage", () => {
it("updated false when given empty file and no translations", () => {
expect(
syncIntlFormatMessage({ fileContent: "", translations: {} })
).toEqual({ updated: false, fileContent: "" });
});
it("updates int.formatMessage components", () => {
expect(
syncIntlFormatMessage({
fileContent:
'intl.formatMessage({ id: "myKey", defaultMessage: "old message" })',
translations: { myKey: "new message" },
})
).toEqual({
updated: true,
fileContent:
'intl.formatMessage({ id: "myKey", defaultMessage: "new message" })',
});
});
it("updates multiline int.formatMessage", () => {
expect(
syncIntlFormatMessage({
fileContent: `intl.formatMessage({\n\tid: "myKey",\n\tdefaultMessage: "old message"\n })`,
translations: { myKey: "new message" },
})
).toEqual({
updated: true,
fileContent: `intl.formatMessage({\n\tid: "myKey",\n\tdefaultMessage: "new message"\n })`,
});
});
it("updates multiple int.formatMessage components", () => {
expect(
syncIntlFormatMessage({
fileContent:
'intl.formatMessage({ id: "myKey", defaultMessage: "old message" })' +
'intl.formatMessage({ id: "anotherKey", defaultMessage: "another old message" })',
translations: {
myKey: "new message",
anotherKey: "another new message",
},
})
).toEqual({
updated: true,
fileContent:
'intl.formatMessage({ id: "myKey", defaultMessage: "new message" })' +
'intl.formatMessage({ id: "anotherKey", defaultMessage: "another new message" })',
});
});
it("updates nothing if no key was found", () => {
const fileContent =
'intl.formatMessage({ id: "myKey", defaultMessage: "old message" })' +
'intl.formatMessage({ id: "anotherKey", defaultMessage: "another old message" })';
expect(
syncIntlFormatMessage({
fileContent,
translations: {
unusedKey: "new message",
},
})
).toEqual({
updated: false,
fileContent,
});
});
it("updates nothing if not using intl.formatMessage", () => {
const fileContent =
'formatMessage({ id: "myKey", defaultMessage: "old message" })';
expect(
syncIntlFormatMessage({
fileContent,
translations: {
myKey: "new message",
},
})
).toEqual({
updated: false,
fileContent,
});
});
it("updates nothing if no defaultMessage is present", () => {
const fileContent = 'formatMessage({ id: "myKey" })';
expect(
syncIntlFormatMessage({
fileContent,
translations: {
myKey: "new message",
},
})
).toEqual({
updated: false,
fileContent,
});
});
});

View File

@@ -0,0 +1,47 @@
export function syncIntlFormatMessage({
translations,
fileContent,
}: {
translations: Record<string, string>;
fileContent: string;
}): { updated: boolean; fileContent: string } {
let updated = false;
const entries = Object.entries(translations);
for (const [messageId, messageValue] of entries) {
const escapedId = messageId.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
// Find intl.formatMessage({...}) blocks that contain the specific id
const outerRegex = new RegExp(
`intl\\.formatMessage\\(\\s*\\{([^}]*?\\bid\\s*:\\s*['"]${escapedId}['"][^}]*?)\\}\\s*\\)`,
"gs"
);
fileContent = fileContent.replace(
outerRegex,
(fullMatch, innerObject) => {
// Find defaultMessage: '...' or "..."
const dmRegex =
/defaultMessage\s*:\s*(['"])((?:\\.|[\s\S])*?)\1/;
if (!dmRegex.test(innerObject)) return fullMatch;
const newInner = innerObject.replace(
dmRegex,
(_m: unknown, quote: string, _old: unknown) => {
// Escape backslashes first, then the surrounding quote, and newlines
const escaped = messageValue
.replace(/\\/g, "\\\\")
.replace(new RegExp(quote, "g"), `\\${quote}`)
.replace(/\n/g, "\\n");
return `defaultMessage: ${quote}${escaped}${quote}`;
}
);
updated = true;
return `intl.formatMessage({${newInner}})`;
}
);
}
return { updated, fileContent };
}

View File

@@ -0,0 +1,3 @@
export function TestComponent() {
return <div>Test</div>;
}

View File

@@ -0,0 +1,6 @@
export function TestComponent() {
const intl = useIntl();
return (
<div>{intl.formatMessage({ id: "myKey", defaultMessage: "Test" })}</div>
);
}

View File

@@ -0,0 +1,9 @@
import { defineConfig } from "vitest/config";
export default defineConfig({
test: {
globals: true,
environment: "jsdom",
setupFiles: ["./vitest-setup.ts"],
},
});

1
scripts/i18n/updateIds/.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
dictionaries

View File

@@ -0,0 +1,335 @@
#!/usr/bin/env node
import * as fs from "fs/promises";
import * as path from "path";
interface TranslationEntry {
translation: string;
}
interface TranslationFile {
[key: string]: TranslationEntry;
}
interface IdMapping {
oldId: string;
newId: string;
englishText: string;
}
/**
* Creates a normalized string representation of translation entries for comparison
* @param entries Array of translation entries
* @returns Normalized string for comparison
*/
function normalizeTranslationEntries(entries: TranslationEntry): string {
return JSON.stringify(entries);
}
/**
* Compares two translation files and creates a mapping of old IDs to new IDs
* @param oldFilePath Path to the file with autogenerated IDs
* @param newFilePath Path to the file with manually set IDs
* @returns Array of ID mappings
*/
export async function createIdMapping(
oldFilePath: string,
newFilePath: string
): Promise<IdMapping[]> {
try {
const oldFileContent = await fs.readFile(oldFilePath, "utf-8");
const newFileContent = await fs.readFile(newFilePath, "utf-8");
const oldTranslations: TranslationFile = JSON.parse(oldFileContent);
const newTranslations: TranslationFile = JSON.parse(newFileContent);
const mappings: IdMapping[] = [];
// Create a reverse lookup for new translations based on complete entry structure
const newTranslationsByText = new Map<string, string>();
for (const [newId, entries] of Object.entries(newTranslations)) {
const normalizedEntries = normalizeTranslationEntries(entries);
if (normalizedEntries && normalizedEntries.length > 0) {
newTranslationsByText.set(normalizedEntries, newId);
}
}
// Match old IDs with new IDs based on complete entry structure
for (const [oldId, entries] of Object.entries(oldTranslations)) {
const normalizedEntries = normalizeTranslationEntries(entries);
if (normalizedEntries && normalizedEntries.length > 0) {
const englishText = entries.translation; // Keep first entry for display purposes
const newId = newTranslationsByText.get(normalizedEntries);
if (newId && newId !== oldId) {
mappings.push({
oldId,
newId,
englishText,
});
}
}
}
return mappings;
} catch (error) {
console.error("Error creating ID mapping:", error);
throw error;
}
}
/**
* Updates a translation file by replacing old IDs with new IDs
* @param filePath Path to the translation file to update
* @param mappings Array of ID mappings
* @returns Number of replacements made
*/
export async function updateTranslationFile(
filePath: string,
mappings: IdMapping[]
): Promise<number> {
try {
const fileContent = await fs.readFile(filePath, "utf-8");
const translations: TranslationFile = JSON.parse(fileContent);
let replacementCount = 0;
const updatedTranslations: TranslationFile = {};
// Create a mapping lookup for efficient searching
const mappingLookup = new Map(mappings.map((m) => [m.oldId, m.newId]));
for (const [oldId, entries] of Object.entries(translations)) {
const newId = mappingLookup.get(oldId);
if (newId) {
updatedTranslations[newId] = entries;
replacementCount++;
console.log(` ${oldId}${newId}`);
} else {
updatedTranslations[oldId] = entries;
}
}
// Write the updated file with proper formatting
await fs.writeFile(
filePath,
JSON.stringify(updatedTranslations, null, 2) + "\n",
"utf-8"
);
return replacementCount;
} catch (error) {
console.error(`Error updating file ${filePath}:`, error);
throw error;
}
}
/**
* Updates multiple translation files with the same ID mappings
* @param mappings Array of ID mappings
* @param translationFilePaths Array of file paths to update
*/
export async function updateAllTranslationFiles(
mappings: IdMapping[],
translationFilePaths: string[]
): Promise<void> {
console.log(`Found ${mappings.length} ID mappings to apply`);
for (const filePath of translationFilePaths) {
try {
const fileName = path.basename(filePath);
console.log(`\nUpdating ${fileName}...`);
const replacementCount = await updateTranslationFile(
filePath,
mappings
);
console.log(
` ✓ Made ${replacementCount} replacements in ${fileName}`
);
} catch (error) {
console.error(
` ✗ Failed to update ${path.basename(filePath)}:`,
error
);
}
}
}
/**
* Validates that the mapping is correct by checking if the English text matches
* @param mappings Array of ID mappings
* @param oldFilePath Path to the old translation file
* @param newFilePath Path to the new translation file
*/
export async function validateMappings(
mappings: IdMapping[],
oldFilePath: string,
newFilePath: string
): Promise<{ validCount: number; invalidCount: number }> {
const oldTranslations: TranslationFile = JSON.parse(
await fs.readFile(oldFilePath, "utf-8")
);
const newTranslations: TranslationFile = JSON.parse(
await fs.readFile(newFilePath, "utf-8")
);
console.log("\nValidating mappings:");
let validCount = 0;
let invalidCount = 0;
for (const mapping of mappings) {
const oldEntry = oldTranslations[mapping.oldId];
const newEntry = newTranslations[mapping.newId];
if (!oldEntry || !newEntry) {
console.log(
` ✗ Missing entry for ${mapping.oldId}${mapping.newId}`
);
invalidCount++;
continue;
}
const oldNormalized = normalizeTranslationEntries(oldEntry);
const newNormalized = normalizeTranslationEntries(newEntry);
if (oldNormalized === newNormalized) {
validCount++;
} else {
console.log(
` ✗ Entry structure mismatch for ${mapping.oldId}${mapping.newId}`
);
console.log(` Old: ${JSON.stringify(oldEntry, null, 2)}`);
console.log(` New: ${JSON.stringify(newEntry, null, 2)}`);
invalidCount++;
}
}
console.log(
`\nValidation complete: ${validCount} valid, ${invalidCount} invalid mappings`
);
return { validCount, invalidCount };
}
/**
* Finds all translation files in a directory
* @param directory Directory to search in
* @returns Array of translation file paths
*/
export async function findTranslationFiles(
directory: string
): Promise<string[]> {
try {
const files = await fs.readdir(directory);
return files
.filter((file) => file.endsWith(".json"))
.map((file) => path.join(directory, file));
} catch (error) {
console.error(`Error reading directory ${directory}:`, error);
return [];
}
}
// CLI functionality
async function main() {
const args = process.argv.slice(2);
if (args.length < 2) {
console.log(`
Usage:
npm run update-ids <old-en.json> <new-en.json> [translation-directory]
Examples:
# Create mapping and update files in the same directory
npm run update-ids ./old-en.json ./new-en.json
# Create mapping and update files in specific directory
npm run update-ids ./old-en.json ./new-en.json ./translations/
# Just create mapping (no updates)
npm run update-ids ./old-en.json ./new-en.json --dry-run
`);
process.exit(1);
}
const oldFilePath = path.resolve(args[0]);
const newFilePath = path.resolve(args[1]);
const isDryRun = args.includes("--dry-run");
// Determine translation directory
let translationDirectory: string;
if (args[2] && !args[2].startsWith("--")) {
translationDirectory = path.resolve(args[2]);
} else {
translationDirectory = path.dirname(newFilePath);
}
if (isDryRun) {
console.log("\n🔍 DRY RUN MODE");
}
try {
// Create the ID mappings
const mappings = await createIdMapping(oldFilePath, newFilePath);
if (mappings.length === 0) {
console.log(
"No ID mappings found. Files might already be synchronized."
);
return;
}
console.log("Mappings that will be applied:");
mappings.forEach((m) => {
if (!m.englishText) {
console.warn(
` ⚠️ Invalid entry for old ID ${m.oldId} ${m.newId}`
);
}
console.log(
` ${m.oldId}${m.newId} ("${m.englishText?.substring(0, 20)}...")`
);
});
// Validate mappings in dry-run mode (read-only)
const { invalidCount } = await validateMappings(
mappings,
oldFilePath,
newFilePath
);
if (isDryRun) {
console.log("\nExiting - No files were be modified");
return;
}
if (invalidCount > 0) {
console.log("\n✗ Aborting due to invalid mappings");
return;
}
// Find all translation files
const translationFiles =
await findTranslationFiles(translationDirectory);
const filesToUpdate = translationFiles.filter(
(f) => f !== newFilePath && f !== oldFilePath
);
if (filesToUpdate.length === 0) {
console.log("No translation files found to update.");
return;
}
// Update all translation files
await updateAllTranslationFiles(mappings, filesToUpdate);
console.log("\n✓ All translation files updated successfully!");
} catch (error) {
console.error("Error:", error);
process.exit(1);
}
}
// Run CLI if this file is executed directly
if (require.main === module) {
main();
}