Merged in feat/lokalise-rebuild (pull request #2993)

Feat/lokalise rebuild

* chore(lokalise): update translation ids

* chore(lokalise): easier to switch between projects

* chore(lokalise): update translation ids

* .

* .

* .

* .

* .

* .

* chore(lokalise): update translation ids

* chore(lokalise): update translation ids

* .

* .

* .

* chore(lokalise): update translation ids

* chore(lokalise): update translation ids

* .

* .

* chore(lokalise): update translation ids

* chore(lokalise): update translation ids

* chore(lokalise): new translations

* merge

* switch to errors for missing id's

* merge

* sync translations


Approved-by: Linus Flood
This commit is contained in:
Joakim Jäderberg
2025-10-22 11:00:03 +00:00
parent bdfe2ab213
commit aafad9781f
499 changed files with 93363 additions and 99164 deletions

View File

@@ -1,268 +1,279 @@
import fs from "node:fs/promises"
import { performance, PerformanceObserver } from "node:perf_hooks"
import fs from "node:fs/promises";
import { performance, PerformanceObserver } from "node:perf_hooks";
import { LokaliseApi } from "@lokalise/node-api"
import AdmZip from "adm-zip"
import { LokaliseApi } from "@lokalise/node-api";
import AdmZip from "adm-zip";
import { createLogger } from "@scandic-hotels/common/logger/createLogger"
import { createLogger } from "@scandic-hotels/common/logger/createLogger";
const projectId = "4194150766ff28c418f010.39532200"
const lokaliseApi = new LokaliseApi({ apiKey: process.env.LOKALISE_API_KEY })
const newProjectId = "7959806168ede449b7bf69.36591916";
const oldProjectId = "4194150766ff28c418f010.39532200";
const projectId = newProjectId;
const lokaliseApi = new LokaliseApi({ apiKey: process.env.LOKALISE_API_KEY });
const lokaliseLogger = createLogger("lokalise")
const lokaliseLogger = createLogger("lokalise");
let resolvePerf: (value?: unknown) => void
let resolvePerf: (value?: unknown) => void;
const performanceMetrics = new Promise((resolve) => {
resolvePerf = resolve
})
resolvePerf = resolve;
});
const perf = new PerformanceObserver((items) => {
const entries = items.getEntries()
for (const entry of entries) {
if (entry.name === "done") {
// This is the last measure meant for clean up
performance.clearMarks()
perf.disconnect()
if (typeof resolvePerf === "function") {
resolvePerf()
}
} else {
lokaliseLogger.info(
`[metrics] ${entry.name} completed in ${entry.duration} ms`
)
const entries = items.getEntries();
for (const entry of entries) {
if (entry.name === "done") {
// This is the last measure meant for clean up
performance.clearMarks();
perf.disconnect();
if (typeof resolvePerf === "function") {
resolvePerf();
}
} else {
lokaliseLogger.info(
`[metrics] ${entry.name} completed in ${entry.duration} ms`
);
}
}
}
performance.clearMeasures()
})
performance.clearMeasures();
});
async function waitUntilUploadDone(processId: string) {
return new Promise<void>((resolve, reject) => {
const interval = setInterval(async () => {
try {
performance.mark("waitUntilUploadDoneStart")
return new Promise<void>((resolve, reject) => {
const interval = setInterval(async () => {
try {
performance.mark("waitUntilUploadDoneStart");
lokaliseLogger.debug("Checking upload status...")
lokaliseLogger.debug("Checking upload status...");
performance.mark("getProcessStart")
const process = await lokaliseApi.queuedProcesses().get(processId, {
project_id: projectId,
})
performance.mark("getProcessEnd")
performance.measure(
"Get Queued Process",
"getProcessStart",
"getProcessEnd"
)
performance.mark("getProcessStart");
const process = await lokaliseApi
.queuedProcesses()
.get(processId, {
project_id: projectId,
});
performance.mark("getProcessEnd");
performance.measure(
"Get Queued Process",
"getProcessStart",
"getProcessEnd"
);
lokaliseLogger.debug(`Status: ${process.status}`)
lokaliseLogger.debug(`Status: ${process.status}`);
if (process.status === "finished") {
clearInterval(interval)
performance.mark("waitUntilUploadDoneEnd", { detail: "success" })
performance.measure(
"Wait on upload",
"waitUntilUploadDoneStart",
"waitUntilUploadDoneEnd"
)
resolve()
} else if (process.status === "failed") {
throw process
}
} catch (e) {
clearInterval(interval)
lokaliseLogger.error("An error occurred:", e)
performance.mark("waitUntilUploadDoneEnd", { detail: e })
performance.measure(
"Wait on upload",
"waitUntilUploadDoneStart",
"waitUntilUploadDoneEnd"
)
reject()
}
}, 1000)
})
if (process.status === "finished") {
clearInterval(interval);
performance.mark("waitUntilUploadDoneEnd", {
detail: "success",
});
performance.measure(
"Wait on upload",
"waitUntilUploadDoneStart",
"waitUntilUploadDoneEnd"
);
resolve();
} else if (process.status === "failed") {
throw process;
}
} catch (e) {
clearInterval(interval);
lokaliseLogger.error("An error occurred:", e);
performance.mark("waitUntilUploadDoneEnd", { detail: e });
performance.measure(
"Wait on upload",
"waitUntilUploadDoneStart",
"waitUntilUploadDoneEnd"
);
reject();
}
}, 1000);
});
}
export async function upload(filepath: string) {
perf.observe({ type: "measure" })
perf.observe({ type: "measure" });
try {
lokaliseLogger.debug(`Uploading ${filepath}...`)
try {
lokaliseLogger.debug(`Uploading ${filepath}...`);
performance.mark("uploadStart")
performance.mark("uploadStart");
performance.mark("sourceFileReadStart")
const data = await fs.readFile(filepath, "utf8")
const buff = Buffer.from(data, "utf8")
const base64 = buff.toString("base64")
performance.mark("sourceFileReadEnd")
performance.measure(
"Read source file",
"sourceFileReadStart",
"sourceFileReadEnd"
)
performance.mark("sourceFileReadStart");
const data = await fs.readFile(filepath, "utf8");
const buff = Buffer.from(data, "utf8");
const base64 = buff.toString("base64");
performance.mark("sourceFileReadEnd");
performance.measure(
"Read source file",
"sourceFileReadStart",
"sourceFileReadEnd"
);
performance.mark("lokaliseUploadInitStart")
const bgProcess = await lokaliseApi.files().upload(projectId, {
data: base64,
filename: "en.json",
lang_iso: "en",
detect_icu_plurals: true,
format: "json",
convert_placeholders: true,
replace_modified: true,
})
performance.mark("lokaliseUploadInitEnd")
performance.measure(
"Upload init",
"lokaliseUploadInitStart",
"lokaliseUploadInitEnd"
)
performance.mark("lokaliseUploadInitStart");
const bgProcess = await lokaliseApi.files().upload(projectId, {
data: base64,
filename: "en.json",
lang_iso: "en",
detect_icu_plurals: true,
format: "json",
convert_placeholders: true,
replace_modified: false,
});
performance.mark("lokaliseUploadInitEnd");
performance.measure(
"Upload init",
"lokaliseUploadInitStart",
"lokaliseUploadInitEnd"
);
performance.mark("lokaliseUploadStart")
await waitUntilUploadDone(bgProcess.process_id)
performance.mark("lokaliseUploadEnd")
performance.measure(
"Upload transfer",
"lokaliseUploadStart",
"lokaliseUploadEnd"
)
performance.mark("lokaliseUploadStart");
await waitUntilUploadDone(bgProcess.process_id);
performance.mark("lokaliseUploadEnd");
performance.measure(
"Upload transfer",
"lokaliseUploadStart",
"lokaliseUploadEnd"
);
lokaliseLogger.debug("Upload successful")
} catch (e) {
lokaliseLogger.error("Upload failed", e)
} finally {
performance.mark("uploadEnd")
lokaliseLogger.debug("Upload successful");
} catch (e) {
lokaliseLogger.error("Upload failed", e);
} finally {
performance.mark("uploadEnd");
performance.measure("Upload operation", "uploadStart", "uploadEnd")
}
performance.measure("Upload operation", "uploadStart", "uploadEnd");
}
performance.measure("done")
performance.measure("done");
await performanceMetrics
await performanceMetrics;
}
export async function download(extractPath: string, all: boolean = false) {
perf.observe({ type: "measure" })
perf.observe({ type: "measure" });
try {
lokaliseLogger.debug(
all
? "Downloading all translations..."
: "Downloading filtered translations..."
)
try {
lokaliseLogger.debug(
all
? "Downloading all translations..."
: "Downloading filtered translations..."
);
performance.mark("downloadStart")
performance.mark("downloadStart");
performance.mark("lokaliseDownloadInitStart")
const downloadResponse = await lokaliseApi.files().download(projectId, {
format: "json_structured",
indentation: "2sp",
placeholder_format: "icu",
plural_format: "icu",
icu_numeric: true,
bundle_structure: "%LANG_ISO%.%FORMAT%",
directory_prefix: "",
filter_data: all ? [] : ["translated", "nonhidden"],
export_empty_as: "skip",
})
performance.mark("lokaliseDownloadInitEnd")
performance.measure(
"Download init",
"lokaliseDownloadInitStart",
"lokaliseDownloadInitEnd"
)
performance.mark("lokaliseDownloadInitStart");
const downloadResponse = await lokaliseApi.files().download(projectId, {
format: "json_structured",
indentation: "2sp",
placeholder_format: "icu",
plural_format: "icu",
icu_numeric: false,
bundle_structure: "%LANG_ISO%.%FORMAT%",
directory_prefix: "",
filter_data: all ? [] : ["translated", "nonhidden"],
export_empty_as: "skip",
});
const { bundle_url } = downloadResponse
performance.mark("lokaliseDownloadInitEnd");
performance.measure(
"Download init",
"lokaliseDownloadInitStart",
"lokaliseDownloadInitEnd"
);
performance.mark("lokaliseDownloadStart")
const bundleResponse = await fetch(bundle_url)
performance.mark("lokaliseDownloadEnd")
performance.measure(
"Download transfer",
"lokaliseDownloadStart",
"lokaliseDownloadEnd"
)
const { bundle_url } = downloadResponse;
if (bundleResponse.ok) {
performance.mark("unpackTranslationsStart")
const arrayBuffer = await bundleResponse.arrayBuffer()
const buffer = Buffer.from(new Uint8Array(arrayBuffer))
const zip = new AdmZip(buffer)
zip.extractAllTo(extractPath, true)
performance.mark("unpackTranslationsEnd")
performance.measure(
"Unpacking translations",
"unpackTranslationsStart",
"unpackTranslationsEnd"
)
performance.mark("lokaliseDownloadStart");
const bundleResponse = await fetch(bundle_url);
performance.mark("lokaliseDownloadEnd");
performance.measure(
"Download transfer",
"lokaliseDownloadStart",
"lokaliseDownloadEnd"
);
lokaliseLogger.debug("Download successful")
} else {
throw bundleResponse
if (bundleResponse.ok) {
performance.mark("unpackTranslationsStart");
const arrayBuffer = await bundleResponse.arrayBuffer();
const buffer = Buffer.from(new Uint8Array(arrayBuffer));
const zip = new AdmZip(buffer);
zip.extractAllTo(extractPath, true);
performance.mark("unpackTranslationsEnd");
performance.measure(
"Unpacking translations",
"unpackTranslationsStart",
"unpackTranslationsEnd"
);
lokaliseLogger.debug("Download successful");
} else {
throw bundleResponse;
}
} catch (e) {
lokaliseLogger.error("Download failed", e);
} finally {
performance.mark("downloadEnd");
performance.measure(
"Download operation",
"downloadStart",
"downloadEnd"
);
}
} catch (e) {
lokaliseLogger.error("Download failed", e)
} finally {
performance.mark("downloadEnd")
performance.measure("Download operation", "downloadStart", "downloadEnd")
}
performance.measure("done");
performance.measure("done")
await performanceMetrics
await performanceMetrics;
}
export async function deleteBulk(keyNames: string[]) {
perf.observe({ type: "measure" })
perf.observe({ type: "measure" });
try {
performance.mark("bulkDeleteStart")
try {
performance.mark("bulkDeleteStart");
let keysToDelete: number[] = []
let cursor: string | undefined = undefined
let hasNext = true
do {
const keys = await lokaliseApi.keys().list({
project_id: projectId,
limit: 100,
pagination: "cursor",
cursor,
})
let keysToDelete: number[] = [];
let cursor: string | undefined = undefined;
let hasNext = true;
do {
const keys = await lokaliseApi.keys().list({
project_id: projectId,
limit: 100,
pagination: "cursor",
cursor,
});
cursor = keys.nextCursor ?? undefined
keys.items.forEach((key) => {
if (key.key_id && key.key_name.web) {
if (keyNames.includes(key.key_name.web)) {
keysToDelete.push(key.key_id)
}
}
})
cursor = keys.nextCursor ?? undefined;
keys.items.forEach((key) => {
if (key.key_id && key.key_name.web) {
if (keyNames.includes(key.key_name.web)) {
keysToDelete.push(key.key_id);
}
}
});
if (!keys.hasNextCursor()) {
hasNext = false
}
} while (hasNext)
if (!keys.hasNextCursor()) {
hasNext = false;
}
} while (hasNext);
const response = await lokaliseApi
.keys()
.bulk_delete(keysToDelete, { project_id: projectId })
const response = await lokaliseApi
.keys()
.bulk_delete(keysToDelete, { project_id: projectId });
lokaliseLogger.debug(
`Bulk delete successful, removed ${keysToDelete.length} keys`
)
lokaliseLogger.debug(
`Bulk delete successful, removed ${keysToDelete.length} keys`
);
return response
} catch (e) {
lokaliseLogger.error("Bulk delete failed", e)
} finally {
performance.mark("bulkDeleteEnd")
return response;
} catch (e) {
lokaliseLogger.error("Bulk delete failed", e);
} finally {
performance.mark("bulkDeleteEnd");
performance.measure(
"Bulk delete operation",
"bulkDeleteStart",
"bulkDeleteEnd"
)
}
performance.measure(
"Bulk delete operation",
"bulkDeleteStart",
"bulkDeleteEnd"
);
}
}