Feat/lokalise rebuild * chore(lokalise): update translation ids * chore(lokalise): easier to switch between projects * chore(lokalise): update translation ids * . * . * . * . * . * . * chore(lokalise): update translation ids * chore(lokalise): update translation ids * . * . * . * chore(lokalise): update translation ids * chore(lokalise): update translation ids * . * . * chore(lokalise): update translation ids * chore(lokalise): update translation ids * chore(lokalise): new translations * merge * switch to errors for missing id's * merge * sync translations Approved-by: Linus Flood
280 lines
8.6 KiB
TypeScript
280 lines
8.6 KiB
TypeScript
import fs from "node:fs/promises";
|
|
import { performance, PerformanceObserver } from "node:perf_hooks";
|
|
|
|
import { LokaliseApi } from "@lokalise/node-api";
|
|
import AdmZip from "adm-zip";
|
|
|
|
import { createLogger } from "@scandic-hotels/common/logger/createLogger";
|
|
|
|
const newProjectId = "7959806168ede449b7bf69.36591916";
|
|
const oldProjectId = "4194150766ff28c418f010.39532200";
|
|
const projectId = newProjectId;
|
|
const lokaliseApi = new LokaliseApi({ apiKey: process.env.LOKALISE_API_KEY });
|
|
|
|
const lokaliseLogger = createLogger("lokalise");
|
|
|
|
let resolvePerf: (value?: unknown) => void;
|
|
const performanceMetrics = new Promise((resolve) => {
|
|
resolvePerf = resolve;
|
|
});
|
|
|
|
const perf = new PerformanceObserver((items) => {
|
|
const entries = items.getEntries();
|
|
for (const entry of entries) {
|
|
if (entry.name === "done") {
|
|
// This is the last measure meant for clean up
|
|
performance.clearMarks();
|
|
perf.disconnect();
|
|
if (typeof resolvePerf === "function") {
|
|
resolvePerf();
|
|
}
|
|
} else {
|
|
lokaliseLogger.info(
|
|
`[metrics] ${entry.name} completed in ${entry.duration} ms`
|
|
);
|
|
}
|
|
}
|
|
performance.clearMeasures();
|
|
});
|
|
|
|
async function waitUntilUploadDone(processId: string) {
|
|
return new Promise<void>((resolve, reject) => {
|
|
const interval = setInterval(async () => {
|
|
try {
|
|
performance.mark("waitUntilUploadDoneStart");
|
|
|
|
lokaliseLogger.debug("Checking upload status...");
|
|
|
|
performance.mark("getProcessStart");
|
|
const process = await lokaliseApi
|
|
.queuedProcesses()
|
|
.get(processId, {
|
|
project_id: projectId,
|
|
});
|
|
performance.mark("getProcessEnd");
|
|
performance.measure(
|
|
"Get Queued Process",
|
|
"getProcessStart",
|
|
"getProcessEnd"
|
|
);
|
|
|
|
lokaliseLogger.debug(`Status: ${process.status}`);
|
|
|
|
if (process.status === "finished") {
|
|
clearInterval(interval);
|
|
performance.mark("waitUntilUploadDoneEnd", {
|
|
detail: "success",
|
|
});
|
|
performance.measure(
|
|
"Wait on upload",
|
|
"waitUntilUploadDoneStart",
|
|
"waitUntilUploadDoneEnd"
|
|
);
|
|
resolve();
|
|
} else if (process.status === "failed") {
|
|
throw process;
|
|
}
|
|
} catch (e) {
|
|
clearInterval(interval);
|
|
lokaliseLogger.error("An error occurred:", e);
|
|
performance.mark("waitUntilUploadDoneEnd", { detail: e });
|
|
performance.measure(
|
|
"Wait on upload",
|
|
"waitUntilUploadDoneStart",
|
|
"waitUntilUploadDoneEnd"
|
|
);
|
|
reject();
|
|
}
|
|
}, 1000);
|
|
});
|
|
}
|
|
|
|
export async function upload(filepath: string) {
|
|
perf.observe({ type: "measure" });
|
|
|
|
try {
|
|
lokaliseLogger.debug(`Uploading ${filepath}...`);
|
|
|
|
performance.mark("uploadStart");
|
|
|
|
performance.mark("sourceFileReadStart");
|
|
const data = await fs.readFile(filepath, "utf8");
|
|
const buff = Buffer.from(data, "utf8");
|
|
const base64 = buff.toString("base64");
|
|
performance.mark("sourceFileReadEnd");
|
|
performance.measure(
|
|
"Read source file",
|
|
"sourceFileReadStart",
|
|
"sourceFileReadEnd"
|
|
);
|
|
|
|
performance.mark("lokaliseUploadInitStart");
|
|
const bgProcess = await lokaliseApi.files().upload(projectId, {
|
|
data: base64,
|
|
filename: "en.json",
|
|
lang_iso: "en",
|
|
detect_icu_plurals: true,
|
|
format: "json",
|
|
convert_placeholders: true,
|
|
replace_modified: false,
|
|
});
|
|
performance.mark("lokaliseUploadInitEnd");
|
|
performance.measure(
|
|
"Upload init",
|
|
"lokaliseUploadInitStart",
|
|
"lokaliseUploadInitEnd"
|
|
);
|
|
|
|
performance.mark("lokaliseUploadStart");
|
|
await waitUntilUploadDone(bgProcess.process_id);
|
|
performance.mark("lokaliseUploadEnd");
|
|
performance.measure(
|
|
"Upload transfer",
|
|
"lokaliseUploadStart",
|
|
"lokaliseUploadEnd"
|
|
);
|
|
|
|
lokaliseLogger.debug("Upload successful");
|
|
} catch (e) {
|
|
lokaliseLogger.error("Upload failed", e);
|
|
} finally {
|
|
performance.mark("uploadEnd");
|
|
|
|
performance.measure("Upload operation", "uploadStart", "uploadEnd");
|
|
}
|
|
|
|
performance.measure("done");
|
|
|
|
await performanceMetrics;
|
|
}
|
|
|
|
export async function download(extractPath: string, all: boolean = false) {
|
|
perf.observe({ type: "measure" });
|
|
|
|
try {
|
|
lokaliseLogger.debug(
|
|
all
|
|
? "Downloading all translations..."
|
|
: "Downloading filtered translations..."
|
|
);
|
|
|
|
performance.mark("downloadStart");
|
|
|
|
performance.mark("lokaliseDownloadInitStart");
|
|
const downloadResponse = await lokaliseApi.files().download(projectId, {
|
|
format: "json_structured",
|
|
indentation: "2sp",
|
|
placeholder_format: "icu",
|
|
plural_format: "icu",
|
|
icu_numeric: false,
|
|
bundle_structure: "%LANG_ISO%.%FORMAT%",
|
|
directory_prefix: "",
|
|
filter_data: all ? [] : ["translated", "nonhidden"],
|
|
export_empty_as: "skip",
|
|
});
|
|
|
|
performance.mark("lokaliseDownloadInitEnd");
|
|
performance.measure(
|
|
"Download init",
|
|
"lokaliseDownloadInitStart",
|
|
"lokaliseDownloadInitEnd"
|
|
);
|
|
|
|
const { bundle_url } = downloadResponse;
|
|
|
|
performance.mark("lokaliseDownloadStart");
|
|
const bundleResponse = await fetch(bundle_url);
|
|
performance.mark("lokaliseDownloadEnd");
|
|
performance.measure(
|
|
"Download transfer",
|
|
"lokaliseDownloadStart",
|
|
"lokaliseDownloadEnd"
|
|
);
|
|
|
|
if (bundleResponse.ok) {
|
|
performance.mark("unpackTranslationsStart");
|
|
const arrayBuffer = await bundleResponse.arrayBuffer();
|
|
const buffer = Buffer.from(new Uint8Array(arrayBuffer));
|
|
const zip = new AdmZip(buffer);
|
|
zip.extractAllTo(extractPath, true);
|
|
performance.mark("unpackTranslationsEnd");
|
|
performance.measure(
|
|
"Unpacking translations",
|
|
"unpackTranslationsStart",
|
|
"unpackTranslationsEnd"
|
|
);
|
|
|
|
lokaliseLogger.debug("Download successful");
|
|
} else {
|
|
throw bundleResponse;
|
|
}
|
|
} catch (e) {
|
|
lokaliseLogger.error("Download failed", e);
|
|
} finally {
|
|
performance.mark("downloadEnd");
|
|
|
|
performance.measure(
|
|
"Download operation",
|
|
"downloadStart",
|
|
"downloadEnd"
|
|
);
|
|
}
|
|
|
|
performance.measure("done");
|
|
|
|
await performanceMetrics;
|
|
}
|
|
|
|
export async function deleteBulk(keyNames: string[]) {
|
|
perf.observe({ type: "measure" });
|
|
|
|
try {
|
|
performance.mark("bulkDeleteStart");
|
|
|
|
let keysToDelete: number[] = [];
|
|
let cursor: string | undefined = undefined;
|
|
let hasNext = true;
|
|
do {
|
|
const keys = await lokaliseApi.keys().list({
|
|
project_id: projectId,
|
|
limit: 100,
|
|
pagination: "cursor",
|
|
cursor,
|
|
});
|
|
|
|
cursor = keys.nextCursor ?? undefined;
|
|
keys.items.forEach((key) => {
|
|
if (key.key_id && key.key_name.web) {
|
|
if (keyNames.includes(key.key_name.web)) {
|
|
keysToDelete.push(key.key_id);
|
|
}
|
|
}
|
|
});
|
|
|
|
if (!keys.hasNextCursor()) {
|
|
hasNext = false;
|
|
}
|
|
} while (hasNext);
|
|
|
|
const response = await lokaliseApi
|
|
.keys()
|
|
.bulk_delete(keysToDelete, { project_id: projectId });
|
|
|
|
lokaliseLogger.debug(
|
|
`Bulk delete successful, removed ${keysToDelete.length} keys`
|
|
);
|
|
|
|
return response;
|
|
} catch (e) {
|
|
lokaliseLogger.error("Bulk delete failed", e);
|
|
} finally {
|
|
performance.mark("bulkDeleteEnd");
|
|
|
|
performance.measure(
|
|
"Bulk delete operation",
|
|
"bulkDeleteStart",
|
|
"bulkDeleteEnd"
|
|
);
|
|
}
|
|
}
|