Merged in feat/lokalise-sync-260105 (pull request #3387)

feat(lokalise): fixed correct message and sync

* feat(lokalise): fixed correct message and sync
This commit is contained in:
Linus Flood
2026-01-05 11:33:08 +00:00
parent d23137a69d
commit 3d62c16899
4 changed files with 937 additions and 221 deletions

View File

@@ -15,265 +15,259 @@ const lokaliseLogger = createLogger("lokalise");
let resolvePerf: (value?: unknown) => void;
const performanceMetrics = new Promise((resolve) => {
resolvePerf = resolve;
resolvePerf = resolve;
});
const perf = new PerformanceObserver((items) => {
const entries = items.getEntries();
for (const entry of entries) {
if (entry.name === "done") {
// This is the last measure meant for clean up
performance.clearMarks();
perf.disconnect();
if (typeof resolvePerf === "function") {
resolvePerf();
}
} else {
lokaliseLogger.info(
`[metrics] ${entry.name} completed in ${entry.duration} ms`
);
}
const entries = items.getEntries();
for (const entry of entries) {
if (entry.name === "done") {
// This is the last measure meant for clean up
performance.clearMarks();
perf.disconnect();
if (typeof resolvePerf === "function") {
resolvePerf();
}
} else {
lokaliseLogger.info(
`[metrics] ${entry.name} completed in ${entry.duration} ms`
);
}
performance.clearMeasures();
}
performance.clearMeasures();
});
async function waitUntilUploadDone(processId: string) {
return new Promise<void>((resolve, reject) => {
const interval = setInterval(async () => {
try {
performance.mark("waitUntilUploadDoneStart");
return new Promise<void>((resolve, reject) => {
const interval = setInterval(async () => {
try {
performance.mark("waitUntilUploadDoneStart");
lokaliseLogger.debug("Checking upload status...");
lokaliseLogger.debug("Checking upload status...");
performance.mark("getProcessStart");
const process = await lokaliseApi
.queuedProcesses()
.get(processId, {
project_id: projectId,
});
performance.mark("getProcessEnd");
performance.measure(
"Get Queued Process",
"getProcessStart",
"getProcessEnd"
);
performance.mark("getProcessStart");
const process = await lokaliseApi.queuedProcesses().get(processId, {
project_id: projectId,
});
performance.mark("getProcessEnd");
performance.measure(
"Get Queued Process",
"getProcessStart",
"getProcessEnd"
);
lokaliseLogger.debug(`Status: ${process.status}`);
lokaliseLogger.debug(`Status: ${process.status}`);
if (process.status === "finished") {
clearInterval(interval);
performance.mark("waitUntilUploadDoneEnd", {
detail: "success",
});
performance.measure(
"Wait on upload",
"waitUntilUploadDoneStart",
"waitUntilUploadDoneEnd"
);
resolve();
} else if (process.status === "failed") {
throw process;
}
} catch (e) {
clearInterval(interval);
lokaliseLogger.error("An error occurred:", e);
performance.mark("waitUntilUploadDoneEnd", { detail: e });
performance.measure(
"Wait on upload",
"waitUntilUploadDoneStart",
"waitUntilUploadDoneEnd"
);
reject();
}
}, 1000);
});
if (process.status === "finished") {
clearInterval(interval);
performance.mark("waitUntilUploadDoneEnd", {
detail: "success",
});
performance.measure(
"Wait on upload",
"waitUntilUploadDoneStart",
"waitUntilUploadDoneEnd"
);
resolve();
} else if (process.status === "failed") {
throw process;
}
} catch (e) {
clearInterval(interval);
lokaliseLogger.error("An error occurred:", e);
performance.mark("waitUntilUploadDoneEnd", { detail: e });
performance.measure(
"Wait on upload",
"waitUntilUploadDoneStart",
"waitUntilUploadDoneEnd"
);
reject();
}
}, 1000);
});
}
export async function upload(filepath: string) {
perf.observe({ type: "measure" });
perf.observe({ type: "measure" });
try {
lokaliseLogger.debug(`Uploading ${filepath}...`);
try {
lokaliseLogger.debug(`Uploading ${filepath}...`);
performance.mark("uploadStart");
performance.mark("uploadStart");
performance.mark("sourceFileReadStart");
const data = await fs.readFile(filepath, "utf8");
const buff = Buffer.from(data, "utf8");
const base64 = buff.toString("base64");
performance.mark("sourceFileReadEnd");
performance.measure(
"Read source file",
"sourceFileReadStart",
"sourceFileReadEnd"
);
performance.mark("sourceFileReadStart");
const data = await fs.readFile(filepath, "utf8");
const buff = Buffer.from(data, "utf8");
const base64 = buff.toString("base64");
performance.mark("sourceFileReadEnd");
performance.measure(
"Read source file",
"sourceFileReadStart",
"sourceFileReadEnd"
);
performance.mark("lokaliseUploadInitStart");
const bgProcess = await lokaliseApi.files().upload(projectId, {
data: base64,
filename: "en.json",
lang_iso: "en",
detect_icu_plurals: true,
format: "json",
convert_placeholders: true,
replace_modified: false,
});
performance.mark("lokaliseUploadInitEnd");
performance.measure(
"Upload init",
"lokaliseUploadInitStart",
"lokaliseUploadInitEnd"
);
performance.mark("lokaliseUploadInitStart");
const bgProcess = await lokaliseApi.files().upload(projectId, {
data: base64,
filename: "en.json",
lang_iso: "en",
detect_icu_plurals: true,
format: "json",
convert_placeholders: true,
replace_modified: false,
});
performance.mark("lokaliseUploadInitEnd");
performance.measure(
"Upload init",
"lokaliseUploadInitStart",
"lokaliseUploadInitEnd"
);
performance.mark("lokaliseUploadStart");
await waitUntilUploadDone(bgProcess.process_id);
performance.mark("lokaliseUploadEnd");
performance.measure(
"Upload transfer",
"lokaliseUploadStart",
"lokaliseUploadEnd"
);
performance.mark("lokaliseUploadStart");
await waitUntilUploadDone(bgProcess.process_id);
performance.mark("lokaliseUploadEnd");
performance.measure(
"Upload transfer",
"lokaliseUploadStart",
"lokaliseUploadEnd"
);
lokaliseLogger.debug("Upload successful");
} catch (e) {
lokaliseLogger.error("Upload failed", e);
} finally {
performance.mark("uploadEnd");
lokaliseLogger.debug("Upload successful");
} catch (e) {
lokaliseLogger.error("Upload failed", e);
} finally {
performance.mark("uploadEnd");
performance.measure("Upload operation", "uploadStart", "uploadEnd");
}
performance.measure("Upload operation", "uploadStart", "uploadEnd");
}
performance.measure("done");
performance.measure("done");
await performanceMetrics;
await performanceMetrics;
}
export async function download(extractPath: string, all: boolean = false) {
perf.observe({ type: "measure" });
perf.observe({ type: "measure" });
try {
lokaliseLogger.debug(
all
? "Downloading all translations..."
: "Downloading filtered translations..."
);
try {
lokaliseLogger.debug(
all
? "Downloading all translations..."
: "Downloading filtered translations..."
);
performance.mark("downloadStart");
performance.mark("downloadStart");
performance.mark("lokaliseDownloadInitStart");
const downloadResponse = await lokaliseApi.files().download(projectId, {
format: "json_structured",
indentation: "2sp",
placeholder_format: "icu",
plural_format: "icu",
icu_numeric: false,
bundle_structure: "%LANG_ISO%.%FORMAT%",
directory_prefix: "",
filter_data: all ? [] : ["translated", "nonhidden"],
export_empty_as: "skip",
});
performance.mark("lokaliseDownloadInitStart");
const downloadResponse = await lokaliseApi.files().download(projectId, {
format: "json_structured",
indentation: "2sp",
placeholder_format: "icu",
plural_format: "icu",
icu_numeric: false,
bundle_structure: "%LANG_ISO%.%FORMAT%",
directory_prefix: "",
filter_data: all ? [] : ["translated", "nonhidden"],
export_empty_as: "skip",
});
performance.mark("lokaliseDownloadInitEnd");
performance.measure(
"Download init",
"lokaliseDownloadInitStart",
"lokaliseDownloadInitEnd"
);
performance.mark("lokaliseDownloadInitEnd");
performance.measure(
"Download init",
"lokaliseDownloadInitStart",
"lokaliseDownloadInitEnd"
);
const { bundle_url } = downloadResponse;
const { bundle_url } = downloadResponse;
performance.mark("lokaliseDownloadStart");
const bundleResponse = await fetch(bundle_url);
performance.mark("lokaliseDownloadEnd");
performance.measure(
"Download transfer",
"lokaliseDownloadStart",
"lokaliseDownloadEnd"
);
performance.mark("lokaliseDownloadStart");
const bundleResponse = await fetch(bundle_url);
performance.mark("lokaliseDownloadEnd");
performance.measure(
"Download transfer",
"lokaliseDownloadStart",
"lokaliseDownloadEnd"
);
if (bundleResponse.ok) {
performance.mark("unpackTranslationsStart");
const arrayBuffer = await bundleResponse.arrayBuffer();
const buffer = Buffer.from(new Uint8Array(arrayBuffer));
const zip = new AdmZip(buffer);
zip.extractAllTo(extractPath, true);
performance.mark("unpackTranslationsEnd");
performance.measure(
"Unpacking translations",
"unpackTranslationsStart",
"unpackTranslationsEnd"
);
if (bundleResponse.ok) {
performance.mark("unpackTranslationsStart");
const arrayBuffer = await bundleResponse.arrayBuffer();
const buffer = Buffer.from(new Uint8Array(arrayBuffer));
const zip = new AdmZip(buffer);
zip.extractAllTo(extractPath, true);
performance.mark("unpackTranslationsEnd");
performance.measure(
"Unpacking translations",
"unpackTranslationsStart",
"unpackTranslationsEnd"
);
lokaliseLogger.debug("Download successful");
} else {
throw bundleResponse;
}
} catch (e) {
lokaliseLogger.error("Download failed", e);
} finally {
performance.mark("downloadEnd");
performance.measure(
"Download operation",
"downloadStart",
"downloadEnd"
);
lokaliseLogger.debug("Download successful");
} else {
throw bundleResponse;
}
} catch (e) {
lokaliseLogger.error("Download failed", e);
} finally {
performance.mark("downloadEnd");
performance.measure("done");
performance.measure("Download operation", "downloadStart", "downloadEnd");
}
await performanceMetrics;
performance.measure("done");
await performanceMetrics;
}
export async function deleteBulk(keyNames: string[]) {
perf.observe({ type: "measure" });
perf.observe({ type: "measure" });
try {
performance.mark("bulkDeleteStart");
try {
performance.mark("bulkDeleteStart");
let keysToDelete: number[] = [];
let cursor: string | undefined = undefined;
let hasNext = true;
do {
const keys = await lokaliseApi.keys().list({
project_id: projectId,
limit: 100,
pagination: "cursor",
cursor,
});
let keysToDelete: number[] = [];
let cursor: string | undefined = undefined;
let hasNext = true;
do {
const keys = await lokaliseApi.keys().list({
project_id: projectId,
limit: 100,
pagination: "cursor",
cursor,
});
cursor = keys.nextCursor ?? undefined;
keys.items.forEach((key) => {
if (key.key_id && key.key_name.web) {
if (keyNames.includes(key.key_name.web)) {
keysToDelete.push(key.key_id);
}
}
});
cursor = keys.nextCursor ?? undefined;
keys.items.forEach((key) => {
if (key.key_id && key.key_name.web) {
if (keyNames.includes(key.key_name.web)) {
keysToDelete.push(key.key_id);
}
}
});
if (!keys.hasNextCursor()) {
hasNext = false;
}
} while (hasNext);
if (!keys.hasNextCursor()) {
hasNext = false;
}
} while (hasNext);
const response = await lokaliseApi
.keys()
.bulk_delete(keysToDelete, { project_id: projectId });
const response = await lokaliseApi
.keys()
.bulk_delete(keysToDelete, { project_id: projectId });
lokaliseLogger.debug(
`Bulk delete successful, removed ${keysToDelete.length} keys`
);
lokaliseLogger.debug(
`Bulk delete successful, removed ${keysToDelete.length} keys`
);
return response;
} catch (e) {
lokaliseLogger.error("Bulk delete failed", e);
} finally {
performance.mark("bulkDeleteEnd");
return response;
} catch (e) {
lokaliseLogger.error("Bulk delete failed", e);
} finally {
performance.mark("bulkDeleteEnd");
performance.measure(
"Bulk delete operation",
"bulkDeleteStart",
"bulkDeleteEnd"
);
}
performance.measure(
"Bulk delete operation",
"bulkDeleteStart",
"bulkDeleteEnd"
);
}
}