Files
web/apps/scandic-web/i18n/tooling/lokalise.ts
Michael Zetterberg 2c9f3c4c5e feat: lokalise clean
2025-06-12 08:56:10 +00:00

269 lines
7.0 KiB
TypeScript

import fs from "node:fs/promises"
import { performance, PerformanceObserver } from "node:perf_hooks"
import { LokaliseApi } from "@lokalise/node-api"
import AdmZip from "adm-zip"
const projectId = "4194150766ff28c418f010.39532200"
const lokaliseApi = new LokaliseApi({ apiKey: process.env.LOKALISE_API_KEY })
function log(msg: string, ...args: any[]) {
console.log(`[lokalise] ${msg}`, ...args)
}
function error(msg: string, ...args: any[]) {
console.error(`[lokalise] ${msg}`, ...args)
}
let resolvePerf: (value?: unknown) => void
const performanceMetrics = new Promise((resolve) => {
resolvePerf = resolve
})
const perf = new PerformanceObserver((items) => {
const entries = items.getEntries()
for (const entry of entries) {
if (entry.name === "done") {
// This is the last measure meant for clean up
performance.clearMarks()
perf.disconnect()
if (typeof resolvePerf === "function") {
resolvePerf()
}
} else {
log(`[metrics] ${entry.name} completed in ${entry.duration} ms`)
}
}
performance.clearMeasures()
})
async function waitUntilUploadDone(processId: string) {
return new Promise<void>((resolve, reject) => {
const interval = setInterval(async () => {
try {
performance.mark("waitUntilUploadDoneStart")
log("Checking upload status...")
performance.mark("getProcessStart")
const process = await lokaliseApi.queuedProcesses().get(processId, {
project_id: projectId,
})
performance.mark("getProcessEnd")
performance.measure(
"Get Queued Process",
"getProcessStart",
"getProcessEnd"
)
log(`Status: ${process.status}`)
if (process.status === "finished") {
clearInterval(interval)
performance.mark("waitUntilUploadDoneEnd", { detail: "success" })
performance.measure(
"Wait on upload",
"waitUntilUploadDoneStart",
"waitUntilUploadDoneEnd"
)
resolve()
} else if (process.status === "failed") {
throw process
}
} catch (e) {
clearInterval(interval)
error("An error occurred:", e)
performance.mark("waitUntilUploadDoneEnd", { detail: error })
performance.measure(
"Wait on upload",
"waitUntilUploadDoneStart",
"waitUntilUploadDoneEnd"
)
reject()
}
}, 1000)
})
}
export async function upload(filepath: string) {
perf.observe({ type: "measure" })
try {
log(`Uploading ${filepath}...`)
performance.mark("uploadStart")
performance.mark("sourceFileReadStart")
const data = await fs.readFile(filepath, "utf8")
const buff = Buffer.from(data, "utf8")
const base64 = buff.toString("base64")
performance.mark("sourceFileReadEnd")
performance.measure(
"Read source file",
"sourceFileReadStart",
"sourceFileReadEnd"
)
performance.mark("lokaliseUploadInitStart")
const bgProcess = await lokaliseApi.files().upload(projectId, {
data: base64,
filename: "en.json",
lang_iso: "en",
detect_icu_plurals: true,
format: "json",
convert_placeholders: true,
replace_modified: true,
})
performance.mark("lokaliseUploadInitEnd")
performance.measure(
"Upload init",
"lokaliseUploadInitStart",
"lokaliseUploadInitEnd"
)
performance.mark("lokaliseUploadStart")
await waitUntilUploadDone(bgProcess.process_id)
performance.mark("lokaliseUploadEnd")
performance.measure(
"Upload transfer",
"lokaliseUploadStart",
"lokaliseUploadEnd"
)
log("Upload successful")
} catch (e) {
error("Upload failed", e)
} finally {
performance.mark("uploadEnd")
performance.measure("Upload operation", "uploadStart", "uploadEnd")
}
performance.measure("done")
await performanceMetrics
}
export async function download(extractPath: string, all: boolean = false) {
perf.observe({ type: "measure" })
try {
log(
all
? "Downloading all translations..."
: "Downloading filtered translations..."
)
performance.mark("downloadStart")
performance.mark("lokaliseDownloadInitStart")
const downloadResponse = await lokaliseApi.files().download(projectId, {
format: "json_structured",
indentation: "2sp",
placeholder_format: "icu",
plural_format: "icu",
icu_numeric: true,
bundle_structure: "%LANG_ISO%.%FORMAT%",
directory_prefix: "",
filter_data: all ? [] : ["translated", "nonhidden"],
export_empty_as: "skip",
})
performance.mark("lokaliseDownloadInitEnd")
performance.measure(
"Download init",
"lokaliseDownloadInitStart",
"lokaliseDownloadInitEnd"
)
const { bundle_url } = downloadResponse
performance.mark("lokaliseDownloadStart")
const bundleResponse = await fetch(bundle_url)
performance.mark("lokaliseDownloadEnd")
performance.measure(
"Download transfer",
"lokaliseDownloadStart",
"lokaliseDownloadEnd"
)
if (bundleResponse.ok) {
performance.mark("unpackTranslationsStart")
const arrayBuffer = await bundleResponse.arrayBuffer()
const buffer = Buffer.from(new Uint8Array(arrayBuffer))
const zip = new AdmZip(buffer)
zip.extractAllTo(extractPath, true)
performance.mark("unpackTranslationsEnd")
performance.measure(
"Unpacking translations",
"unpackTranslationsStart",
"unpackTranslationsEnd"
)
log("Download successful")
} else {
throw bundleResponse
}
} catch (e) {
error("Download failed", e)
} finally {
performance.mark("downloadEnd")
performance.measure("Download operation", "downloadStart", "downloadEnd")
}
performance.measure("done")
await performanceMetrics
}
export async function deleteBulk(keyNames: string[]) {
perf.observe({ type: "measure" })
try {
performance.mark("bulkDeleteStart")
let keysToDelete: number[] = []
let cursor: string | undefined = undefined
let hasNext = true
do {
const keys = await lokaliseApi.keys().list({
project_id: projectId,
limit: 100,
pagination: "cursor",
cursor,
})
cursor = keys.nextCursor ?? undefined
keys.items.forEach((key) => {
if (key.key_id && key.key_name.web) {
if (keyNames.includes(key.key_name.web)) {
keysToDelete.push(key.key_id)
}
}
})
if (!keys.hasNextCursor()) {
hasNext = false
}
} while (hasNext)
const response = await lokaliseApi
.keys()
.bulk_delete(keysToDelete, { project_id: projectId })
log(`Bulk delete successful, removed ${keysToDelete.length} keys`)
return response
} catch (e) {
error("Bulk delete failed", e)
} finally {
performance.mark("bulkDeleteEnd")
performance.measure(
"Bulk delete operation",
"bulkDeleteStart",
"bulkDeleteEnd"
)
}
}