Merged in feat/sw-2333-package-and-sas-i18n (pull request #2538)
feat(SW-2333): I18n for multiple apps and packages * Set upp i18n in partner-sas * Adapt lokalise workflow to monorepo * Fix layout props Approved-by: Linus Flood
This commit is contained in:
55
scripts/i18n/clean.mjs
Normal file
55
scripts/i18n/clean.mjs
Normal file
@@ -0,0 +1,55 @@
|
||||
import { stdin as input, stdout as output } from 'node:process'
|
||||
import * as readline from 'node:readline/promises'
|
||||
|
||||
import { config } from 'dotenv'
|
||||
|
||||
const rl = readline.createInterface({ input, output })
|
||||
|
||||
config({ path: `${process.cwd()}/.env.local` })
|
||||
|
||||
function diffArray(json1, json2) {
|
||||
const diff = []
|
||||
const keys1 = Object.keys(json1)
|
||||
const keys2 = Object.keys(json2)
|
||||
|
||||
keys1.forEach((key) => {
|
||||
if (!keys2.includes(key)) {
|
||||
diff.push(key)
|
||||
}
|
||||
})
|
||||
|
||||
return diff
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const answer = await rl.question(
|
||||
'To make sure we use the latest data for the diff, have you run i18n:download AND i18n:extract BEFORE running this? Type yes or no '
|
||||
)
|
||||
|
||||
if (answer !== 'yes') {
|
||||
console.log('')
|
||||
console.warn('Please run i18n:download AND i18n:extract BEFORE running this.')
|
||||
rl.close()
|
||||
process.exit(1)
|
||||
}
|
||||
rl.close()
|
||||
|
||||
const allLokalise = await import('./translations-all/en.json', {
|
||||
with: {
|
||||
type: 'json',
|
||||
},
|
||||
})
|
||||
const fromCodebase = await import('./extracted.json', {
|
||||
with: {
|
||||
type: 'json',
|
||||
},
|
||||
})
|
||||
|
||||
const labelsToRemove = diffArray(allLokalise, fromCodebase)
|
||||
|
||||
const { deleteBulk } = await import('./lokalise')
|
||||
|
||||
await deleteBulk(labelsToRemove)
|
||||
}
|
||||
|
||||
main()
|
||||
70
scripts/i18n/diff.mjs
Normal file
70
scripts/i18n/diff.mjs
Normal file
@@ -0,0 +1,70 @@
|
||||
import filteredLokalise from "./translations/en.json" with { type: "json" }
|
||||
import allLokalise from "./translations-all/en.json" with { type: "json" }
|
||||
import fromCodebase from "./extracted.json" with { type: "json" }
|
||||
|
||||
function diffArray(json1, json2) {
|
||||
const diff = []
|
||||
const keys1 = Object.keys(json1)
|
||||
const keys2 = Object.keys(json2)
|
||||
|
||||
keys1.forEach((key) => {
|
||||
if (!keys2.includes(key)) {
|
||||
diff.push(key)
|
||||
}
|
||||
})
|
||||
|
||||
return diff
|
||||
}
|
||||
|
||||
function resolveLabels(ids, arr) {
|
||||
return ids.map((id) => {
|
||||
return {
|
||||
id,
|
||||
...arr[id],
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
const labelsHidden = diffArray(allLokalise, filteredLokalise)
|
||||
const labelsToRemove = diffArray(filteredLokalise, fromCodebase)
|
||||
const labelsToAdd = diffArray(fromCodebase, filteredLokalise).filter(
|
||||
(key) => !labelsHidden.includes(key)
|
||||
)
|
||||
|
||||
if (labelsToRemove.length === 0 && labelsToAdd.length === 0) {
|
||||
console.log(`Nothing has changed!`)
|
||||
} else {
|
||||
console.log(`Labels to REMOVE from Lokalise: ${labelsToRemove.length}`)
|
||||
console.log(`Labels to ADD to Lokalise: ${labelsToAdd.length}`)
|
||||
console.log(`Labels HIDDEN in Lokalise: ${labelsHidden.length}`)
|
||||
console.log("")
|
||||
}
|
||||
|
||||
if (labelsToRemove.length) {
|
||||
console.log(`${labelsToRemove.length} labels to remove from Lokalise:`)
|
||||
console.table(resolveLabels(labelsToRemove, filteredLokalise))
|
||||
console.log("")
|
||||
}
|
||||
|
||||
if (labelsToAdd.length) {
|
||||
console.log("")
|
||||
console.log(`${labelsToAdd.length} labels to add to Lokalise`)
|
||||
console.table(resolveLabels(labelsToAdd, fromCodebase))
|
||||
console.log("")
|
||||
}
|
||||
|
||||
if (labelsHidden.length) {
|
||||
console.log("")
|
||||
console.log(`${labelsHidden.length} labels are hidden in Lokalise`)
|
||||
console.table(resolveLabels(labelsHidden, allLokalise))
|
||||
console.log("")
|
||||
}
|
||||
|
||||
if (labelsToRemove.length === 0 && labelsToAdd.length === 0) {
|
||||
console.log(`Nothing has changed!`)
|
||||
} else {
|
||||
console.log(`Labels to REMOVE from Lokalise: ${labelsToRemove.length}`)
|
||||
console.log(`Labels to ADD to Lokalise: ${labelsToAdd.length}`)
|
||||
console.log(`Labels HIDDEN in Lokalise: ${labelsHidden.length}`)
|
||||
console.log("")
|
||||
}
|
||||
35
scripts/i18n/distribute.ts
Normal file
35
scripts/i18n/distribute.ts
Normal file
@@ -0,0 +1,35 @@
|
||||
import fs from 'fs-extra'
|
||||
import path from 'path'
|
||||
|
||||
// Get the list of apps from command-line arguments
|
||||
const apps = process.argv.slice(2)
|
||||
|
||||
if (apps.length === 0) {
|
||||
console.error('Please provide at least one app name.')
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
// Update the source directory to the correct path
|
||||
const sourceDir = path.resolve(process.cwd(), 'scripts/i18n/dictionaries')
|
||||
|
||||
if (!fs.existsSync(sourceDir)) {
|
||||
console.error(`Source directory does not exist: ${sourceDir}`)
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
// Iterate over each app and copy the dictionaries folder
|
||||
apps.forEach((app) => {
|
||||
const targetDir = path.resolve(process.cwd(), `apps/${app}/i18n/dictionaries`)
|
||||
|
||||
try {
|
||||
// Ensure the target directory exists
|
||||
fs.ensureDirSync(targetDir)
|
||||
|
||||
// Copy the dictionaries folder
|
||||
fs.copySync(sourceDir, targetDir)
|
||||
|
||||
console.log(`Copied dictionaries to ${targetDir}`)
|
||||
} catch (error) {
|
||||
console.error(`Failed to copy dictionaries to ${targetDir}:`, error)
|
||||
}
|
||||
})
|
||||
16
scripts/i18n/download.ts
Normal file
16
scripts/i18n/download.ts
Normal file
@@ -0,0 +1,16 @@
|
||||
import path from 'node:path'
|
||||
|
||||
import { config } from 'dotenv'
|
||||
|
||||
config({ path: `${process.cwd()}/.env.local` })
|
||||
|
||||
const filteredExtractPath = path.resolve(__dirname, 'translations')
|
||||
const allExtractPath = path.resolve(__dirname, 'translations-all')
|
||||
|
||||
async function main() {
|
||||
const { download } = await import('./lokalise')
|
||||
await download(filteredExtractPath, false)
|
||||
await download(allExtractPath, true)
|
||||
}
|
||||
|
||||
main()
|
||||
10
scripts/i18n/formatter.mjs
Normal file
10
scripts/i18n/formatter.mjs
Normal file
@@ -0,0 +1,10 @@
|
||||
// Run the formatter.ts through Jiti
|
||||
|
||||
import { fileURLToPath } from "node:url"
|
||||
|
||||
import createJiti from "jiti"
|
||||
|
||||
const formatter = createJiti(fileURLToPath(import.meta.url))("./formatter.ts")
|
||||
|
||||
export const format = formatter.format
|
||||
export const compile = formatter.compile
|
||||
97
scripts/i18n/formatter.ts
Normal file
97
scripts/i18n/formatter.ts
Normal file
@@ -0,0 +1,97 @@
|
||||
// https://docs.lokalise.com/en/articles/3229161-structured-json
|
||||
|
||||
import { logger } from '@scandic-hotels/common/logger'
|
||||
import type { MessageDescriptor } from '@formatjs/intl'
|
||||
|
||||
interface LokaliseMessageDescriptor extends Omit<MessageDescriptor, 'description'> {
|
||||
description: {
|
||||
context?: string
|
||||
limit?: number
|
||||
tags?: string
|
||||
}
|
||||
}
|
||||
|
||||
type TranslationEntry = {
|
||||
translation: string
|
||||
notes?: string
|
||||
context?: string
|
||||
limit?: number
|
||||
tags?: string[]
|
||||
}
|
||||
|
||||
type CompiledEntries = Record<string, string>
|
||||
|
||||
type LokaliseStructuredJson = Record<string, TranslationEntry>
|
||||
|
||||
export function format(msgs: LokaliseMessageDescriptor[]): LokaliseStructuredJson {
|
||||
const results: LokaliseStructuredJson = {}
|
||||
for (const [id, msg] of Object.entries(msgs)) {
|
||||
const { defaultMessage, description } = msg
|
||||
|
||||
if (typeof defaultMessage === 'string') {
|
||||
const entry: TranslationEntry = {
|
||||
translation: defaultMessage,
|
||||
}
|
||||
|
||||
if (description) {
|
||||
if (typeof description === 'string') {
|
||||
logger.warn(
|
||||
`Unsupported type for description, expected 'object', got ${typeof description}. Skipping!`,
|
||||
msg
|
||||
)
|
||||
} else {
|
||||
const { context, limit, tags } = description
|
||||
|
||||
if (context) {
|
||||
if (typeof context === 'string') {
|
||||
entry.context = context
|
||||
} else {
|
||||
logger.warn(`Unsupported type for context, expected 'string', got ${typeof context}`, msg)
|
||||
}
|
||||
}
|
||||
|
||||
if (limit) {
|
||||
if (limit && typeof limit === 'number') {
|
||||
entry.limit = limit
|
||||
} else {
|
||||
logger.warn(`Unsupported type for limit, expected 'number', got ${typeof limit}`, msg)
|
||||
}
|
||||
}
|
||||
|
||||
if (tags) {
|
||||
if (tags && typeof tags === 'string') {
|
||||
const tagArray = tags.split(',').map((s) => s.trim())
|
||||
if (tagArray.length) {
|
||||
entry.tags = tagArray
|
||||
}
|
||||
} else {
|
||||
logger.warn(`Unsupported type for tags, expected Array, got ${typeof tags}`, msg)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
results[id] = entry
|
||||
} else {
|
||||
logger.warn(
|
||||
`Skipping message, unsupported type for defaultMessage, expected string, got ${typeof defaultMessage}`,
|
||||
{
|
||||
id,
|
||||
msg,
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
export function compile(msgs: LokaliseStructuredJson): CompiledEntries {
|
||||
const results: CompiledEntries = {}
|
||||
|
||||
for (const [id, msg] of Object.entries(msgs)) {
|
||||
results[id] = msg.translation
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
||||
268
scripts/i18n/lokalise.ts
Normal file
268
scripts/i18n/lokalise.ts
Normal file
@@ -0,0 +1,268 @@
|
||||
import fs from "node:fs/promises"
|
||||
import { performance, PerformanceObserver } from "node:perf_hooks"
|
||||
|
||||
import { LokaliseApi } from "@lokalise/node-api"
|
||||
import AdmZip from "adm-zip"
|
||||
|
||||
import { createLogger } from "@scandic-hotels/common/logger/createLogger"
|
||||
|
||||
const projectId = "4194150766ff28c418f010.39532200"
|
||||
const lokaliseApi = new LokaliseApi({ apiKey: process.env.LOKALISE_API_KEY })
|
||||
|
||||
const lokaliseLogger = createLogger("lokalise")
|
||||
|
||||
let resolvePerf: (value?: unknown) => void
|
||||
const performanceMetrics = new Promise((resolve) => {
|
||||
resolvePerf = resolve
|
||||
})
|
||||
|
||||
const perf = new PerformanceObserver((items) => {
|
||||
const entries = items.getEntries()
|
||||
for (const entry of entries) {
|
||||
if (entry.name === "done") {
|
||||
// This is the last measure meant for clean up
|
||||
performance.clearMarks()
|
||||
perf.disconnect()
|
||||
if (typeof resolvePerf === "function") {
|
||||
resolvePerf()
|
||||
}
|
||||
} else {
|
||||
lokaliseLogger.info(
|
||||
`[metrics] ${entry.name} completed in ${entry.duration} ms`
|
||||
)
|
||||
}
|
||||
}
|
||||
performance.clearMeasures()
|
||||
})
|
||||
|
||||
async function waitUntilUploadDone(processId: string) {
|
||||
return new Promise<void>((resolve, reject) => {
|
||||
const interval = setInterval(async () => {
|
||||
try {
|
||||
performance.mark("waitUntilUploadDoneStart")
|
||||
|
||||
lokaliseLogger.debug("Checking upload status...")
|
||||
|
||||
performance.mark("getProcessStart")
|
||||
const process = await lokaliseApi.queuedProcesses().get(processId, {
|
||||
project_id: projectId,
|
||||
})
|
||||
performance.mark("getProcessEnd")
|
||||
performance.measure(
|
||||
"Get Queued Process",
|
||||
"getProcessStart",
|
||||
"getProcessEnd"
|
||||
)
|
||||
|
||||
lokaliseLogger.debug(`Status: ${process.status}`)
|
||||
|
||||
if (process.status === "finished") {
|
||||
clearInterval(interval)
|
||||
performance.mark("waitUntilUploadDoneEnd", { detail: "success" })
|
||||
performance.measure(
|
||||
"Wait on upload",
|
||||
"waitUntilUploadDoneStart",
|
||||
"waitUntilUploadDoneEnd"
|
||||
)
|
||||
resolve()
|
||||
} else if (process.status === "failed") {
|
||||
throw process
|
||||
}
|
||||
} catch (e) {
|
||||
clearInterval(interval)
|
||||
lokaliseLogger.error("An error occurred:", e)
|
||||
performance.mark("waitUntilUploadDoneEnd", { detail: e })
|
||||
performance.measure(
|
||||
"Wait on upload",
|
||||
"waitUntilUploadDoneStart",
|
||||
"waitUntilUploadDoneEnd"
|
||||
)
|
||||
reject()
|
||||
}
|
||||
}, 1000)
|
||||
})
|
||||
}
|
||||
|
||||
export async function upload(filepath: string) {
|
||||
perf.observe({ type: "measure" })
|
||||
|
||||
try {
|
||||
lokaliseLogger.debug(`Uploading ${filepath}...`)
|
||||
|
||||
performance.mark("uploadStart")
|
||||
|
||||
performance.mark("sourceFileReadStart")
|
||||
const data = await fs.readFile(filepath, "utf8")
|
||||
const buff = Buffer.from(data, "utf8")
|
||||
const base64 = buff.toString("base64")
|
||||
performance.mark("sourceFileReadEnd")
|
||||
performance.measure(
|
||||
"Read source file",
|
||||
"sourceFileReadStart",
|
||||
"sourceFileReadEnd"
|
||||
)
|
||||
|
||||
performance.mark("lokaliseUploadInitStart")
|
||||
const bgProcess = await lokaliseApi.files().upload(projectId, {
|
||||
data: base64,
|
||||
filename: "en.json",
|
||||
lang_iso: "en",
|
||||
detect_icu_plurals: true,
|
||||
format: "json",
|
||||
convert_placeholders: true,
|
||||
replace_modified: true,
|
||||
})
|
||||
performance.mark("lokaliseUploadInitEnd")
|
||||
performance.measure(
|
||||
"Upload init",
|
||||
"lokaliseUploadInitStart",
|
||||
"lokaliseUploadInitEnd"
|
||||
)
|
||||
|
||||
performance.mark("lokaliseUploadStart")
|
||||
await waitUntilUploadDone(bgProcess.process_id)
|
||||
performance.mark("lokaliseUploadEnd")
|
||||
performance.measure(
|
||||
"Upload transfer",
|
||||
"lokaliseUploadStart",
|
||||
"lokaliseUploadEnd"
|
||||
)
|
||||
|
||||
lokaliseLogger.debug("Upload successful")
|
||||
} catch (e) {
|
||||
lokaliseLogger.error("Upload failed", e)
|
||||
} finally {
|
||||
performance.mark("uploadEnd")
|
||||
|
||||
performance.measure("Upload operation", "uploadStart", "uploadEnd")
|
||||
}
|
||||
|
||||
performance.measure("done")
|
||||
|
||||
await performanceMetrics
|
||||
}
|
||||
|
||||
export async function download(extractPath: string, all: boolean = false) {
|
||||
perf.observe({ type: "measure" })
|
||||
|
||||
try {
|
||||
lokaliseLogger.debug(
|
||||
all
|
||||
? "Downloading all translations..."
|
||||
: "Downloading filtered translations..."
|
||||
)
|
||||
|
||||
performance.mark("downloadStart")
|
||||
|
||||
performance.mark("lokaliseDownloadInitStart")
|
||||
const downloadResponse = await lokaliseApi.files().download(projectId, {
|
||||
format: "json_structured",
|
||||
indentation: "2sp",
|
||||
placeholder_format: "icu",
|
||||
plural_format: "icu",
|
||||
icu_numeric: true,
|
||||
bundle_structure: "%LANG_ISO%.%FORMAT%",
|
||||
directory_prefix: "",
|
||||
filter_data: all ? [] : ["translated", "nonhidden"],
|
||||
export_empty_as: "skip",
|
||||
})
|
||||
performance.mark("lokaliseDownloadInitEnd")
|
||||
performance.measure(
|
||||
"Download init",
|
||||
"lokaliseDownloadInitStart",
|
||||
"lokaliseDownloadInitEnd"
|
||||
)
|
||||
|
||||
const { bundle_url } = downloadResponse
|
||||
|
||||
performance.mark("lokaliseDownloadStart")
|
||||
const bundleResponse = await fetch(bundle_url)
|
||||
performance.mark("lokaliseDownloadEnd")
|
||||
performance.measure(
|
||||
"Download transfer",
|
||||
"lokaliseDownloadStart",
|
||||
"lokaliseDownloadEnd"
|
||||
)
|
||||
|
||||
if (bundleResponse.ok) {
|
||||
performance.mark("unpackTranslationsStart")
|
||||
const arrayBuffer = await bundleResponse.arrayBuffer()
|
||||
const buffer = Buffer.from(new Uint8Array(arrayBuffer))
|
||||
const zip = new AdmZip(buffer)
|
||||
zip.extractAllTo(extractPath, true)
|
||||
performance.mark("unpackTranslationsEnd")
|
||||
performance.measure(
|
||||
"Unpacking translations",
|
||||
"unpackTranslationsStart",
|
||||
"unpackTranslationsEnd"
|
||||
)
|
||||
|
||||
lokaliseLogger.debug("Download successful")
|
||||
} else {
|
||||
throw bundleResponse
|
||||
}
|
||||
} catch (e) {
|
||||
lokaliseLogger.error("Download failed", e)
|
||||
} finally {
|
||||
performance.mark("downloadEnd")
|
||||
|
||||
performance.measure("Download operation", "downloadStart", "downloadEnd")
|
||||
}
|
||||
|
||||
performance.measure("done")
|
||||
|
||||
await performanceMetrics
|
||||
}
|
||||
|
||||
export async function deleteBulk(keyNames: string[]) {
|
||||
perf.observe({ type: "measure" })
|
||||
|
||||
try {
|
||||
performance.mark("bulkDeleteStart")
|
||||
|
||||
let keysToDelete: number[] = []
|
||||
let cursor: string | undefined = undefined
|
||||
let hasNext = true
|
||||
do {
|
||||
const keys = await lokaliseApi.keys().list({
|
||||
project_id: projectId,
|
||||
limit: 100,
|
||||
pagination: "cursor",
|
||||
cursor,
|
||||
})
|
||||
|
||||
cursor = keys.nextCursor ?? undefined
|
||||
keys.items.forEach((key) => {
|
||||
if (key.key_id && key.key_name.web) {
|
||||
if (keyNames.includes(key.key_name.web)) {
|
||||
keysToDelete.push(key.key_id)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
if (!keys.hasNextCursor()) {
|
||||
hasNext = false
|
||||
}
|
||||
} while (hasNext)
|
||||
|
||||
const response = await lokaliseApi
|
||||
.keys()
|
||||
.bulk_delete(keysToDelete, { project_id: projectId })
|
||||
|
||||
lokaliseLogger.debug(
|
||||
`Bulk delete successful, removed ${keysToDelete.length} keys`
|
||||
)
|
||||
|
||||
return response
|
||||
} catch (e) {
|
||||
lokaliseLogger.error("Bulk delete failed", e)
|
||||
} finally {
|
||||
performance.mark("bulkDeleteEnd")
|
||||
|
||||
performance.measure(
|
||||
"Bulk delete operation",
|
||||
"bulkDeleteStart",
|
||||
"bulkDeleteEnd"
|
||||
)
|
||||
}
|
||||
}
|
||||
14
scripts/i18n/upload.ts
Normal file
14
scripts/i18n/upload.ts
Normal file
@@ -0,0 +1,14 @@
|
||||
import path from 'node:path'
|
||||
|
||||
import { config } from 'dotenv'
|
||||
|
||||
config({ path: `${process.cwd()}/.env.local` })
|
||||
|
||||
const filepath = path.resolve(__dirname, './extracted.json')
|
||||
|
||||
async function main() {
|
||||
const { upload } = await import('./lokalise')
|
||||
await upload(filepath)
|
||||
}
|
||||
|
||||
main()
|
||||
Reference in New Issue
Block a user