Merged in feature/redis (pull request #1478)

Distributed cache

* cache deleteKey now uses an options object instead of a lonely argument variable fuzzy

* merge

* remove debug logs and cleanup

* cleanup

* add fault handling

* add fault handling

* add pid when logging redis client creation

* add identifier when logging redis client creation

* cleanup

* feat: add redis-api as it's own app

* feature: use http wrapper for redis

* feat: add the possibility to fallback to unstable_cache

* Add error handling if redis cache is unresponsive

* add logging for unstable_cache

* merge

* don't cache errors

* fix: metadatabase on branchdeploys

* Handle when /en/destinations throws
add ErrorBoundary

* Add sentry-logging when ErrorBoundary catches exception

* Fix error handling for distributed cache

* cleanup code

* Added Application Insights back

* Update generateApiKeys script and remove duplicate

* Merge branch 'feature/redis' of bitbucket.org:scandic-swap/web into feature/redis

* merge


Approved-by: Linus Flood
This commit is contained in:
Joakim Jäderberg
2025-03-14 07:54:21 +00:00
committed by Linus Flood
parent a8304e543e
commit fa63b20ed0
141 changed files with 4404 additions and 1941 deletions

View File

@@ -0,0 +1,29 @@
import { type CacheTime, type DataCache } from "@/services/dataCache/Cache"
import { cacheLogger } from "../logger"
import { generateCacheKey } from "./generateCacheKey"
import { get } from "./get"
import { set } from "./set"
export const cacheOrGet: DataCache["cacheOrGet"] = async <T>(
key: string | string[],
callback: () => Promise<T>,
ttl: CacheTime
) => {
const cacheKey = generateCacheKey(key)
const cachedValue = await get<T>(cacheKey)
if (!cachedValue) {
const perf = performance.now()
const data = await callback()
cacheLogger.debug(
`Getting data '${cacheKey}' took ${(performance.now() - perf).toFixed(2)}ms`
)
await set<T>(cacheKey, data, ttl)
return data
}
return cachedValue
}

View File

@@ -0,0 +1,19 @@
import { env } from "@/env/server"
import { cacheOrGet } from "./cacheOrGet"
import { deleteKey } from "./deleteKey"
import { get } from "./get"
import { set } from "./set"
import type { DataCache } from "@/services/dataCache/Cache"
export const API_KEY = env.REDIS_API_KEY ?? ""
export async function createDistributedCache(): Promise<DataCache> {
return {
type: "redis",
get,
set,
cacheOrGet,
deleteKey,
} satisfies DataCache
}

View File

@@ -0,0 +1,38 @@
import * as Sentry from "@sentry/nextjs"
import { cacheLogger } from "../logger"
import { API_KEY } from "./client"
import { getCacheEndpoint } from "./endpoints"
export async function deleteKey<T>(key: string) {
const perf = performance.now()
const response = await fetch(getCacheEndpoint(key), {
method: "DELETE",
cache: "no-cache",
headers: {
"x-api-key": API_KEY,
},
})
if (!response.ok) {
if (response.status !== 404) {
Sentry.captureMessage("Unable to DELETE cachekey", {
level: "error",
extra: {
cacheKey: key,
statusCode: response?.status,
statusText: response?.statusText,
},
})
}
return undefined
}
const data = (await response.json()) as { data: T }
cacheLogger.debug(
`Delete '${key}' took ${(performance.now() - perf).toFixed(2)}ms`
)
return data.data
}

View File

@@ -0,0 +1,24 @@
import { env } from "@/env/server"
import { generateCacheKey } from "./generateCacheKey"
export function getCacheEndpoint(key: string) {
if (!env.REDIS_API_HOST) {
throw new Error("REDIS_API_HOST is not set")
}
const url = new URL(`/api/cache`, env.REDIS_API_HOST)
url.searchParams.set("key", encodeURIComponent(generateCacheKey(key)))
return url
}
export function getClearCacheEndpoint() {
if (!env.REDIS_API_HOST) {
throw new Error("REDIS_API_HOST is not set")
}
const url = new URL(`/api/cache/clear`, env.REDIS_API_HOST)
return url
}

View File

@@ -0,0 +1,20 @@
import { env } from "@/env/server"
export function generateCacheKey(key: string | string[]): string {
const prefix = getPrefix()
key = Array.isArray(key) ? key.join("_") : key
return `${prefix ? `${prefix}:` : ""}${key}`
}
function getPrefix(): string {
if (process.env.NODE_ENV === "development") {
const devPrefix = process.env.USER || process.env.USERNAME || "dev"
return `${devPrefix}`
}
const branch = env.BRANCH.trim()
const gitSha = env.GIT_SHA?.trim().substring(0, 7)
return `${branch}:${gitSha}`
}

View File

@@ -0,0 +1,62 @@
import * as Sentry from "@sentry/nextjs"
import { safeTry } from "@/utils/safeTry"
import { cacheLogger } from "../logger"
import { API_KEY } from "./client"
import { deleteKey } from "./deleteKey"
import { getCacheEndpoint } from "./endpoints"
export async function get<T>(key: string) {
const perf = performance.now()
const [response, error] = await safeTry(
fetch(getCacheEndpoint(key), {
method: "GET",
cache: "no-cache",
headers: {
"x-api-key": API_KEY,
},
})
)
if (!response || error || !response.ok) {
if (response?.status === 404) {
cacheLogger.debug(
`Miss '${key}' took ${(performance.now() - perf).toFixed(2)}ms`
)
return undefined
}
Sentry.captureMessage("Unable to GET cachekey", {
level: "error",
extra: {
cacheKey: key,
errorMessage: error instanceof Error ? error.message : undefined,
statusCode: response?.status,
statusText: response?.statusText,
},
})
return undefined
}
const [data, jsonError] = await safeTry(
response.json() as Promise<{ data: T }>
)
if (jsonError) {
cacheLogger.error("Failed to parse cache response", {
key,
error: jsonError,
})
await deleteKey(key)
return undefined
}
cacheLogger.debug(
`Hit '${key}' took ${(performance.now() - perf).toFixed(2)}ms`
)
return data?.data
}

View File

@@ -0,0 +1 @@
export { createDistributedCache } from "./client"

View File

@@ -0,0 +1,33 @@
import * as Sentry from "@sentry/nextjs"
import { safeTry } from "@/utils/safeTry"
import { type CacheTime, getCacheTimeInSeconds } from "../Cache"
import { API_KEY } from "./client"
import { getCacheEndpoint } from "./endpoints"
export async function set<T>(key: string, value: T, ttl: CacheTime) {
const [response, error] = await safeTry(
fetch(getCacheEndpoint(key), {
method: "PUT",
headers: {
"Content-Type": "application/json",
"x-api-key": API_KEY,
},
body: JSON.stringify({ data: value, ttl: getCacheTimeInSeconds(ttl) }),
cache: "no-cache",
})
)
if (!response || error || !response.ok) {
Sentry.captureMessage("Unable to SET cachekey", {
level: "error",
extra: {
cacheKey: key,
errorMessage: error instanceof Error ? error.message : undefined,
statusCode: response?.status,
statusText: response?.statusText,
},
})
}
}

View File

@@ -0,0 +1,5 @@
import { env } from "@/env/server"
export function shouldHaveFullTtl() {
return env.BRANCH === "release"
}