Merged in feature/redis (pull request #1478)

Distributed cache

* cache deleteKey now uses an options object instead of a lonely argument variable fuzzy

* merge

* remove debug logs and cleanup

* cleanup

* add fault handling

* add fault handling

* add pid when logging redis client creation

* add identifier when logging redis client creation

* cleanup

* feat: add redis-api as it's own app

* feature: use http wrapper for redis

* feat: add the possibility to fallback to unstable_cache

* Add error handling if redis cache is unresponsive

* add logging for unstable_cache

* merge

* don't cache errors

* fix: metadatabase on branchdeploys

* Handle when /en/destinations throws
add ErrorBoundary

* Add sentry-logging when ErrorBoundary catches exception

* Fix error handling for distributed cache

* cleanup code

* Added Application Insights back

* Update generateApiKeys script and remove duplicate

* Merge branch 'feature/redis' of bitbucket.org:scandic-swap/web into feature/redis

* merge


Approved-by: Linus Flood
This commit is contained in:
Joakim Jäderberg
2025-03-14 07:54:21 +00:00
committed by Linus Flood
parent a8304e543e
commit fa63b20ed0
141 changed files with 4404 additions and 1941 deletions

View File

@@ -0,0 +1,96 @@
const ONE_HOUR_IN_SECONDS = 3_600 as const
const ONE_DAY_IN_SECONDS = 86_400 as const
export const namedCacheTimeMap: Record<NamedCacheTimes, number> = {
"no cache": 0,
"1m": 60,
"5m": 300,
"10m": 600,
"1h": ONE_HOUR_IN_SECONDS,
"3h": ONE_HOUR_IN_SECONDS * 3,
"6h": ONE_HOUR_IN_SECONDS * 6,
"1d": ONE_DAY_IN_SECONDS,
"3d": ONE_DAY_IN_SECONDS * 3,
max: ONE_DAY_IN_SECONDS * 30,
} as const
export const namedCacheTimes = [
"no cache",
"1m",
"5m",
"10m",
"1h",
"3h",
"6h",
"1d",
"3d",
"max",
] as const
export type NamedCacheTimes = (typeof namedCacheTimes)[number]
/**
* Retrieves the cache time in seconds based on the given cache time.
* @param cacheTime - The time value to determine, either a named cache time or a number of seconds.
* @returns The cache time in seconds.
*/
export const getCacheTimeInSeconds = (cacheTime: CacheTime): number => {
if (typeof cacheTime === "number") {
if (cacheTime < 0 || !Number.isInteger(cacheTime)) {
return 0
}
return cacheTime
}
return namedCacheTimeMap[cacheTime] ?? 0
}
export type CacheTime = NamedCacheTimes | number
export type DataCache = {
/**
* Type of cache
*/
type: "edge" | "redis" | "in-memory" | "unstable-cache"
/**
* Helper function that retrieves from the cache if it exists, otherwise calls the callback and caches the result.
* If the call fails, the cache is not updated.
* @param key The cache key
* @param getDataFromSource An async function that provides a value to cache
* @param ttl Time to live, either a named cache time or a number of seconds
* @returns The cached value or the result from the callback
*/
cacheOrGet: <T>(
key: string | string[],
getDataFromSource: () => Promise<T>,
ttl: CacheTime
) => Promise<T>
/**
* Get a value from the cache, if it exists
* @see `cacheOrGet` for a more convenient way to cache values
* @param key The cache key to retrieve the value for
* @returns The cached value or undefined if not found
*/
get: <T>(key: string) => Promise<T | undefined>
/**
* Sets a value in the cache.
* @see `cacheOrGet` for a more convenient way to cache values
* @param key CacheKey to set
* @param obj Value to be cached
* @param ttl Time to live, either a named cache time or a number of seconds
* @returns A promise that resolves when the value has been cached
*/
set: <T>(key: string, obj: T, ttl: CacheTime) => Promise<void>
/**
* Deletes a key from the cache
* @param key CacheKey to delete
* @param fuzzy If true, does a wildcard delete. *key*
* @returns
*/
deleteKey: (key: string, opts?: { fuzzy?: boolean }) => Promise<void>
}

View File

@@ -0,0 +1,29 @@
import { type CacheTime, type DataCache } from "@/services/dataCache/Cache"
import { cacheLogger } from "../logger"
import { generateCacheKey } from "./generateCacheKey"
import { get } from "./get"
import { set } from "./set"
export const cacheOrGet: DataCache["cacheOrGet"] = async <T>(
key: string | string[],
callback: () => Promise<T>,
ttl: CacheTime
) => {
const cacheKey = generateCacheKey(key)
const cachedValue = await get<T>(cacheKey)
if (!cachedValue) {
const perf = performance.now()
const data = await callback()
cacheLogger.debug(
`Getting data '${cacheKey}' took ${(performance.now() - perf).toFixed(2)}ms`
)
await set<T>(cacheKey, data, ttl)
return data
}
return cachedValue
}

View File

@@ -0,0 +1,19 @@
import { env } from "@/env/server"
import { cacheOrGet } from "./cacheOrGet"
import { deleteKey } from "./deleteKey"
import { get } from "./get"
import { set } from "./set"
import type { DataCache } from "@/services/dataCache/Cache"
export const API_KEY = env.REDIS_API_KEY ?? ""
export async function createDistributedCache(): Promise<DataCache> {
return {
type: "redis",
get,
set,
cacheOrGet,
deleteKey,
} satisfies DataCache
}

View File

@@ -0,0 +1,38 @@
import * as Sentry from "@sentry/nextjs"
import { cacheLogger } from "../logger"
import { API_KEY } from "./client"
import { getCacheEndpoint } from "./endpoints"
export async function deleteKey<T>(key: string) {
const perf = performance.now()
const response = await fetch(getCacheEndpoint(key), {
method: "DELETE",
cache: "no-cache",
headers: {
"x-api-key": API_KEY,
},
})
if (!response.ok) {
if (response.status !== 404) {
Sentry.captureMessage("Unable to DELETE cachekey", {
level: "error",
extra: {
cacheKey: key,
statusCode: response?.status,
statusText: response?.statusText,
},
})
}
return undefined
}
const data = (await response.json()) as { data: T }
cacheLogger.debug(
`Delete '${key}' took ${(performance.now() - perf).toFixed(2)}ms`
)
return data.data
}

View File

@@ -0,0 +1,24 @@
import { env } from "@/env/server"
import { generateCacheKey } from "./generateCacheKey"
export function getCacheEndpoint(key: string) {
if (!env.REDIS_API_HOST) {
throw new Error("REDIS_API_HOST is not set")
}
const url = new URL(`/api/cache`, env.REDIS_API_HOST)
url.searchParams.set("key", encodeURIComponent(generateCacheKey(key)))
return url
}
export function getClearCacheEndpoint() {
if (!env.REDIS_API_HOST) {
throw new Error("REDIS_API_HOST is not set")
}
const url = new URL(`/api/cache/clear`, env.REDIS_API_HOST)
return url
}

View File

@@ -0,0 +1,20 @@
import { env } from "@/env/server"
export function generateCacheKey(key: string | string[]): string {
const prefix = getPrefix()
key = Array.isArray(key) ? key.join("_") : key
return `${prefix ? `${prefix}:` : ""}${key}`
}
function getPrefix(): string {
if (process.env.NODE_ENV === "development") {
const devPrefix = process.env.USER || process.env.USERNAME || "dev"
return `${devPrefix}`
}
const branch = env.BRANCH.trim()
const gitSha = env.GIT_SHA?.trim().substring(0, 7)
return `${branch}:${gitSha}`
}

View File

@@ -0,0 +1,62 @@
import * as Sentry from "@sentry/nextjs"
import { safeTry } from "@/utils/safeTry"
import { cacheLogger } from "../logger"
import { API_KEY } from "./client"
import { deleteKey } from "./deleteKey"
import { getCacheEndpoint } from "./endpoints"
export async function get<T>(key: string) {
const perf = performance.now()
const [response, error] = await safeTry(
fetch(getCacheEndpoint(key), {
method: "GET",
cache: "no-cache",
headers: {
"x-api-key": API_KEY,
},
})
)
if (!response || error || !response.ok) {
if (response?.status === 404) {
cacheLogger.debug(
`Miss '${key}' took ${(performance.now() - perf).toFixed(2)}ms`
)
return undefined
}
Sentry.captureMessage("Unable to GET cachekey", {
level: "error",
extra: {
cacheKey: key,
errorMessage: error instanceof Error ? error.message : undefined,
statusCode: response?.status,
statusText: response?.statusText,
},
})
return undefined
}
const [data, jsonError] = await safeTry(
response.json() as Promise<{ data: T }>
)
if (jsonError) {
cacheLogger.error("Failed to parse cache response", {
key,
error: jsonError,
})
await deleteKey(key)
return undefined
}
cacheLogger.debug(
`Hit '${key}' took ${(performance.now() - perf).toFixed(2)}ms`
)
return data?.data
}

View File

@@ -0,0 +1 @@
export { createDistributedCache } from "./client"

View File

@@ -0,0 +1,33 @@
import * as Sentry from "@sentry/nextjs"
import { safeTry } from "@/utils/safeTry"
import { type CacheTime, getCacheTimeInSeconds } from "../Cache"
import { API_KEY } from "./client"
import { getCacheEndpoint } from "./endpoints"
export async function set<T>(key: string, value: T, ttl: CacheTime) {
const [response, error] = await safeTry(
fetch(getCacheEndpoint(key), {
method: "PUT",
headers: {
"Content-Type": "application/json",
"x-api-key": API_KEY,
},
body: JSON.stringify({ data: value, ttl: getCacheTimeInSeconds(ttl) }),
cache: "no-cache",
})
)
if (!response || error || !response.ok) {
Sentry.captureMessage("Unable to SET cachekey", {
level: "error",
extra: {
cacheKey: key,
errorMessage: error instanceof Error ? error.message : undefined,
statusCode: response?.status,
statusText: response?.statusText,
},
})
}
}

View File

@@ -0,0 +1,5 @@
import { env } from "@/env/server"
export function shouldHaveFullTtl() {
return env.BRANCH === "release"
}

View File

@@ -0,0 +1,9 @@
export const cacheMap = new Map<
string,
{
/** Absolute expiration timestamp (`Date.now()`) */
expiresAt: number
/** The cached data */
data: unknown
}
>()

View File

@@ -0,0 +1,36 @@
import { type CacheTime, type DataCache } from "@/services/dataCache/Cache"
import { cacheLogger } from "@/services/dataCache/logger"
import { get } from "./get"
import { set } from "./set"
export const cacheOrGet: DataCache["cacheOrGet"] = async <T>(
key: string | string[],
callback: () => Promise<T>,
ttl: CacheTime
): Promise<T> => {
if (Array.isArray(key)) {
key = key.join("-")
}
const cached = await get(key)
if (cached) {
return cached as T
}
cacheLogger.debug(`Miss for key '${key}'`)
try {
const data = await callback()
await set(key, data, ttl)
return data
} catch (e) {
cacheLogger.error(
`Error while fetching data for key '${key}', avoid caching`,
e
)
throw e
}
}

View File

@@ -0,0 +1,9 @@
import { cacheLogger } from "@/services/dataCache/logger"
import { cacheMap } from "./cacheMap"
export async function deleteAll() {
cacheLogger.debug("Deleting all keys")
cacheMap.clear()
}

View File

@@ -0,0 +1,17 @@
import { cacheLogger } from "@/services/dataCache/logger"
import { cacheMap } from "./cacheMap"
export async function deleteKey(key: string, opts?: { fuzzy?: boolean }) {
cacheLogger.debug("Deleting key", key)
if (opts?.fuzzy) {
cacheMap.forEach((_, k) => {
if (k.includes(key)) {
cacheMap.delete(k)
}
})
return
}
cacheMap.delete(key)
}

View File

@@ -0,0 +1,23 @@
import { cacheLogger } from "@/services/dataCache/logger"
import { cacheMap } from "./cacheMap"
export async function get<T>(key: string): Promise<T | undefined> {
const cached = cacheMap.get(key)
if (!cached) {
return undefined
}
if (cached.expiresAt < Date.now()) {
cacheLogger.debug(`Expired for key '${key}'`)
cacheMap.delete(key)
return undefined
}
if (cached.data === undefined) {
cacheLogger.debug(`Data is undefined for key '${key}'`)
cacheMap.delete(key)
return undefined
}
return cached.data as T
}

View File

@@ -0,0 +1,10 @@
import { cacheOrGet } from "./cacheOrGet"
import { deleteKey } from "./deleteKey"
import { get } from "./get"
import { set } from "./set"
import type { DataCache } from "@/services/dataCache/Cache"
export async function createInMemoryCache(): Promise<DataCache> {
return { type: "in-memory", cacheOrGet, deleteKey, get, set }
}

View File

@@ -0,0 +1,17 @@
import {
type CacheTime,
getCacheTimeInSeconds,
} from "@/services/dataCache/Cache"
import { cacheMap } from "./cacheMap"
export async function set<T>(
key: string,
data: T,
ttl: CacheTime
): Promise<void> {
cacheMap.set(key, {
data: data,
expiresAt: Date.now() + getCacheTimeInSeconds(ttl) * 1000,
})
}

View File

@@ -0,0 +1,29 @@
import { unstable_cache } from "next/cache"
import {
type CacheTime,
type DataCache,
getCacheTimeInSeconds,
} from "@/services/dataCache/Cache"
import { cacheLogger } from "../../logger"
export const cacheOrGet: DataCache["cacheOrGet"] = async <T>(
key: string | string[],
callback: () => Promise<T>,
ttl: CacheTime
): Promise<T> => {
if (!Array.isArray(key)) {
key = [key]
}
const perf = performance.now()
const res = await unstable_cache(callback, key, {
revalidate: getCacheTimeInSeconds(ttl),
tags: key,
})()
cacheLogger.debug(`'${key}' took ${(performance.now() - perf).toFixed(2)}ms`)
return res
}

View File

@@ -0,0 +1,5 @@
import { revalidateTag } from "next/cache"
export async function deleteKey(key: string) {
revalidateTag(key)
}

View File

@@ -0,0 +1,14 @@
import type { DataCache } from "@/services/dataCache/Cache"
/**
* This function is not implemented for unstable_cache due to underlying cache implementation.
* @see cacheOrGet
* @param _key
* @returns
*/
export const get: DataCache["get"] = async <T>(
_key: string
): Promise<T | undefined> => {
console.warn("UnstableCache.get is not implemented, use cacheOrGet")
return undefined
}

View File

@@ -0,0 +1,10 @@
import { cacheOrGet } from "./cacheOrGet"
import { deleteKey } from "./deleteKey"
import { get } from "./get"
import { set } from "./set"
import type { DataCache } from "@/services/dataCache/Cache"
export async function createUnstableCache(): Promise<DataCache> {
return { type: "unstable-cache", cacheOrGet, deleteKey, get, set }
}

View File

@@ -0,0 +1,15 @@
import { type CacheTime, type DataCache } from "@/services/dataCache/Cache"
/**
* This function is not implemented for unstable_cache due to underlying cache implementation.
* @see cacheOrGet
* @param _key
* @returns
*/
export const set: DataCache["set"] = async <T>(
_key: string,
_data: T,
_ttl: CacheTime
): Promise<void> => {
console.warn("UnstableCache.set is not implemented, use cacheOrGet")
}

View File

@@ -0,0 +1,17 @@
import { isEdge } from "@/utils/isEdge"
import { createInMemoryCache } from "./InMemoryCache"
import { createUnstableCache } from "./UnstableCache"
import type { DataCache } from "@/services/dataCache/Cache"
export function createMemoryCache(): Promise<DataCache> {
if (isEdge) {
/**
* unstable_cache is not available on the edge runtime
*/
return createInMemoryCache()
}
return createUnstableCache()
}

View File

@@ -0,0 +1,28 @@
import { env } from "@/env/server"
import { isEdge } from "@/utils/isEdge"
import { createMemoryCache } from "./MemoryCache/createMemoryCache"
import { createDistributedCache } from "./DistributedCache"
import { cacheLogger } from "./logger"
import type { DataCache } from "./Cache"
export type { CacheTime, DataCache } from "./Cache"
export async function getCacheClient(): Promise<DataCache> {
if (global.cacheClient) {
return global.cacheClient
}
global.cacheClient = env.REDIS_API_HOST
? createDistributedCache()
: createMemoryCache()
const cacheClient = await global.cacheClient
cacheLogger.debug(
`Creating ${cacheClient.type} cache on ${isEdge ? "edge" : "server"} runtime`
)
return global.cacheClient
}

View File

@@ -0,0 +1,44 @@
export const cacheLogger = {
async debug(message: string, ...args: unknown[]): Promise<void> {
console.debug(`${await loggerPrefix()} ${message}`, ...args)
},
async warn(message: string, ...args: unknown[]): Promise<void> {
console.warn(`${await loggerPrefix()} Warning - ${message}`, ...args)
},
async error(message: string, ...args: unknown[]): Promise<void> {
console.error(`${await loggerPrefix()} Error - ${message}`, ...args)
},
}
async function loggerPrefix() {
const instancePrefix = await getCachePrefix()
return `[Cache] ${instancePrefix ?? ""}`.trim()
}
async function getCachePrefix() {
const cacheCreated = await isPromiseResolved(global.cacheClient)
if (!cacheCreated.resolved) {
return null
}
const instanceType = cacheCreated.value?.type
if (!instanceType) {
return null
}
return `[${instanceType}]`
}
function isPromiseResolved<T>(promise: Promise<T> | undefined) {
if (!promise) {
return { resolved: false, value: undefined }
}
const check = Promise.race([promise, Promise.resolve("__PENDING__")])
return check.then((result) => ({
resolved: result !== "__PENDING__",
value: result !== "__PENDING__" ? (result as Awaited<T>) : undefined,
}))
}