Merged in feat/use-hash-for-graphql-cache (pull request #2251)
Feature: Use hash of query+variables for graphql cache instead of gitsha * feature: use a hash of query+variables as part of the cache key instead of gitsha * . * Merge branch 'master' of bitbucket.org:scandic-swap/web into feat/use-hash-for-graphql-cache * use correct json stringify * merge * remove edgeRequest in favor of request * add more indicative logging Approved-by: Linus Flood
This commit is contained in:
@@ -1,41 +0,0 @@
|
||||
import deepmerge from "deepmerge"
|
||||
|
||||
import { arrayMerge } from "@/utils/merge"
|
||||
|
||||
import { edgeRequest } from "./edgeRequest"
|
||||
|
||||
import type { BatchRequestDocument } from "graphql-request"
|
||||
|
||||
import type { Data } from "@/types/request"
|
||||
|
||||
export async function batchEdgeRequest<T>(
|
||||
queries: BatchRequestDocument[]
|
||||
): Promise<Data<T>> {
|
||||
try {
|
||||
const response = await Promise.allSettled(
|
||||
queries.map((query) => edgeRequest<T>(query.document, query.variables))
|
||||
)
|
||||
|
||||
let data = {} as T
|
||||
const reasons: PromiseRejectedResult["reason"][] = []
|
||||
response.forEach((res) => {
|
||||
if (res.status === "fulfilled") {
|
||||
data = deepmerge(data, res.value.data, { arrayMerge })
|
||||
} else {
|
||||
reasons.push(res.reason)
|
||||
}
|
||||
})
|
||||
|
||||
if (reasons.length) {
|
||||
reasons.forEach((reason) => {
|
||||
console.error(`Batch request failed`, reason)
|
||||
})
|
||||
}
|
||||
|
||||
return { data }
|
||||
} catch (error) {
|
||||
console.error("Error in batched graphql request")
|
||||
console.error(error)
|
||||
throw new Error("Something went wrong")
|
||||
}
|
||||
}
|
||||
@@ -1,22 +0,0 @@
|
||||
import { GraphQLClient } from "graphql-request"
|
||||
|
||||
import { env } from "@/env/server"
|
||||
|
||||
import { request as _request } from "./_request"
|
||||
|
||||
import type { DocumentNode } from "graphql"
|
||||
|
||||
import type { Data } from "@/types/request"
|
||||
|
||||
export async function edgeRequest<T>(
|
||||
query: string | DocumentNode,
|
||||
variables?: {},
|
||||
params?: RequestInit
|
||||
): Promise<Data<T>> {
|
||||
// Creating a new client for each request to avoid conflicting parameters
|
||||
const client = new GraphQLClient(env.CMS_URL, {
|
||||
fetch: fetch,
|
||||
})
|
||||
|
||||
return _request(client, query, variables, params)
|
||||
}
|
||||
22
apps/scandic-web/lib/graphql/getOperationName.ts
Normal file
22
apps/scandic-web/lib/graphql/getOperationName.ts
Normal file
@@ -0,0 +1,22 @@
|
||||
import type { DocumentNode } from "graphql"
|
||||
|
||||
export function getOperationName(query: string | DocumentNode): string {
|
||||
let operationName = ""
|
||||
|
||||
if (typeof query === "string") {
|
||||
const operationRegex = /(query|mutation|subscription)\s+(\w+)/
|
||||
const match = query.match(operationRegex)
|
||||
if (match && match[2]) {
|
||||
operationName = match[2]
|
||||
}
|
||||
} else {
|
||||
const opDefinition = query.definitions.find(
|
||||
(def) => def.kind === "OperationDefinition" && def.name
|
||||
)
|
||||
if (opDefinition && "name" in opDefinition && opDefinition.name) {
|
||||
operationName = opDefinition.name.value
|
||||
}
|
||||
}
|
||||
|
||||
return operationName ?? "AnonymousOperation"
|
||||
}
|
||||
@@ -1,5 +1,6 @@
|
||||
import fetchRetry from "fetch-retry"
|
||||
import { GraphQLClient } from "graphql-request"
|
||||
import stringify from "json-stable-stringify-without-jsonify"
|
||||
import { cache as reactCache } from "react"
|
||||
|
||||
import { env } from "@/env/server"
|
||||
@@ -8,6 +9,7 @@ import { getPreviewHash, isPreviewByUid } from "@/lib/previewContext"
|
||||
import { type CacheTime, getCacheClient } from "@/services/dataCache"
|
||||
|
||||
import { request as _request } from "./_request"
|
||||
import { getOperationName } from "./getOperationName"
|
||||
|
||||
import type { DocumentNode } from "graphql"
|
||||
|
||||
@@ -37,12 +39,23 @@ export async function request<T>(
|
||||
return doCall()
|
||||
}
|
||||
|
||||
const queryString = typeof query === "string" ? query : stringify(query)
|
||||
const variablesString = stringify(variables)
|
||||
|
||||
const fullQuery = `${queryString}${variablesString}`
|
||||
const queryHash = await sha256(fullQuery)
|
||||
const operationName = getOperationName(query)
|
||||
|
||||
const cacheKey: string = Array.isArray(cacheOptions.key)
|
||||
? cacheOptions.key.join("_")
|
||||
: cacheOptions.key
|
||||
|
||||
const extendedCacheKey = `${operationName}:${queryHash}:${cacheKey}`
|
||||
|
||||
const _dataCache = await getCacheClient()
|
||||
return _dataCache.cacheOrGet(cacheKey, doCall, cacheOptions.ttl)
|
||||
return _dataCache.cacheOrGet(extendedCacheKey, doCall, cacheOptions.ttl, {
|
||||
includeGitHashInKey: false,
|
||||
})
|
||||
}
|
||||
|
||||
function internalRequest<T>(
|
||||
@@ -81,3 +94,13 @@ function internalRequest<T>(
|
||||
|
||||
return _request(client, query, variables, mergedParams)
|
||||
}
|
||||
|
||||
async function sha256(input: string) {
|
||||
const encoder = new TextEncoder()
|
||||
const data = encoder.encode(input)
|
||||
const hashBuffer = await crypto.subtle.digest("SHA-256", data)
|
||||
const hashArray = Array.from(new Uint8Array(hashBuffer))
|
||||
const hashHex = hashArray.map((b) => b.toString(16).padStart(2, "0")).join("")
|
||||
|
||||
return hashHex
|
||||
}
|
||||
|
||||
@@ -9,7 +9,7 @@ import {
|
||||
import { notFound } from "@/server/errors/next"
|
||||
import { getPublicNextURL } from "@/server/utils"
|
||||
|
||||
import { fetchAndCacheEntry } from "@/services/cms/fetchAndCacheEntry"
|
||||
import { resolve as resolveEntry } from "@/utils/entry"
|
||||
import { findLang } from "@/utils/languages"
|
||||
|
||||
import { getDefaultRequestHeaders } from "./utils"
|
||||
@@ -32,10 +32,7 @@ export const middleware: NextMiddleware = async (request) => {
|
||||
}
|
||||
|
||||
const pathNameWithoutLang = nextUrl.pathname.replace(`/${lang}`, "")
|
||||
const { uid, contentType } = await fetchAndCacheEntry(
|
||||
pathNameWithoutLang,
|
||||
lang
|
||||
)
|
||||
const { uid, contentType } = await resolveEntry(pathNameWithoutLang, lang)
|
||||
if (!uid || !contentType) {
|
||||
throw notFound(
|
||||
`Unable to resolve CMS entry for locale "${lang}": ${pathNameWithoutLang}`
|
||||
|
||||
@@ -10,8 +10,8 @@ import {
|
||||
import { env } from "@/env/server"
|
||||
import { badRequest, notFound } from "@/server/errors/next"
|
||||
|
||||
import { fetchAndCacheEntry } from "@/services/cms/fetchAndCacheEntry"
|
||||
import { decryptData } from "@/utils/aes"
|
||||
import { resolve as resolveEntry } from "@/utils/entry"
|
||||
import { findLang } from "@/utils/languages"
|
||||
|
||||
import { getDefaultRequestHeaders } from "./utils"
|
||||
@@ -147,7 +147,7 @@ async function handleWebviewRewrite({
|
||||
|
||||
const pathNameWithoutLang = path.replace(`/${lang}/webview`, "")
|
||||
|
||||
const { uid } = await fetchAndCacheEntry(pathNameWithoutLang, lang)
|
||||
const { uid } = await resolveEntry(pathNameWithoutLang, lang)
|
||||
if (uid) {
|
||||
headers.set("x-uid", uid)
|
||||
}
|
||||
|
||||
@@ -32,6 +32,7 @@ export async function fetchCollectionPageRefs(lang: Lang, uid: string) {
|
||||
metricsGetCollectionPageRefs.start()
|
||||
|
||||
const cacheKey = generateRefsResponseTag(lang, uid)
|
||||
|
||||
const refsResponse = await request<GetCollectionPageRefsSchema>(
|
||||
GetCollectionPageRefs,
|
||||
{
|
||||
|
||||
@@ -1,23 +0,0 @@
|
||||
import { getCacheClient } from "@/services/dataCache"
|
||||
import { resolve as resolveEntry } from "@/utils/entry"
|
||||
|
||||
import type { Lang } from "@/constants/languages"
|
||||
|
||||
export const fetchAndCacheEntry = async (path: string, lang: Lang) => {
|
||||
path = path || "/"
|
||||
const cacheKey = `${lang}:resolveentry:${path}`
|
||||
const cache = await getCacheClient()
|
||||
|
||||
return cache.cacheOrGet(
|
||||
cacheKey,
|
||||
async () => {
|
||||
const { contentType, uid } = await resolveEntry(path, lang)
|
||||
|
||||
return {
|
||||
contentType,
|
||||
uid,
|
||||
}
|
||||
},
|
||||
"max"
|
||||
)
|
||||
}
|
||||
@@ -1,10 +1,9 @@
|
||||
import { Lang } from "@/constants/languages"
|
||||
|
||||
import { resolve as resolveEntry } from "@/utils/entry"
|
||||
import { findLang } from "@/utils/languages"
|
||||
import { removeTrailingSlash } from "@/utils/url"
|
||||
|
||||
import { fetchAndCacheEntry } from "./fetchAndCacheEntry"
|
||||
|
||||
export const getUidAndContentTypeByPath = async (pathname: string) => {
|
||||
const lang = findLang(pathname)
|
||||
|
||||
@@ -12,7 +11,7 @@ export const getUidAndContentTypeByPath = async (pathname: string) => {
|
||||
|
||||
const contentTypePathName = pathWithoutTrailingSlash.replace(`/${lang}`, "")
|
||||
|
||||
const { contentType, uid } = await fetchAndCacheEntry(
|
||||
const { contentType, uid } = await resolveEntry(
|
||||
contentTypePathName,
|
||||
lang ?? Lang.en
|
||||
)
|
||||
|
||||
@@ -15,7 +15,9 @@ export const cacheOrGet: DataCache["cacheOrGet"] = async <T>(
|
||||
ttl: CacheTime,
|
||||
opts?: CacheOrGetOptions
|
||||
) => {
|
||||
const cacheKey = generateCacheKey(key)
|
||||
const cacheKey = generateCacheKey(key, {
|
||||
includeGitHashInKey: opts?.includeGitHashInKey ?? true,
|
||||
})
|
||||
|
||||
let cachedValue: Awaited<T> | undefined = undefined
|
||||
if (shouldGetFromCache(opts)) {
|
||||
@@ -32,8 +34,12 @@ export const cacheOrGet: DataCache["cacheOrGet"] = async <T>(
|
||||
const perf = performance.now()
|
||||
const data = await callback(overrideTTL)
|
||||
|
||||
const size = JSON.stringify(data).length / (1024 * 1024)
|
||||
if (size >= 5) {
|
||||
cacheLogger.warn(`'${key}' is larger than 5MB!`)
|
||||
}
|
||||
cacheLogger.debug(
|
||||
`Getting data '${cacheKey}' took ${(performance.now() - perf).toFixed(2)}ms`
|
||||
`Fetching data took ${(performance.now() - perf).toFixed(2)}ms ${size.toFixed(4)}MB for '${key}'`
|
||||
)
|
||||
|
||||
await set<T>(cacheKey, data, realTTL)
|
||||
|
||||
@@ -2,28 +2,41 @@ import { env } from "@/env/server"
|
||||
|
||||
import { getBranchPrefix } from "./getBranchPrefix"
|
||||
|
||||
export function getPrefix(): string {
|
||||
export function getPrefix(options: {
|
||||
includeGitHashInKey: boolean
|
||||
includeBranchPrefix: boolean
|
||||
}): string {
|
||||
const prefixTokens = []
|
||||
|
||||
const includeGitHashInKey = options.includeGitHashInKey
|
||||
const includeBranchPrefix = options.includeBranchPrefix
|
||||
|
||||
if (process.env.NODE_ENV === "development") {
|
||||
const devPrefix = process.env.USER || process.env.USERNAME || "dev"
|
||||
return `${devPrefix}`
|
||||
}
|
||||
|
||||
const branch = env.BRANCH.trim()
|
||||
const gitSha = env.GIT_SHA?.trim().substring(0, 7)
|
||||
if (includeGitHashInKey) {
|
||||
const gitSha = env.GIT_SHA?.trim().substring(0, 7)
|
||||
|
||||
if (!branch && !gitSha) {
|
||||
throw new Error("Unable to getPrefix, BRANCH and GIT_SHA must be set")
|
||||
if (!gitSha) {
|
||||
throw new Error("Unable to getPrefix, GIT_SHA must be set")
|
||||
}
|
||||
|
||||
prefixTokens.push(gitSha)
|
||||
}
|
||||
|
||||
if (!branch) {
|
||||
throw new Error("Unable to getPrefix, BRANCH must be set")
|
||||
}
|
||||
if (includeBranchPrefix) {
|
||||
const branch = env.BRANCH?.trim()
|
||||
|
||||
if (!gitSha) {
|
||||
throw new Error("Unable to getPrefix, GIT_SHA must be set")
|
||||
if (!branch) {
|
||||
throw new Error("Unable to getPrefix, BRANCH must be set")
|
||||
}
|
||||
const branchPrefix = getBranchPrefix(branch)
|
||||
if (branchPrefix) {
|
||||
prefixTokens.push(branchPrefix)
|
||||
}
|
||||
}
|
||||
|
||||
const prefixTokens = [getBranchPrefix(branch), gitSha].filter(Boolean)
|
||||
|
||||
return prefixTokens.join(":")
|
||||
}
|
||||
|
||||
@@ -1,13 +1,20 @@
|
||||
import { getPrefix } from "./getPrefix"
|
||||
|
||||
export function generateCacheKey(key: string | string[]): string {
|
||||
export function generateCacheKey(
|
||||
key: string | string[],
|
||||
options?: { includeGitHashInKey?: boolean }
|
||||
): string {
|
||||
const includeGitHashInKey = options?.includeGitHashInKey ?? true
|
||||
const keyArray = (Array.isArray(key) ? key : [key]).filter(Boolean)
|
||||
|
||||
if (keyArray.length === 0) {
|
||||
throw new Error("No keys provided")
|
||||
}
|
||||
|
||||
const prefix = getPrefix()
|
||||
const prefix = getPrefix({
|
||||
includeGitHashInKey,
|
||||
includeBranchPrefix: true,
|
||||
})
|
||||
|
||||
const keyTokens = [prefix, keyArray.join("_")].filter(Boolean).join(":")
|
||||
|
||||
|
||||
@@ -7,6 +7,7 @@
|
||||
export type CacheStrategy = "cache-first" | "fetch-then-cache"
|
||||
export type CacheOrGetOptions = {
|
||||
cacheStrategy?: CacheStrategy
|
||||
includeGitHashInKey?: boolean
|
||||
}
|
||||
|
||||
export function defaultCacheOrGetOptions(
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { Lang } from "@/constants/languages"
|
||||
import { batchEdgeRequest } from "@/lib/graphql/batchEdgeRequest"
|
||||
import { batchRequest } from "@/lib/graphql/batchRequest"
|
||||
import {
|
||||
EntryByUrlBatch1,
|
||||
EntryByUrlBatch2,
|
||||
@@ -13,14 +13,23 @@ export async function resolve(url: string, lang = Lang.en) {
|
||||
|
||||
// The maximum amount of content types you can query is 6, therefor more
|
||||
// than that is being batched
|
||||
const response = await batchEdgeRequest([
|
||||
const cacheKey = `${lang}:${url}:resolveentry`
|
||||
const response = await batchRequest([
|
||||
{
|
||||
document: EntryByUrlBatch1,
|
||||
variables,
|
||||
cacheOptions: {
|
||||
ttl: "max",
|
||||
key: cacheKey,
|
||||
},
|
||||
},
|
||||
{
|
||||
document: EntryByUrlBatch2,
|
||||
variables,
|
||||
cacheOptions: {
|
||||
ttl: "max",
|
||||
key: cacheKey,
|
||||
},
|
||||
},
|
||||
])
|
||||
|
||||
|
||||
Reference in New Issue
Block a user