Merged in feat/redis-fix (pull request #3207)
Feat/redis fix * feat(redis): delete multiple keys in one partition scan * fix(BOOK-603): make it possible to do multiple deletes in redis at once using one partition scan Approved-by: Linus Flood
This commit is contained in:
@@ -1,15 +1,20 @@
|
||||
import * as Sentry from "@sentry/bun";
|
||||
import { Elysia, t } from "elysia";
|
||||
|
||||
import { ModelValidationError } from "@/errors/ModelValidationError";
|
||||
import { redis } from "@/services/redis";
|
||||
import { queueDelete } from "@/services/redis/queueDelete";
|
||||
import { queueDelete, queueDeleteMultiple } from "@/services/redis/queueDelete";
|
||||
import { loggerModule } from "@/utils/logger";
|
||||
import { truncate } from "@/utils/truncate";
|
||||
import { validateKey } from "@/utils/validateKey";
|
||||
|
||||
const MIN_LENGTH = 1;
|
||||
|
||||
const QUERY_TYPE = t.Object({ key: t.String({ minLength: MIN_LENGTH }) });
|
||||
const DELETEMULTIPLE_BODY_TYPE = t.Object({
|
||||
keys: t.Array(t.String({ minLength: MIN_LENGTH })),
|
||||
fuzzy: t.Optional(t.Boolean({ default: false })),
|
||||
});
|
||||
|
||||
const cacheRouteLogger = loggerModule("cacheRoute");
|
||||
export const cacheRoutes = new Elysia({ prefix: "/cache" })
|
||||
.get(
|
||||
@@ -68,6 +73,53 @@ export const cacheRoutes = new Elysia({ prefix: "/cache" })
|
||||
response: { 204: t.Undefined(), 400: t.String() },
|
||||
},
|
||||
)
|
||||
.delete(
|
||||
"/multiple",
|
||||
async ({ body: { keys, fuzzy = false } }) => {
|
||||
const validatedKeys = keys.map(validateKey);
|
||||
|
||||
cacheRouteLogger.debug(
|
||||
`DELETE /multiple keys=${validatedKeys.join(",")} ${fuzzy ? "(fuzzy)" : ""}`,
|
||||
);
|
||||
|
||||
// 1. Fuzzy deletes → Single SCAN pass
|
||||
if (fuzzy) {
|
||||
const patterns = validatedKeys.map((k) => `*${k}*`);
|
||||
|
||||
await queueDeleteMultiple({ patterns });
|
||||
return { status: "queued" };
|
||||
}
|
||||
|
||||
// 2. Exact deletes → Batch unlink
|
||||
const now = performance.now();
|
||||
|
||||
// Use UNLINK for async deletes
|
||||
const deletedKeys = await redis.unlink(...validatedKeys);
|
||||
|
||||
const elapsed = performance.now() - now;
|
||||
|
||||
cacheRouteLogger.info(
|
||||
`Deleted ${deletedKeys} keys in ${elapsed}ms`,
|
||||
{
|
||||
deletedKeys,
|
||||
keys: validatedKeys,
|
||||
elapsed,
|
||||
},
|
||||
);
|
||||
|
||||
return { deletedKeys };
|
||||
},
|
||||
{
|
||||
body: DELETEMULTIPLE_BODY_TYPE,
|
||||
response: {
|
||||
200: t.Union([
|
||||
t.Object({ deletedKeys: t.Number() }),
|
||||
t.Object({ status: t.Literal("queued") }),
|
||||
]),
|
||||
400: t.String(),
|
||||
},
|
||||
},
|
||||
)
|
||||
.delete(
|
||||
"/",
|
||||
async ({ query: { key, fuzzy } }) => {
|
||||
@@ -105,19 +157,3 @@ export const cacheRoutes = new Elysia({ prefix: "/cache" })
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
function validateKey(key: string) {
|
||||
const parsedKey = decodeURIComponent(key);
|
||||
|
||||
if (parsedKey.length < MIN_LENGTH) {
|
||||
throw new ModelValidationError(
|
||||
"Key has to be at least 1 character long",
|
||||
);
|
||||
}
|
||||
|
||||
if (parsedKey.includes("*")) {
|
||||
throw new ModelValidationError("Key cannot contain wildcards");
|
||||
}
|
||||
|
||||
return parsedKey;
|
||||
}
|
||||
|
||||
@@ -11,30 +11,52 @@ import { bullmqredis, redis } from ".";
|
||||
const DELETE_JOB = "deleteQueueJob";
|
||||
const deleteQueueLogger = loggerModule("deleteQueue");
|
||||
|
||||
const deleteQueueSchema = z.object({
|
||||
const deleteSingleSchema = z.object({
|
||||
pattern: z.string().min(1, "Pattern must be at least 1 character long"),
|
||||
});
|
||||
|
||||
const deleteMultipleSchema = z.object({
|
||||
patterns: z
|
||||
.array(z.string().min(1, "Pattern must be at least 1 character long"))
|
||||
.min(1, "At least one pattern is required"),
|
||||
});
|
||||
|
||||
const deleteQueue = new Queue(DELETE_JOB, { connection: bullmqredis });
|
||||
const worker = new Worker(
|
||||
DELETE_JOB,
|
||||
async (job) => {
|
||||
let patterns: string[] = [];
|
||||
|
||||
// 1. Normalize job input into patterns[]
|
||||
if (job.name === "delete") {
|
||||
const { pattern } = deleteQueueSchema.parse(job.data);
|
||||
deleteQueueLogger.info(
|
||||
`Job: ${job.id} processing. With pattern: ${pattern}`,
|
||||
{ pattern, jobId: job.id },
|
||||
);
|
||||
|
||||
const now = performance.now();
|
||||
const deletedCount = await deleteWithPattern(pattern);
|
||||
const elapsed = performance.now() - now;
|
||||
|
||||
deleteQueueLogger.info(
|
||||
`Job: ${job.id} completed. Deleted ${deletedCount} keys for pattern '${pattern}' in ${elapsed.toFixed(2)}ms.`,
|
||||
{ deletedCount, pattern, elapsed, jobId: job.id },
|
||||
);
|
||||
const { pattern } = deleteSingleSchema.parse(job.data);
|
||||
patterns = [pattern];
|
||||
}
|
||||
|
||||
if (job.name === "deleteMultiple") {
|
||||
const { patterns: parsedPatterns } = deleteMultipleSchema.parse(
|
||||
job.data,
|
||||
);
|
||||
patterns = parsedPatterns;
|
||||
}
|
||||
|
||||
if (!patterns.length) {
|
||||
throw new Error(`Unknown job name or invalid data: ${job.name}`);
|
||||
}
|
||||
|
||||
deleteQueueLogger.info(`Job: ${job.id} processing.`, {
|
||||
patterns,
|
||||
jobId: job.id,
|
||||
});
|
||||
|
||||
const now = performance.now();
|
||||
const deletedCount = await deleteWithPatterns(patterns);
|
||||
const elapsed = performance.now() - now;
|
||||
|
||||
deleteQueueLogger.info(
|
||||
`Job: ${job.id} completed. Deleted ${deletedCount} keys in ${elapsed.toFixed(2)}ms`,
|
||||
{ deletedCount, patterns, elapsed, jobId: job.id },
|
||||
);
|
||||
},
|
||||
{ connection: bullmqredis },
|
||||
);
|
||||
@@ -43,7 +65,7 @@ worker.on("failed", (job, error) => {
|
||||
deleteQueueLogger.error(`Job failed: ${job?.id} with ${error.message}`, {
|
||||
error,
|
||||
jobId: job?.id,
|
||||
pattern: job?.data?.pattern,
|
||||
patterns: job?.data?.patterns,
|
||||
});
|
||||
|
||||
sentry.captureException(error);
|
||||
@@ -53,33 +75,68 @@ export async function queueDelete({ pattern }: { pattern: string }) {
|
||||
deleteQueue.add("delete", { pattern });
|
||||
}
|
||||
|
||||
async function deleteWithPattern(pattern: string) {
|
||||
export async function queueDeleteMultiple({
|
||||
patterns,
|
||||
}: {
|
||||
patterns: string[];
|
||||
}) {
|
||||
deleteQueue.add("deleteMultiple", { patterns });
|
||||
}
|
||||
|
||||
async function deleteWithPatterns(patterns: string[]) {
|
||||
let cursor = "0";
|
||||
const SCAN_SIZE = env.DELETE_BATCH_SIZE;
|
||||
|
||||
let totalDeleteCount = 0;
|
||||
|
||||
do {
|
||||
const [newCursor, foundKeys] = await redis.scan(
|
||||
const [newCursor, keys] = await redis.scan(
|
||||
cursor,
|
||||
"MATCH",
|
||||
pattern,
|
||||
"*",
|
||||
"COUNT",
|
||||
SCAN_SIZE,
|
||||
);
|
||||
|
||||
cursor = newCursor;
|
||||
if (foundKeys.length === 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const deleteCount = await redis.unlink(foundKeys);
|
||||
if (!keys.length) continue;
|
||||
|
||||
totalDeleteCount += deleteCount;
|
||||
const matchedKeys = keys.filter((key) =>
|
||||
patterns.some((pattern) => matchKey(key, pattern)),
|
||||
);
|
||||
|
||||
if (!matchedKeys.length) continue;
|
||||
|
||||
const deleted = await redis.unlink(...matchedKeys);
|
||||
totalDeleteCount += deleted;
|
||||
|
||||
// Rate limiting to avoid overwhelming the Redis server
|
||||
await timeout(100);
|
||||
} while (cursor !== "0");
|
||||
|
||||
return totalDeleteCount;
|
||||
}
|
||||
|
||||
function matchKey(key: string, pattern: string): boolean {
|
||||
const startsWithWildcard = pattern.startsWith("*");
|
||||
const endsWithWildcard = pattern.endsWith("*");
|
||||
|
||||
const cleanPattern = pattern.replace(/^\*|\*$/g, ""); // remove outer *
|
||||
|
||||
if (!startsWithWildcard && !endsWithWildcard) {
|
||||
return key === pattern;
|
||||
}
|
||||
|
||||
if (startsWithWildcard && endsWithWildcard) {
|
||||
return key.includes(cleanPattern);
|
||||
}
|
||||
|
||||
if (startsWithWildcard) {
|
||||
return key.endsWith(cleanPattern);
|
||||
}
|
||||
|
||||
if (endsWithWildcard) {
|
||||
return key.startsWith(cleanPattern);
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
17
apps/redis-api/src/utils/validateKey.ts
Normal file
17
apps/redis-api/src/utils/validateKey.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
import { ModelValidationError } from "@/errors/ModelValidationError";
|
||||
const MIN_LENGTH = 1;
|
||||
export function validateKey(key: string) {
|
||||
const parsedKey = decodeURIComponent(key);
|
||||
|
||||
if (parsedKey.length < MIN_LENGTH) {
|
||||
throw new ModelValidationError(
|
||||
"Key has to be at least 1 character long",
|
||||
);
|
||||
}
|
||||
|
||||
if (parsedKey.includes("*")) {
|
||||
throw new ModelValidationError("Key cannot contain wildcards");
|
||||
}
|
||||
|
||||
return parsedKey;
|
||||
}
|
||||
Reference in New Issue
Block a user