Merged in feat/redis-fix (pull request #3209)
Feat/redis fix * feat(redis): delete multiple keys in one partition scan * fix(BOOK-603): make it possible to do multiple deletes in redis at once using one partition scan * filter out invalid keys * fix: if no valid keys are present return early * . * fix: do redis deletes after scanning through all keys * fix: do redis deletes after scanning through all keys Approved-by: Linus Flood
This commit is contained in:
committed by
Linus Flood
parent
1ecbca40a3
commit
c3381e8100
@@ -11,7 +11,7 @@ const MIN_LENGTH = 1;
|
|||||||
|
|
||||||
const QUERY_TYPE = t.Object({ key: t.String({ minLength: MIN_LENGTH }) });
|
const QUERY_TYPE = t.Object({ key: t.String({ minLength: MIN_LENGTH }) });
|
||||||
const DELETEMULTIPLE_BODY_TYPE = t.Object({
|
const DELETEMULTIPLE_BODY_TYPE = t.Object({
|
||||||
keys: t.Array(t.String({ minLength: MIN_LENGTH })),
|
keys: t.Array(t.String()),
|
||||||
fuzzy: t.Optional(t.Boolean({ default: false })),
|
fuzzy: t.Optional(t.Boolean({ default: false })),
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -76,7 +76,10 @@ export const cacheRoutes = new Elysia({ prefix: "/cache" })
|
|||||||
.delete(
|
.delete(
|
||||||
"/multiple",
|
"/multiple",
|
||||||
async ({ body: { keys, fuzzy = false } }) => {
|
async ({ body: { keys, fuzzy = false } }) => {
|
||||||
const validatedKeys = keys.map(validateKey);
|
const validatedKeys = keys.filter((x) => !!x).map(validateKey);
|
||||||
|
if (validatedKeys.length === 0) {
|
||||||
|
return { deletedKeys: 0 };
|
||||||
|
}
|
||||||
|
|
||||||
cacheRouteLogger.debug(
|
cacheRouteLogger.debug(
|
||||||
`DELETE /multiple keys=${validatedKeys.join(",")} ${fuzzy ? "(fuzzy)" : ""}`,
|
`DELETE /multiple keys=${validatedKeys.join(",")} ${fuzzy ? "(fuzzy)" : ""}`,
|
||||||
|
|||||||
@@ -86,8 +86,8 @@ export async function queueDeleteMultiple({
|
|||||||
async function deleteWithPatterns(patterns: string[]) {
|
async function deleteWithPatterns(patterns: string[]) {
|
||||||
let cursor = "0";
|
let cursor = "0";
|
||||||
const SCAN_SIZE = env.DELETE_BATCH_SIZE;
|
const SCAN_SIZE = env.DELETE_BATCH_SIZE;
|
||||||
let totalDeleteCount = 0;
|
let matchedKeys: string[] = [];
|
||||||
|
let totalKeys = 0;
|
||||||
do {
|
do {
|
||||||
const [newCursor, keys] = await redis.scan(
|
const [newCursor, keys] = await redis.scan(
|
||||||
cursor,
|
cursor,
|
||||||
@@ -101,19 +101,29 @@ async function deleteWithPatterns(patterns: string[]) {
|
|||||||
|
|
||||||
if (!keys.length) continue;
|
if (!keys.length) continue;
|
||||||
|
|
||||||
const matchedKeys = keys.filter((key) =>
|
totalKeys += keys.length;
|
||||||
patterns.some((pattern) => matchKey(key, pattern)),
|
|
||||||
);
|
|
||||||
|
|
||||||
if (!matchedKeys.length) continue;
|
matchedKeys = [
|
||||||
|
...matchedKeys,
|
||||||
const deleted = await redis.unlink(...matchedKeys);
|
...keys.filter((key) =>
|
||||||
totalDeleteCount += deleted;
|
patterns.some((pattern) => matchKey(key, pattern)),
|
||||||
|
),
|
||||||
|
];
|
||||||
|
|
||||||
await timeout(100);
|
await timeout(100);
|
||||||
} while (cursor !== "0");
|
} while (cursor !== "0");
|
||||||
|
|
||||||
return totalDeleteCount;
|
let deleted = 0;
|
||||||
|
if (matchedKeys.length > 0) {
|
||||||
|
deleted = await redis.unlink(...matchedKeys);
|
||||||
|
}
|
||||||
|
|
||||||
|
deleteQueueLogger.info(
|
||||||
|
`Scanned ${totalKeys} keys, matched ${matchedKeys.length}, deleted ${deleted} keys.`,
|
||||||
|
{ totalKeys, matchedKeys: matchedKeys.length, deleted },
|
||||||
|
);
|
||||||
|
|
||||||
|
return deleted;
|
||||||
}
|
}
|
||||||
|
|
||||||
function matchKey(key: string, pattern: string): boolean {
|
function matchKey(key: string, pattern: string): boolean {
|
||||||
|
|||||||
Reference in New Issue
Block a user