Merged in feature/redis-api-get-all-keys-endopoint (pull request #3306)
feature: Add getAllKeys endpoint * feature: Add getAllKeys endpoint * rename DELETE_BATCH_SIZE to SCAN_BATCH_SIZE Approved-by: Anton Gunnarsson
This commit is contained in:
@@ -13,7 +13,7 @@ param sentryEnvironment string
|
|||||||
param sentryDSN string
|
param sentryDSN string
|
||||||
param sentryEnabled string
|
param sentryEnabled string
|
||||||
param sentryTraceSampleRate string
|
param sentryTraceSampleRate string
|
||||||
param deleteBatchSize string
|
param scanBatchSize string
|
||||||
|
|
||||||
param timestamp string = utcNow()
|
param timestamp string = utcNow()
|
||||||
|
|
||||||
@@ -48,7 +48,7 @@ module containerApp 'containerApp.bicep' = {
|
|||||||
{ name: 'SENTRY_ENABLED', value: sentryEnabled }
|
{ name: 'SENTRY_ENABLED', value: sentryEnabled }
|
||||||
{ name: 'SENTRY_TRACE_SAMPLE_RATE', value: sentryTraceSampleRate }
|
{ name: 'SENTRY_TRACE_SAMPLE_RATE', value: sentryTraceSampleRate }
|
||||||
{ name: 'VERSION', value: version }
|
{ name: 'VERSION', value: version }
|
||||||
{ name: 'DELETE_BATCH_SIZE', value: deleteBatchSize }
|
{ name: 'SCAN_BATCH_SIZE', value: scanBatchSize }
|
||||||
|
|
||||||
{ name: 'timestamp', value: timestamp }
|
{ name: 'timestamp', value: timestamp }
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import { Environment, EnvironmentVar } from 'types.bicep'
|
import { Environment } from 'types.bicep'
|
||||||
|
|
||||||
targetScope = 'subscription'
|
targetScope = 'subscription'
|
||||||
|
|
||||||
@@ -12,7 +12,7 @@ param secondaryApiKey string
|
|||||||
param sentryDSN string
|
param sentryDSN string
|
||||||
param sentryEnabled string
|
param sentryEnabled string
|
||||||
param sentryTraceSampleRate string
|
param sentryTraceSampleRate string
|
||||||
param deleteBatchSize int = 2000
|
param scanBatchSize int = 2000
|
||||||
|
|
||||||
@description('The location for the resource group')
|
@description('The location for the resource group')
|
||||||
param location string = 'westeurope'
|
param location string = 'westeurope'
|
||||||
@@ -54,6 +54,6 @@ module containerApp 'app/main.bicep' = {
|
|||||||
sentryEnabled: sentryEnabled
|
sentryEnabled: sentryEnabled
|
||||||
sentryTraceSampleRate: sentryTraceSampleRate
|
sentryTraceSampleRate: sentryTraceSampleRate
|
||||||
version: version
|
version: version
|
||||||
deleteBatchSize: string(deleteBatchSize)
|
scanBatchSize: string(scanBatchSize)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -44,7 +44,7 @@ export const env = createEnv({
|
|||||||
.refine((s) => s === "true" || s === "false")
|
.refine((s) => s === "true" || s === "false")
|
||||||
.transform((s) => s === "true"),
|
.transform((s) => s === "true"),
|
||||||
SENTRY_TRACE_SAMPLE_RATE: z.coerce.number().default(0.001),
|
SENTRY_TRACE_SAMPLE_RATE: z.coerce.number().default(0.001),
|
||||||
DELETE_BATCH_SIZE: z.coerce.number().default(2000),
|
SCAN_BATCH_SIZE: z.coerce.number().default(2000),
|
||||||
},
|
},
|
||||||
createFinalSchema: (shape) => {
|
createFinalSchema: (shape) => {
|
||||||
return z.object(shape).transform((env, ctx) => {
|
return z.object(shape).transform((env, ctx) => {
|
||||||
|
|||||||
@@ -6,6 +6,7 @@ import { queueDelete, queueDeleteMultiple } from "@/services/redis/queueDelete";
|
|||||||
import { loggerModule } from "@/utils/logger";
|
import { loggerModule } from "@/utils/logger";
|
||||||
import { truncate } from "@/utils/truncate";
|
import { truncate } from "@/utils/truncate";
|
||||||
import { validateKey } from "@/utils/validateKey";
|
import { validateKey } from "@/utils/validateKey";
|
||||||
|
import { getAllKeys } from "@/services/redis/getAllKeys";
|
||||||
|
|
||||||
const QUERY_TYPE = t.Object({ key: t.String({}) });
|
const QUERY_TYPE = t.Object({ key: t.String({}) });
|
||||||
const DELETEMULTIPLE_BODY_TYPE = t.Object({
|
const DELETEMULTIPLE_BODY_TYPE = t.Object({
|
||||||
@@ -47,6 +48,27 @@ export const cacheRoutes = new Elysia({ prefix: "/cache" })
|
|||||||
response: { 200: t.Object({ data: t.Any() }), 404: t.String() },
|
response: { 200: t.Object({ data: t.Any() }), 404: t.String() },
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
.get(
|
||||||
|
"/all",
|
||||||
|
async ({ query: { key } }) => {
|
||||||
|
key = validateKey(key);
|
||||||
|
const { matchedKeys, totalKeys } = await getAllKeys([`*${key}*`], {
|
||||||
|
caseInsensitive: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
return { data: matchedKeys.toSorted(), totalKeys };
|
||||||
|
},
|
||||||
|
{
|
||||||
|
query: QUERY_TYPE,
|
||||||
|
response: {
|
||||||
|
200: t.Object({
|
||||||
|
data: t.Array(t.String()),
|
||||||
|
totalKeys: t.Number(),
|
||||||
|
}),
|
||||||
|
400: t.String(),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
.put(
|
.put(
|
||||||
"/",
|
"/",
|
||||||
async ({ query: { key }, body, status, set }) => {
|
async ({ query: { key }, body, status, set }) => {
|
||||||
|
|||||||
98
apps/redis-api/src/services/redis/getAllKeys.ts
Normal file
98
apps/redis-api/src/services/redis/getAllKeys.ts
Normal file
@@ -0,0 +1,98 @@
|
|||||||
|
import { env } from "@/env";
|
||||||
|
import { redis } from ".";
|
||||||
|
import { timeout } from "@/utils/timeout";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Retrieves all Redis keys that match any of the provided patterns.
|
||||||
|
*
|
||||||
|
* This function performs an iterative SCAN operation on the Redis database to retrieve keys
|
||||||
|
* in batches, avoiding blocking the Redis server. It filters the keys against the provided
|
||||||
|
* patterns and returns only those that match at least one pattern.
|
||||||
|
*
|
||||||
|
* @param patterns - An array of pattern strings to match against Redis keys.
|
||||||
|
* Keys matching any of these patterns will be included in the result.
|
||||||
|
* @returns A promise that resolves to an array of matched key strings.
|
||||||
|
*
|
||||||
|
* @remarks
|
||||||
|
* - Uses Redis SCAN command with a cursor-based iteration to handle large keyspaces efficiently
|
||||||
|
* - Batch size is controlled by `env.SCAN_BATCH_SIZE`
|
||||||
|
* - Includes a 100ms timeout between batches to prevent overwhelming the Redis server
|
||||||
|
* - The SCAN operation uses a wildcard "*" match, with additional filtering applied via `matchKey`
|
||||||
|
*
|
||||||
|
* @example
|
||||||
|
* ```typescript
|
||||||
|
* const keys = await getAllKeys(['user:*', 'session:*']);
|
||||||
|
* console.log(keys); // ['user:123', 'user:456', 'session:abc']
|
||||||
|
* ```
|
||||||
|
*/
|
||||||
|
export async function getAllKeys(
|
||||||
|
patterns: string[],
|
||||||
|
options?: { caseInsensitive?: boolean },
|
||||||
|
) {
|
||||||
|
let cursor = "0";
|
||||||
|
const SCAN_SIZE = env.SCAN_BATCH_SIZE;
|
||||||
|
let matchedKeys: string[] = [];
|
||||||
|
let totalKeys = 0;
|
||||||
|
do {
|
||||||
|
const [newCursor, keys] = await redis.scan(
|
||||||
|
cursor,
|
||||||
|
"MATCH",
|
||||||
|
"*",
|
||||||
|
"COUNT",
|
||||||
|
SCAN_SIZE,
|
||||||
|
);
|
||||||
|
|
||||||
|
cursor = newCursor;
|
||||||
|
|
||||||
|
if (!keys.length) continue;
|
||||||
|
|
||||||
|
totalKeys += keys.length;
|
||||||
|
|
||||||
|
matchedKeys = [
|
||||||
|
...matchedKeys,
|
||||||
|
...keys.filter((key) =>
|
||||||
|
patterns.some((pattern) =>
|
||||||
|
matchKey(key, pattern, options?.caseInsensitive),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
];
|
||||||
|
|
||||||
|
await timeout(100);
|
||||||
|
} while (cursor !== "0");
|
||||||
|
|
||||||
|
return { totalKeys, matchedKeys };
|
||||||
|
}
|
||||||
|
|
||||||
|
function matchKey(
|
||||||
|
key: string,
|
||||||
|
pattern: string,
|
||||||
|
caseInsensitive = false,
|
||||||
|
): boolean {
|
||||||
|
if (caseInsensitive) {
|
||||||
|
key = key.toLowerCase();
|
||||||
|
pattern = pattern.toLowerCase();
|
||||||
|
}
|
||||||
|
|
||||||
|
const startsWithWildcard = pattern.startsWith("*");
|
||||||
|
const endsWithWildcard = pattern.endsWith("*");
|
||||||
|
|
||||||
|
const cleanPattern = pattern.replace(/^\*|\*$/g, ""); // remove outer *
|
||||||
|
|
||||||
|
if (!startsWithWildcard && !endsWithWildcard) {
|
||||||
|
return key === pattern;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (startsWithWildcard && endsWithWildcard) {
|
||||||
|
return key.includes(cleanPattern);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (startsWithWildcard) {
|
||||||
|
return key.endsWith(cleanPattern);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (endsWithWildcard) {
|
||||||
|
return key.startsWith(cleanPattern);
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
@@ -7,6 +7,7 @@ import { loggerModule } from "@/utils/logger";
|
|||||||
import { timeout } from "@/utils/timeout";
|
import { timeout } from "@/utils/timeout";
|
||||||
|
|
||||||
import { bullmqredis, redis } from ".";
|
import { bullmqredis, redis } from ".";
|
||||||
|
import { getAllKeys } from "./getAllKeys";
|
||||||
|
|
||||||
const DELETE_JOB = "deleteQueueJob";
|
const DELETE_JOB = "deleteQueueJob";
|
||||||
const deleteQueueLogger = loggerModule("deleteQueue");
|
const deleteQueueLogger = loggerModule("deleteQueue");
|
||||||
@@ -84,34 +85,7 @@ export async function queueDeleteMultiple({
|
|||||||
}
|
}
|
||||||
|
|
||||||
async function deleteWithPatterns(patterns: string[]) {
|
async function deleteWithPatterns(patterns: string[]) {
|
||||||
let cursor = "0";
|
const { totalKeys, matchedKeys } = await getAllKeys(patterns);
|
||||||
const SCAN_SIZE = env.DELETE_BATCH_SIZE;
|
|
||||||
let matchedKeys: string[] = [];
|
|
||||||
let totalKeys = 0;
|
|
||||||
do {
|
|
||||||
const [newCursor, keys] = await redis.scan(
|
|
||||||
cursor,
|
|
||||||
"MATCH",
|
|
||||||
"*",
|
|
||||||
"COUNT",
|
|
||||||
SCAN_SIZE,
|
|
||||||
);
|
|
||||||
|
|
||||||
cursor = newCursor;
|
|
||||||
|
|
||||||
if (!keys.length) continue;
|
|
||||||
|
|
||||||
totalKeys += keys.length;
|
|
||||||
|
|
||||||
matchedKeys = [
|
|
||||||
...matchedKeys,
|
|
||||||
...keys.filter((key) =>
|
|
||||||
patterns.some((pattern) => matchKey(key, pattern)),
|
|
||||||
),
|
|
||||||
];
|
|
||||||
|
|
||||||
await timeout(100);
|
|
||||||
} while (cursor !== "0");
|
|
||||||
|
|
||||||
let deleted = 0;
|
let deleted = 0;
|
||||||
if (matchedKeys.length > 0) {
|
if (matchedKeys.length > 0) {
|
||||||
@@ -125,28 +99,3 @@ async function deleteWithPatterns(patterns: string[]) {
|
|||||||
|
|
||||||
return deleted;
|
return deleted;
|
||||||
}
|
}
|
||||||
|
|
||||||
function matchKey(key: string, pattern: string): boolean {
|
|
||||||
const startsWithWildcard = pattern.startsWith("*");
|
|
||||||
const endsWithWildcard = pattern.endsWith("*");
|
|
||||||
|
|
||||||
const cleanPattern = pattern.replace(/^\*|\*$/g, ""); // remove outer *
|
|
||||||
|
|
||||||
if (!startsWithWildcard && !endsWithWildcard) {
|
|
||||||
return key === pattern;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (startsWithWildcard && endsWithWildcard) {
|
|
||||||
return key.includes(cleanPattern);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (startsWithWildcard) {
|
|
||||||
return key.endsWith(cleanPattern);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (endsWithWildcard) {
|
|
||||||
return key.startsWith(cleanPattern);
|
|
||||||
}
|
|
||||||
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|||||||
Reference in New Issue
Block a user