Merged in feature/SW-2538-redis-api-sentry (pull request #1973)
Feature/SW-2538 redis api sentry * Add fingerprint to be able to differentiate JSON.parse errors * update deploy script * use status over deprecated error function in elysia * use t3-env createFinalSchema for extra validation Approved-by: Anton Gunnarsson
This commit is contained in:
@@ -35,5 +35,9 @@ az deployment sub create \
|
||||
--parameters environment={{ENVIRONMENT}} \
|
||||
containerImageTag=latest \
|
||||
primaryApiKey={{PRIMARY API KEY}} \ # API keys are used for communicating with the api
|
||||
secondaryApiKey={{SECONDARY API KEY}}
|
||||
secondaryApiKey={{SECONDARY API KEY}} \
|
||||
sentryDSN={{SENTRY DSN}} \
|
||||
sentryEnabled={{true/false}} \
|
||||
sentryTraceSampleRate=0.01
|
||||
|
||||
```
|
||||
|
||||
@@ -7,6 +7,9 @@ param containerImageTag string
|
||||
param redisConnection string
|
||||
param primaryApiKey string
|
||||
param secondaryApiKey string
|
||||
param sentryDSN string
|
||||
param sentryEnabled string
|
||||
param sentryTraceSampleRate string
|
||||
|
||||
param timestamp string = utcNow()
|
||||
|
||||
@@ -36,6 +39,10 @@ module containerApp 'containerApp.bicep' = {
|
||||
{ name: 'REDIS_CONNECTION', value: redisConnection }
|
||||
{ name: 'PRIMARY_API_KEY', value: primaryApiKey }
|
||||
{ name: 'SECONDARY_API_KEY', value: secondaryApiKey }
|
||||
{ name: 'SENTRY_DSN', value: sentryDSN }
|
||||
{ name: 'SENTRY_ENABLED', value: sentryEnabled }
|
||||
{ name: 'SENTRY_TRACE_SAMPLE_RATE', value: sentryTraceSampleRate }
|
||||
|
||||
{ name: 'timestamp', value: timestamp }
|
||||
]
|
||||
}
|
||||
|
||||
27
apps/redis-api/docker-compose.yaml
Normal file
27
apps/redis-api/docker-compose.yaml
Normal file
@@ -0,0 +1,27 @@
|
||||
services:
|
||||
redis-api:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile
|
||||
ports:
|
||||
- "3001:3001"
|
||||
depends_on:
|
||||
- redis
|
||||
environment:
|
||||
- REDIS_CONNECTION=redis:6379
|
||||
- PRIMARY_API_KEY=
|
||||
- SECONDARY_API_KEY=
|
||||
- NODE_ENV=development
|
||||
- SENTRY_ENABLED=false
|
||||
|
||||
redis:
|
||||
image: redis:6
|
||||
ports:
|
||||
- "6379:6379"
|
||||
|
||||
redisinsight:
|
||||
image: redis/redisinsight:latest
|
||||
ports:
|
||||
- "5540:5540"
|
||||
depends_on:
|
||||
- redis
|
||||
@@ -7,12 +7,14 @@
|
||||
"dev": "bun --watch src/index.ts | pino-pretty -o '{if module}[{module}] {end}{msg}' -i pid,hostname"
|
||||
},
|
||||
"dependencies": {
|
||||
"@elysiajs/server-timing": "1.2.1",
|
||||
"@elysiajs/swagger": "1.2.2",
|
||||
"@t3-oss/env-core": "0.12.0",
|
||||
"elysia": "1.2.25",
|
||||
"ioredis": "5.6.0",
|
||||
"pino": "9.6.0"
|
||||
"@elysiajs/server-timing": "^1.3.0",
|
||||
"@elysiajs/swagger": "^1.3.0",
|
||||
"@sentry/bun": "^9.15.0",
|
||||
"@sentry/tracing": "^7.120.3",
|
||||
"@t3-oss/env-core": "^0.13.0",
|
||||
"elysia": "^1.3.0",
|
||||
"ioredis": "^5.6.1",
|
||||
"pino": "^9.6.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@biomejs/biome": "^1.9.4",
|
||||
|
||||
@@ -3,6 +3,7 @@ import { z } from "zod";
|
||||
|
||||
const redisConnectionRegex =
|
||||
/^((?<username>.*?):(?<password>.*?)@)?(?<host>.*?):(?<port>\d+)$/;
|
||||
|
||||
export const env = createEnv({
|
||||
server: {
|
||||
IS_PROD: z
|
||||
@@ -34,6 +35,33 @@ export const env = createEnv({
|
||||
process.env.NODE_ENV === "development"
|
||||
? z.string().optional()
|
||||
: z.string().min(10),
|
||||
SENTRY_DSN: z.string().min(1).optional(),
|
||||
SENTRY_ENVIRONMENT: z
|
||||
.enum(["development", "test", "stage", "pre-prod", "production"])
|
||||
.default("development"),
|
||||
SENTRY_ENABLED: z
|
||||
.string()
|
||||
.refine((s) => s === "true" || s === "false")
|
||||
.transform((s) => s === "true"),
|
||||
SENTRY_TRACE_SAMPLE_RATE: z.coerce.number().default(0.001),
|
||||
},
|
||||
createFinalSchema: (shape) => {
|
||||
return z.object(shape).transform((env, ctx) => {
|
||||
if (!env.SENTRY_ENABLED || !env.SENTRY_DSN) {
|
||||
ctx.addIssue({
|
||||
code: z.ZodIssueCode.custom,
|
||||
message:
|
||||
"SENTRY_DSN is required when SENTRY_ENABLED is true",
|
||||
});
|
||||
|
||||
return z.NEVER;
|
||||
}
|
||||
|
||||
return {
|
||||
...env,
|
||||
SENTRY_ENABLED: env.SENTRY_ENABLED && !!env.SENTRY_DSN,
|
||||
};
|
||||
});
|
||||
},
|
||||
runtimeEnv: {
|
||||
...process.env,
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import "@/server/sentry.server.config";
|
||||
import * as Sentry from "@sentry/bun";
|
||||
import { Elysia } from "elysia";
|
||||
|
||||
import { swagger } from "@elysiajs/swagger";
|
||||
@@ -27,6 +29,8 @@ const app = new Elysia()
|
||||
return getErrorReturn(error);
|
||||
case "INTERNAL_SERVER_ERROR":
|
||||
set.status = 500;
|
||||
baseLogger.error("Internal server error", error);
|
||||
Sentry.captureException(error);
|
||||
return getErrorReturn(error);
|
||||
}
|
||||
});
|
||||
|
||||
@@ -1,114 +1,126 @@
|
||||
import { Elysia, t, ValidationError } from "elysia";
|
||||
import * as Sentry from "@sentry/bun";
|
||||
|
||||
import { Elysia, t } from "elysia";
|
||||
import { redis } from "@/services/redis";
|
||||
import { ModelValidationError } from "@/errors/ModelValidationError";
|
||||
import { loggerModule } from "@/utils/logger";
|
||||
import { truncate } from "@/utils/truncate";
|
||||
|
||||
const MIN_LENGTH = 1;
|
||||
|
||||
const QUERY_TYPE = t.Object({ key: t.String({ minLength: MIN_LENGTH }) });
|
||||
|
||||
const cacheRouteLogger = loggerModule("cacheRoute");
|
||||
export const cacheRoutes = new Elysia({ prefix: "/cache" })
|
||||
.get(
|
||||
"/",
|
||||
async ({ query: { key }, error }) => {
|
||||
key = validateKey(key);
|
||||
console.log("GET /cache", key);
|
||||
.get(
|
||||
"/",
|
||||
async ({ query: { key }, status }) => {
|
||||
key = validateKey(key);
|
||||
cacheRouteLogger.info("GET /cache", key);
|
||||
|
||||
const value = await redis.get(key);
|
||||
if (!value) {
|
||||
return error("Not Found", "Not Found");
|
||||
}
|
||||
const value = await redis.get(key);
|
||||
if (!value) {
|
||||
return status("Not Found", "Not Found");
|
||||
}
|
||||
|
||||
try {
|
||||
const output = JSON.parse(value);
|
||||
return { data: output };
|
||||
} catch (e) {
|
||||
redis.del(key);
|
||||
throw e;
|
||||
}
|
||||
},
|
||||
{
|
||||
query: QUERY_TYPE,
|
||||
response: { 200: t.Object({ data: t.Any() }), 404: t.String() },
|
||||
}
|
||||
)
|
||||
.put(
|
||||
"/",
|
||||
async ({ query: { key }, body, error, set }) => {
|
||||
key = validateKey(key);
|
||||
console.log("PUT /cache", key);
|
||||
try {
|
||||
const output = JSON.parse(value);
|
||||
return { data: output };
|
||||
} catch (e) {
|
||||
redis.del(key);
|
||||
cacheRouteLogger.error(`Invalid JSON in cache for '${key}'`, e);
|
||||
|
||||
if (!body.ttl || body.ttl < 0) {
|
||||
return error("Bad Request", "ttl is required");
|
||||
}
|
||||
Sentry.captureException(e, {
|
||||
tags: { cacheKey: key },
|
||||
extra: { cacheKey: key, value: `${truncate(value, 100)}` },
|
||||
fingerprint: ["get-route_invalid-json-in-cache"],
|
||||
});
|
||||
|
||||
await redis.set(key, JSON.stringify(body.data), "EX", body.ttl);
|
||||
return status("Not Found", "Not Found");
|
||||
}
|
||||
},
|
||||
{
|
||||
query: QUERY_TYPE,
|
||||
response: { 200: t.Object({ data: t.Any() }), 404: t.String() },
|
||||
}
|
||||
)
|
||||
.put(
|
||||
"/",
|
||||
async ({ query: { key }, body, status, set }) => {
|
||||
key = validateKey(key);
|
||||
cacheRouteLogger.info("PUT /cache", key);
|
||||
|
||||
set.status = 204;
|
||||
return;
|
||||
},
|
||||
{
|
||||
body: t.Object({ data: t.Any(), ttl: t.Number() }),
|
||||
query: QUERY_TYPE,
|
||||
response: { 204: t.Void(), 400: t.String() },
|
||||
}
|
||||
)
|
||||
.delete(
|
||||
"/",
|
||||
async ({ query: { key, fuzzy }, set }) => {
|
||||
key = validateKey(key);
|
||||
console.log("DELETE /cache", key, { fuzzy });
|
||||
if (!body.ttl || body.ttl < 0) {
|
||||
return status("Bad Request", "ttl is required");
|
||||
}
|
||||
|
||||
if (fuzzy) {
|
||||
await deleteWithPattern(`*${key}*`);
|
||||
} else {
|
||||
await redis.del(key);
|
||||
console.log("Deleted key: ", key);
|
||||
}
|
||||
await redis.set(key, JSON.stringify(body.data), "EX", body.ttl);
|
||||
|
||||
set.status = 204;
|
||||
return;
|
||||
},
|
||||
{
|
||||
query: t.Object({
|
||||
...QUERY_TYPE.properties,
|
||||
...t.Object({ fuzzy: t.Optional(t.Boolean()) }).properties,
|
||||
}),
|
||||
response: { 204: t.Void(), 400: t.String() },
|
||||
}
|
||||
);
|
||||
return status(204, void 0);
|
||||
},
|
||||
{
|
||||
body: t.Object({ data: t.Any(), ttl: t.Number() }),
|
||||
query: QUERY_TYPE,
|
||||
response: { 204: t.Void(), 400: t.String() },
|
||||
}
|
||||
)
|
||||
.delete(
|
||||
"/",
|
||||
async ({ query: { key, fuzzy }, status }) => {
|
||||
key = validateKey(key);
|
||||
cacheRouteLogger.info("DELETE /cache", key, { fuzzy });
|
||||
|
||||
if (fuzzy) {
|
||||
await deleteWithPattern(`*${key}*`);
|
||||
} else {
|
||||
await redis.del(key);
|
||||
cacheRouteLogger.info("Deleted key: ", key);
|
||||
}
|
||||
|
||||
return status(204, void 0);
|
||||
},
|
||||
{
|
||||
query: t.Object({
|
||||
...QUERY_TYPE.properties,
|
||||
...t.Object({ fuzzy: t.Optional(t.Boolean()) }).properties,
|
||||
}),
|
||||
response: { 204: t.Void(), 400: t.String() },
|
||||
}
|
||||
);
|
||||
|
||||
function validateKey(key: string) {
|
||||
const parsedKey = decodeURIComponent(key);
|
||||
const parsedKey = decodeURIComponent(key);
|
||||
|
||||
if (parsedKey.length < MIN_LENGTH) {
|
||||
throw new ModelValidationError("Key has to be atleast 1 character long");
|
||||
}
|
||||
if (parsedKey.length < MIN_LENGTH) {
|
||||
throw new ModelValidationError(
|
||||
"Key has to be atleast 1 character long"
|
||||
);
|
||||
}
|
||||
|
||||
if (parsedKey.includes("*")) {
|
||||
throw new ModelValidationError("Key cannot contain wildcards");
|
||||
}
|
||||
if (parsedKey.includes("*")) {
|
||||
throw new ModelValidationError("Key cannot contain wildcards");
|
||||
}
|
||||
|
||||
return parsedKey;
|
||||
return parsedKey;
|
||||
}
|
||||
|
||||
async function deleteWithPattern(pattern: string) {
|
||||
let cursor = "0";
|
||||
let keys: string[] = [];
|
||||
let cursor = "0";
|
||||
const keys: string[] = [];
|
||||
|
||||
do {
|
||||
const [newCursor, foundKeys] = await redis.scan(
|
||||
cursor,
|
||||
"MATCH",
|
||||
pattern,
|
||||
"COUNT",
|
||||
5000
|
||||
);
|
||||
cursor = newCursor;
|
||||
keys.push(...foundKeys);
|
||||
} while (cursor !== "0");
|
||||
do {
|
||||
const [newCursor, foundKeys] = await redis.scan(
|
||||
cursor,
|
||||
"MATCH",
|
||||
pattern,
|
||||
"COUNT",
|
||||
5000
|
||||
);
|
||||
cursor = newCursor;
|
||||
keys.push(...foundKeys);
|
||||
} while (cursor !== "0");
|
||||
|
||||
if (keys.length > 0) {
|
||||
await redis.del(...keys);
|
||||
}
|
||||
console.log("Deleted number of keys: ", keys.length);
|
||||
if (keys.length > 0) {
|
||||
await redis.del(...keys);
|
||||
}
|
||||
cacheRouteLogger.info("Deleted number of keys: ", keys.length);
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@ import { baseLogger } from "@/utils/logger";
|
||||
|
||||
export const healthRoutes = new Elysia().get(
|
||||
"/health",
|
||||
async ({ set, error }) => {
|
||||
async ({ set, status }) => {
|
||||
const perf = performance.now();
|
||||
try {
|
||||
await redis.ping();
|
||||
@@ -13,7 +13,7 @@ export const healthRoutes = new Elysia().get(
|
||||
baseLogger.error("Redis connection error:", e);
|
||||
console.log("Redis connection error:", e);
|
||||
|
||||
return error(503, { healthy: false });
|
||||
return status(503, { healthy: false });
|
||||
}
|
||||
|
||||
const duration = performance.now() - perf;
|
||||
|
||||
11
apps/redis-api/src/server/sentry.server.config.ts
Normal file
11
apps/redis-api/src/server/sentry.server.config.ts
Normal file
@@ -0,0 +1,11 @@
|
||||
import "@sentry/tracing";
|
||||
import { env } from "@/env";
|
||||
import * as Sentry from "@sentry/bun";
|
||||
|
||||
Sentry.init({
|
||||
dsn: env.SENTRY_DSN,
|
||||
enabled: env.SENTRY_ENABLED,
|
||||
environment: env.SENTRY_ENVIRONMENT,
|
||||
tracesSampleRate: env.SENTRY_TRACE_SAMPLE_RATE,
|
||||
release: env.VERSION,
|
||||
});
|
||||
3
apps/redis-api/src/utils/truncate.ts
Normal file
3
apps/redis-api/src/utils/truncate.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
export function truncate(value: string, maxLength = 10): string {
|
||||
return value.length > maxLength ? `${value.slice(0, maxLength)}...` : value;
|
||||
}
|
||||
Reference in New Issue
Block a user