Merged in feature/redis (pull request #1478)

Distributed cache

* cache deleteKey now uses an options object instead of a lonely argument variable fuzzy

* merge

* remove debug logs and cleanup

* cleanup

* add fault handling

* add fault handling

* add pid when logging redis client creation

* add identifier when logging redis client creation

* cleanup

* feat: add redis-api as it's own app

* feature: use http wrapper for redis

* feat: add the possibility to fallback to unstable_cache

* Add error handling if redis cache is unresponsive

* add logging for unstable_cache

* merge

* don't cache errors

* fix: metadatabase on branchdeploys

* Handle when /en/destinations throws
add ErrorBoundary

* Add sentry-logging when ErrorBoundary catches exception

* Fix error handling for distributed cache

* cleanup code

* Added Application Insights back

* Update generateApiKeys script and remove duplicate

* Merge branch 'feature/redis' of bitbucket.org:scandic-swap/web into feature/redis

* merge


Approved-by: Linus Flood
This commit is contained in:
Joakim Jäderberg
2025-03-14 07:54:21 +00:00
committed by Linus Flood
parent a8304e543e
commit fa63b20ed0
141 changed files with 4404 additions and 1941 deletions

View File

@@ -0,0 +1 @@
.env.local

175
apps/redis-api/.gitignore vendored Normal file
View File

@@ -0,0 +1,175 @@
# Based on https://raw.githubusercontent.com/github/gitignore/main/Node.gitignore
# Logs
logs
_.log
npm-debug.log_
yarn-debug.log*
yarn-error.log*
lerna-debug.log*
.pnpm-debug.log*
# Caches
.cache
# Diagnostic reports (https://nodejs.org/api/report.html)
report.[0-9]_.[0-9]_.[0-9]_.[0-9]_.json
# Runtime data
pids
_.pid
_.seed
*.pid.lock
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
*.lcov
# nyc test coverage
.nyc_output
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# Bower dependency directory (https://bower.io/)
bower_components
# node-waf configuration
.lock-wscript
# Compiled binary addons (https://nodejs.org/api/addons.html)
build/Release
# Dependency directories
node_modules/
jspm_packages/
# Snowpack dependency directory (https://snowpack.dev/)
web_modules/
# TypeScript cache
*.tsbuildinfo
# Optional npm cache directory
.npm
# Optional eslint cache
.eslintcache
# Optional stylelint cache
.stylelintcache
# Microbundle cache
.rpt2_cache/
.rts2_cache_cjs/
.rts2_cache_es/
.rts2_cache_umd/
# Optional REPL history
.node_repl_history
# Output of 'npm pack'
*.tgz
# Yarn Integrity file
.yarn-integrity
# dotenv environment variable files
.env
.env.development.local
.env.test.local
.env.production.local
.env.local
# parcel-bundler cache (https://parceljs.org/)
.parcel-cache
# Next.js build output
.next
out
# Nuxt.js build / generate output
.nuxt
dist
# Gatsby files
# Comment in the public line in if your project uses Gatsby and not Next.js
# https://nextjs.org/blog/next-9-1#public-directory-support
# public
# vuepress build output
.vuepress/dist
# vuepress v2.x temp and cache directory
.temp
# Docusaurus cache and generated files
.docusaurus
# Serverless directories
.serverless/
# FuseBox cache
.fusebox/
# DynamoDB Local files
.dynamodb/
# TernJS port file
.tern-port
# Stores VSCode versions used for testing VSCode extensions
.vscode-test
# yarn v2
.yarn/cache
.yarn/unplugged
.yarn/build-state.yml
.yarn/install-state.gz
.pnp.*
# IntelliJ based IDEs
.idea
# Finder (MacOS) folder config
.DS_Store

21
apps/redis-api/Dockerfile Normal file
View File

@@ -0,0 +1,21 @@
# Use the official Bun image
FROM oven/bun:latest
ENV CI=true
# Set the working directory
WORKDIR /app
COPY package.json ./
# Install dependencies
RUN bun install --production
# Copy the rest of the application code
COPY . .
ENV NODE_ENV=production
# Expose the port the app runs on
EXPOSE 3000
# Start the Bun server
CMD ["bun", "./src/index.ts"]

39
apps/redis-api/README.md Normal file
View File

@@ -0,0 +1,39 @@
# Redis API
A thin wrapper around redis so that we can communicate to it via HTTP instead of TCP
## Deployment
Make sure you have access to Azure and have PIMed yourself to
- `Web-App-Frontend prod` where the ACR is located
- `Web Components Prod` or `Web Components Test` depending on where you want to deploy
Login with `az login` and select `Web-App-Frontend prod`
### Build container image
Standing in `/apps/redis-api` run
```bash
az acr build . --image redis-api:latest -r acrscandicfrontend
```
### Deploy container image
| Subscription | Environment | SubscriptionId |
| ------------------- | ----------- | ------------------------------------ |
| Web Components Prod | prod | 799cbffe-5209-41fd-adf9-4ffa3d1feead |
| Web Components Test | test | 3b657fc5-85b0-4a43-aba2-e77618ef98c4 |
```bash
# Replace with appropriate values
az deployment sub create \
--location westeurope \
--template-file ci/bicep/main.bicep \
--subscription {{SUBSCRIPTION_ID}} \
--parameters environment={{ENVIRONMENT}} \
containerImageTag=latest \
primaryApiKey={{PRIMARY API KEY}} \ # API keys are used for communicating with the api
secondaryApiKey={{SECONDARY API KEY}}
```

View File

@@ -0,0 +1,38 @@
{
"$schema": "https://biomejs.dev/schemas/1.9.4/schema.json",
"vcs": {
"enabled": false,
"clientKind": "git",
"useIgnoreFile": true,
},
"files": {
"ignoreUnknown": false,
"ignore": ["node_modules"],
},
"formatter": {
"enabled": true,
"indentStyle": "tab",
},
"organizeImports": {
"enabled": true,
},
"linter": {
"enabled": true,
"rules": {
"recommended": true,
"performance": {
"noBarrelFile": "error",
},
"style": {
"useImportType": "error",
"useExportType": "error",
},
},
},
"javascript": {
"formatter": {
"quoteStyle": "double",
"trailingCommas": "all",
},
},
}

View File

@@ -0,0 +1,103 @@
# Docker
# Build a Docker image
# https://docs.microsoft.com/azure/devops/pipelines/languages/docker
name: 1.0.0-$(SourceBranchName)-$(Rev:r)
trigger:
- main
parameters:
- name: forcePush
displayName: Force push
type: boolean
default: false
resources:
- repo: self
variables:
tag: "$(Build.BuildNumber)"
imageName: "redis-api"
isMaster: $[eq(variables['Build.SourceBranchName'], 'master')]
shouldPush: $[or(eq(${{parameters.forcePush}}, True), eq(variables['isMaster'], True))]
tags: |
stages:
- stage: Build
displayName: Set version
jobs:
- job: CreateArtifact
displayName: Create version artifact
steps:
- task: Bash@3
displayName: Write buildnumber
inputs:
targetType: "inline"
script: |
echo '$(Build.BuildNumber)' > $(Pipeline.Workspace)/.version
- task: PublishPipelineArtifact@1
inputs:
targetPath: "$(Pipeline.Workspace)/.version"
artifact: "Version"
publishLocation: "pipeline"
- task: Bash@3
displayName: Add tag main-latest if main branch
inputs:
targetType: "inline"
script: |
localTags = $(tag)
localTags += "\nlatest"
if [ $[isMaster] ]; then
localTags += "\nlatest-main"
echo -e "##vso[task.setvariable variable=tags;]$localTags"
fi
echo -e $localTags
- job: Build
displayName: Build
pool:
vmImage: ubuntu-latest
steps:
- task: Bash@3
inputs:
targetType: "inline"
script: |
echo "VERSION=$(tag)" >> .env.production
echo "ShouldPush=$(shouldPush)"
echo "ForcePush=${{ parameters.forcePush }}"
echo "isMaster=$(isMaster)"
- task: AzureCLI@2
displayName: Login to ACR
inputs:
azureSubscription: "mi-devops"
scriptType: "bash"
scriptLocation: "inlineScript"
workingDirectory: "$(build.sourcesDirectory)"
inlineScript: az acr login --name acrscandicfrontend
- task: AzureCLI@2
displayName: Build and push to ACR
inputs:
azureSubscription: "mi-devops"
scriptType: "bash"
scriptLocation: "inlineScript"
workingDirectory: "$(build.sourcesDirectory)"
inlineScript: |
if [ "$(shouldPush)" != "True" ]; then
echo "Not pushing to ACR"
noPush="--no-push"
else
echo "Pushing to ACR"
noPush=""
fi
echo "isMaster: $(isMaster)"
if [ "$(isMaster)" == "True" ]; then
echo "Building with latest tag"
az acr build . --image $(imageName):latest -r acrscandicfrontend $noPush
fi
echo "Building with $(tag) tag"
az acr build . --image $(imageName):$(tag) -r acrscandicfrontend $noPush

View File

@@ -0,0 +1,44 @@
trigger: none
pr: none
resources:
pipelines:
- pipeline: buildPipeline
source: "Build App BFF"
trigger:
branches:
include:
- main
pool:
vmImage: ubuntu-latest
parameters:
- name: containerTag
displayName: Select tag to deploy
type: string
default: "latest"
variables:
- name: containerTag
value: ${{ parameters.containerTag }}
stages:
- stage: Deploy_test
variables:
- group: "BFF test"
jobs:
- template: ./azure-pipelines.deploywebapptemplate.yml
parameters:
environment: test
subscriptionId: 1a126a59-4703-4e36-ad7b-2503d36526c0
containerTag: $(containerTag)
# - stage: Deploy_prod
# variables:
# - group: 'BFF prod'
# jobs:
# - template: ./azure-pipelines.deploywebapptemplate.yml
# parameters:
# environment: prod
# subscriptionId: 1e6ef69e-8719-4924-a311-e66fe00399c7
# containerTag: $(containerTag)

View File

@@ -0,0 +1,75 @@
import { Environment, EnvironmentVar } from '../types.bicep'
param environment Environment
param location string
param containerAppName string
param containerImage string
param containerPort int
param minReplicas int = 1
param maxReplicas int = 3
param envVars EnvironmentVar[] = []
param userAssignedIdentityId string
resource acr 'Microsoft.ContainerRegistry/registries@2023-07-01' existing = {
name: 'acrscandicfrontend'
scope: resourceGroup('1e6ef69e-8719-4924-a311-e66fe00399c7', 'rg-shared')
}
resource containerApp 'Microsoft.App/containerApps@2024-10-02-preview' = {
name: containerAppName
location: location
identity: {
type: 'UserAssigned'
userAssignedIdentities: {
'${userAssignedIdentityId}': {}
}
}
properties: {
environmentId: resourceId('Microsoft.App/managedEnvironments', 'cae-redis-api-${environment}')
configuration: {
activeRevisionsMode: 'Single'
registries: [
{
identity: userAssignedIdentityId
server: acr.properties.loginServer
}
]
ingress: {
external: true
targetPort: containerPort
}
}
template: {
containers: [
{
name: containerAppName
image: containerImage
imageType: 'ContainerImage'
env: [
for envVar in envVars: {
name: envVar.name
value: envVar.value
}
]
probes: [
{
type: 'Liveness'
httpGet: {
port: containerPort
path: '/health'
}
}
]
resources: {
cpu: json('0.25')
memory: '0.5Gi'
}
}
]
scale: {
minReplicas: minReplicas
maxReplicas: maxReplicas
}
}
}
}

View File

@@ -0,0 +1,42 @@
import { Environment, EnvironmentVar } from '../types.bicep'
targetScope = 'subscription'
param environment Environment
param containerImageTag string
param redisConnection string
param primaryApiKey string
param secondaryApiKey string
param timestamp string = utcNow()
@description('The location for the resource group')
param location string = 'westeurope'
resource rgRedisApi 'Microsoft.Resources/resourceGroups@2021-04-01' existing = {
name: 'rg-redis-api-${environment}'
}
resource mi 'Microsoft.ManagedIdentity/userAssignedIdentities@2023-01-31' existing = {
name: 'mi-redis-api-${environment}'
scope: rgRedisApi
}
module containerApp 'containerApp.bicep' = {
name: 'containerApp'
scope: rgRedisApi
params: {
location: location
environment: environment
userAssignedIdentityId: mi.id
containerAppName: 'ca-redis-api-${environment}'
containerImage: 'acrscandicfrontend.azurecr.io/redis-api:${containerImageTag}'
containerPort: 3001
envVars: [
{ name: 'REDIS_CONNECTION', value: redisConnection }
{ name: 'PRIMARY_API_KEY', value: primaryApiKey }
{ name: 'SECONDARY_API_KEY', value: secondaryApiKey }
{ name: 'timestamp', value: timestamp }
]
}
}

View File

@@ -0,0 +1,49 @@
import { Environment } from '../types.bicep'
param environment Environment
@description('The location for the resource group')
param location string = 'westeurope'
var testSKU = {
name: 'Basic'
family: 'C'
capacity: 0
}
var prodSKU = {
name: 'Standard'
family: 'C'
capacity: 1
}
var sku = environment == 'prod' ? prodSKU : testSKU
resource redisResource 'Microsoft.Cache/Redis@2024-11-01' = {
name: 'redis-scandic-frontend-${environment}'
location: location
properties: {
redisVersion: '6.0'
sku: {
name: sku.name
family: sku.family
capacity: sku.capacity
}
enableNonSslPort: false
minimumTlsVersion: '1.2'
publicNetworkAccess: 'Enabled'
redisConfiguration: {
'aad-enabled': 'false'
'maxmemory-reserved': '30'
'maxfragmentationmemory-reserved': '30'
'maxmemory-delta': '30'
}
updateChannel: 'Stable'
disableAccessKeyAuthentication: false
}
}
output hostname string = redisResource.properties.hostName
output connectionString string = '${redisResource.properties.hostName}:6380,password=${redisResource.properties.accessKeys.primaryKey},ssl=True,abortConnect=False'
output primaryAccessKey string = redisResource.properties.accessKeys.primaryKey

View File

@@ -0,0 +1,19 @@
param principalId string
module acrPull '../roles/acr-pull.bicep' = {
name: 'acrPull'
}
resource registry 'Microsoft.ContainerRegistry/registries@2023-07-01' existing = {
name: 'acrscandicfrontend'
}
resource rbac 'Microsoft.Authorization/roleAssignments@2022-04-01' = {
name: guid(registry.name, 'ServicePrincipal', principalId, acrPull.name)
scope: registry
properties: {
principalType: 'ServicePrincipal'
principalId: principalId
roleDefinitionId: acrPull.outputs.id
}
}

View File

@@ -0,0 +1,26 @@
import { Environment } from '../types.bicep'
param location string = 'westeurope'
param environment Environment
param userAssignedIdentityId string
resource containerEnv 'Microsoft.App/managedEnvironments@2024-02-02-preview' = {
name: 'cae-redis-api-${environment}'
location: location
identity: {
type: 'UserAssigned'
userAssignedIdentities: {
'${userAssignedIdentityId}': {}
}
}
properties: {
publicNetworkAccess: 'Enabled'
workloadProfiles: [
{
name: 'Consumption'
workloadProfileType: 'Consumption'
}
]
zoneRedundant: false
}
}

View File

@@ -0,0 +1,40 @@
import { Environment } from '../types.bicep'
targetScope = 'subscription'
param environment Environment
var location = deployment().location
var productionSubscriptionId = '799cbffe-5209-41fd-adf9-4ffa3d1feead'
resource rgBff 'Microsoft.Resources/resourceGroups@2021-04-01' = {
name: 'rg-redis-api-${environment}'
location: location
}
module mi '../managedIdentity.bicep' = {
name: 'mi-redis-api-${environment}'
scope: rgBff
params: {
principalName: 'mi-redis-api-${environment}'
location: location
}
}
module allowAcrPull 'allow-acr-pull.bicep' = {
name: 'allowAcrPull'
scope: resourceGroup('1e6ef69e-8719-4924-a311-e66fe00399c7', 'rg-shared')
params: {
principalId: mi.outputs.principalId
}
}
module containerEnv 'containerEnvironment.bicep' = {
name: 'containerEnv'
scope: rgBff
params: {
location: location
environment: environment
userAssignedIdentityId: mi.outputs.id
}
}

View File

@@ -0,0 +1,47 @@
import { Environment, EnvironmentVar } from 'types.bicep'
targetScope = 'subscription'
param environment Environment
param containerImageTag string = 'latest'
param primaryApiKey string
param secondaryApiKey string
@description('The location for the resource group')
param location string = 'westeurope'
resource rgRedisApi 'Microsoft.Resources/resourceGroups@2021-04-01' = {
name: 'rg-redis-api-${environment}'
location: location
}
module mi 'managedIdentity.bicep' = {
name: 'mi-redis-api-${environment}'
scope: rgRedisApi
params: {
principalName: 'mi-redis-api-${environment}'
location: location
}
}
module redis 'cache/redis.bicep' = {
name: 'redisCache'
scope: rgRedisApi
params: {
location: location
environment: environment
}
}
module containerApp 'app/main.bicep' = {
name: 'containerApp'
params: {
location: location
environment: environment
containerImageTag: containerImageTag
redisConnection: 'default:${redis.outputs.primaryAccessKey}@${redis.outputs.hostname}:6380'
primaryApiKey: primaryApiKey
secondaryApiKey: secondaryApiKey
}
}

View File

@@ -0,0 +1,10 @@
param location string = 'westeurope'
param principalName string
resource mi 'Microsoft.ManagedIdentity/userAssignedIdentities@2023-01-31' = {
name: principalName
location: location
}
output principalId string = mi.properties.principalId
output id string = mi.id

View File

@@ -0,0 +1,5 @@
@description('Pull artifacts from a container registry. Ref: https://learn.microsoft.com/en-us/azure/role-based-access-control/built-in-roles/containers#acrpull')
resource rd 'Microsoft.Authorization/roleDefinitions@2022-05-01-preview' existing = {
name: '7f951dda-4ed3-4680-a7ca-43fe172d538d'
}
output id string = rd.id

View File

@@ -0,0 +1,9 @@
@export()
@description('Type with allowed environments.')
type Environment = 'test' | 'prod'
@export()
type EnvironmentVar = {
name: string
value: string
}

View File

@@ -0,0 +1,23 @@
{
"name": "redis-api",
"module": "index.ts",
"type": "module",
"private": true,
"scripts": {
"dev": "bun --watch src/index.ts | pino-pretty -o '{if module}[{module}] {end}{msg}' -i pid,hostname"
},
"dependencies": {
"@elysiajs/server-timing": "1.2.1",
"@elysiajs/swagger": "1.2.2",
"@t3-oss/env-core": "0.12.0",
"elysia": "1.2.25",
"ioredis": "5.6.0",
"pino": "9.6.0"
},
"devDependencies": {
"@biomejs/biome": "^1.9.4",
"@types/bun": "latest",
"pino-pretty": "^13.0.0",
"typescript": "^5.7.2"
}
}

View File

@@ -0,0 +1,13 @@
import crypto from "node:crypto";
function generateApiKey(length = 32): string {
return crypto.randomBytes(length).toString("base64");
}
// If this file is run directly, generate and log an API key.
if (require.main === module) {
console.log("Primary API Key:", generateApiKey());
console.log("Secondary API Key:", generateApiKey());
}
export { generateApiKey };

56
apps/redis-api/src/env.ts Normal file
View File

@@ -0,0 +1,56 @@
import { createEnv } from "@t3-oss/env-core";
import { z } from "zod";
const redisConnectionRegex =
/^((?<username>.*?):(?<password>.*?)@)?(?<host>.*?):(?<port>\d+)$/;
export const env = createEnv({
server: {
IS_PROD: z
.boolean()
.default(false)
.transform(
() =>
process.env.BUN_ENV === "production" ||
process.env.NODE_ENV === "production"
),
IS_DEV: z
.boolean()
.default(false)
.transform(
() =>
process.env.BUN_ENV === "development" ||
process.env.NODE_ENV === "development"
),
VERSION: z.string().min(1).default("development"),
PORT: z.coerce.number().default(3001),
REDIS_CONNECTION: z.string().regex(redisConnectionRegex),
PRIMARY_API_KEY:
process.env.NODE_ENV === "development"
? z.string().optional()
: z.string().min(10),
SECONDARY_API_KEY:
process.env.NODE_ENV === "development"
? z.string().optional()
: z.string().min(10),
},
runtimeEnv: {
...process.env,
},
});
const redisMatch = env.REDIS_CONNECTION.match(redisConnectionRegex);
if (!redisMatch?.groups) {
throw new Error("Invalid REDIS_CONNECTION format");
}
export const redisConfig = {
host: redisMatch.groups.host,
port: Number(redisMatch.groups.port),
username: redisMatch.groups.username,
password: redisMatch.groups.password,
};
console.log("env", env);
console.log("redisConfig", redisConfig);

View File

@@ -0,0 +1,6 @@
export class AuthenticationError extends Error {
constructor(public message: string) {
super(message);
this.name = "AuthenticationError";
}
}

View File

@@ -0,0 +1,6 @@
export class ModelValidationError extends Error {
constructor(public message: string) {
super(message);
this.name = "ModelValidationError";
}
}

View File

@@ -0,0 +1,59 @@
import { Elysia } from "elysia";
import { swagger } from "@elysiajs/swagger";
import { apiRoutes } from "@/routes/api";
import { healthRoutes } from "@/routes/health";
import { baseLogger } from "@/utils/logger";
import { env } from "@/env";
import serverTiming from "@elysiajs/server-timing";
import { AuthenticationError } from "@/errors/AuthenticationError";
import { ModelValidationError } from "@/errors/ModelValidationError";
const app = new Elysia()
.use(serverTiming())
.error("AUTHENTICATION_ERROR", AuthenticationError)
.error("MODEL_VALIDATION_ERROR", ModelValidationError)
.onError(({ code, error, set }) => {
switch (code) {
case "MODEL_VALIDATION_ERROR":
set.status = 400;
return getErrorReturn(error);
case "AUTHENTICATION_ERROR":
set.status = 401;
return getErrorReturn(error);
case "NOT_FOUND":
set.status = 404;
return getErrorReturn(error);
case "INTERNAL_SERVER_ERROR":
set.status = 500;
return getErrorReturn(error);
}
});
if (env.IS_DEV) {
app.use(
swagger({
documentation: {
info: {
title: "Redis API",
version: "1.0.0",
},
},
})
);
}
app.use(apiRoutes);
app.use(healthRoutes);
app.listen(env.PORT, (server) => {
baseLogger.info(`🦊 REDISAPI@${env.VERSION} running on ${server.url}`);
});
function getErrorReturn(error: Error) {
return {
status: "error",
message: error.toString(),
};
}

View File

@@ -0,0 +1,28 @@
import { AuthenticationError } from "@/errors/AuthenticationError";
import type { Context } from "elysia";
import { env } from "@/env";
const API_KEY_HEADER = "x-api-key";
export const apiKeyMiddleware = ({ headers }: Context) => {
if (!isApiKeyRequired()) {
return;
}
const apiKey = headers[API_KEY_HEADER];
if (!apiKey) {
throw new AuthenticationError("No API KEY provided");
}
if (!validateApiKey(apiKey)) {
throw new AuthenticationError("Invalid API key");
}
};
function isApiKeyRequired(): boolean {
return Boolean(env.PRIMARY_API_KEY) || Boolean(env.SECONDARY_API_KEY);
}
function validateApiKey(apiKey: string): boolean {
return apiKey === env.PRIMARY_API_KEY || apiKey === env.SECONDARY_API_KEY;
}

View File

@@ -0,0 +1,93 @@
import { Elysia, t, ValidationError } from "elysia";
import { redis } from "@/services/redis";
import { ModelValidationError } from "@/errors/ModelValidationError";
const MIN_LENGTH = 1;
const QUERY_TYPE = t.Object({ key: t.String({ minLength: MIN_LENGTH }) });
export const cacheRoutes = new Elysia({ prefix: "/cache" })
.get(
"/",
async ({ query: { key }, error }) => {
key = validateKey(key);
console.log("GET /cache", key);
const value = await redis.get(key);
if (!value) {
return error("Not Found", "Not Found");
}
try {
const output = JSON.parse(value);
return { data: output };
} catch (e) {
redis.del(key);
throw e;
}
},
{
query: QUERY_TYPE,
response: { 200: t.Object({ data: t.Any() }), 404: t.String() },
}
)
.put(
"/",
async ({ query: { key }, body, error, set }) => {
key = validateKey(key);
console.log("PUT /cache", key);
if (!body.ttl || body.ttl < 0) {
return error("Bad Request", "ttl is required");
}
await redis.set(key, JSON.stringify(body.data), "EX", body.ttl);
set.status = 204;
return;
},
{
body: t.Object({ data: t.Any(), ttl: t.Number() }),
query: QUERY_TYPE,
response: { 204: t.Void(), 400: t.String() },
}
)
.delete(
"/",
async ({ query: { key, fuzzy }, set }) => {
key = validateKey(key);
console.log("DELETE /cache", key);
if (fuzzy) {
key = `*${key}*`;
}
await redis.del(key);
set.status = 204;
return;
},
{
query: t.Object({
...QUERY_TYPE.properties,
...t.Object({ fuzzy: t.Optional(t.Boolean()) }).properties,
}),
response: { 204: t.Void(), 400: t.String() },
}
);
function validateKey(key: string) {
const parsedKey = decodeURIComponent(key);
if (parsedKey.length < MIN_LENGTH) {
throw new ModelValidationError(
"Key has to be atleast 1 character long"
);
}
if (parsedKey.includes("*")) {
throw new ModelValidationError("Key cannot contain wildcards");
}
return parsedKey;
}

View File

@@ -0,0 +1,7 @@
import { Elysia } from "elysia";
import { cacheRoutes } from "./cache";
import { apiKeyMiddleware } from "@/middleware/apiKeyMiddleware";
export const apiRoutes = new Elysia({ prefix: "/api" })
.guard({ beforeHandle: apiKeyMiddleware })
.use(cacheRoutes);

View File

@@ -0,0 +1,34 @@
import Elysia, { t } from "elysia";
import { redis } from "@/services/redis";
import { baseLogger } from "@/utils/logger";
export const healthRoutes = new Elysia().get(
"/health",
async ({ set, error }) => {
const perf = performance.now();
try {
await redis.ping();
} catch (e) {
baseLogger.error("Redis connection error:", e);
console.log("Redis connection error:", e);
return error(503, { healthy: false });
}
const duration = performance.now() - perf;
baseLogger.info(`Service healthy: ${duration.toFixed(2)} ms`);
return { healthy: true };
},
{
response: {
200: t.Object({
healthy: t.Boolean(),
}),
503: t.Object({
healthy: t.Boolean(),
}),
},
}
);

View File

@@ -0,0 +1,19 @@
import { redisConfig, env } from "@/env";
import ioredis from "ioredis";
const redis = new ioredis({
host: redisConfig.host,
port: redisConfig.port,
username: redisConfig.username,
password: redisConfig.password,
maxRetriesPerRequest: 1, // Avoid excessive retries,
tls: !env.IS_DEV
? {
rejectUnauthorized: true,
}
: undefined,
lazyConnect: true,
connectTimeout: 10_000,
});
export { redis };

View File

@@ -0,0 +1,34 @@
import pino from "pino";
import { mask } from "./mask";
import { env } from "@/env";
const serializers: { [key: string]: pino.SerializerFn } = {
password: (payload) => {
if (payload) {
return env.IS_DEV
? mask(payload)
: mask(payload, {
visibleStart: 0,
visibleEnd: 0,
});
}
return payload;
},
email: (payload) => {
if (payload) {
return env.IS_DEV ? payload : mask(payload);
}
return payload;
},
};
export const baseLogger = pino({
level: process.env.LOG_LEVEL || "info",
timestamp: pino.stdTimeFunctions.isoTime,
serializers,
});
export const loggerModule = (loggerName: string) => {
return baseLogger.child({ module: loggerName });
};

View File

@@ -0,0 +1,42 @@
import { describe, it, expect } from "bun:test";
import { mask } from "./mask";
describe("mask", () => {
it("should return empty string for empty input", () => {
expect(mask("")).toBe("");
});
it("should mask string with default parameters", () => {
expect(mask("1234567890")).toBe("12******90");
});
it("should show custom number of characters at start", () => {
expect(mask("1234567890", { visibleStart: 3 })).toBe("123*****90");
});
it("should show custom number of characters at end", () => {
expect(mask("1234567890", { visibleStart: 2, visibleEnd: 3 })).toBe(
"12*****890",
);
});
it("should mask entire string when visible parts exceed length", () => {
expect(mask("123", { visibleStart: 2, visibleEnd: 2 })).toBe("***");
});
it("should handle undefined end part", () => {
expect(mask("1234567890", { visibleStart: 2, visibleEnd: 0 })).toBe(
"12********",
);
});
it("should handle long strings", () => {
expect(mask("12345678901234567890")).toBe("12**********90");
});
it("should handle emails", () => {
expect(mask("test.testsson@scandichotels.com")).toBe(
"te*********on@sc*********ls.com",
);
});
});

View File

@@ -0,0 +1,42 @@
/**
* Masks a string by replacing characters with a mask character
* @param value - The string to mask
* @param visibleStart - Number of characters to show at start (default: 0)
* @param visibleEnd - Number of characters to show at end (default: 4)
* @param maskChar - Character to use for masking (default: '*')
* @returns The masked string
*/
const maskChar = "*";
export function mask(
value: string,
options?: { visibleStart?: number; visibleEnd?: number; maxLength?: number },
): string {
if (!value) return "";
const { visibleStart = 2, visibleEnd = 2, maxLength = 10 } = options ?? {};
if (isEmail(value)) {
return maskEmail(value);
}
const totalVisible = visibleStart + visibleEnd;
if (value.length <= totalVisible) {
return maskChar.repeat(value.length);
}
const start = value.slice(0, visibleStart);
const middle = value.slice(visibleStart, -visibleEnd || undefined);
const end = visibleEnd ? value.slice(-visibleEnd) : "";
const maskedLength = Math.min(middle.length, maxLength);
return start + maskChar.repeat(maskedLength) + end;
}
function maskEmail(email: string): string {
const [local, domain] = email.split("@");
if (!domain || !local) return mask(email);
const [subDomain, tld] = domain.split(/\.(?=[^.]+$)/);
return `${mask(local)}@${mask(subDomain ?? "")}.${tld}`;
}
const isEmail = (value: string) => /^[^\s@]+@[^\s@]+\.[^\s@]+$/.test(value);

View File

@@ -0,0 +1,30 @@
{
"compilerOptions": {
// Enable latest features
"lib": ["ESNext", "DOM"],
"target": "ESNext",
"module": "ESNext",
"moduleDetection": "force",
"jsx": "react-jsx",
"allowJs": true,
// Bundler mode
"moduleResolution": "bundler",
"allowImportingTsExtensions": true,
"verbatimModuleSyntax": true,
"noEmit": true,
// Best practices
"strict": true,
"skipLibCheck": true,
"noFallthroughCasesInSwitch": true,
// Some stricter flags (disabled by default)
"noUnusedLocals": false,
"noUnusedParameters": false,
"noPropertyAccessFromIndexSignature": false,
"paths": {
"@/*": ["./src/*"]
}
}
}