Compare commits
33 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
7910beb5cb
|
|||
|
1be8836df1
|
|||
|
70d6091a6a
|
|||
|
1d9c451dfb
|
|||
|
3197498ab3
|
|||
|
a1a2c2747c
|
|||
|
80e7e7939c
|
|||
|
6caa1850e3
|
|||
|
7aaac65af4
|
|||
|
13e0c81957
|
|||
|
329a29aca7
|
|||
|
abdadb8e64
|
|||
|
abce517d86
|
|||
|
a1e697acb2
|
|||
|
c9b8614f53
|
|||
|
cbf1da31c9
|
|||
|
fd18e56251
|
|||
|
3bb8b202b0
|
|||
|
d1c4744231
|
|||
|
fe90414dd9
|
|||
|
21ceb9fa26
|
|||
|
5081819281
|
|||
|
240bd9cba1
|
|||
|
53fb0389cd
|
|||
|
d230350027
|
|||
|
024e647295
|
|||
|
d3e0206a3c
|
|||
|
b0c6759813
|
|||
|
526738e487
|
|||
|
778f159405
|
|||
|
2da8247978
|
|||
|
bbf6ea6c0f
|
|||
|
3584b3facf
|
@@ -8,4 +8,7 @@ DB_NAME=./test.sqlite
|
|||||||
NODE_ENV=production
|
NODE_ENV=production
|
||||||
POSTALCODE_COUNTRYCODE=DE
|
POSTALCODE_COUNTRYCODE=DE
|
||||||
SEED_TEST_DATA=false
|
SEED_TEST_DATA=false
|
||||||
SELFSERVICE_URL=bla
|
SELFSERVICE_URL=bla
|
||||||
|
STATION_TOKEN_SECRET=<replace-with-random-secret-min-32-chars>
|
||||||
|
NATS_URL=nats://localhost:4222
|
||||||
|
NATS_PREWARM=false
|
||||||
@@ -10,12 +10,9 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
- name: Set up Node.js
|
- uses: oven-sh/setup-bun@v2
|
||||||
uses: actions/setup-node@v4
|
- run: bun install --frozen-lockfile
|
||||||
with:
|
- run: bun licenses:export
|
||||||
node-version: 19
|
|
||||||
- run: npm i -g pnpm@10.7 && pnpm i
|
|
||||||
- run: pnpm licenses:export
|
|
||||||
- name: Login to registry
|
- name: Login to registry
|
||||||
uses: docker/login-action@v3
|
uses: docker/login-action@v3
|
||||||
with:
|
with:
|
||||||
|
|||||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -126,8 +126,12 @@ dist
|
|||||||
.yarn/build-state.yml
|
.yarn/build-state.yml
|
||||||
.yarn/install-state.gz
|
.yarn/install-state.gz
|
||||||
.pnp.*
|
.pnp.*
|
||||||
|
|
||||||
|
# Old package manager lockfiles (Bun migration - keep bun.lock)
|
||||||
yarn.lock
|
yarn.lock
|
||||||
package-lock.json
|
package-lock.json
|
||||||
|
pnpm-lock.yaml
|
||||||
|
|
||||||
build
|
build
|
||||||
|
|
||||||
*.sqlite
|
*.sqlite
|
||||||
|
|||||||
282
AGENTS.md
Normal file
282
AGENTS.md
Normal file
@@ -0,0 +1,282 @@
|
|||||||
|
# AGENTS.md — LfK Backend
|
||||||
|
|
||||||
|
Guidance for agentic coding agents working in this repository.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Project Overview
|
||||||
|
|
||||||
|
Express + [`routing-controllers`](https://github.com/typestack/routing-controllers) REST API written in TypeScript. Uses TypeORM for database access (SQLite in dev/test, PostgreSQL or MySQL in production). OpenAPI docs are auto-generated from decorators at startup.
|
||||||
|
|
||||||
|
**Runtime & Package Manager**: Bun (replaces Node.js + npm/pnpm).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Build / Run / Test Commands
|
||||||
|
|
||||||
|
### Development
|
||||||
|
|
||||||
|
```sh
|
||||||
|
bun run dev # Start dev server with auto-reload (uses Bun's --watch)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Auto-reload**: The `dev` script uses Bun's built-in `--watch` flag, which automatically restarts the server when TypeScript files in `src/` change. Bun runs TypeScript directly - no build step needed.
|
||||||
|
|
||||||
|
**Performance**: Bun delivers 8-15% better latency under concurrent load compared to Node.js. See `BUN_BENCHMARK_RESULTS.md` for details.
|
||||||
|
|
||||||
|
### Build
|
||||||
|
|
||||||
|
```sh
|
||||||
|
bun run build # rimraf dist && tsc && copy static assets → dist/
|
||||||
|
```
|
||||||
|
|
||||||
|
**Note**: The build script exists for legacy compatibility and type-checking, but is **not required** for development or production. Bun runs TypeScript source files directly.
|
||||||
|
|
||||||
|
### Production
|
||||||
|
|
||||||
|
```sh
|
||||||
|
bun start # bun src/app.ts (runs TypeScript directly)
|
||||||
|
```
|
||||||
|
|
||||||
|
### Tests
|
||||||
|
|
||||||
|
Tests are **integration tests** that hit a live running server via HTTP. The server must be started before Jest is invoked.
|
||||||
|
|
||||||
|
```sh
|
||||||
|
# Full CI test flow (generates .env, starts server, runs jest):
|
||||||
|
bun run test:ci
|
||||||
|
|
||||||
|
# Run Jest directly (server must already be running):
|
||||||
|
bun test
|
||||||
|
|
||||||
|
# Watch mode:
|
||||||
|
bun run test:watch
|
||||||
|
|
||||||
|
# Run a single test file:
|
||||||
|
bunx jest src/tests/runners/runner_add.spec.ts
|
||||||
|
|
||||||
|
# Run tests matching a name pattern:
|
||||||
|
bunx jest --testNamePattern="POST /api/runners"
|
||||||
|
|
||||||
|
# Run all tests in a subdirectory:
|
||||||
|
bunx jest src/tests/runners/
|
||||||
|
```
|
||||||
|
|
||||||
|
# Run all tests in a subdirectory:
|
||||||
|
bunx jest src/tests/runners/
|
||||||
|
```
|
||||||
|
|
||||||
|
> **Important:** `bun test` alone will fail unless the dev server is already running on `http://localhost:<config.internal_port>`. In CI, `start-server-and-test` handles this automatically via `bun run test:ci`.
|
||||||
|
|
||||||
|
### Other Utilities
|
||||||
|
|
||||||
|
```sh
|
||||||
|
bun run seed # Sync DB schema and run seeders
|
||||||
|
bun run openapi:export # Export OpenAPI spec to file
|
||||||
|
bun run docs # Generate TypeDoc documentation
|
||||||
|
bun run licenses:export # Export third-party license report
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## TypeScript Configuration
|
||||||
|
|
||||||
|
- **Target:** ES2020, **Module:** CommonJS
|
||||||
|
- **`strict: false`** — TypeScript strictness is disabled; types are used but not exhaustively enforced
|
||||||
|
- **`experimentalDecorators: true`** and **`emitDecoratorMetadata: true`** — required by `routing-controllers`, `TypeORM`, and `class-validator`
|
||||||
|
- Spec files (`**/*.spec.ts`) are excluded from compilation
|
||||||
|
- Source root: `src/`, output: `dist/`
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Code Style Guidelines
|
||||||
|
|
||||||
|
### No Linter / Formatter Configured
|
||||||
|
|
||||||
|
There is no ESLint or Prettier configuration. Follow the patterns already established in the codebase rather than introducing new tooling.
|
||||||
|
|
||||||
|
### Imports
|
||||||
|
|
||||||
|
- Use named imports for decorator packages: `import { Get, JsonController, Param } from 'routing-controllers'`
|
||||||
|
- Use named imports for TypeORM: `import { Column, Entity, getConnectionManager } from 'typeorm'`
|
||||||
|
- Use named imports for class-validator: `import { IsInt, IsOptional, IsString } from 'class-validator'`
|
||||||
|
- Use `import * as X from 'module'` for modules without clean default exports (e.g., `import * as jwt from 'jsonwebtoken'`)
|
||||||
|
- Use default imports for simple modules (e.g., `import cookie from 'cookie'`)
|
||||||
|
- `reflect-metadata` is imported once at the top of `src/app.ts` — do not re-import it
|
||||||
|
- No barrel/index re-export files; import source files directly by path
|
||||||
|
|
||||||
|
### Naming Conventions
|
||||||
|
|
||||||
|
| Construct | Convention | Example |
|
||||||
|
|---|---|---|
|
||||||
|
| Classes | `PascalCase` | `RunnerController`, `CreateRunner` |
|
||||||
|
| Files | `PascalCase.ts` matching class name | `RunnerController.ts` |
|
||||||
|
| Local variables | `camelCase` (some `snake_case` in tests) | `accessToken`, `access_token` |
|
||||||
|
| DB entity fields | `snake_case` preferred | `created_at`, `updated_at` |
|
||||||
|
| Controller methods | REST-conventional | `getAll`, `getOne`, `post`, `put`, `remove` |
|
||||||
|
| Custom errors | `{Entity}{Issue}Error` | `RunnerNotFoundError`, `RunnerIdsNotMatchingError` |
|
||||||
|
| Response DTOs | `Response{Entity}` | `ResponseRunner`, `ResponseAuth` |
|
||||||
|
| Create DTOs | `Create{Entity}` | `CreateRunner` |
|
||||||
|
| Update DTOs | `Update{Entity}` | `UpdateRunner` |
|
||||||
|
| Enums | `PascalCase` | `ResponseObjectType`, `PermissionAction` |
|
||||||
|
|
||||||
|
### Formatting
|
||||||
|
|
||||||
|
- 4-space indentation (observed throughout the codebase)
|
||||||
|
- Single quotes for string literals in most files
|
||||||
|
- No trailing semicolons style inconsistency — follow what's already in the file you're editing
|
||||||
|
|
||||||
|
### Types
|
||||||
|
|
||||||
|
- Add TypeScript types to all function parameters and return values
|
||||||
|
- Use `class-validator` decorators (`@IsString`, `@IsInt`, `@IsOptional`, `@IsUUID`, etc.) on every DTO and response class field — these drive both runtime validation and OpenAPI schema generation
|
||||||
|
- Use abstract classes for shared entity base types (e.g., `abstract class Participant`)
|
||||||
|
- Use interfaces for response contracts (e.g., `interface IResponse`)
|
||||||
|
- Use enums for typed string/number constants
|
||||||
|
- Avoid `any` where possible; when unavoidable, keep it localised
|
||||||
|
- `strict` is off — but still annotate types explicitly rather than relying on inference
|
||||||
|
|
||||||
|
### Controller Pattern
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { Authorized, Body, Delete, Get, JsonController, Param, Post, Put } from 'routing-controllers';
|
||||||
|
import { OpenAPI, ResponseSchema } from 'routing-controllers-openapi';
|
||||||
|
|
||||||
|
@JsonController('/runners')
|
||||||
|
@Authorized()
|
||||||
|
export class RunnerController {
|
||||||
|
@Get('/')
|
||||||
|
@OpenAPI({ description: 'Returns all runners' })
|
||||||
|
@ResponseSchema(ResponseRunner, { isArray: true })
|
||||||
|
async getAll() { ... }
|
||||||
|
|
||||||
|
@Get('/:id')
|
||||||
|
@ResponseSchema(ResponseRunner)
|
||||||
|
async getOne(@Param('id') id: number) { ... }
|
||||||
|
|
||||||
|
@Post('/')
|
||||||
|
@ResponseSchema(ResponseRunner)
|
||||||
|
async post(@Body({ validate: true }) createRunner: CreateRunner) { ... }
|
||||||
|
|
||||||
|
@Put('/:id')
|
||||||
|
@ResponseSchema(ResponseRunner)
|
||||||
|
async put(@Param('id') id: number, @Body({ validate: true }) updateRunner: UpdateRunner) { ... }
|
||||||
|
|
||||||
|
@Delete('/:id')
|
||||||
|
@ResponseSchema(ResponseRunner)
|
||||||
|
async remove(@Param('id') id: number) { ... }
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Error Handling
|
||||||
|
|
||||||
|
- Define custom error classes in `src/errors/` extending `routing-controllers` error types (`NotFoundError`, `NotAcceptableError`, etc.)
|
||||||
|
- Every custom error class must include `@IsString()` decorated `name` and `message` fields for OpenAPI schema generation
|
||||||
|
- Throw custom errors directly in controllers: `throw new RunnerNotFoundError()`
|
||||||
|
- Use try/catch in controllers and re-throw meaningful errors; do not swallow errors silently
|
||||||
|
- The global `ErrorHandler` middleware (registered in `src/middlewares/`) catches all unhandled errors and serialises them as JSON — do not duplicate this logic in controllers
|
||||||
|
- Auth errors are thrown from `src/middlewares/authchecker.ts`, not from individual controllers
|
||||||
|
|
||||||
|
### Entity Pattern (TypeORM)
|
||||||
|
|
||||||
|
- Entities live in `src/models/entities/`
|
||||||
|
- Decorate every entity with `@Entity()` and every column with the appropriate `@Column`, `@PrimaryGeneratedColumn`, etc.
|
||||||
|
- Use `@CreateDateColumn()` / `@UpdateDateColumn()` for timestamp fields
|
||||||
|
- Use table inheritance (`@TableInheritance` + `@ChildEntity`) for polymorphic entities
|
||||||
|
- Access repositories via `getConnectionManager().get().getRepository(EntityClass)` — do not inject repositories as constructor dependencies
|
||||||
|
- Database schema is synchronised automatically on startup (`connection.synchronize()`) — no manual migration files
|
||||||
|
|
||||||
|
### DTO Pattern (Create / Update)
|
||||||
|
|
||||||
|
- Create DTOs in `src/models/actions/create/` and `src/models/actions/update/`
|
||||||
|
- Use `class-validator` decorators for every field
|
||||||
|
- `@IsOptional()` for fields that are not required on update; all fields on create DTOs should be mandatory unless explicitly optional in the API contract
|
||||||
|
- Response DTOs live in `src/models/responses/` and follow the `Response{Entity}` naming pattern
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Test Style Guidelines
|
||||||
|
|
||||||
|
> **IMPORTANT: Do not run existing tests and do not create new tests.** The existing test suite in `src/tests/` is outdated and no longer reflects the current state of the codebase. Ignore all test files when working in this repository. Do not write new tests for any changes or additions.
|
||||||
|
|
||||||
|
All tests are integration tests in `src/tests/` organised by domain entity:
|
||||||
|
|
||||||
|
```
|
||||||
|
src/tests/
|
||||||
|
auth/
|
||||||
|
auth_login.spec.ts
|
||||||
|
auth_refresh.spec.ts
|
||||||
|
runners/
|
||||||
|
runner_add.spec.ts
|
||||||
|
runner_get.spec.ts
|
||||||
|
runner_update.spec.ts
|
||||||
|
runner_delete.spec.ts
|
||||||
|
...
|
||||||
|
```
|
||||||
|
|
||||||
|
### Test File Template
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import axios from 'axios';
|
||||||
|
import { config } from '../../config';
|
||||||
|
const base = "http://localhost:" + config.internal_port;
|
||||||
|
|
||||||
|
let access_token: string;
|
||||||
|
let axios_config: object;
|
||||||
|
|
||||||
|
beforeAll(async () => {
|
||||||
|
jest.setTimeout(20000);
|
||||||
|
const res = await axios.post(base + '/api/auth/login', { username: "demo", password: "demo" });
|
||||||
|
access_token = res.data["access_token"];
|
||||||
|
axios_config = {
|
||||||
|
headers: { "authorization": "Bearer " + access_token },
|
||||||
|
validateStatus: undefined // prevents axios from throwing on non-2xx responses
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('POST /api/runners working', () => {
|
||||||
|
it('creating a runner with required params should return 200', async () => {
|
||||||
|
const res = await axios.post(base + '/api/runners', { ... }, axios_config);
|
||||||
|
expect(res.status).toEqual(200);
|
||||||
|
expect(res.headers['content-type']).toContain("application/json");
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('POST /api/runners failing', () => {
|
||||||
|
it('creating a runner without required params should return 400', async () => {
|
||||||
|
const res = await axios.post(base + '/api/runners', {}, axios_config);
|
||||||
|
expect(res.status).toEqual(400);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
- Always set `validateStatus: undefined` in `axios_config` to prevent axios throwing on error responses
|
||||||
|
- Group tests by HTTP verb + route in `describe()` blocks; separate "working" and "failing" cases
|
||||||
|
- Use `jest.setTimeout(20000)` in `beforeAll` for slow integration tests
|
||||||
|
- Assert both `res.status` and `res.headers['content-type']` on success paths
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Environment Configuration
|
||||||
|
|
||||||
|
- Copy `.env.example` to `.env` and fill in values before running locally
|
||||||
|
- Database type is set via `DB_TYPE` env var (`sqlite`, `postgres`, or `mysql`)
|
||||||
|
- Server port is set via `INTERNAL_PORT` (accessed as `config.internal_port` in code)
|
||||||
|
- All config values are validated at startup in `src/config.ts`
|
||||||
|
- CI env is generated by `bun run test:ci:generate_env` (`scripts/create_testenv.ts`)
|
||||||
|
|
||||||
|
### NATS Configuration
|
||||||
|
|
||||||
|
The backend uses **NATS JetStream** as a KV cache for scan intake performance optimization.
|
||||||
|
|
||||||
|
- `NATS_URL` — connection URL for NATS server (default: `nats://localhost:4222`)
|
||||||
|
- `NATS_PREWARM` — if `true`, preloads all runner state into the KV cache at startup to eliminate DB reads from the first scan onward (default: `false`)
|
||||||
|
|
||||||
|
**KV buckets** (auto-created by `NatsClient` at startup):
|
||||||
|
- `station_state` — station token cache (1-hour TTL)
|
||||||
|
- `card_state` — card→runner mapping cache (1-hour TTL)
|
||||||
|
- `runner_state` — runner display name, total distance, latest scan timestamp (no TTL, CAS-based updates)
|
||||||
|
|
||||||
|
**Development**: NATS runs in Docker via `docker-compose.yml` (port 4222). The JetStream volume is persisted to `./nats-data/` to survive container restarts.
|
||||||
|
|
||||||
|
**Station intake hot path**: `POST /api/scans/trackscans` from scan stations uses a KV-first flow that eliminates DB reads on cache hits and prevents race conditions via compare-and-swap (CAS) updates. See `SCAN_NATS_PLAN.md` for full architecture details.
|
||||||
73
CHANGELOG.md
73
CHANGELOG.md
@@ -2,9 +2,82 @@
|
|||||||
|
|
||||||
All notable changes to this project will be documented in this file. Dates are displayed in UTC.
|
All notable changes to this project will be documented in this file. Dates are displayed in UTC.
|
||||||
|
|
||||||
|
#### [1.8.3](https://git.odit.services/lfk/backend/compare/1.8.2...1.8.3)
|
||||||
|
|
||||||
|
- fix(RunnerSelfServiceController): Update getStationMe method to use req.stationId instead of header [`70d6091`](https://git.odit.services/lfk/backend/commit/70d6091a6a8a1a3d532c7648d9af51cdf8735c94)
|
||||||
|
- refactor(ScanController, ResponseScanIntake, RunnerKV): Rename totalDistance to distance for consistency [`1be8836`](https://git.odit.services/lfk/backend/commit/1be8836df1a7dec7261356faac8d23bc9558df56)
|
||||||
|
|
||||||
|
#### [1.8.2](https://git.odit.services/lfk/backend/compare/1.8.1...1.8.2)
|
||||||
|
|
||||||
|
> 20 February 2026
|
||||||
|
|
||||||
|
- chore(PERFORMANCE_IDEAS): Remove outdated performance optimization ideas document [`a1a2c27`](https://git.odit.services/lfk/backend/commit/a1a2c2747cda8ad4c049d0d3b188e993daa67a01)
|
||||||
|
- chore(release): 1.8.2 [`1d9c451`](https://git.odit.services/lfk/backend/commit/1d9c451dfbcac105d7440797c32080e30252fd18)
|
||||||
|
- refactor(Dockerfile): Update build process and entry point for TypeScript application [`3197498`](https://git.odit.services/lfk/backend/commit/3197498ab37221156eef42311e58a4038c3309d1)
|
||||||
|
- fix(deps): Add @types/bun dependency to devDependencies [`6caa185`](https://git.odit.services/lfk/backend/commit/6caa1850e3668bbf72912121d2d1923a5e22d6e8)
|
||||||
|
- fix(CreateUser): Await password hashing in toEntity method [`80e7e79`](https://git.odit.services/lfk/backend/commit/80e7e7939c1ab35da9ece1cd9e6e1002e4e50d3a)
|
||||||
|
|
||||||
|
#### [1.8.1](https://git.odit.services/lfk/backend/compare/1.8.0...1.8.1)
|
||||||
|
|
||||||
|
> 20 February 2026
|
||||||
|
|
||||||
|
- perf(stats): Cache stats results for 60 seconds [`13e0c81`](https://git.odit.services/lfk/backend/commit/13e0c81957768c1b380914a0b93d3617c60e08a0)
|
||||||
|
- chore(release): 1.8.1 [`7aaac65`](https://git.odit.services/lfk/backend/commit/7aaac65af4e2d04653645adcf859ca69449e2332)
|
||||||
|
|
||||||
|
#### [1.8.0](https://git.odit.services/lfk/backend/compare/1.7.2...1.8.0)
|
||||||
|
|
||||||
|
> 20 February 2026
|
||||||
|
|
||||||
|
- chore(release): 1.8.0 [`329a29a`](https://git.odit.services/lfk/backend/commit/329a29aca70b8c779c592149dc1cfe197ab62463)
|
||||||
|
- refactor: Switch from official argon2 to Bun's implementation [`a1e697a`](https://git.odit.services/lfk/backend/commit/a1e697acb264a753534c5ff8f5f43357cbc287da)
|
||||||
|
- refactor: Replace uuid and dotenv with bun primitives [`abce517`](https://git.odit.services/lfk/backend/commit/abce517d86daa00d76d691081907cb832494cb91)
|
||||||
|
- refactor(deps): Remove unused glob dependency from package.json and bun.lock [`abdadb8`](https://git.odit.services/lfk/backend/commit/abdadb8e6419c5ec9f8cc0a9e5ebf68671d84a94)
|
||||||
|
|
||||||
|
#### [1.7.2](https://git.odit.services/lfk/backend/compare/1.7.1...1.7.2)
|
||||||
|
|
||||||
|
> 20 February 2026
|
||||||
|
|
||||||
|
- fix(dev): We did it funky bun dev workarounds are no more [`3bb8b20`](https://git.odit.services/lfk/backend/commit/3bb8b202b00f8b7c52c700373ed09a92714528be)
|
||||||
|
- docs: Added agents file to support ai assisted coding [`cbf1da3`](https://git.odit.services/lfk/backend/commit/cbf1da31c9f02a810d8c85caae60ab9483f826c2)
|
||||||
|
- refactor(dev): Yeet the funky dev script out of this codebase [`fd18e56`](https://git.odit.services/lfk/backend/commit/fd18e562518f5b3437f11ceb68e69e50f042891e)
|
||||||
|
- chore(release): 1.7.2 [`c9b8614`](https://git.odit.services/lfk/backend/commit/c9b8614f53619ec76ccf76875c138c986699c746)
|
||||||
|
|
||||||
|
#### [1.7.1](https://git.odit.services/lfk/backend/compare/1.7.0...1.7.1)
|
||||||
|
|
||||||
|
> 20 February 2026
|
||||||
|
|
||||||
|
- fix(ci): Switch to bun in ci [`fe90414`](https://git.odit.services/lfk/backend/commit/fe90414dd910baff8107197408575b6af0cc4cbf)
|
||||||
|
- perf(db): Added indexes [`21ceb9f`](https://git.odit.services/lfk/backend/commit/21ceb9fa265df2f2193a6c4fb58080ead9c72bf8)
|
||||||
|
- chore(release): 1.7.1 [`d1c4744`](https://git.odit.services/lfk/backend/commit/d1c47442314508a95bfa66b83740c957b75f152a)
|
||||||
|
|
||||||
|
#### [1.7.0](https://git.odit.services/lfk/backend/compare/1.6.0...1.7.0)
|
||||||
|
|
||||||
|
> 20 February 2026
|
||||||
|
|
||||||
|
- refactor: Bun by default [`240bd9c`](https://git.odit.services/lfk/backend/commit/240bd9cba10636bfc100ea2732508d805639f105)
|
||||||
|
- chore(release): 1.7.0 [`5081819`](https://git.odit.services/lfk/backend/commit/5081819281eacd6beb8d4876f0a9df71c901e84e)
|
||||||
|
|
||||||
|
#### [1.6.0](https://git.odit.services/lfk/backend/compare/1.5.2...1.6.0)
|
||||||
|
|
||||||
|
> 20 February 2026
|
||||||
|
|
||||||
|
- feat(data): Added nats jetstream dependency [`bbf6ea6`](https://git.odit.services/lfk/backend/commit/bbf6ea6c0fdffa11dacdf4b9afb6160ce54e197d)
|
||||||
|
- chore(deps): Bump typescript and get rid of now legacy imports [`2da8247`](https://git.odit.services/lfk/backend/commit/2da8247978c5142eec194651a7520fa53396d762)
|
||||||
|
- chore(release): 1.6.0 [`53fb038`](https://git.odit.services/lfk/backend/commit/53fb0389cd1da2b71b82102e82fc3d30f0be3820)
|
||||||
|
- feat(nats): Implement caching for card, runner, and station entries with improved key management [`b0c6759`](https://git.odit.services/lfk/backend/commit/b0c67598132deffce697f19c83bd4826420abe76)
|
||||||
|
- feat(auth): Implement caching for scanauth [`526738e`](https://git.odit.services/lfk/backend/commit/526738e48722fffe4493102fad69f65b40fc3b49)
|
||||||
|
- refactor(scan): Implement KV-backed scan station submissions and response model [`d3e0206`](https://git.odit.services/lfk/backend/commit/d3e0206a3ccbff0e69024426bb2bf266cde30eeb)
|
||||||
|
- fix(types): Add custom Express request types for station authentication [`778f159`](https://git.odit.services/lfk/backend/commit/778f15940594d5c2e423ef001eddd2d505ebd5f5)
|
||||||
|
- perf(nats): Implement bulk cache prewarming for runners to optimize startup performance [`024e647`](https://git.odit.services/lfk/backend/commit/024e64729594237773f3819646bdbc806ee985bc)
|
||||||
|
- feat(auth): Switch scanstation auth from argon2 to sha256 to improve performance [`3584b3f`](https://git.odit.services/lfk/backend/commit/3584b3facf7641f18db6eafe7035f17de8c5086c)
|
||||||
|
- perf(nats): Implement bulk cache prewarming for runners to optimize startup performance [`d230350`](https://git.odit.services/lfk/backend/commit/d230350027dea4dcdad9feddd9408a866ed787df)
|
||||||
|
|
||||||
#### [1.5.2](https://git.odit.services/lfk/backend/compare/1.5.1...1.5.2)
|
#### [1.5.2](https://git.odit.services/lfk/backend/compare/1.5.1...1.5.2)
|
||||||
|
|
||||||
|
> 26 May 2025
|
||||||
|
|
||||||
- feat(mailer): Add logging for selfservice forgotten mail requests [`eebcc2e`](https://git.odit.services/lfk/backend/commit/eebcc2e3284230135e3911b4edaecd1a9cfd2100)
|
- feat(mailer): Add logging for selfservice forgotten mail requests [`eebcc2e`](https://git.odit.services/lfk/backend/commit/eebcc2e3284230135e3911b4edaecd1a9cfd2100)
|
||||||
|
- chore(release): 1.5.2 [`e27e819`](https://git.odit.services/lfk/backend/commit/e27e8196097da19e24af22368ca8be5a8d9ef6b9)
|
||||||
- feat(mailer): Log error message when sending selfservice forgotten mail fails [`0f532b1`](https://git.odit.services/lfk/backend/commit/0f532b139c2bc5cd89ca2dbff0867825a9363250)
|
- feat(mailer): Log error message when sending selfservice forgotten mail fails [`0f532b1`](https://git.odit.services/lfk/backend/commit/0f532b139c2bc5cd89ca2dbff0867825a9363250)
|
||||||
|
|
||||||
#### [1.5.1](https://git.odit.services/lfk/backend/compare/1.5.0...1.5.1)
|
#### [1.5.1](https://git.odit.services/lfk/backend/compare/1.5.0...1.5.1)
|
||||||
|
|||||||
36
Dockerfile
36
Dockerfile
@@ -1,27 +1,23 @@
|
|||||||
# Typescript Build
|
# Build stage - install dependencies
|
||||||
FROM registry.odit.services/hub/library/node:23.10.0-alpine3.21 AS build
|
FROM registry.odit.services/hub/oven/bun:1.3.9-alpine AS build
|
||||||
ARG NPM_REGISTRY_URL=https://registry.npmjs.org
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
COPY package.json ./
|
COPY package.json bun.lockb* ./
|
||||||
COPY pnpm-workspace.yaml ./
|
RUN bun install --frozen-lockfile
|
||||||
COPY pnpm-lock.yaml ./
|
|
||||||
RUN npm config set registry $NPM_REGISTRY_URL && npm i -g pnpm@10.7
|
|
||||||
RUN mkdir /pnpm && pnpm config set store-dir /pnpm && pnpm i
|
|
||||||
|
|
||||||
COPY tsconfig.json ormconfig.js ./
|
# Production dependencies only
|
||||||
COPY src ./src
|
RUN rm -rf /app/node_modules \
|
||||||
RUN pnpm run build \
|
&& bun install --production --frozen-lockfile
|
||||||
&& rm -rf /app/node_modules \
|
|
||||||
&& pnpm i --production --prefer-offline
|
|
||||||
|
|
||||||
# final image
|
# Final image - run TypeScript directly
|
||||||
FROM registry.odit.services/hub/library/node:23.10.0-alpine3.21 AS final
|
FROM registry.odit.services/hub/oven/bun:1.3.9-alpine AS final
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
COPY --from=build /app/package.json /app/package.json
|
COPY --from=build /app/package.json /app/package.json
|
||||||
COPY --from=build /app/pnpm-lock.yaml /app/pnpm-lock.yaml
|
COPY --from=build /app/bun.lockb* /app/
|
||||||
COPY --from=build /app/pnpm-workspace.yaml /app/pnpm-workspace.yaml
|
|
||||||
COPY --from=build /app/ormconfig.js /app/ormconfig.js
|
|
||||||
COPY --from=build /app/dist /app/dist
|
|
||||||
COPY --from=build /app/node_modules /app/node_modules
|
COPY --from=build /app/node_modules /app/node_modules
|
||||||
ENTRYPOINT ["node", "/app/dist/app.js"]
|
|
||||||
|
COPY ormconfig.js bunfig.toml tsconfig.json ./
|
||||||
|
COPY src ./src
|
||||||
|
|
||||||
|
ENTRYPOINT ["bun", "/app/src/app.ts"]
|
||||||
123
README.md
123
README.md
@@ -2,66 +2,119 @@
|
|||||||
|
|
||||||
Backend Server
|
Backend Server
|
||||||
|
|
||||||
## Quickstart 🐳
|
## Prerequisites
|
||||||
> Use this to run the backend with a postgresql db in docker
|
|
||||||
|
|
||||||
1. Clone the repo or copy the docker-compose
|
This project uses **Bun** as the runtime and package manager. Install Bun first:
|
||||||
2. Run in toe folder that contains the docker-compose file: `docker-compose up -d`
|
|
||||||
|
```bash
|
||||||
|
# macOS/Linux
|
||||||
|
curl -fsSL https://bun.sh/install | bash
|
||||||
|
|
||||||
|
# Windows
|
||||||
|
powershell -c "irm bun.sh/install.ps1 | iex"
|
||||||
|
```
|
||||||
|
|
||||||
|
Or visit [bun.sh](https://bun.sh) for other installation methods.
|
||||||
|
|
||||||
|
## Quickstart 🐳
|
||||||
|
> Use this to run the backend with a PostgreSQL db in Docker
|
||||||
|
|
||||||
|
1. Clone the repo or copy the docker-compose
|
||||||
|
2. Run in the folder that contains the docker-compose file: `docker-compose up -d`
|
||||||
3. Visit http://127.0.0.1:4010/api/docs to check if the server is running
|
3. Visit http://127.0.0.1:4010/api/docs to check if the server is running
|
||||||
4. You can now use the default admin user (`demo:demo`)
|
4. You can now use the default admin user (`demo:demo`)
|
||||||
|
|
||||||
## Dev Setup 🛠
|
## Dev Setup 🛠
|
||||||
> Local dev setup utilizing sqlite3 as the database.
|
> Local dev setup utilizing SQLite3 as the database and NATS for caching.
|
||||||
|
|
||||||
1. Rename the .env.example file to .env (you can adjust app port and other settings, if needed)
|
1. Rename the `.env.example` file to `.env` (you can adjust app port and other settings if needed)
|
||||||
2. Install Dependencies
|
2. Start NATS (required for KV cache):
|
||||||
```bash
|
```bash
|
||||||
pnpm i
|
docker-compose up -d nats
|
||||||
```
|
```
|
||||||
3. Start the server
|
3. Install dependencies:
|
||||||
```bash
|
```bash
|
||||||
pnpm dev
|
bun install
|
||||||
```
|
```
|
||||||
|
4. Start the server:
|
||||||
|
```bash
|
||||||
|
bun run dev
|
||||||
|
```
|
||||||
|
|
||||||
|
**Note**: Bun cannot run TypeScript source files directly due to circular TypeORM dependencies. The `dev` script automatically builds and runs the compiled output. For hot-reload during development, you may need to rebuild manually after code changes.
|
||||||
|
|
||||||
### Run Tests
|
### Run Tests
|
||||||
```bash
|
```bash
|
||||||
# Run tests once (server has to run)
|
# Run tests once (server has to be running)
|
||||||
pnpm test
|
bun test
|
||||||
|
|
||||||
# Run test in watch mode (reruns on change)
|
# Run test in watch mode (reruns on change)
|
||||||
pnpm test:watch
|
bun run test:watch
|
||||||
|
|
||||||
# Run test in ci mode (automaticly starts the dev server)
|
# Run test in CI mode (automatically starts the dev server)
|
||||||
pnpm test:ci
|
bun run test:ci
|
||||||
|
```
|
||||||
|
|
||||||
|
### Run Benchmarks
|
||||||
|
```bash
|
||||||
|
# Start the server first
|
||||||
|
bun run dev
|
||||||
|
|
||||||
|
# In another terminal:
|
||||||
|
bun run benchmark
|
||||||
```
|
```
|
||||||
|
|
||||||
### Generate Docs
|
### Generate Docs
|
||||||
```bash
|
```bash
|
||||||
pnpm docs
|
bun run docs
|
||||||
|
```
|
||||||
|
|
||||||
|
### Other Commands
|
||||||
|
```bash
|
||||||
|
# Build for production
|
||||||
|
bun run build
|
||||||
|
|
||||||
|
# Start production server
|
||||||
|
bun start
|
||||||
|
|
||||||
|
# Seed database with test data
|
||||||
|
bun run seed
|
||||||
|
|
||||||
|
# Export OpenAPI spec
|
||||||
|
bun run openapi:export
|
||||||
|
|
||||||
|
# Generate license report
|
||||||
|
bun run licenses:export
|
||||||
|
|
||||||
|
# Generate changelog
|
||||||
|
bun run changelog:export
|
||||||
```
|
```
|
||||||
|
|
||||||
## ENV Vars
|
## ENV Vars
|
||||||
> You can provide them via .env file or docker env vars.
|
> You can provide them via .env file or docker env vars.
|
||||||
> You can use the `test:ci:generate_env` package script to generate a example env (uses bs data as test server and ignores the errors).
|
> You can use the `test:ci:generate_env` package script to generate an example env (uses placeholder data for test server and ignores the errors).
|
||||||
|
|
||||||
| Name | Type | Default | Description |
|
| Name | Type | Default | Description |
|
||||||
| ---------------------- | ------------------ | -------------------- | -------------------------------------------------------------------------------------------------------------- |
|
| ------------------------- | ------------------ | -------------------------- | ---------------------------------------------------------------------------------------------------------------- |
|
||||||
| APP_PORT | Number | 4010 | The port the backend server listens on. Is optional. |
|
| APP_PORT | Number | 4010 | The port the backend server listens on. Is optional. |
|
||||||
| DB_TYPE | String | N/A | The type of the db u want to use. It has to be supported by typeorm. Possible: `sqlite`, `mysql`, `postgresql` |
|
| DB_TYPE | String | N/A | The type of the db you want to use. Supported by TypeORM. Possible: `sqlite`, `mysql`, `postgresql` |
|
||||||
| DB_HOST | String | N/A | The db's host's ip-address/fqdn or file path for sqlite |
|
| DB_HOST | String | N/A | The db's host IP address/FQDN or file path for sqlite |
|
||||||
| DB_PORT | String | N/A | The db's port |
|
| DB_PORT | String | N/A | The db's port |
|
||||||
| DB_USER | String | N/A | The user for accessing the db |
|
| DB_USER | String | N/A | The user for accessing the db |
|
||||||
| DB_PASSWORD | String | N/A | The user's password for accessing the db |
|
| DB_PASSWORD | String | N/A | The user's password for accessing the db |
|
||||||
| DB_NAME | String | N/A | The db's name |
|
| DB_NAME | String | N/A | The db's name |
|
||||||
| NODE_ENV | String | dev | The apps env - influences debug info. Also when the env is set to "test", mailing errors get ignored. |
|
| NODE_ENV | String | dev | The app's env - influences debug info. When set to "test", mailing errors get ignored. |
|
||||||
| POSTALCODE_COUNTRYCODE | String/CountryCode | N/A | The countrycode used to validate address's postal codes |
|
| POSTALCODE_COUNTRYCODE | String/CountryCode | N/A | The country code used to validate address postal codes |
|
||||||
| PHONE_COUNTRYCODE | String/CountryCode | null (international) | The countrycode used to validate phone numers |
|
| PHONE_COUNTRYCODE | String/CountryCode | null (international) | The country code used to validate phone numbers |
|
||||||
| SEED_TEST_DATA | Boolean | False | If you want the app to seed some example data set this to true |
|
| SEED_TEST_DATA | Boolean | false | If you want the app to seed example data, set this to true |
|
||||||
| MAILER_URL | String(Url) | N/A | The mailer's base url (no trailing slash) |
|
| STATION_TOKEN_SECRET | String | N/A | Secret key for HMAC-SHA256 station token generation (min 32 chars). **Required.** |
|
||||||
| MAILER_KEY | String | N/A | The mailer's api key. |
|
| NATS_URL | String(URL) | nats://localhost:4222 | NATS server connection URL for KV cache |
|
||||||
| SELFSERVICE_URL | String(Url) | N/A | The link to selfservice (no trailing slash) |
|
| NATS_PREWARM | Boolean | false | Preload all runner state into NATS cache at startup (eliminates DB reads on first scan) |
|
||||||
| IMPRINT_URL | String(Url) | /imprint | The link to a imprint page for the system (Defaults to the frontend's imprint) |
|
| MAILER_URL | String(URL) | N/A | The mailer's base URL (no trailing slash) |
|
||||||
| PRIVACY_URL | String(Url) | /privacy | The link to a privacy page for the system (Defaults to the frontend's privacy page) |
|
| MAILER_KEY | String | N/A | The mailer's API key |
|
||||||
|
| SELFSERVICE_URL | String(URL) | N/A | The link to selfservice (no trailing slash) |
|
||||||
|
| IMPRINT_URL | String(URL) | /imprint | The link to an imprint page for the system (defaults to the frontend's imprint) |
|
||||||
|
| PRIVACY_URL | String(URL) | /privacy | The link to a privacy page for the system (defaults to the frontend's privacy page) |
|
||||||
|
|
||||||
|
|
||||||
## Recommended Editor
|
## Recommended Editor
|
||||||
|
|||||||
13
bunfig.toml
Normal file
13
bunfig.toml
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
# Bun configuration
|
||||||
|
# See: https://bun.sh/docs/runtime/bunfig
|
||||||
|
|
||||||
|
[runtime]
|
||||||
|
# Enable Node.js compatibility mode
|
||||||
|
bun = true
|
||||||
|
|
||||||
|
# TypeScript transpiler settings
|
||||||
|
# Required for TypeORM decorators
|
||||||
|
[transpiler]
|
||||||
|
tsconfig = "tsconfig.json"
|
||||||
|
emitDecoratorMetadata = true
|
||||||
|
experimentalDecorators = true
|
||||||
@@ -1,21 +1,30 @@
|
|||||||
services:
|
services:
|
||||||
backend_server:
|
nats:
|
||||||
build: .
|
image: mirror.gcr.io/library/nats:alpine
|
||||||
|
command: ["--jetstream", "--store_dir", "/data"]
|
||||||
ports:
|
ports:
|
||||||
- 4010:4010
|
- "4222:4222"
|
||||||
environment:
|
- "8222:8222"
|
||||||
APP_PORT: 4010
|
volumes:
|
||||||
DB_TYPE: sqlite
|
- nats_data:/data
|
||||||
DB_HOST: bla
|
|
||||||
DB_PORT: bla
|
# backend_server:
|
||||||
DB_USER: bla
|
# build: .
|
||||||
DB_PASSWORD: bla
|
# ports:
|
||||||
DB_NAME: ./db.sqlite
|
# - 4010:4010
|
||||||
NODE_ENV: production
|
# environment:
|
||||||
POSTALCODE_COUNTRYCODE: DE
|
# APP_PORT: 4010
|
||||||
SEED_TEST_DATA: "true"
|
# DB_TYPE: sqlite
|
||||||
MAILER_URL: https://dev.lauf-fuer-kaya.de/mailer
|
# DB_HOST: bla
|
||||||
MAILER_KEY: asdasd
|
# DB_PORT: bla
|
||||||
|
# DB_USER: bla
|
||||||
|
# DB_PASSWORD: bla
|
||||||
|
# DB_NAME: ./db.sqlite
|
||||||
|
# NODE_ENV: production
|
||||||
|
# POSTALCODE_COUNTRYCODE: DE
|
||||||
|
# SEED_TEST_DATA: "true"
|
||||||
|
# MAILER_URL: https://dev.lauf-fuer-kaya.de/mailer
|
||||||
|
# MAILER_KEY: asdasd
|
||||||
# APP_PORT: 4010
|
# APP_PORT: 4010
|
||||||
# DB_TYPE: postgres
|
# DB_TYPE: postgres
|
||||||
# DB_HOST: backend_db
|
# DB_HOST: backend_db
|
||||||
@@ -32,3 +41,6 @@ services:
|
|||||||
# POSTGRES_USER: lfk
|
# POSTGRES_USER: lfk
|
||||||
# ports:
|
# ports:
|
||||||
# - 5432:5432
|
# - 5432:5432
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
nats_data:
|
||||||
|
|||||||
476
licenses.md
476
licenses.md
@@ -1,32 +1,3 @@
|
|||||||
# @node-rs/argon2
|
|
||||||
**Author**: undefined
|
|
||||||
**Repo**: [object Object]
|
|
||||||
**License**: MIT
|
|
||||||
**Description**: RustCrypto: Argon2 binding for Node.js
|
|
||||||
## License Text
|
|
||||||
MIT License
|
|
||||||
|
|
||||||
Copyright (c) 2020-present LongYinan
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
|
||||||
in the Software without restriction, including without limitation the rights
|
|
||||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
copies of the Software, and to permit persons to whom the Software is
|
|
||||||
furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in all
|
|
||||||
copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
||||||
SOFTWARE.
|
|
||||||
|
|
||||||
|
|
||||||
# @odit/class-validator-jsonschema
|
# @odit/class-validator-jsonschema
|
||||||
**Author**: Aleksi Pekkala <aleksipekkala@gmail.com>
|
**Author**: Aleksi Pekkala <aleksipekkala@gmail.com>
|
||||||
**Repo**: git@github.com:epiphone/class-validator-jsonschema.git
|
**Repo**: git@github.com:epiphone/class-validator-jsonschema.git
|
||||||
@@ -316,37 +287,6 @@ The above copyright notice and this permission notice shall be included in all c
|
|||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||||
|
|
||||||
|
|
||||||
# dotenv
|
|
||||||
**Author**: undefined
|
|
||||||
**Repo**: [object Object]
|
|
||||||
**License**: BSD-2-Clause
|
|
||||||
**Description**: Loads environment variables from .env file
|
|
||||||
## License Text
|
|
||||||
Copyright (c) 2015, Scott Motte
|
|
||||||
All rights reserved.
|
|
||||||
|
|
||||||
Redistribution and use in source and binary forms, with or without
|
|
||||||
modification, are permitted provided that the following conditions are met:
|
|
||||||
|
|
||||||
* Redistributions of source code must retain the above copyright notice, this
|
|
||||||
list of conditions and the following disclaimer.
|
|
||||||
|
|
||||||
* Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
this list of conditions and the following disclaimer in the documentation
|
|
||||||
and/or other materials provided with the distribution.
|
|
||||||
|
|
||||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
||||||
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
||||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
|
||||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
|
||||||
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
|
||||||
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
|
||||||
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
|
||||||
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
|
||||||
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
|
||||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
|
|
||||||
# express
|
# express
|
||||||
**Author**: TJ Holowaychuk <tj@vision-media.ca>
|
**Author**: TJ Holowaychuk <tj@vision-media.ca>
|
||||||
**Repo**: expressjs/express
|
**Repo**: expressjs/express
|
||||||
@@ -464,6 +404,215 @@ Copyright (c) 2012 Felix Geisendörfer (felix@debuggable.com) and contributors
|
|||||||
THE SOFTWARE.
|
THE SOFTWARE.
|
||||||
|
|
||||||
|
|
||||||
|
# nats
|
||||||
|
**Author**: [object Object]
|
||||||
|
**Repo**: [object Object]
|
||||||
|
**License**: Apache-2.0
|
||||||
|
**Description**: Node.js client for NATS, a lightweight, high-performance cloud native messaging system
|
||||||
|
## License Text
|
||||||
|
Apache License
|
||||||
|
Version 2.0, January 2004
|
||||||
|
http://www.apache.org/licenses/
|
||||||
|
|
||||||
|
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||||
|
|
||||||
|
1. Definitions.
|
||||||
|
|
||||||
|
"License" shall mean the terms and conditions for use, reproduction,
|
||||||
|
and distribution as defined by Sections 1 through 9 of this document.
|
||||||
|
|
||||||
|
"Licensor" shall mean the copyright owner or entity authorized by
|
||||||
|
the copyright owner that is granting the License.
|
||||||
|
|
||||||
|
"Legal Entity" shall mean the union of the acting entity and all
|
||||||
|
other entities that control, are controlled by, or are under common
|
||||||
|
control with that entity. For the purposes of this definition,
|
||||||
|
"control" means (i) the power, direct or indirect, to cause the
|
||||||
|
direction or management of such entity, whether by contract or
|
||||||
|
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||||
|
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||||
|
|
||||||
|
"You" (or "Your") shall mean an individual or Legal Entity
|
||||||
|
exercising permissions granted by this License.
|
||||||
|
|
||||||
|
"Source" form shall mean the preferred form for making modifications,
|
||||||
|
including but not limited to software source code, documentation
|
||||||
|
source, and configuration files.
|
||||||
|
|
||||||
|
"Object" form shall mean any form resulting from mechanical
|
||||||
|
transformation or translation of a Source form, including but
|
||||||
|
not limited to compiled object code, generated documentation,
|
||||||
|
and conversions to other media types.
|
||||||
|
|
||||||
|
"Work" shall mean the work of authorship, whether in Source or
|
||||||
|
Object form, made available under the License, as indicated by a
|
||||||
|
copyright notice that is included in or attached to the work
|
||||||
|
(an example is provided in the Appendix below).
|
||||||
|
|
||||||
|
"Derivative Works" shall mean any work, whether in Source or Object
|
||||||
|
form, that is based on (or derived from) the Work and for which the
|
||||||
|
editorial revisions, annotations, elaborations, or other modifications
|
||||||
|
represent, as a whole, an original work of authorship. For the purposes
|
||||||
|
of this License, Derivative Works shall not include works that remain
|
||||||
|
separable from, or merely link (or bind by name) to the interfaces of,
|
||||||
|
the Work and Derivative Works thereof.
|
||||||
|
|
||||||
|
"Contribution" shall mean any work of authorship, including
|
||||||
|
the original version of the Work and any modifications or additions
|
||||||
|
to that Work or Derivative Works thereof, that is intentionally
|
||||||
|
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||||
|
or by an individual or Legal Entity authorized to submit on behalf of
|
||||||
|
the copyright owner. For the purposes of this definition, "submitted"
|
||||||
|
means any form of electronic, verbal, or written communication sent
|
||||||
|
to the Licensor or its representatives, including but not limited to
|
||||||
|
communication on electronic mailing lists, source code control systems,
|
||||||
|
and issue tracking systems that are managed by, or on behalf of, the
|
||||||
|
Licensor for the purpose of discussing and improving the Work, but
|
||||||
|
excluding communication that is conspicuously marked or otherwise
|
||||||
|
designated in writing by the copyright owner as "Not a Contribution."
|
||||||
|
|
||||||
|
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||||
|
on behalf of whom a Contribution has been received by Licensor and
|
||||||
|
subsequently incorporated within the Work.
|
||||||
|
|
||||||
|
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
copyright license to reproduce, prepare Derivative Works of,
|
||||||
|
publicly display, publicly perform, sublicense, and distribute the
|
||||||
|
Work and such Derivative Works in Source or Object form.
|
||||||
|
|
||||||
|
3. Grant of Patent License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
(except as stated in this section) patent license to make, have made,
|
||||||
|
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||||
|
where such license applies only to those patent claims licensable
|
||||||
|
by such Contributor that are necessarily infringed by their
|
||||||
|
Contribution(s) alone or by combination of their Contribution(s)
|
||||||
|
with the Work to which such Contribution(s) was submitted. If You
|
||||||
|
institute patent litigation against any entity (including a
|
||||||
|
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||||
|
or a Contribution incorporated within the Work constitutes direct
|
||||||
|
or contributory patent infringement, then any patent licenses
|
||||||
|
granted to You under this License for that Work shall terminate
|
||||||
|
as of the date such litigation is filed.
|
||||||
|
|
||||||
|
4. Redistribution. You may reproduce and distribute copies of the
|
||||||
|
Work or Derivative Works thereof in any medium, with or without
|
||||||
|
modifications, and in Source or Object form, provided that You
|
||||||
|
meet the following conditions:
|
||||||
|
|
||||||
|
(a) You must give any other recipients of the Work or
|
||||||
|
Derivative Works a copy of this License; and
|
||||||
|
|
||||||
|
(b) You must cause any modified files to carry prominent notices
|
||||||
|
stating that You changed the files; and
|
||||||
|
|
||||||
|
(c) You must retain, in the Source form of any Derivative Works
|
||||||
|
that You distribute, all copyright, patent, trademark, and
|
||||||
|
attribution notices from the Source form of the Work,
|
||||||
|
excluding those notices that do not pertain to any part of
|
||||||
|
the Derivative Works; and
|
||||||
|
|
||||||
|
(d) If the Work includes a "NOTICE" text file as part of its
|
||||||
|
distribution, then any Derivative Works that You distribute must
|
||||||
|
include a readable copy of the attribution notices contained
|
||||||
|
within such NOTICE file, excluding those notices that do not
|
||||||
|
pertain to any part of the Derivative Works, in at least one
|
||||||
|
of the following places: within a NOTICE text file distributed
|
||||||
|
as part of the Derivative Works; within the Source form or
|
||||||
|
documentation, if provided along with the Derivative Works; or,
|
||||||
|
within a display generated by the Derivative Works, if and
|
||||||
|
wherever such third-party notices normally appear. The contents
|
||||||
|
of the NOTICE file are for informational purposes only and
|
||||||
|
do not modify the License. You may add Your own attribution
|
||||||
|
notices within Derivative Works that You distribute, alongside
|
||||||
|
or as an addendum to the NOTICE text from the Work, provided
|
||||||
|
that such additional attribution notices cannot be construed
|
||||||
|
as modifying the License.
|
||||||
|
|
||||||
|
You may add Your own copyright statement to Your modifications and
|
||||||
|
may provide additional or different license terms and conditions
|
||||||
|
for use, reproduction, or distribution of Your modifications, or
|
||||||
|
for any such Derivative Works as a whole, provided Your use,
|
||||||
|
reproduction, and distribution of the Work otherwise complies with
|
||||||
|
the conditions stated in this License.
|
||||||
|
|
||||||
|
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||||
|
any Contribution intentionally submitted for inclusion in the Work
|
||||||
|
by You to the Licensor shall be under the terms and conditions of
|
||||||
|
this License, without any additional terms or conditions.
|
||||||
|
Notwithstanding the above, nothing herein shall supersede or modify
|
||||||
|
the terms of any separate license agreement you may have executed
|
||||||
|
with Licensor regarding such Contributions.
|
||||||
|
|
||||||
|
6. Trademarks. This License does not grant permission to use the trade
|
||||||
|
names, trademarks, service marks, or product names of the Licensor,
|
||||||
|
except as required for reasonable and customary use in describing the
|
||||||
|
origin of the Work and reproducing the content of the NOTICE file.
|
||||||
|
|
||||||
|
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||||
|
agreed to in writing, Licensor provides the Work (and each
|
||||||
|
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||||
|
implied, including, without limitation, any warranties or conditions
|
||||||
|
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||||
|
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||||
|
appropriateness of using or redistributing the Work and assume any
|
||||||
|
risks associated with Your exercise of permissions under this License.
|
||||||
|
|
||||||
|
8. Limitation of Liability. In no event and under no legal theory,
|
||||||
|
whether in tort (including negligence), contract, or otherwise,
|
||||||
|
unless required by applicable law (such as deliberate and grossly
|
||||||
|
negligent acts) or agreed to in writing, shall any Contributor be
|
||||||
|
liable to You for damages, including any direct, indirect, special,
|
||||||
|
incidental, or consequential damages of any character arising as a
|
||||||
|
result of this License or out of the use or inability to use the
|
||||||
|
Work (including but not limited to damages for loss of goodwill,
|
||||||
|
work stoppage, computer failure or malfunction, or any and all
|
||||||
|
other commercial damages or losses), even if such Contributor
|
||||||
|
has been advised of the possibility of such damages.
|
||||||
|
|
||||||
|
9. Accepting Warranty or Additional Liability. While redistributing
|
||||||
|
the Work or Derivative Works thereof, You may choose to offer,
|
||||||
|
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||||
|
or other liability obligations and/or rights consistent with this
|
||||||
|
License. However, in accepting such obligations, You may act only
|
||||||
|
on Your own behalf and on Your sole responsibility, not on behalf
|
||||||
|
of any other Contributor, and only if You agree to indemnify,
|
||||||
|
defend, and hold each Contributor harmless for any liability
|
||||||
|
incurred by, or claims asserted against, such Contributor by reason
|
||||||
|
of your accepting any such warranty or additional liability.
|
||||||
|
|
||||||
|
END OF TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
APPENDIX: How to apply the Apache License to your work.
|
||||||
|
|
||||||
|
To apply the Apache License to your work, attach the following
|
||||||
|
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||||
|
replaced with your own identifying information. (Don't include
|
||||||
|
the brackets!) The text should be enclosed in the appropriate
|
||||||
|
comment syntax for the file format. We also recommend that a
|
||||||
|
file or class name and description of purpose be included on the
|
||||||
|
same "printed page" as the copyright notice for easier
|
||||||
|
identification within third-party archives.
|
||||||
|
|
||||||
|
Copyright 2013-2018 The NATS Authors
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
|
||||||
|
|
||||||
# pg
|
# pg
|
||||||
**Author**: Brian Carlson <brian.m.carlson@gmail.com>
|
**Author**: Brian Carlson <brian.m.carlson@gmail.com>
|
||||||
**Repo**: [object Object]
|
**Repo**: [object Object]
|
||||||
@@ -669,23 +818,6 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|||||||
SOFTWARE.
|
SOFTWARE.
|
||||||
|
|
||||||
|
|
||||||
# uuid
|
|
||||||
**Author**: undefined
|
|
||||||
**Repo**: [object Object]
|
|
||||||
**License**: MIT
|
|
||||||
**Description**: RFC4122 (v1, v4, and v5) UUIDs
|
|
||||||
## License Text
|
|
||||||
The MIT License (MIT)
|
|
||||||
|
|
||||||
Copyright (c) 2010-2020 Robert Kieffer and other contributors
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
|
||||||
|
|
||||||
|
|
||||||
# validator
|
# validator
|
||||||
**Author**: Chris O'Hara <cohara87@gmail.com>
|
**Author**: Chris O'Hara <cohara87@gmail.com>
|
||||||
**Repo**: [object Object]
|
**Repo**: [object Object]
|
||||||
@@ -810,6 +942,35 @@ WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF
|
|||||||
OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||||
|
|
||||||
|
|
||||||
|
# @types/bun
|
||||||
|
**Author**: undefined
|
||||||
|
**Repo**: [object Object]
|
||||||
|
**License**: MIT
|
||||||
|
**Description**: TypeScript definitions for bun
|
||||||
|
## License Text
|
||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) Microsoft Corporation.
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE
|
||||||
|
|
||||||
|
|
||||||
# @types/cors
|
# @types/cors
|
||||||
**Author**: undefined
|
**Author**: undefined
|
||||||
**Repo**: [object Object]
|
**Repo**: [object Object]
|
||||||
@@ -872,7 +1033,7 @@ OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
|||||||
**Author**: undefined
|
**Author**: undefined
|
||||||
**Repo**: [object Object]
|
**Repo**: [object Object]
|
||||||
**License**: MIT
|
**License**: MIT
|
||||||
**Description**: TypeScript definitions for Express
|
**Description**: TypeScript definitions for express
|
||||||
## License Text
|
## License Text
|
||||||
MIT License
|
MIT License
|
||||||
|
|
||||||
@@ -901,7 +1062,7 @@ OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
|||||||
**Author**: undefined
|
**Author**: undefined
|
||||||
**Repo**: [object Object]
|
**Repo**: [object Object]
|
||||||
**License**: MIT
|
**License**: MIT
|
||||||
**Description**: TypeScript definitions for Jest
|
**Description**: TypeScript definitions for jest
|
||||||
## License Text
|
## License Text
|
||||||
MIT License
|
MIT License
|
||||||
|
|
||||||
@@ -959,36 +1120,7 @@ OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
|||||||
**Author**: undefined
|
**Author**: undefined
|
||||||
**Repo**: [object Object]
|
**Repo**: [object Object]
|
||||||
**License**: MIT
|
**License**: MIT
|
||||||
**Description**: TypeScript definitions for Node.js
|
**Description**: TypeScript definitions for node
|
||||||
## License Text
|
|
||||||
MIT License
|
|
||||||
|
|
||||||
Copyright (c) Microsoft Corporation.
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
|
||||||
in the Software without restriction, including without limitation the rights
|
|
||||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
copies of the Software, and to permit persons to whom the Software is
|
|
||||||
furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in all
|
|
||||||
copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
||||||
SOFTWARE
|
|
||||||
|
|
||||||
|
|
||||||
# @types/uuid
|
|
||||||
**Author**: undefined
|
|
||||||
**Repo**: [object Object]
|
|
||||||
**License**: MIT
|
|
||||||
**Description**: TypeScript definitions for uuid
|
|
||||||
## License Text
|
## License Text
|
||||||
MIT License
|
MIT License
|
||||||
|
|
||||||
@@ -1100,35 +1232,6 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|||||||
SOFTWARE.
|
SOFTWARE.
|
||||||
|
|
||||||
|
|
||||||
# nodemon
|
|
||||||
**Author**: [object Object]
|
|
||||||
**Repo**: [object Object]
|
|
||||||
**License**: MIT
|
|
||||||
**Description**: Simple monitor script for use during development of a node.js app.
|
|
||||||
## License Text
|
|
||||||
MIT License
|
|
||||||
|
|
||||||
Copyright (c) 2010 - present, Remy Sharp, https://remysharp.com <remy@remysharp.com>
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
|
||||||
in the Software without restriction, including without limitation the rights
|
|
||||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
copies of the Software, and to permit persons to whom the Software is
|
|
||||||
furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in all
|
|
||||||
copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
||||||
SOFTWARE.
|
|
||||||
|
|
||||||
|
|
||||||
# release-it
|
# release-it
|
||||||
**Author**: [object Object]
|
**Author**: [object Object]
|
||||||
**Repo**: [object Object]
|
**Repo**: [object Object]
|
||||||
@@ -1160,25 +1263,65 @@ SOFTWARE.
|
|||||||
|
|
||||||
# rimraf
|
# rimraf
|
||||||
**Author**: Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)
|
**Author**: Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)
|
||||||
**Repo**: git://github.com/isaacs/rimraf.git
|
**Repo**: git@github.com:isaacs/rimraf.git
|
||||||
**License**: ISC
|
**License**: BlueOak-1.0.0
|
||||||
**Description**: A deep deletion module for node (like `rm -rf`)
|
**Description**: A deep deletion module for node (like `rm -rf`)
|
||||||
## License Text
|
## License Text
|
||||||
The ISC License
|
# Blue Oak Model License
|
||||||
|
|
||||||
Copyright (c) Isaac Z. Schlueter and Contributors
|
Version 1.0.0
|
||||||
|
|
||||||
Permission to use, copy, modify, and/or distribute this software for any
|
## Purpose
|
||||||
purpose with or without fee is hereby granted, provided that the above
|
|
||||||
copyright notice and this permission notice appear in all copies.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
This license gives everyone as much permission to work with
|
||||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
this software as possible, while protecting contributors
|
||||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
from liability.
|
||||||
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
|
||||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
## Acceptance
|
||||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
|
|
||||||
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
In order to receive this license, you must agree to its
|
||||||
|
rules. The rules of this license are both obligations
|
||||||
|
under that agreement and conditions to your license.
|
||||||
|
You must not do anything with this software that triggers
|
||||||
|
a rule that you cannot or will not follow.
|
||||||
|
|
||||||
|
## Copyright
|
||||||
|
|
||||||
|
Each contributor licenses you to do everything with this
|
||||||
|
software that would otherwise infringe that contributor's
|
||||||
|
copyright in it.
|
||||||
|
|
||||||
|
## Notices
|
||||||
|
|
||||||
|
You must ensure that everyone who gets a copy of
|
||||||
|
any part of this software from you, with or without
|
||||||
|
changes, also gets the text of this license or a link to
|
||||||
|
<https://blueoakcouncil.org/license/1.0.0>.
|
||||||
|
|
||||||
|
## Excuse
|
||||||
|
|
||||||
|
If anyone notifies you in writing that you have not
|
||||||
|
complied with [Notices](#notices), you can keep your
|
||||||
|
license by taking all practical steps to comply within 30
|
||||||
|
days after the notice. If you do not do so, your license
|
||||||
|
ends immediately.
|
||||||
|
|
||||||
|
## Patent
|
||||||
|
|
||||||
|
Each contributor licenses you to do everything with this
|
||||||
|
software that would otherwise infringe any patent claims
|
||||||
|
they can license or become able to license.
|
||||||
|
|
||||||
|
## Reliability
|
||||||
|
|
||||||
|
No contributor can revoke this license.
|
||||||
|
|
||||||
|
## No Liability
|
||||||
|
|
||||||
|
***As far as the law allows, this software comes as is,
|
||||||
|
without any warranty or condition, and no contributor
|
||||||
|
will be liable to anyone for any damages related to this
|
||||||
|
software or this license, under any kind of legal claim.***
|
||||||
|
|
||||||
|
|
||||||
# start-server-and-test
|
# start-server-and-test
|
||||||
@@ -1218,35 +1361,6 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|||||||
SOFTWARE.
|
SOFTWARE.
|
||||||
|
|
||||||
|
|
||||||
# ts-node
|
|
||||||
**Author**: [object Object]
|
|
||||||
**Repo**: [object Object]
|
|
||||||
**License**: MIT
|
|
||||||
**Description**: TypeScript execution environment and REPL for node.js, with source map support
|
|
||||||
## License Text
|
|
||||||
The MIT License (MIT)
|
|
||||||
|
|
||||||
Copyright (c) 2014 Blake Embrey (hello@blakeembrey.com)
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
|
||||||
in the Software without restriction, including without limitation the rights
|
|
||||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
copies of the Software, and to permit persons to whom the Software is
|
|
||||||
furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in
|
|
||||||
all copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
|
||||||
THE SOFTWARE.
|
|
||||||
|
|
||||||
|
|
||||||
# typedoc
|
# typedoc
|
||||||
**Author**: undefined
|
**Author**: undefined
|
||||||
**Repo**: [object Object]
|
**Repo**: [object Object]
|
||||||
|
|||||||
11
ormconfig.js
11
ormconfig.js
@@ -1,7 +1,3 @@
|
|||||||
const dotenv = require('dotenv');
|
|
||||||
dotenv.config();
|
|
||||||
//
|
|
||||||
const SOURCE_PATH = process.env.NODE_ENV === 'production' ? 'dist' : 'src';
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
type: process.env.DB_TYPE,
|
type: process.env.DB_TYPE,
|
||||||
host: process.env.DB_HOST,
|
host: process.env.DB_HOST,
|
||||||
@@ -9,8 +5,7 @@ module.exports = {
|
|||||||
username: process.env.DB_USER,
|
username: process.env.DB_USER,
|
||||||
password: process.env.DB_PASSWORD,
|
password: process.env.DB_PASSWORD,
|
||||||
database: process.env.DB_NAME,
|
database: process.env.DB_NAME,
|
||||||
// entities: ["src/**/entities/*.ts"],
|
// Run directly from TypeScript source (Bun workflow)
|
||||||
entities: [ `${SOURCE_PATH}/**/entities/*{.ts,.js}` ],
|
entities: ["src/models/entities/**/*.ts"],
|
||||||
seeds: [ `${SOURCE_PATH}/**/seeds/*{.ts,.js}` ]
|
seeds: ["src/seeds/**/*.ts"]
|
||||||
// seeds: ['src/seeds/*.ts'],
|
|
||||||
};
|
};
|
||||||
|
|||||||
45
package.json
45
package.json
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@odit/lfk-backend",
|
"name": "@odit/lfk-backend",
|
||||||
"version": "1.5.2",
|
"version": "1.8.3",
|
||||||
"main": "src/app.ts",
|
"main": "src/app.ts",
|
||||||
"repository": "https://git.odit.services/lfk/backend",
|
"repository": "https://git.odit.services/lfk/backend",
|
||||||
"author": {
|
"author": {
|
||||||
@@ -22,7 +22,6 @@
|
|||||||
],
|
],
|
||||||
"license": "CC-BY-NC-SA-4.0",
|
"license": "CC-BY-NC-SA-4.0",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@node-rs/argon2": "^2.0.2",
|
|
||||||
"@odit/class-validator-jsonschema": "2.1.1",
|
"@odit/class-validator-jsonschema": "2.1.1",
|
||||||
"axios": "0.21.1",
|
"axios": "0.21.1",
|
||||||
"body-parser": "1.19.0",
|
"body-parser": "1.19.0",
|
||||||
@@ -34,11 +33,11 @@
|
|||||||
"cookie-parser": "1.4.5",
|
"cookie-parser": "1.4.5",
|
||||||
"cors": "2.8.5",
|
"cors": "2.8.5",
|
||||||
"csvtojson": "2.0.10",
|
"csvtojson": "2.0.10",
|
||||||
"dotenv": "8.2.0",
|
|
||||||
"express": "4.17.1",
|
"express": "4.17.1",
|
||||||
"jsonwebtoken": "8.5.1",
|
"jsonwebtoken": "8.5.1",
|
||||||
"libphonenumber-js": "1.9.9",
|
"libphonenumber-js": "1.9.9",
|
||||||
"mysql": "2.18.1",
|
"mysql": "2.18.1",
|
||||||
|
"nats": "^2.29.3",
|
||||||
"pg": "8.5.1",
|
"pg": "8.5.1",
|
||||||
"reflect-metadata": "0.1.13",
|
"reflect-metadata": "0.1.13",
|
||||||
"routing-controllers": "0.9.0-alpha.6",
|
"routing-controllers": "0.9.0-alpha.6",
|
||||||
@@ -47,42 +46,40 @@
|
|||||||
"typeorm": "0.2.30",
|
"typeorm": "0.2.30",
|
||||||
"typeorm-routing-controllers-extensions": "0.2.0",
|
"typeorm-routing-controllers-extensions": "0.2.0",
|
||||||
"typeorm-seeding": "1.6.1",
|
"typeorm-seeding": "1.6.1",
|
||||||
"uuid": "8.3.2",
|
|
||||||
"validator": "13.5.2"
|
"validator": "13.5.2"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@faker-js/faker": "7.6.0",
|
"@faker-js/faker": "7.6.0",
|
||||||
"@odit/license-exporter": "0.0.9",
|
"@odit/license-exporter": "0.0.9",
|
||||||
"@types/cors": "2.8.9",
|
"@types/bun": "^1.3.9",
|
||||||
|
"@types/cors": "2.8.19",
|
||||||
"@types/csvtojson": "1.1.5",
|
"@types/csvtojson": "1.1.5",
|
||||||
"@types/express": "4.17.11",
|
"@types/express": "5.0.6",
|
||||||
"@types/jest": "26.0.20",
|
"@types/jest": "30.0.0",
|
||||||
"@types/jsonwebtoken": "8.5.0",
|
"@types/jsonwebtoken": "9.0.10",
|
||||||
"@types/node": "14.14.22",
|
"@types/node": "25.3.0",
|
||||||
"@types/uuid": "8.3.0",
|
|
||||||
"auto-changelog": "2.4.0",
|
"auto-changelog": "2.4.0",
|
||||||
"cp-cli": "2.0.0",
|
"cp-cli": "2.0.0",
|
||||||
"jest": "26.6.3",
|
"jest": "26.6.3",
|
||||||
"nodemon": "2.0.7",
|
|
||||||
"release-it": "14.2.2",
|
"release-it": "14.2.2",
|
||||||
"rimraf": "3.0.2",
|
"rimraf": "^6.1.3",
|
||||||
"start-server-and-test": "1.11.7",
|
"start-server-and-test": "1.11.7",
|
||||||
"ts-jest": "26.5.0",
|
"ts-jest": "26.5.0",
|
||||||
"ts-node": "9.1.1",
|
|
||||||
"typedoc": "0.20.19",
|
"typedoc": "0.20.19",
|
||||||
"typescript": "4.1.3"
|
"typescript": "5.9.3"
|
||||||
},
|
},
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"dev": "nodemon src/app.ts",
|
"dev": "bun --watch src/app.ts",
|
||||||
"build": "rimraf ./dist && tsc && cp-cli ./src/static ./dist/static",
|
"start": "bun src/app.ts",
|
||||||
"docs": "typedoc --out docs src",
|
"docs": "typedoc --out docs src",
|
||||||
"test": "jest",
|
"test": "jest",
|
||||||
"test:watch": "jest --watchAll",
|
"test:watch": "jest --watchAll",
|
||||||
"test:ci:generate_env": "ts-node scripts/create_testenv.ts",
|
"test:ci:generate_env": "bun scripts/create_testenv.ts",
|
||||||
"test:ci:run": "start-server-and-test dev http://localhost:4010/api/docs/openapi.json test",
|
"test:ci:run": "start-server-and-test dev http://localhost:4010/api/docs/openapi.json test",
|
||||||
"test:ci": "npm run test:ci:generate_env && npm run test:ci:run",
|
"test:ci": "bun run test:ci:generate_env && bun run test:ci:run",
|
||||||
"seed": "ts-node ./node_modules/typeorm/cli.js schema:sync && ts-node ./node_modules/typeorm-seeding/dist/cli.js seed",
|
"benchmark": "bun scripts/benchmark_scan_intake.ts",
|
||||||
"openapi:export": "ts-node scripts/openapi_export.ts",
|
"seed": "bun ./node_modules/typeorm/cli.js schema:sync && bun ./node_modules/typeorm-seeding/dist/cli.js seed",
|
||||||
|
"openapi:export": "bun scripts/openapi_export.ts",
|
||||||
"licenses:export": "license-exporter --markdown",
|
"licenses:export": "license-exporter --markdown",
|
||||||
"changelog:export": "auto-changelog --commit-limit false -p -u --hide-credit",
|
"changelog:export": "auto-changelog --commit-limit false -p -u --hide-credit",
|
||||||
"release": "release-it --only-version"
|
"release": "release-it --only-version"
|
||||||
@@ -102,13 +99,7 @@
|
|||||||
"publish": false
|
"publish": false
|
||||||
},
|
},
|
||||||
"hooks": {
|
"hooks": {
|
||||||
"after:bump": "npm run changelog:export && npm run licenses:export && git add CHANGELOG.md && git add licenses.md"
|
"after:bump": "bun run changelog:export && bun run licenses:export && git add CHANGELOG.md && git add licenses.md"
|
||||||
}
|
}
|
||||||
},
|
|
||||||
"nodemonConfig": {
|
|
||||||
"ignore": [
|
|
||||||
"src/tests/*",
|
|
||||||
"docs/*"
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
521
pnpm-lock.yaml
generated
521
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
367
scripts/benchmark_scan_intake.ts
Normal file
367
scripts/benchmark_scan_intake.ts
Normal file
@@ -0,0 +1,367 @@
|
|||||||
|
/**
|
||||||
|
* Scan Intake Benchmark Script
|
||||||
|
*
|
||||||
|
* Measures TrackScan creation performance before and after each optimisation phase.
|
||||||
|
* Run against a live dev server: bun run dev
|
||||||
|
*
|
||||||
|
* Usage:
|
||||||
|
* bun run benchmark
|
||||||
|
* bun scripts/benchmark_scan_intake.ts --base http://localhost:4010
|
||||||
|
*
|
||||||
|
* What it measures:
|
||||||
|
* 1. Single sequential scans — baseline latency per request (p50/p95/p99/max)
|
||||||
|
* 2. Parallel scans (10 stations) — simulates 10 concurrent stations each submitting
|
||||||
|
* one scan at a time at the expected event rate
|
||||||
|
* (~1 scan/3s per station = ~3.3 scans/s total)
|
||||||
|
*
|
||||||
|
* The script self-provisions all required data (org, runners, cards, track, stations)
|
||||||
|
* and cleans up after itself. It authenticates via the station token, matching the
|
||||||
|
* real production auth path exactly.
|
||||||
|
*
|
||||||
|
* Output is printed to stdout in a copy-paste-friendly table format so results can
|
||||||
|
* be compared across phases.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import axios, { AxiosInstance } from 'axios';
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Config
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
const BASE = (() => {
|
||||||
|
const idx = process.argv.indexOf('--base');
|
||||||
|
return idx !== -1 ? process.argv[idx + 1] : 'http://localhost:4010';
|
||||||
|
})();
|
||||||
|
|
||||||
|
const API = `${BASE}/api`;
|
||||||
|
|
||||||
|
// Number of simulated scan stations
|
||||||
|
const STATION_COUNT = 10;
|
||||||
|
|
||||||
|
// Sequential benchmark: total number of scans to send, one at a time
|
||||||
|
const SEQUENTIAL_SCAN_COUNT = 50;
|
||||||
|
|
||||||
|
// Parallel benchmark: number of rounds. Each round fires STATION_COUNT scans concurrently.
|
||||||
|
// 20 rounds × 10 stations = 200 total scans, matching the expected event throughput pattern.
|
||||||
|
const PARALLEL_ROUNDS = 20;
|
||||||
|
|
||||||
|
// Minimum lap time on the test track (seconds). Set low so most scans are valid.
|
||||||
|
// The benchmark measures submission speed, not business logic.
|
||||||
|
const TRACK_MINIMUM_LAP_TIME = 1;
|
||||||
|
|
||||||
|
// Track distance (metres)
|
||||||
|
const TRACK_DISTANCE = 400;
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Types
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
interface StationHandle {
|
||||||
|
id: number;
|
||||||
|
key: string; // cleartext token, used as Bearer token
|
||||||
|
cardCode: number; // EAN-13 barcode of the card assigned to this station's runner
|
||||||
|
axiosInstance: AxiosInstance;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface Percentiles {
|
||||||
|
p50: number;
|
||||||
|
p95: number;
|
||||||
|
p99: number;
|
||||||
|
max: number;
|
||||||
|
min: number;
|
||||||
|
mean: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface BenchmarkResult {
|
||||||
|
label: string;
|
||||||
|
totalScans: number;
|
||||||
|
totalTimeMs: number;
|
||||||
|
scansPerSecond: number;
|
||||||
|
latencies: Percentiles;
|
||||||
|
errors: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// HTTP helpers
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
const adminClient = axios.create({
|
||||||
|
baseURL: API,
|
||||||
|
validateStatus: () => true,
|
||||||
|
});
|
||||||
|
|
||||||
|
async function adminLogin(): Promise<string> {
|
||||||
|
const res = await adminClient.post('/auth/login', { username: 'demo', password: 'demo' });
|
||||||
|
if (res.status !== 200) {
|
||||||
|
throw new Error(`Login failed: ${res.status} ${JSON.stringify(res.data)}`);
|
||||||
|
}
|
||||||
|
return res.data.access_token;
|
||||||
|
}
|
||||||
|
|
||||||
|
function authedClient(token: string): AxiosInstance {
|
||||||
|
return axios.create({
|
||||||
|
baseURL: API,
|
||||||
|
validateStatus: () => true,
|
||||||
|
headers: { authorization: `Bearer ${token}` },
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Data provisioning
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
async function provision(adminToken: string): Promise<{
|
||||||
|
stations: StationHandle[];
|
||||||
|
trackId: number;
|
||||||
|
orgId: number;
|
||||||
|
cleanup: () => Promise<void>;
|
||||||
|
}> {
|
||||||
|
const client = authedClient(adminToken);
|
||||||
|
const createdIds: { type: string; id: number }[] = [];
|
||||||
|
|
||||||
|
const create = async (path: string, body: object): Promise<any> => {
|
||||||
|
const res = await client.post(path, body);
|
||||||
|
if (res.status !== 200) {
|
||||||
|
throw new Error(`POST ${path} failed: ${res.status} ${JSON.stringify(res.data)}`);
|
||||||
|
}
|
||||||
|
return res.data;
|
||||||
|
};
|
||||||
|
|
||||||
|
process.stdout.write('Provisioning test data... ');
|
||||||
|
|
||||||
|
// Organisation
|
||||||
|
const org = await create('/organizations', { name: 'benchmark-org' });
|
||||||
|
createdIds.push({ type: 'organizations', id: org.id });
|
||||||
|
|
||||||
|
// Track with a low minimumLapTime so re-scans within the benchmark are mostly valid
|
||||||
|
const track = await create('/tracks', {
|
||||||
|
name: 'benchmark-track',
|
||||||
|
distance: TRACK_DISTANCE,
|
||||||
|
minimumLapTime: TRACK_MINIMUM_LAP_TIME,
|
||||||
|
});
|
||||||
|
createdIds.push({ type: 'tracks', id: track.id });
|
||||||
|
|
||||||
|
// One runner + card + station per simulated scan station
|
||||||
|
const stations: StationHandle[] = [];
|
||||||
|
|
||||||
|
for (let i = 0; i < STATION_COUNT; i++) {
|
||||||
|
const runner = await create('/runners', {
|
||||||
|
firstname: `Bench`,
|
||||||
|
lastname: `Runner${i}`,
|
||||||
|
group: org.id,
|
||||||
|
});
|
||||||
|
createdIds.push({ type: 'runners', id: runner.id });
|
||||||
|
|
||||||
|
const card = await create('/cards', { runner: runner.id });
|
||||||
|
createdIds.push({ type: 'cards', id: card.id });
|
||||||
|
|
||||||
|
const station = await create('/stations', {
|
||||||
|
track: track.id,
|
||||||
|
description: `bench-station-${i}`,
|
||||||
|
});
|
||||||
|
createdIds.push({ type: 'stations', id: station.id });
|
||||||
|
|
||||||
|
stations.push({
|
||||||
|
id: station.id,
|
||||||
|
key: station.key,
|
||||||
|
cardCode: card.id, // the test spec uses card.id directly as the barcode value
|
||||||
|
axiosInstance: axios.create({
|
||||||
|
baseURL: API,
|
||||||
|
validateStatus: () => true,
|
||||||
|
headers: { authorization: `Bearer ${station.key}` },
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`done. (${STATION_COUNT} stations, ${STATION_COUNT} runners, ${STATION_COUNT} cards)`);
|
||||||
|
|
||||||
|
const cleanup = async () => {
|
||||||
|
process.stdout.write('Cleaning up test data... ');
|
||||||
|
// Delete in reverse-dependency order
|
||||||
|
for (const item of [...createdIds].reverse()) {
|
||||||
|
await client.delete(`/${item.type}/${item.id}?force=true`);
|
||||||
|
}
|
||||||
|
console.log('done.');
|
||||||
|
};
|
||||||
|
|
||||||
|
return { stations, trackId: track.id, orgId: org.id, cleanup };
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Single scan submission (returns latency in ms)
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
async function submitScan(station: StationHandle): Promise<{ latencyMs: number; ok: boolean }> {
|
||||||
|
const start = performance.now();
|
||||||
|
const res = await station.axiosInstance.post('/scans/trackscans', {
|
||||||
|
card: station.cardCode,
|
||||||
|
station: station.id,
|
||||||
|
});
|
||||||
|
const latencyMs = performance.now() - start;
|
||||||
|
const ok = res.status === 200;
|
||||||
|
return { latencyMs, ok };
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Statistics
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
function percentiles(latencies: number[]): Percentiles {
|
||||||
|
const sorted = [...latencies].sort((a, b) => a - b);
|
||||||
|
const at = (pct: number) => sorted[Math.floor((pct / 100) * sorted.length)] ?? sorted[sorted.length - 1];
|
||||||
|
const mean = sorted.reduce((s, v) => s + v, 0) / sorted.length;
|
||||||
|
return {
|
||||||
|
p50: Math.round(at(50)),
|
||||||
|
p95: Math.round(at(95)),
|
||||||
|
p99: Math.round(at(99)),
|
||||||
|
max: Math.round(sorted[sorted.length - 1]),
|
||||||
|
min: Math.round(sorted[0]),
|
||||||
|
mean: Math.round(mean),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Benchmark 1 — Sequential (single station, one scan at a time)
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
async function benchmarkSequential(station: StationHandle): Promise<BenchmarkResult> {
|
||||||
|
const latencies: number[] = [];
|
||||||
|
let errors = 0;
|
||||||
|
|
||||||
|
process.stdout.write(` Running ${SEQUENTIAL_SCAN_COUNT} sequential scans`);
|
||||||
|
const wallStart = performance.now();
|
||||||
|
|
||||||
|
for (let i = 0; i < SEQUENTIAL_SCAN_COUNT; i++) {
|
||||||
|
const { latencyMs, ok } = await submitScan(station);
|
||||||
|
latencies.push(latencyMs);
|
||||||
|
if (!ok) errors++;
|
||||||
|
if ((i + 1) % 10 === 0) process.stdout.write('.');
|
||||||
|
}
|
||||||
|
|
||||||
|
const totalTimeMs = performance.now() - wallStart;
|
||||||
|
console.log(' done.');
|
||||||
|
|
||||||
|
return {
|
||||||
|
label: 'Sequential (1 station)',
|
||||||
|
totalScans: SEQUENTIAL_SCAN_COUNT,
|
||||||
|
totalTimeMs,
|
||||||
|
scansPerSecond: (SEQUENTIAL_SCAN_COUNT / totalTimeMs) * 1000,
|
||||||
|
latencies: percentiles(latencies),
|
||||||
|
errors,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Benchmark 2 — Parallel (10 stations, concurrent rounds)
|
||||||
|
//
|
||||||
|
// Models the real event scenario: every ~3 seconds each station submits one scan.
|
||||||
|
// We don't actually sleep between rounds — we fire each round as fast as the
|
||||||
|
// previous one completes, which gives us the worst-case sustained throughput
|
||||||
|
// (all stations submitting at maximum rate simultaneously).
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
async function benchmarkParallel(stations: StationHandle[]): Promise<BenchmarkResult> {
|
||||||
|
const latencies: number[] = [];
|
||||||
|
let errors = 0;
|
||||||
|
|
||||||
|
process.stdout.write(` Running ${PARALLEL_ROUNDS} rounds × ${STATION_COUNT} concurrent stations`);
|
||||||
|
const wallStart = performance.now();
|
||||||
|
|
||||||
|
for (let round = 0; round < PARALLEL_ROUNDS; round++) {
|
||||||
|
const results = await Promise.all(stations.map(s => submitScan(s)));
|
||||||
|
for (const { latencyMs, ok } of results) {
|
||||||
|
latencies.push(latencyMs);
|
||||||
|
if (!ok) errors++;
|
||||||
|
}
|
||||||
|
if ((round + 1) % 4 === 0) process.stdout.write('.');
|
||||||
|
}
|
||||||
|
|
||||||
|
const totalTimeMs = performance.now() - wallStart;
|
||||||
|
const totalScans = PARALLEL_ROUNDS * STATION_COUNT;
|
||||||
|
console.log(' done.');
|
||||||
|
|
||||||
|
return {
|
||||||
|
label: `Parallel (${STATION_COUNT} stations concurrent)`,
|
||||||
|
totalScans,
|
||||||
|
totalTimeMs,
|
||||||
|
scansPerSecond: (totalScans / totalTimeMs) * 1000,
|
||||||
|
latencies: percentiles(latencies),
|
||||||
|
errors,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Output formatting
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
function printResult(result: BenchmarkResult) {
|
||||||
|
const { label, totalScans, totalTimeMs, scansPerSecond, latencies, errors } = result;
|
||||||
|
console.log(`\n ${label}`);
|
||||||
|
console.log(` ${'─'.repeat(52)}`);
|
||||||
|
console.log(` Total scans : ${totalScans}`);
|
||||||
|
console.log(` Total time : ${totalTimeMs.toFixed(0)} ms`);
|
||||||
|
console.log(` Throughput : ${scansPerSecond.toFixed(2)} scans/sec`);
|
||||||
|
console.log(` Latency min : ${latencies.min} ms`);
|
||||||
|
console.log(` Latency mean : ${latencies.mean} ms`);
|
||||||
|
console.log(` Latency p50 : ${latencies.p50} ms`);
|
||||||
|
console.log(` Latency p95 : ${latencies.p95} ms`);
|
||||||
|
console.log(` Latency p99 : ${latencies.p99} ms`);
|
||||||
|
console.log(` Latency max : ${latencies.max} ms`);
|
||||||
|
console.log(` Errors : ${errors}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
function printSummary(results: BenchmarkResult[]) {
|
||||||
|
const now = new Date().toISOString();
|
||||||
|
console.log('\n');
|
||||||
|
console.log('═'.repeat(60));
|
||||||
|
console.log(` SCAN INTAKE BENCHMARK RESULTS — ${now}`);
|
||||||
|
console.log(` Server: ${BASE}`);
|
||||||
|
console.log('═'.repeat(60));
|
||||||
|
for (const r of results) {
|
||||||
|
printResult(r);
|
||||||
|
}
|
||||||
|
console.log('\n' + '═'.repeat(60));
|
||||||
|
console.log(' Copy the block above to compare across phases.');
|
||||||
|
console.log('═'.repeat(60) + '\n');
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Entry point
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
async function main() {
|
||||||
|
console.log(`\nScan Intake Benchmark — target: ${BASE}\n`);
|
||||||
|
|
||||||
|
let adminToken: string;
|
||||||
|
try {
|
||||||
|
adminToken = await adminLogin();
|
||||||
|
} catch (err) {
|
||||||
|
console.error(`Could not authenticate. Is the server running at ${BASE}?\n`, err.message);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
const { stations, cleanup } = await provision(adminToken);
|
||||||
|
|
||||||
|
const results: BenchmarkResult[] = [];
|
||||||
|
|
||||||
|
try {
|
||||||
|
console.log('\nBenchmark 1 — Sequential');
|
||||||
|
results.push(await benchmarkSequential(stations[0]));
|
||||||
|
|
||||||
|
// Brief pause between benchmarks so the sequential scans don't skew
|
||||||
|
// the parallel benchmark's first-scan latency (minimumLapTime window)
|
||||||
|
await new Promise(r => setTimeout(r, (TRACK_MINIMUM_LAP_TIME + 1) * 1000));
|
||||||
|
|
||||||
|
console.log('\nBenchmark 2 — Parallel');
|
||||||
|
results.push(await benchmarkParallel(stations));
|
||||||
|
} finally {
|
||||||
|
await cleanup();
|
||||||
|
}
|
||||||
|
|
||||||
|
printSummary(results);
|
||||||
|
}
|
||||||
|
|
||||||
|
main().catch(err => {
|
||||||
|
console.error('Benchmark failed:', err);
|
||||||
|
process.exit(1);
|
||||||
|
});
|
||||||
46
src/app.ts
46
src/app.ts
@@ -7,7 +7,28 @@ import authchecker from "./middlewares/authchecker";
|
|||||||
import { ErrorHandler } from './middlewares/ErrorHandler';
|
import { ErrorHandler } from './middlewares/ErrorHandler';
|
||||||
import UserChecker from './middlewares/UserChecker';
|
import UserChecker from './middlewares/UserChecker';
|
||||||
|
|
||||||
const CONTROLLERS_FILE_EXTENSION = process.env.NODE_ENV === 'production' ? 'js' : 'ts';
|
// Import all controllers directly to avoid Bun + routing-controllers glob/require issues
|
||||||
|
import { AuthController } from './controllers/AuthController';
|
||||||
|
import { DonationController } from './controllers/DonationController';
|
||||||
|
import { DonorController } from './controllers/DonorController';
|
||||||
|
import { GroupContactController } from './controllers/GroupContactController';
|
||||||
|
import { ImportController } from './controllers/ImportController';
|
||||||
|
import { MeController } from './controllers/MeController';
|
||||||
|
import { PermissionController } from './controllers/PermissionController';
|
||||||
|
import { RunnerCardController } from './controllers/RunnerCardController';
|
||||||
|
import { RunnerController } from './controllers/RunnerController';
|
||||||
|
import { RunnerOrganizationController } from './controllers/RunnerOrganizationController';
|
||||||
|
import { RunnerSelfServiceController } from './controllers/RunnerSelfServiceController';
|
||||||
|
import { RunnerTeamController } from './controllers/RunnerTeamController';
|
||||||
|
import { ScanController } from './controllers/ScanController';
|
||||||
|
import { ScanStationController } from './controllers/ScanStationController';
|
||||||
|
import { StatsClientController } from './controllers/StatsClientController';
|
||||||
|
import { StatsController } from './controllers/StatsController';
|
||||||
|
import { StatusController } from './controllers/StatusController';
|
||||||
|
import { TrackController } from './controllers/TrackController';
|
||||||
|
import { UserController } from './controllers/UserController';
|
||||||
|
import { UserGroupController } from './controllers/UserGroupController';
|
||||||
|
|
||||||
const app = createExpressServer({
|
const app = createExpressServer({
|
||||||
authorizationChecker: authchecker,
|
authorizationChecker: authchecker,
|
||||||
currentUserChecker: UserChecker,
|
currentUserChecker: UserChecker,
|
||||||
@@ -15,7 +36,28 @@ const app = createExpressServer({
|
|||||||
development: config.development,
|
development: config.development,
|
||||||
cors: true,
|
cors: true,
|
||||||
routePrefix: "/api",
|
routePrefix: "/api",
|
||||||
controllers: [`${__dirname}/controllers/*.${CONTROLLERS_FILE_EXTENSION}`],
|
controllers: [
|
||||||
|
AuthController,
|
||||||
|
DonationController,
|
||||||
|
DonorController,
|
||||||
|
GroupContactController,
|
||||||
|
ImportController,
|
||||||
|
MeController,
|
||||||
|
PermissionController,
|
||||||
|
RunnerCardController,
|
||||||
|
RunnerController,
|
||||||
|
RunnerOrganizationController,
|
||||||
|
RunnerSelfServiceController,
|
||||||
|
RunnerTeamController,
|
||||||
|
ScanController,
|
||||||
|
ScanStationController,
|
||||||
|
StatsClientController,
|
||||||
|
StatsController,
|
||||||
|
StatusController,
|
||||||
|
TrackController,
|
||||||
|
UserController,
|
||||||
|
UserGroupController,
|
||||||
|
],
|
||||||
});
|
});
|
||||||
|
|
||||||
async function main() {
|
async function main() {
|
||||||
|
|||||||
111
src/config.ts
111
src/config.ts
@@ -1,54 +1,59 @@
|
|||||||
import consola from 'consola';
|
import consola from 'consola';
|
||||||
import { config as configDotenv } from 'dotenv';
|
import { CountryCode } from 'libphonenumber-js';
|
||||||
import { CountryCode } from 'libphonenumber-js';
|
import ValidatorJS from 'validator';
|
||||||
import ValidatorJS from 'validator';
|
|
||||||
|
export const config = {
|
||||||
configDotenv();
|
internal_port: parseInt(process.env.APP_PORT) || 4010,
|
||||||
export const config = {
|
development: process.env.NODE_ENV === "production",
|
||||||
internal_port: parseInt(process.env.APP_PORT) || 4010,
|
testing: process.env.NODE_ENV === "test",
|
||||||
development: process.env.NODE_ENV === "production",
|
jwt_secret: process.env.JWT_SECRET || "secretjwtsecret",
|
||||||
testing: process.env.NODE_ENV === "test",
|
station_token_secret: process.env.STATION_TOKEN_SECRET || "",
|
||||||
jwt_secret: process.env.JWT_SECRET || "secretjwtsecret",
|
nats_url: process.env.NATS_URL || "nats://localhost:4222",
|
||||||
phone_validation_countrycode: getPhoneCodeLocale(),
|
nats_prewarm: process.env.NATS_PREWARM === "true",
|
||||||
postalcode_validation_countrycode: getPostalCodeLocale(),
|
phone_validation_countrycode: getPhoneCodeLocale(),
|
||||||
version: process.env.VERSION || require('../package.json').version,
|
postalcode_validation_countrycode: getPostalCodeLocale(),
|
||||||
seedTestData: getDataSeeding(),
|
version: process.env.VERSION || require('../package.json').version,
|
||||||
app_url: process.env.APP_URL || "http://localhost:8080",
|
seedTestData: getDataSeeding(),
|
||||||
privacy_url: process.env.PRIVACY_URL || "/privacy",
|
app_url: process.env.APP_URL || "http://localhost:8080",
|
||||||
imprint_url: process.env.IMPRINT_URL || "/imprint",
|
privacy_url: process.env.PRIVACY_URL || "/privacy",
|
||||||
mailer_url: process.env.MAILER_URL || "",
|
imprint_url: process.env.IMPRINT_URL || "/imprint",
|
||||||
mailer_key: process.env.MAILER_KEY || ""
|
mailer_url: process.env.MAILER_URL || "",
|
||||||
}
|
mailer_key: process.env.MAILER_KEY || ""
|
||||||
let errors = 0
|
}
|
||||||
if (typeof config.internal_port !== "number") {
|
let errors = 0
|
||||||
consola.error("Error: APP_PORT is not a number")
|
if (typeof config.internal_port !== "number") {
|
||||||
errors++
|
consola.error("Error: APP_PORT is not a number")
|
||||||
}
|
errors++
|
||||||
if (typeof config.development !== "boolean") {
|
}
|
||||||
consola.error("Error: NODE_ENV is not a boolean")
|
if (typeof config.development !== "boolean") {
|
||||||
errors++
|
consola.error("Error: NODE_ENV is not a boolean")
|
||||||
}
|
errors++
|
||||||
if (config.mailer_url == "" || config.mailer_key == "") {
|
}
|
||||||
consola.error("Error: invalid mailer config")
|
if (config.mailer_url == "" || config.mailer_key == "") {
|
||||||
errors++;
|
consola.error("Error: invalid mailer config")
|
||||||
}
|
errors++;
|
||||||
function getPhoneCodeLocale(): CountryCode {
|
}
|
||||||
return (process.env.PHONE_COUNTRYCODE as CountryCode);
|
if (config.station_token_secret.length < 32) {
|
||||||
}
|
consola.error("Error: STATION_TOKEN_SECRET must be set and at least 32 characters long")
|
||||||
function getPostalCodeLocale(): any {
|
errors++;
|
||||||
try {
|
}
|
||||||
const stringArray: String[] = ValidatorJS.isPostalCodeLocales;
|
function getPhoneCodeLocale(): CountryCode {
|
||||||
let index = stringArray.indexOf(process.env.POSTALCODE_COUNTRYCODE);
|
return (process.env.PHONE_COUNTRYCODE as CountryCode);
|
||||||
return ValidatorJS.isPostalCodeLocales[index];
|
}
|
||||||
} catch (error) {
|
function getPostalCodeLocale(): any {
|
||||||
return null;
|
try {
|
||||||
}
|
const stringArray: String[] = ValidatorJS.isPostalCodeLocales;
|
||||||
}
|
let index = stringArray.indexOf(process.env.POSTALCODE_COUNTRYCODE);
|
||||||
function getDataSeeding(): Boolean {
|
return ValidatorJS.isPostalCodeLocales[index];
|
||||||
try {
|
} catch (error) {
|
||||||
return JSON.parse(process.env.SEED_TEST_DATA);
|
return null;
|
||||||
} catch (error) {
|
}
|
||||||
return false;
|
}
|
||||||
}
|
function getDataSeeding(): Boolean {
|
||||||
}
|
try {
|
||||||
|
return JSON.parse(process.env.SEED_TEST_DATA);
|
||||||
|
} catch (error) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
export let e = errors
|
export let e = errors
|
||||||
@@ -1,8 +1,9 @@
|
|||||||
import { Authorized, Body, Delete, Get, JsonController, OnUndefined, Param, Post, Put, QueryParam } from 'routing-controllers';
|
import { Authorized, Body, Delete, Get, JsonController, OnUndefined, Param, Post, Put, QueryParam } from 'routing-controllers';
|
||||||
import { OpenAPI, ResponseSchema } from 'routing-controllers-openapi';
|
import { OpenAPI, ResponseSchema } from 'routing-controllers-openapi';
|
||||||
import { Repository, getConnectionManager } from 'typeorm';
|
import { Repository, getConnectionManager } from 'typeorm';
|
||||||
import { RunnerCardHasScansError, RunnerCardIdsNotMatchingError, RunnerCardNotFoundError } from '../errors/RunnerCardErrors';
|
import { RunnerCardHasScansError, RunnerCardIdsNotMatchingError, RunnerCardNotFoundError } from '../errors/RunnerCardErrors';
|
||||||
import { RunnerNotFoundError } from '../errors/RunnerErrors';
|
import { RunnerNotFoundError } from '../errors/RunnerErrors';
|
||||||
|
import { deleteCardEntry } from '../nats/CardKV';
|
||||||
import { CreateRunnerCard } from '../models/actions/create/CreateRunnerCard';
|
import { CreateRunnerCard } from '../models/actions/create/CreateRunnerCard';
|
||||||
import { UpdateRunnerCard } from '../models/actions/update/UpdateRunnerCard';
|
import { UpdateRunnerCard } from '../models/actions/update/UpdateRunnerCard';
|
||||||
import { UpdateRunnerCardByCode } from '../models/actions/update/UpdateRunnerCardByCode';
|
import { UpdateRunnerCardByCode } from '../models/actions/update/UpdateRunnerCardByCode';
|
||||||
@@ -109,8 +110,9 @@ export class RunnerCardController {
|
|||||||
throw new RunnerCardIdsNotMatchingError();
|
throw new RunnerCardIdsNotMatchingError();
|
||||||
}
|
}
|
||||||
|
|
||||||
await this.cardRepository.save(await card.update(oldCard));
|
await this.cardRepository.save(await card.update(oldCard));
|
||||||
return (await this.cardRepository.findOne({ id: id }, { relations: ['runner', 'runner.group', 'runner.group.parentGroup'] })).toResponse();
|
await deleteCardEntry(id);
|
||||||
|
return (await this.cardRepository.findOne({ id: id }, { relations: ['runner', 'runner.group', 'runner.group.parentGroup'] })).toResponse();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Put('/:code')
|
@Put('/:code')
|
||||||
@@ -151,11 +153,12 @@ export class RunnerCardController {
|
|||||||
throw new RunnerCardHasScansError();
|
throw new RunnerCardHasScansError();
|
||||||
}
|
}
|
||||||
const scanController = new ScanController;
|
const scanController = new ScanController;
|
||||||
for (let scan of cardScans) {
|
for (let scan of cardScans) {
|
||||||
await scanController.remove(scan.id, force);
|
await scanController.remove(scan.id, force);
|
||||||
}
|
}
|
||||||
|
|
||||||
await this.cardRepository.delete(card);
|
await deleteCardEntry(id);
|
||||||
return card.toResponse();
|
await this.cardRepository.delete(card);
|
||||||
|
return card.toResponse();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -1,18 +1,19 @@
|
|||||||
import { Authorized, Body, Delete, Get, JsonController, OnUndefined, Param, Post, Put, QueryParam } from 'routing-controllers';
|
import { Authorized, Body, Delete, Get, JsonController, OnUndefined, Param, Post, Put, QueryParam } from 'routing-controllers';
|
||||||
import { OpenAPI, ResponseSchema } from 'routing-controllers-openapi';
|
import { OpenAPI, ResponseSchema } from 'routing-controllers-openapi';
|
||||||
import { Repository, getConnectionManager } from 'typeorm';
|
import { Repository, getConnectionManager } from 'typeorm';
|
||||||
import { RunnerGroupNeededError, RunnerHasDistanceDonationsError, RunnerIdsNotMatchingError, RunnerNotFoundError } from '../errors/RunnerErrors';
|
import { RunnerGroupNeededError, RunnerHasDistanceDonationsError, RunnerIdsNotMatchingError, RunnerNotFoundError } from '../errors/RunnerErrors';
|
||||||
import { RunnerGroupNotFoundError } from '../errors/RunnerGroupErrors';
|
import { RunnerGroupNotFoundError } from '../errors/RunnerGroupErrors';
|
||||||
import { CreateRunner } from '../models/actions/create/CreateRunner';
|
import { deleteRunnerEntry } from '../nats/RunnerKV';
|
||||||
import { UpdateRunner } from '../models/actions/update/UpdateRunner';
|
import { CreateRunner } from '../models/actions/create/CreateRunner';
|
||||||
import { Runner } from '../models/entities/Runner';
|
import { UpdateRunner } from '../models/actions/update/UpdateRunner';
|
||||||
import { ResponseEmpty } from '../models/responses/ResponseEmpty';
|
import { Runner } from '../models/entities/Runner';
|
||||||
import { ResponseRunner } from '../models/responses/ResponseRunner';
|
import { ResponseEmpty } from '../models/responses/ResponseEmpty';
|
||||||
import { ResponseScan } from '../models/responses/ResponseScan';
|
import { ResponseRunner } from '../models/responses/ResponseRunner';
|
||||||
import { ResponseTrackScan } from '../models/responses/ResponseTrackScan';
|
import { ResponseScan } from '../models/responses/ResponseScan';
|
||||||
import { DonationController } from './DonationController';
|
import { ResponseTrackScan } from '../models/responses/ResponseTrackScan';
|
||||||
import { RunnerCardController } from './RunnerCardController';
|
import { DonationController } from './DonationController';
|
||||||
import { ScanController } from './ScanController';
|
import { RunnerCardController } from './RunnerCardController';
|
||||||
|
import { ScanController } from './ScanController';
|
||||||
|
|
||||||
@JsonController('/runners')
|
@JsonController('/runners')
|
||||||
@OpenAPI({ security: [{ "AuthToken": [] }, { "RefreshTokenCookie": [] }] })
|
@OpenAPI({ security: [{ "AuthToken": [] }, { "RefreshTokenCookie": [] }] })
|
||||||
@@ -125,8 +126,9 @@ export class RunnerController {
|
|||||||
throw new RunnerIdsNotMatchingError();
|
throw new RunnerIdsNotMatchingError();
|
||||||
}
|
}
|
||||||
|
|
||||||
await this.runnerRepository.save(await runner.update(oldRunner));
|
await this.runnerRepository.save(await runner.update(oldRunner));
|
||||||
return new ResponseRunner(await this.runnerRepository.findOne({ id: id }, { relations: ['scans', 'group', 'group.parentGroup', 'scans.track', 'cards'] }), true);
|
await deleteRunnerEntry(id);
|
||||||
|
return new ResponseRunner(await this.runnerRepository.findOne({ id: id }, { relations: ['scans', 'group', 'group.parentGroup', 'scans.track', 'cards'] }), true);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Delete('/:id')
|
@Delete('/:id')
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import { Request } from "express";
|
import type { Request } from "express";
|
||||||
import * as jwt from "jsonwebtoken";
|
import * as jwt from "jsonwebtoken";
|
||||||
import { BadRequestError, Body, Delete, Get, JsonController, OnUndefined, Param, Post, QueryParam, Req, UseBefore } from 'routing-controllers';
|
import { BadRequestError, Body, Delete, Get, JsonController, OnUndefined, Param, Post, QueryParam, Req, UseBefore } from 'routing-controllers';
|
||||||
import { OpenAPI, ResponseSchema } from 'routing-controllers-openapi';
|
import { OpenAPI, ResponseSchema } from 'routing-controllers-openapi';
|
||||||
@@ -104,17 +104,21 @@ export class RunnerSelfServiceController {
|
|||||||
return responseScans;
|
return responseScans;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Get('/stations/me')
|
@Get('/stations/me')
|
||||||
@UseBefore(ScanAuth)
|
@UseBefore(ScanAuth)
|
||||||
@ResponseSchema(ResponseScanStation)
|
@ResponseSchema(ResponseScanStation)
|
||||||
@ResponseSchema(ScanStationNotFoundError, { statusCode: 404 })
|
@ResponseSchema(ScanStationNotFoundError, { statusCode: 404 })
|
||||||
@OnUndefined(ScanStationNotFoundError)
|
@OnUndefined(ScanStationNotFoundError)
|
||||||
@OpenAPI({ description: 'Lists basic information about the station whose token got provided. <br> This includes it\'s associated track.', security: [{ "StationApiToken": [] }] })
|
@OpenAPI({ description: 'Lists basic information about the station whose token got provided. <br> This includes it\'s associated track.', security: [{ "StationApiToken": [] }] })
|
||||||
async getStationMe(@Req() req: Request) {
|
async getStationMe(@Req() req: Request) {
|
||||||
let scan = await this.stationRepository.findOne({ id: parseInt(req.headers["station_id"].toString()) }, { relations: ['track'] })
|
// ScanAuth middleware sets req.stationId (not a header)
|
||||||
if (!scan) { throw new ScanStationNotFoundError(); }
|
if (!req.stationId) {
|
||||||
return scan.toResponse();
|
throw new ScanStationNotFoundError();
|
||||||
}
|
}
|
||||||
|
let scan = await this.stationRepository.findOne({ id: req.stationId }, { relations: ['track'] })
|
||||||
|
if (!scan) { throw new ScanStationNotFoundError(); }
|
||||||
|
return scan.toResponse();
|
||||||
|
}
|
||||||
|
|
||||||
@Post('/runners/login')
|
@Post('/runners/login')
|
||||||
@ResponseSchema(RunnerNotFoundError, { statusCode: 404 })
|
@ResponseSchema(RunnerNotFoundError, { statusCode: 404 })
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
import { Request } from "express";
|
import type { Request } from "express";
|
||||||
import { Authorized, Body, Delete, Get, JsonController, OnUndefined, Param, Post, Put, QueryParam, Req, UseBefore } from 'routing-controllers';
|
import { Authorized, Body, Delete, Get, HttpError, JsonController, OnUndefined, Param, Post, Put, QueryParam, Req, UseBefore } from 'routing-controllers';
|
||||||
import { OpenAPI, ResponseSchema } from 'routing-controllers-openapi';
|
import { OpenAPI, ResponseSchema } from 'routing-controllers-openapi';
|
||||||
import { Repository, getConnectionManager } from 'typeorm';
|
import { getConnection, getConnectionManager, Repository } from 'typeorm';
|
||||||
import { RunnerNotFoundError } from '../errors/RunnerErrors';
|
import { RunnerNotFoundError } from '../errors/RunnerErrors';
|
||||||
import { ScanIdsNotMatchingError, ScanNotFoundError } from '../errors/ScanErrors';
|
import { ScanIdsNotMatchingError, ScanNotFoundError } from '../errors/ScanErrors';
|
||||||
import { ScanStationNotFoundError } from '../errors/ScanStationErrors';
|
import { ScanStationNotFoundError } from '../errors/ScanStationErrors';
|
||||||
@@ -10,11 +10,16 @@ import { CreateScan } from '../models/actions/create/CreateScan';
|
|||||||
import { CreateTrackScan } from '../models/actions/create/CreateTrackScan';
|
import { CreateTrackScan } from '../models/actions/create/CreateTrackScan';
|
||||||
import { UpdateScan } from '../models/actions/update/UpdateScan';
|
import { UpdateScan } from '../models/actions/update/UpdateScan';
|
||||||
import { UpdateTrackScan } from '../models/actions/update/UpdateTrackScan';
|
import { UpdateTrackScan } from '../models/actions/update/UpdateTrackScan';
|
||||||
|
import { RunnerCard } from '../models/entities/RunnerCard';
|
||||||
import { Scan } from '../models/entities/Scan';
|
import { Scan } from '../models/entities/Scan';
|
||||||
import { TrackScan } from '../models/entities/TrackScan';
|
import { TrackScan } from '../models/entities/TrackScan';
|
||||||
import { ResponseEmpty } from '../models/responses/ResponseEmpty';
|
import { ResponseEmpty } from '../models/responses/ResponseEmpty';
|
||||||
import { ResponseScan } from '../models/responses/ResponseScan';
|
import { ResponseScan } from '../models/responses/ResponseScan';
|
||||||
|
import { ResponseScanIntake, ResponseScanIntakeRunner } from '../models/responses/ResponseScanIntake';
|
||||||
import { ResponseTrackScan } from '../models/responses/ResponseTrackScan';
|
import { ResponseTrackScan } from '../models/responses/ResponseTrackScan';
|
||||||
|
import { getCardEntry, setCardEntry } from '../nats/CardKV';
|
||||||
|
import { deleteRunnerEntry, getRunnerEntry, RunnerKVEntry, setRunnerEntry, warmRunner } from '../nats/RunnerKV';
|
||||||
|
import { getStationEntryById } from '../nats/StationKV';
|
||||||
@JsonController('/scans')
|
@JsonController('/scans')
|
||||||
@OpenAPI({ security: [{ "AuthToken": [] }, { "RefreshTokenCookie": [] }] })
|
@OpenAPI({ security: [{ "AuthToken": [] }, { "RefreshTokenCookie": [] }] })
|
||||||
export class ScanController {
|
export class ScanController {
|
||||||
@@ -77,18 +82,112 @@ export class ScanController {
|
|||||||
@Post("/trackscans")
|
@Post("/trackscans")
|
||||||
@UseBefore(ScanAuth)
|
@UseBefore(ScanAuth)
|
||||||
@ResponseSchema(ResponseTrackScan)
|
@ResponseSchema(ResponseTrackScan)
|
||||||
|
@ResponseSchema(ResponseScanIntake)
|
||||||
@ResponseSchema(RunnerNotFoundError, { statusCode: 404 })
|
@ResponseSchema(RunnerNotFoundError, { statusCode: 404 })
|
||||||
@OpenAPI({ description: 'Create a new track scan (for "normal" scans use /scans instead). <br> Please remember that to provide the scan\'s card\'s station\'s id.', security: [{ "StationApiToken": [] }, { "AuthToken": [] }, { "RefreshTokenCookie": [] }] })
|
@OpenAPI({ description: 'Create a new track scan (for "normal" scans use /scans instead). <br> Please remember that to provide the scan\'s card\'s station\'s id.', security: [{ "StationApiToken": [] }, { "AuthToken": [] }, { "RefreshTokenCookie": [] }] })
|
||||||
async postTrackScans(@Body({ validate: true }) createScan: CreateTrackScan, @Req() req: Request) {
|
async postTrackScans(@Body({ validate: true }) createScan: CreateTrackScan, @Req() req: Request) {
|
||||||
const station_id = req.headers["station_id"];
|
// Station token path — KV-backed intake flow
|
||||||
if (station_id) {
|
if (req.isStationAuth) {
|
||||||
createScan.station = parseInt(station_id.toString());
|
return this.stationIntake(createScan.card, req.stationId);
|
||||||
}
|
}
|
||||||
|
// JWT path — existing full flow, unchanged
|
||||||
|
createScan.station = createScan.station;
|
||||||
let scan = await createScan.toEntity();
|
let scan = await createScan.toEntity();
|
||||||
scan = await this.trackScanRepository.save(scan);
|
scan = await this.trackScanRepository.save(scan);
|
||||||
return (await this.scanRepository.findOne({ id: scan.id }, { relations: ['runner', 'track', 'runner.scans', 'runner.group', 'runner.scans.track', 'card', 'station'] })).toResponse();
|
return (await this.scanRepository.findOne({ id: scan.id }, { relations: ['runner', 'track', 'runner.scans', 'runner.group', 'runner.scans.track', 'card', 'station'] })).toResponse();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* KV-backed hot path for scan station submissions.
|
||||||
|
* Zero DB reads on a fully warm cache. Fixes the race condition via CAS on the runner KV entry.
|
||||||
|
*/
|
||||||
|
private async stationIntake(rawCard: number, stationId: number): Promise<ResponseScanIntake> {
|
||||||
|
const MAX_RETRIES = 3;
|
||||||
|
const cardId = rawCard % 200000000000;
|
||||||
|
|
||||||
|
// --- Station (already verified by ScanAuth, just need track data) ---
|
||||||
|
const stationEntry = await getStationEntryById(stationId);
|
||||||
|
// stationEntry is always populated here — ScanAuth wrote it on the cold path
|
||||||
|
const trackDistance = stationEntry.trackDistance;
|
||||||
|
const minimumLapTime = stationEntry.minimumLapTime;
|
||||||
|
|
||||||
|
// --- Card ---
|
||||||
|
let cardEntry = await getCardEntry(cardId);
|
||||||
|
if (!cardEntry) {
|
||||||
|
// Cold path: load from DB and cache
|
||||||
|
const card = await getConnection().getRepository(RunnerCard).findOne({ id: cardId }, { relations: ['runner'] });
|
||||||
|
if (!card) throw new ScanNotFoundError();
|
||||||
|
if (!card.runner) throw new RunnerNotFoundError();
|
||||||
|
cardEntry = {
|
||||||
|
runnerId: card.runner.id,
|
||||||
|
runnerDisplayName: `${card.runner.firstname} ${card.runner.lastname}`,
|
||||||
|
enabled: card.enabled,
|
||||||
|
};
|
||||||
|
await setCardEntry(cardId, cardEntry);
|
||||||
|
}
|
||||||
|
if (!cardEntry.enabled) throw new HttpError(400, 'Card is disabled.');
|
||||||
|
const runnerId = cardEntry.runnerId;
|
||||||
|
|
||||||
|
// --- Runner state + CAS update (fixes race condition) ---
|
||||||
|
const now = Math.round(Date.now() / 1000);
|
||||||
|
let retries = 0;
|
||||||
|
let response: ResponseScanIntake;
|
||||||
|
|
||||||
|
while (retries < MAX_RETRIES) {
|
||||||
|
// Get current runner state (warm or cold)
|
||||||
|
let result = await getRunnerEntry(runnerId);
|
||||||
|
if (!result) {
|
||||||
|
const warmed = await warmRunner(runnerId);
|
||||||
|
result = { entry: warmed, revision: undefined };
|
||||||
|
}
|
||||||
|
const { entry, revision } = result;
|
||||||
|
|
||||||
|
// Compute
|
||||||
|
const lapTime = entry.latestTimestamp === 0 ? 0 : now - entry.latestTimestamp;
|
||||||
|
const valid = minimumLapTime === 0 || lapTime > minimumLapTime;
|
||||||
|
const newDistance = entry.distance + (valid ? trackDistance : 0);
|
||||||
|
const newTimestamp = valid ? now : entry.latestTimestamp;
|
||||||
|
|
||||||
|
const updated: RunnerKVEntry = {
|
||||||
|
displayName: entry.displayName,
|
||||||
|
distance: newDistance,
|
||||||
|
latestTimestamp: newTimestamp,
|
||||||
|
};
|
||||||
|
|
||||||
|
// CAS write — if revision is undefined (warmed this request), plain put
|
||||||
|
const success = await setRunnerEntry(runnerId, updated, revision);
|
||||||
|
if (!success) {
|
||||||
|
retries++;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// DB insert — synchronous, keeps DB as source of truth
|
||||||
|
const newScan = new TrackScan();
|
||||||
|
newScan.runner = { id: runnerId } as any;
|
||||||
|
newScan.card = { id: cardId } as any;
|
||||||
|
newScan.station = { id: stationId } as any;
|
||||||
|
newScan.track = { id: stationEntry.trackId } as any;
|
||||||
|
newScan.timestamp = now;
|
||||||
|
newScan.lapTime = lapTime;
|
||||||
|
newScan.valid = valid;
|
||||||
|
await this.trackScanRepository.save(newScan);
|
||||||
|
|
||||||
|
const runnerInfo = new ResponseScanIntakeRunner();
|
||||||
|
runnerInfo.displayName = entry.displayName;
|
||||||
|
runnerInfo.distance = newDistance;
|
||||||
|
|
||||||
|
response = new ResponseScanIntake();
|
||||||
|
response.accepted = true;
|
||||||
|
response.valid = valid;
|
||||||
|
response.lapTime = lapTime;
|
||||||
|
response.runner = runnerInfo;
|
||||||
|
|
||||||
|
return response;
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new HttpError(409, 'Scan rejected: too many concurrent scans for this runner. Please retry.');
|
||||||
|
}
|
||||||
|
|
||||||
@Put('/:id')
|
@Put('/:id')
|
||||||
@Authorized("SCAN:UPDATE")
|
@Authorized("SCAN:UPDATE")
|
||||||
@ResponseSchema(ResponseScan)
|
@ResponseSchema(ResponseScan)
|
||||||
@@ -97,7 +196,7 @@ export class ScanController {
|
|||||||
@ResponseSchema(ScanIdsNotMatchingError, { statusCode: 406 })
|
@ResponseSchema(ScanIdsNotMatchingError, { statusCode: 406 })
|
||||||
@OpenAPI({ description: "Update the scan (not track scan use /scans/trackscans/:id instead) whose id you provided. <br> Please remember that ids can't be changed and distances must be positive." })
|
@OpenAPI({ description: "Update the scan (not track scan use /scans/trackscans/:id instead) whose id you provided. <br> Please remember that ids can't be changed and distances must be positive." })
|
||||||
async put(@Param('id') id: number, @Body({ validate: true }) scan: UpdateScan) {
|
async put(@Param('id') id: number, @Body({ validate: true }) scan: UpdateScan) {
|
||||||
let oldScan = await this.scanRepository.findOne({ id: id });
|
let oldScan = await this.scanRepository.findOne({ id: id }, { relations: ['runner'] });
|
||||||
|
|
||||||
if (!oldScan) {
|
if (!oldScan) {
|
||||||
throw new ScanNotFoundError();
|
throw new ScanNotFoundError();
|
||||||
@@ -107,7 +206,9 @@ export class ScanController {
|
|||||||
throw new ScanIdsNotMatchingError();
|
throw new ScanIdsNotMatchingError();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const runnerId = oldScan.runner?.id;
|
||||||
await this.scanRepository.save(await scan.update(oldScan));
|
await this.scanRepository.save(await scan.update(oldScan));
|
||||||
|
if (runnerId) await deleteRunnerEntry(runnerId);
|
||||||
return (await this.scanRepository.findOne({ id: id }, { relations: ['runner', 'track', 'runner.scans', 'runner.group', 'runner.scans.track', 'card', 'station'] })).toResponse();
|
return (await this.scanRepository.findOne({ id: id }, { relations: ['runner', 'track', 'runner.scans', 'runner.group', 'runner.scans.track', 'card', 'station'] })).toResponse();
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -120,7 +221,7 @@ export class ScanController {
|
|||||||
@ResponseSchema(ScanIdsNotMatchingError, { statusCode: 406 })
|
@ResponseSchema(ScanIdsNotMatchingError, { statusCode: 406 })
|
||||||
@OpenAPI({ description: 'Update the track scan (not "normal" scan use /scans/trackscans/:id instead) whose id you provided. <br> Please remember that only the validity, runner and track can be changed.' })
|
@OpenAPI({ description: 'Update the track scan (not "normal" scan use /scans/trackscans/:id instead) whose id you provided. <br> Please remember that only the validity, runner and track can be changed.' })
|
||||||
async putTrackScan(@Param('id') id: number, @Body({ validate: true }) scan: UpdateTrackScan) {
|
async putTrackScan(@Param('id') id: number, @Body({ validate: true }) scan: UpdateTrackScan) {
|
||||||
let oldScan = await this.trackScanRepository.findOne({ id: id });
|
let oldScan = await this.trackScanRepository.findOne({ id: id }, { relations: ['runner'] });
|
||||||
|
|
||||||
if (!oldScan) {
|
if (!oldScan) {
|
||||||
throw new ScanNotFoundError();
|
throw new ScanNotFoundError();
|
||||||
@@ -130,7 +231,9 @@ export class ScanController {
|
|||||||
throw new ScanIdsNotMatchingError();
|
throw new ScanIdsNotMatchingError();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const runnerId = oldScan.runner?.id;
|
||||||
await this.trackScanRepository.save(await scan.update(oldScan));
|
await this.trackScanRepository.save(await scan.update(oldScan));
|
||||||
|
if (runnerId) await deleteRunnerEntry(runnerId);
|
||||||
return (await this.scanRepository.findOne({ id: id }, { relations: ['runner', 'track', 'runner.scans', 'runner.group', 'runner.scans.track', 'card', 'station'] })).toResponse();
|
return (await this.scanRepository.findOne({ id: id }, { relations: ['runner', 'track', 'runner.scans', 'runner.group', 'runner.scans.track', 'card', 'station'] })).toResponse();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ import { OpenAPI, ResponseSchema } from 'routing-controllers-openapi';
|
|||||||
import { Repository, getConnectionManager } from 'typeorm';
|
import { Repository, getConnectionManager } from 'typeorm';
|
||||||
import { ScanStationHasScansError, ScanStationIdsNotMatchingError, ScanStationNotFoundError } from '../errors/ScanStationErrors';
|
import { ScanStationHasScansError, ScanStationIdsNotMatchingError, ScanStationNotFoundError } from '../errors/ScanStationErrors';
|
||||||
import { TrackNotFoundError } from '../errors/TrackErrors';
|
import { TrackNotFoundError } from '../errors/TrackErrors';
|
||||||
|
import { deleteStationEntry } from '../nats/StationKV';
|
||||||
import { CreateScanStation } from '../models/actions/create/CreateScanStation';
|
import { CreateScanStation } from '../models/actions/create/CreateScanStation';
|
||||||
import { UpdateScanStation } from '../models/actions/update/UpdateScanStation';
|
import { UpdateScanStation } from '../models/actions/update/UpdateScanStation';
|
||||||
import { ScanStation } from '../models/entities/ScanStation';
|
import { ScanStation } from '../models/entities/ScanStation';
|
||||||
@@ -85,6 +86,7 @@ export class ScanStationController {
|
|||||||
}
|
}
|
||||||
|
|
||||||
await this.stationRepository.save(await station.update(oldStation));
|
await this.stationRepository.save(await station.update(oldStation));
|
||||||
|
await deleteStationEntry(oldStation.prefix);
|
||||||
return (await this.stationRepository.findOne({ id: id }, { relations: ['track'] })).toResponse();
|
return (await this.stationRepository.findOne({ id: id }, { relations: ['track'] })).toResponse();
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -109,6 +111,7 @@ export class ScanStationController {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const responseStation = await this.stationRepository.findOne({ id: station.id }, { relations: ["track"] });
|
const responseStation = await this.stationRepository.findOne({ id: station.id }, { relations: ["track"] });
|
||||||
|
await deleteStationEntry(station.prefix);
|
||||||
await this.stationRepository.delete(station);
|
await this.stationRepository.delete(station);
|
||||||
return responseStation.toResponse();
|
return responseStation.toResponse();
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -14,6 +14,7 @@ import { ResponseStats } from '../models/responses/ResponseStats';
|
|||||||
import { ResponseStatsOrgnisation } from '../models/responses/ResponseStatsOrganization';
|
import { ResponseStatsOrgnisation } from '../models/responses/ResponseStatsOrganization';
|
||||||
import { ResponseStatsRunner } from '../models/responses/ResponseStatsRunner';
|
import { ResponseStatsRunner } from '../models/responses/ResponseStatsRunner';
|
||||||
import { ResponseStatsTeam } from '../models/responses/ResponseStatsTeam';
|
import { ResponseStatsTeam } from '../models/responses/ResponseStatsTeam';
|
||||||
|
import { getStatsCache, setStatsCache } from '../nats/StatsKV';
|
||||||
|
|
||||||
@JsonController('/stats')
|
@JsonController('/stats')
|
||||||
export class StatsController {
|
export class StatsController {
|
||||||
@@ -22,6 +23,13 @@ export class StatsController {
|
|||||||
@ResponseSchema(ResponseStats)
|
@ResponseSchema(ResponseStats)
|
||||||
@OpenAPI({ description: "A very basic stats endpoint providing basic counters for a dashboard or simmilar" })
|
@OpenAPI({ description: "A very basic stats endpoint providing basic counters for a dashboard or simmilar" })
|
||||||
async get() {
|
async get() {
|
||||||
|
// Try cache first
|
||||||
|
const cached = await getStatsCache<ResponseStats>('overview');
|
||||||
|
if (cached) {
|
||||||
|
return cached;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Cache miss - compute fresh stats
|
||||||
const connection = getConnection();
|
const connection = getConnection();
|
||||||
const runnersViaSelfservice = await connection.getRepository(Runner).count({ where: { created_via: "selfservice" } });
|
const runnersViaSelfservice = await connection.getRepository(Runner).count({ where: { created_via: "selfservice" } });
|
||||||
const runnersViaKiosk = await connection.getRepository(Runner).count({ where: { created_via: "kiosk" } });
|
const runnersViaKiosk = await connection.getRepository(Runner).count({ where: { created_via: "kiosk" } });
|
||||||
@@ -43,7 +51,12 @@ export class StatsController {
|
|||||||
let donations = await connection.getRepository(Donation).find({ relations: ['runner', 'runner.scans', 'runner.scans.track'] });
|
let donations = await connection.getRepository(Donation).find({ relations: ['runner', 'runner.scans', 'runner.scans.track'] });
|
||||||
const donors = await connection.getRepository(Donor).count();
|
const donors = await connection.getRepository(Donor).count();
|
||||||
|
|
||||||
return new ResponseStats(runnersViaSelfservice, runners, teams, orgs, users, scans, donations, distace, donors, runnersViaKiosk)
|
const result = new ResponseStats(runnersViaSelfservice, runners, teams, orgs, users, scans, donations, distace, donors, runnersViaKiosk);
|
||||||
|
|
||||||
|
// Store in cache for 60 seconds
|
||||||
|
await setStatsCache('overview', result);
|
||||||
|
|
||||||
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Get("/runners/distance")
|
@Get("/runners/distance")
|
||||||
@@ -51,6 +64,13 @@ export class StatsController {
|
|||||||
@ResponseSchema(ResponseStatsRunner, { isArray: true })
|
@ResponseSchema(ResponseStatsRunner, { isArray: true })
|
||||||
@OpenAPI({ description: "Returns the top ten runners by distance.", security: [{ "StatsApiToken": [] }, { "AuthToken": [] }, { "RefreshTokenCookie": [] }] })
|
@OpenAPI({ description: "Returns the top ten runners by distance.", security: [{ "StatsApiToken": [] }, { "AuthToken": [] }, { "RefreshTokenCookie": [] }] })
|
||||||
async getTopRunnersByDistance() {
|
async getTopRunnersByDistance() {
|
||||||
|
// Try cache first
|
||||||
|
const cached = await getStatsCache<ResponseStatsRunner[]>('runners.distance');
|
||||||
|
if (cached) {
|
||||||
|
return cached;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Cache miss - compute fresh stats
|
||||||
let runners = await getConnection().getRepository(Runner).find({ relations: ['scans', 'group', 'distanceDonations', 'scans.track'] });
|
let runners = await getConnection().getRepository(Runner).find({ relations: ['scans', 'group', 'distanceDonations', 'scans.track'] });
|
||||||
if (!runners || runners.length == 0) {
|
if (!runners || runners.length == 0) {
|
||||||
return [];
|
return [];
|
||||||
@@ -60,6 +80,10 @@ export class StatsController {
|
|||||||
topRunners.forEach(runner => {
|
topRunners.forEach(runner => {
|
||||||
responseRunners.push(new ResponseStatsRunner(runner));
|
responseRunners.push(new ResponseStatsRunner(runner));
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Store in cache for 60 seconds
|
||||||
|
await setStatsCache('runners.distance', responseRunners);
|
||||||
|
|
||||||
return responseRunners;
|
return responseRunners;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -68,6 +92,13 @@ export class StatsController {
|
|||||||
@ResponseSchema(ResponseStatsRunner, { isArray: true })
|
@ResponseSchema(ResponseStatsRunner, { isArray: true })
|
||||||
@OpenAPI({ description: "Returns the top ten runners by donations.", security: [{ "StatsApiToken": [] }, { "AuthToken": [] }, { "RefreshTokenCookie": [] }] })
|
@OpenAPI({ description: "Returns the top ten runners by donations.", security: [{ "StatsApiToken": [] }, { "AuthToken": [] }, { "RefreshTokenCookie": [] }] })
|
||||||
async getTopRunnersByDonations() {
|
async getTopRunnersByDonations() {
|
||||||
|
// Try cache first
|
||||||
|
const cached = await getStatsCache<ResponseStatsRunner[]>('runners.donations');
|
||||||
|
if (cached) {
|
||||||
|
return cached;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Cache miss - compute fresh stats
|
||||||
let runners = await getConnection().getRepository(Runner).find({ relations: ['group', 'distanceDonations', 'distanceDonations.runner', 'distanceDonations.runner.scans', 'distanceDonations.runner.scans.track'] });
|
let runners = await getConnection().getRepository(Runner).find({ relations: ['group', 'distanceDonations', 'distanceDonations.runner', 'distanceDonations.runner.scans', 'distanceDonations.runner.scans.track'] });
|
||||||
if (!runners || runners.length == 0) {
|
if (!runners || runners.length == 0) {
|
||||||
return [];
|
return [];
|
||||||
@@ -77,6 +108,10 @@ export class StatsController {
|
|||||||
topRunners.forEach(runner => {
|
topRunners.forEach(runner => {
|
||||||
responseRunners.push(new ResponseStatsRunner(runner));
|
responseRunners.push(new ResponseStatsRunner(runner));
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Store in cache for 60 seconds
|
||||||
|
await setStatsCache('runners.donations', responseRunners);
|
||||||
|
|
||||||
return responseRunners;
|
return responseRunners;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -85,6 +120,14 @@ export class StatsController {
|
|||||||
@ResponseSchema(ResponseStatsRunner, { isArray: true })
|
@ResponseSchema(ResponseStatsRunner, { isArray: true })
|
||||||
@OpenAPI({ description: "Returns the top ten runners by fastest laptime on your selected track (track by id).", security: [{ "StatsApiToken": [] }, { "AuthToken": [] }, { "RefreshTokenCookie": [] }] })
|
@OpenAPI({ description: "Returns the top ten runners by fastest laptime on your selected track (track by id).", security: [{ "StatsApiToken": [] }, { "AuthToken": [] }, { "RefreshTokenCookie": [] }] })
|
||||||
async getTopRunnersByLaptime(@QueryParam("track") track: number) {
|
async getTopRunnersByLaptime(@QueryParam("track") track: number) {
|
||||||
|
// Try cache first (cache key includes track id, using dots for NATS KV compatibility)
|
||||||
|
const cacheKey = `runners.laptime.${track}`;
|
||||||
|
const cached = await getStatsCache<ResponseStatsRunner[]>(cacheKey);
|
||||||
|
if (cached) {
|
||||||
|
return cached;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Cache miss - compute fresh stats
|
||||||
let scans = await getConnection().getRepository(TrackScan).find({ relations: ['track', 'runner', 'runner.group', 'runner.scans', 'runner.scans.track', 'runner.distanceDonations'] });
|
let scans = await getConnection().getRepository(TrackScan).find({ relations: ['track', 'runner', 'runner.group', 'runner.scans', 'runner.scans.track', 'runner.distanceDonations'] });
|
||||||
if (!scans || scans.length == 0) {
|
if (!scans || scans.length == 0) {
|
||||||
return [];
|
return [];
|
||||||
@@ -105,6 +148,10 @@ export class StatsController {
|
|||||||
topScans.forEach(scan => {
|
topScans.forEach(scan => {
|
||||||
responseRunners.push(new ResponseStatsRunner(scan.runner, scan.lapTime));
|
responseRunners.push(new ResponseStatsRunner(scan.runner, scan.lapTime));
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Store in cache for 60 seconds
|
||||||
|
await setStatsCache(cacheKey, responseRunners);
|
||||||
|
|
||||||
return responseRunners;
|
return responseRunners;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -121,6 +168,13 @@ export class StatsController {
|
|||||||
@ResponseSchema(ResponseStatsTeam, { isArray: true })
|
@ResponseSchema(ResponseStatsTeam, { isArray: true })
|
||||||
@OpenAPI({ description: "Returns the top ten teams by distance.", security: [{ "StatsApiToken": [] }, { "AuthToken": [] }, { "RefreshTokenCookie": [] }] })
|
@OpenAPI({ description: "Returns the top ten teams by distance.", security: [{ "StatsApiToken": [] }, { "AuthToken": [] }, { "RefreshTokenCookie": [] }] })
|
||||||
async getTopTeamsByDistance() {
|
async getTopTeamsByDistance() {
|
||||||
|
// Try cache first
|
||||||
|
const cached = await getStatsCache<ResponseStatsTeam[]>('teams.distance');
|
||||||
|
if (cached) {
|
||||||
|
return cached;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Cache miss - compute fresh stats
|
||||||
let teams = await getConnection().getRepository(RunnerTeam).find({ relations: ['parentGroup', 'runners', 'runners.scans', 'runners.scans.track'] });
|
let teams = await getConnection().getRepository(RunnerTeam).find({ relations: ['parentGroup', 'runners', 'runners.scans', 'runners.scans.track'] });
|
||||||
if (!teams || teams.length == 0) {
|
if (!teams || teams.length == 0) {
|
||||||
return [];
|
return [];
|
||||||
@@ -130,6 +184,10 @@ export class StatsController {
|
|||||||
topTeams.forEach(team => {
|
topTeams.forEach(team => {
|
||||||
responseTeams.push(new ResponseStatsTeam(team));
|
responseTeams.push(new ResponseStatsTeam(team));
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Store in cache for 60 seconds
|
||||||
|
await setStatsCache('teams.distance', responseTeams);
|
||||||
|
|
||||||
return responseTeams;
|
return responseTeams;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -138,6 +196,13 @@ export class StatsController {
|
|||||||
@ResponseSchema(ResponseStatsTeam, { isArray: true })
|
@ResponseSchema(ResponseStatsTeam, { isArray: true })
|
||||||
@OpenAPI({ description: "Returns the top ten teams by donations.", security: [{ "StatsApiToken": [] }, { "AuthToken": [] }, { "RefreshTokenCookie": [] }] })
|
@OpenAPI({ description: "Returns the top ten teams by donations.", security: [{ "StatsApiToken": [] }, { "AuthToken": [] }, { "RefreshTokenCookie": [] }] })
|
||||||
async getTopTeamsByDonations() {
|
async getTopTeamsByDonations() {
|
||||||
|
// Try cache first
|
||||||
|
const cached = await getStatsCache<ResponseStatsTeam[]>('teams.donations');
|
||||||
|
if (cached) {
|
||||||
|
return cached;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Cache miss - compute fresh stats
|
||||||
let teams = await getConnection().getRepository(RunnerTeam).find({ relations: ['parentGroup', 'runners', 'runners.scans', 'runners.distanceDonations', 'runners.scans.track'] });
|
let teams = await getConnection().getRepository(RunnerTeam).find({ relations: ['parentGroup', 'runners', 'runners.scans', 'runners.distanceDonations', 'runners.scans.track'] });
|
||||||
if (!teams || teams.length == 0) {
|
if (!teams || teams.length == 0) {
|
||||||
return [];
|
return [];
|
||||||
@@ -147,6 +212,10 @@ export class StatsController {
|
|||||||
topTeams.forEach(team => {
|
topTeams.forEach(team => {
|
||||||
responseTeams.push(new ResponseStatsTeam(team));
|
responseTeams.push(new ResponseStatsTeam(team));
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Store in cache for 60 seconds
|
||||||
|
await setStatsCache('teams.donations', responseTeams);
|
||||||
|
|
||||||
return responseTeams;
|
return responseTeams;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -155,6 +224,13 @@ export class StatsController {
|
|||||||
@ResponseSchema(ResponseStatsOrgnisation, { isArray: true })
|
@ResponseSchema(ResponseStatsOrgnisation, { isArray: true })
|
||||||
@OpenAPI({ description: "Returns the top ten organizations by distance.", security: [{ "StatsApiToken": [] }, { "AuthToken": [] }, { "RefreshTokenCookie": [] }] })
|
@OpenAPI({ description: "Returns the top ten organizations by distance.", security: [{ "StatsApiToken": [] }, { "AuthToken": [] }, { "RefreshTokenCookie": [] }] })
|
||||||
async getTopOrgsByDistance() {
|
async getTopOrgsByDistance() {
|
||||||
|
// Try cache first
|
||||||
|
const cached = await getStatsCache<ResponseStatsOrgnisation[]>('organizations.distance');
|
||||||
|
if (cached) {
|
||||||
|
return cached;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Cache miss - compute fresh stats
|
||||||
let orgs = await getConnection().getRepository(RunnerOrganization).find({ relations: ['runners', 'runners.scans', 'runners.distanceDonations', 'runners.scans.track', 'teams', 'teams.runners', 'teams.runners.scans', 'teams.runners.distanceDonations', 'teams.runners.scans.track'] });
|
let orgs = await getConnection().getRepository(RunnerOrganization).find({ relations: ['runners', 'runners.scans', 'runners.distanceDonations', 'runners.scans.track', 'teams', 'teams.runners', 'teams.runners.scans', 'teams.runners.distanceDonations', 'teams.runners.scans.track'] });
|
||||||
if (!orgs || orgs.length == 0) {
|
if (!orgs || orgs.length == 0) {
|
||||||
return [];
|
return [];
|
||||||
@@ -164,6 +240,10 @@ export class StatsController {
|
|||||||
topOrgs.forEach(org => {
|
topOrgs.forEach(org => {
|
||||||
responseOrgs.push(new ResponseStatsOrgnisation(org));
|
responseOrgs.push(new ResponseStatsOrgnisation(org));
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Store in cache for 60 seconds
|
||||||
|
await setStatsCache('organizations.distance', responseOrgs);
|
||||||
|
|
||||||
return responseOrgs;
|
return responseOrgs;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -172,6 +252,13 @@ export class StatsController {
|
|||||||
@ResponseSchema(ResponseStatsOrgnisation, { isArray: true })
|
@ResponseSchema(ResponseStatsOrgnisation, { isArray: true })
|
||||||
@OpenAPI({ description: "Returns the top ten organizations by donations.", security: [{ "StatsApiToken": [] }, { "AuthToken": [] }, { "RefreshTokenCookie": [] }] })
|
@OpenAPI({ description: "Returns the top ten organizations by donations.", security: [{ "StatsApiToken": [] }, { "AuthToken": [] }, { "RefreshTokenCookie": [] }] })
|
||||||
async getTopOrgsByDonations() {
|
async getTopOrgsByDonations() {
|
||||||
|
// Try cache first
|
||||||
|
const cached = await getStatsCache<ResponseStatsOrgnisation[]>('organizations.donations');
|
||||||
|
if (cached) {
|
||||||
|
return cached;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Cache miss - compute fresh stats
|
||||||
let orgs = await getConnection().getRepository(RunnerOrganization).find({ relations: ['runners', 'runners.distanceDonations', 'runners.distanceDonations.runner', 'runners.distanceDonations.runner.scans', 'runners.distanceDonations.runner.scans.track', 'teams', 'teams.runners', 'teams.runners.distanceDonations', 'teams.runners.distanceDonations.runner', 'teams.runners.distanceDonations.runner.scans', 'teams.runners.distanceDonations.runner.scans.track'] });
|
let orgs = await getConnection().getRepository(RunnerOrganization).find({ relations: ['runners', 'runners.distanceDonations', 'runners.distanceDonations.runner', 'runners.distanceDonations.runner.scans', 'runners.distanceDonations.runner.scans.track', 'teams', 'teams.runners', 'teams.runners.distanceDonations', 'teams.runners.distanceDonations.runner', 'teams.runners.distanceDonations.runner.scans', 'teams.runners.distanceDonations.runner.scans.track'] });
|
||||||
if (!orgs || orgs.length == 0) {
|
if (!orgs || orgs.length == 0) {
|
||||||
return [];
|
return [];
|
||||||
@@ -181,6 +268,10 @@ export class StatsController {
|
|||||||
topOrgs.forEach(org => {
|
topOrgs.forEach(org => {
|
||||||
responseOrgs.push(new ResponseStatsOrgnisation(org));
|
responseOrgs.push(new ResponseStatsOrgnisation(org));
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Store in cache for 60 seconds
|
||||||
|
await setStatsCache('organizations.donations', responseOrgs);
|
||||||
|
|
||||||
return responseOrgs;
|
return responseOrgs;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -1,5 +1,6 @@
|
|||||||
import { createConnection } from "typeorm";
|
import { createConnection } from "typeorm";
|
||||||
import { runSeeder } from 'typeorm-seeding';
|
import { runSeeder } from 'typeorm-seeding';
|
||||||
|
import consola from 'consola';
|
||||||
import { config } from '../config';
|
import { config } from '../config';
|
||||||
import { ConfigFlag } from '../models/entities/ConfigFlags';
|
import { ConfigFlag } from '../models/entities/ConfigFlags';
|
||||||
import SeedPublicOrg from '../seeds/SeedPublicOrg';
|
import SeedPublicOrg from '../seeds/SeedPublicOrg';
|
||||||
@@ -11,6 +12,11 @@ import SeedUsers from '../seeds/SeedUsers';
|
|||||||
*/
|
*/
|
||||||
export default async () => {
|
export default async () => {
|
||||||
const connection = await createConnection();
|
const connection = await createConnection();
|
||||||
|
|
||||||
|
// Log discovered entities for debugging
|
||||||
|
consola.info(`TypeORM discovered ${connection.entityMetadatas.length} entities:`);
|
||||||
|
consola.info(connection.entityMetadatas.map(m => m.name).sort().join(', '));
|
||||||
|
|
||||||
await connection.synchronize();
|
await connection.synchronize();
|
||||||
|
|
||||||
//The data seeding part
|
//The data seeding part
|
||||||
|
|||||||
@@ -1,4 +1,8 @@
|
|||||||
import { Application } from "express";
|
import { Application } from "express";
|
||||||
|
import consola from "consola";
|
||||||
|
import { config } from "../config";
|
||||||
|
import NatsClient from "../nats/NatsClient";
|
||||||
|
import { warmAll } from "../nats/RunnerKV";
|
||||||
import databaseLoader from "./database";
|
import databaseLoader from "./database";
|
||||||
import expressLoader from "./express";
|
import expressLoader from "./express";
|
||||||
import openapiLoader from "./openapi";
|
import openapiLoader from "./openapi";
|
||||||
@@ -9,6 +13,16 @@ import openapiLoader from "./openapi";
|
|||||||
*/
|
*/
|
||||||
export default async (app: Application) => {
|
export default async (app: Application) => {
|
||||||
await databaseLoader();
|
await databaseLoader();
|
||||||
|
await NatsClient.connect();
|
||||||
|
|
||||||
|
if (config.nats_prewarm) {
|
||||||
|
consola.info("Prewarming NATS runner cache...");
|
||||||
|
const startTime = Date.now();
|
||||||
|
await warmAll();
|
||||||
|
const duration = Date.now() - startTime;
|
||||||
|
consola.success(`NATS runner cache prewarmed in ${duration}ms`);
|
||||||
|
}
|
||||||
|
|
||||||
await openapiLoader(app);
|
await openapiLoader(app);
|
||||||
await expressLoader(app);
|
await expressLoader(app);
|
||||||
return app;
|
return app;
|
||||||
|
|||||||
@@ -1,69 +1,129 @@
|
|||||||
import { verify } from '@node-rs/argon2';
|
import crypto from 'crypto';
|
||||||
import { Request, Response } from 'express';
|
import { Request, Response } from 'express';
|
||||||
import { getConnectionManager } from 'typeorm';
|
import { getConnectionManager } from 'typeorm';
|
||||||
|
import { config } from '../config';
|
||||||
|
import { deleteStationEntry, getStationEntry, setStationEntry, StationKVEntry } from '../nats/StationKV';
|
||||||
import { ScanStation } from '../models/entities/ScanStation';
|
import { ScanStation } from '../models/entities/ScanStation';
|
||||||
import authchecker from './authchecker';
|
import authchecker from './authchecker';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Computes the HMAC-SHA256 of the provided token using the station token secret.
|
||||||
|
*/
|
||||||
|
function computeHmac(token: string): string {
|
||||||
|
return crypto.createHmac('sha256', config.station_token_secret).update(token).digest('hex');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Constant-time comparison of two hex HMAC strings.
|
||||||
|
* Returns true if they match.
|
||||||
|
*/
|
||||||
|
function verifyHmac(provided_token: string, storedHash: string): boolean {
|
||||||
|
const expectedHash = computeHmac(provided_token);
|
||||||
|
const expectedBuf = Buffer.from(expectedHash);
|
||||||
|
const storedBuf = Buffer.from(storedHash);
|
||||||
|
return expectedBuf.length === storedBuf.length && crypto.timingSafeEqual(expectedBuf, storedBuf);
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This middleware handles the authentication of scan station api tokens.
|
* This middleware handles the authentication of scan station api tokens.
|
||||||
* The tokens have to be provided via Bearer authorization header.
|
* The tokens have to be provided via Bearer authorization header.
|
||||||
|
*
|
||||||
|
* Auth flow:
|
||||||
|
* 1. Extract prefix from token (PREFIX.KEY format)
|
||||||
|
* 2. Try NATS KV cache lookup by prefix — warm path: HMAC verify, no DB
|
||||||
|
* 3. On cache miss: DB lookup → HMAC verify → write to KV cache
|
||||||
|
* 4. On no station match at all: fall back to JWT auth (SCAN:CREATE permission)
|
||||||
|
*
|
||||||
|
* On success sets req.isStationAuth = true and req.stationId on the request object.
|
||||||
|
* These are internal server-side properties — not HTTP headers, not spoofable by clients.
|
||||||
|
*
|
||||||
* You have to manually use this middleware via @UseBefore(ScanAuth) instead of using @Authorized().
|
* You have to manually use this middleware via @UseBefore(ScanAuth) instead of using @Authorized().
|
||||||
* @param req Express request object.
|
* @param req Express request object.
|
||||||
* @param res Express response object.
|
* @param res Express response object.
|
||||||
* @param next Next function to call on success.
|
* @param next Next function to call on success.
|
||||||
*/
|
*/
|
||||||
const ScanAuth = async (req: Request, res: Response, next: () => void) => {
|
const ScanAuth = async (req: Request, res: Response, next: () => void) => {
|
||||||
let provided_token: string = req.headers["authorization"];
|
let provided_token: string = req.headers['authorization'];
|
||||||
if (provided_token == "" || provided_token === undefined || provided_token === null) {
|
if (!provided_token) {
|
||||||
res.status(401).send({ http_code: 401, short: "no_token", message: "No api token provided." });
|
res.status(401).send({ http_code: 401, short: 'no_token', message: 'No api token provided.' });
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
provided_token = provided_token.replace('Bearer ', '');
|
||||||
provided_token = provided_token.replace("Bearer ", "");
|
|
||||||
} catch (error) {
|
const prefix = provided_token.split('.')[0];
|
||||||
res.status(401).send({ http_code: 401, short: "no_token", message: "No valid jwt or api token provided." });
|
if (!prefix) {
|
||||||
|
res.status(401).send({ http_code: 401, short: 'invalid_token', message: 'Api token non-existent or invalid syntax.' });
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
let prefix = "";
|
// --- KV cache lookup (warm path) ---
|
||||||
try {
|
const cached = await getStationEntry(prefix);
|
||||||
prefix = provided_token.split(".")[0];
|
if (cached) {
|
||||||
}
|
if (!cached.enabled) {
|
||||||
finally {
|
res.status(401).send({ http_code: 401, short: 'station_disabled', message: 'Station is disabled.' });
|
||||||
if (prefix == "" || prefix == undefined || prefix == null) {
|
|
||||||
res.status(401).send({ http_code: 401, short: "invalid_token", message: "Api token non-existent or invalid syntax." });
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
if (!verifyHmac(provided_token, cached.tokenHash)) {
|
||||||
|
res.status(401).send({ http_code: 401, short: 'invalid_token', message: 'Api token non-existent or invalid syntax.' });
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
req.isStationAuth = true;
|
||||||
|
req.stationId = cached.id;
|
||||||
|
next();
|
||||||
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const station = await getConnectionManager().get().getRepository(ScanStation).findOne({ prefix: prefix });
|
// --- DB lookup (cold path) ---
|
||||||
|
const station = await getConnectionManager().get().getRepository(ScanStation).findOne({ prefix }, { relations: ['track'] });
|
||||||
|
|
||||||
if (!station) {
|
if (!station) {
|
||||||
|
// No station with this prefix — fall back to JWT auth
|
||||||
let user_authorized = false;
|
let user_authorized = false;
|
||||||
try {
|
try {
|
||||||
let action = { request: req, response: res, context: null, next: next }
|
const action = { request: req, response: res, context: null, next };
|
||||||
user_authorized = await authchecker(action, ["SCAN:CREATE"]);
|
user_authorized = await authchecker(action, ['SCAN:CREATE']);
|
||||||
}
|
} finally {
|
||||||
finally {
|
if (!user_authorized) {
|
||||||
if (user_authorized == false) {
|
res.status(401).send({ http_code: 401, short: 'invalid_token', message: 'Api token non-existent or invalid syntax.' });
|
||||||
res.status(401).send({ http_code: 401, short: "invalid_token", message: "Api token non-existent or invalid syntax." });
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
else {
|
next();
|
||||||
next();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
return;
|
||||||
}
|
}
|
||||||
else {
|
|
||||||
if (station.enabled == false) {
|
// Station found — verify token before caching
|
||||||
res.status(401).send({ http_code: 401, short: "station_disabled", message: "Station is disabled." });
|
const tokenHash = computeHmac(provided_token);
|
||||||
}
|
const storedBuf = Buffer.from(station.key);
|
||||||
if (!(await verify(station.key, provided_token))) {
|
const computedBuf = Buffer.from(tokenHash);
|
||||||
res.status(401).send({ http_code: 401, short: "invalid_token", message: "Api token non-existent or invalid syntax." });
|
const valid = computedBuf.length === storedBuf.length && crypto.timingSafeEqual(computedBuf, storedBuf);
|
||||||
return;
|
|
||||||
}
|
if (!valid) {
|
||||||
req.headers["station_id"] = station.id.toString();
|
res.status(401).send({ http_code: 401, short: 'invalid_token', message: 'Api token non-existent or invalid syntax.' });
|
||||||
next();
|
return;
|
||||||
}
|
}
|
||||||
}
|
|
||||||
export default ScanAuth;
|
if (!station.enabled) {
|
||||||
|
res.status(401).send({ http_code: 401, short: 'station_disabled', message: 'Station is disabled.' });
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Write to KV cache for subsequent requests
|
||||||
|
const entry: StationKVEntry = {
|
||||||
|
id: station.id,
|
||||||
|
enabled: station.enabled,
|
||||||
|
tokenHash,
|
||||||
|
trackId: station.track.id,
|
||||||
|
trackDistance: station.track.distance,
|
||||||
|
minimumLapTime: station.track.minimumLapTime ?? 0,
|
||||||
|
};
|
||||||
|
await setStationEntry(prefix, entry);
|
||||||
|
|
||||||
|
req.isStationAuth = true;
|
||||||
|
req.stationId = station.id;
|
||||||
|
next();
|
||||||
|
};
|
||||||
|
|
||||||
|
export default ScanAuth;
|
||||||
|
export { deleteStationEntry };
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import { verify } from '@node-rs/argon2';
|
import * as Bun from 'bun';
|
||||||
import { Request, Response } from 'express';
|
import { Request, Response } from 'express';
|
||||||
import { getConnectionManager } from 'typeorm';
|
import { getConnectionManager } from 'typeorm';
|
||||||
import { StatsClient } from '../models/entities/StatsClient';
|
import { StatsClient } from '../models/entities/StatsClient';
|
||||||
@@ -55,7 +55,7 @@ const StatsAuth = async (req: Request, res: Response, next: () => void) => {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
if (!(await verify(client.key, provided_token))) {
|
if (!(await Bun.password.verify(provided_token, client.key))) {
|
||||||
res.status(401).send("Api token invalid.");
|
res.status(401).send("Api token invalid.");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import { hash } from '@node-rs/argon2';
|
import * as Bun from 'bun';
|
||||||
import { IsNotEmpty, IsOptional, IsString } from 'class-validator';
|
import { IsNotEmpty, IsOptional, IsString } from 'class-validator';
|
||||||
import * as jsonwebtoken from 'jsonwebtoken';
|
import * as jsonwebtoken from 'jsonwebtoken';
|
||||||
import { getConnectionManager } from 'typeorm';
|
import { getConnectionManager } from 'typeorm';
|
||||||
@@ -49,7 +49,7 @@ export class ResetPassword {
|
|||||||
if (found_user.refreshTokenCount !== decoded["refreshTokenCount"]) { throw new RefreshTokenCountInvalidError(); }
|
if (found_user.refreshTokenCount !== decoded["refreshTokenCount"]) { throw new RefreshTokenCountInvalidError(); }
|
||||||
|
|
||||||
found_user.refreshTokenCount = found_user.refreshTokenCount + 1;
|
found_user.refreshTokenCount = found_user.refreshTokenCount + 1;
|
||||||
found_user.password = await hash(this.password + found_user.uuid);
|
found_user.password = await Bun.password.hash(this.password + found_user.uuid);
|
||||||
await getConnectionManager().get().getRepository(User).save(found_user);
|
await getConnectionManager().get().getRepository(User).save(found_user);
|
||||||
|
|
||||||
return "password reset successfull";
|
return "password reset successfull";
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import { verify } from '@node-rs/argon2';
|
import * as Bun from 'bun';
|
||||||
import { IsEmail, IsNotEmpty, IsOptional, IsString } from 'class-validator';
|
import { IsEmail, IsNotEmpty, IsOptional, IsString } from 'class-validator';
|
||||||
import { getConnectionManager } from 'typeorm';
|
import { getConnectionManager } from 'typeorm';
|
||||||
import { InvalidCredentialsError, PasswordNeededError, UserDisabledError, UserNotFoundError } from '../../../errors/AuthError';
|
import { InvalidCredentialsError, PasswordNeededError, UserDisabledError, UserNotFoundError } from '../../../errors/AuthError';
|
||||||
@@ -56,7 +56,7 @@ export class CreateAuth {
|
|||||||
throw new UserNotFoundError();
|
throw new UserNotFoundError();
|
||||||
}
|
}
|
||||||
if (found_user.enabled == false) { throw new UserDisabledError(); }
|
if (found_user.enabled == false) { throw new UserDisabledError(); }
|
||||||
if (!(await verify(found_user.password, this.password + found_user.uuid))) {
|
if (!(await Bun.password.verify(this.password + found_user.uuid, found_user.password))) {
|
||||||
throw new InvalidCredentialsError();
|
throw new InvalidCredentialsError();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,4 @@
|
|||||||
import { IsBoolean, IsObject, IsOptional } from 'class-validator';
|
import { IsBoolean, IsObject, IsOptional } from 'class-validator';
|
||||||
import * as uuid from 'uuid';
|
|
||||||
import { Address } from '../../entities/Address';
|
import { Address } from '../../entities/Address';
|
||||||
import { RunnerOrganization } from '../../entities/RunnerOrganization';
|
import { RunnerOrganization } from '../../entities/RunnerOrganization';
|
||||||
import { CreateRunnerGroup } from './CreateRunnerGroup';
|
import { CreateRunnerGroup } from './CreateRunnerGroup';
|
||||||
@@ -35,7 +34,7 @@ export class CreateRunnerOrganization extends CreateRunnerGroup {
|
|||||||
Address.validate(newRunnerOrganization.address);
|
Address.validate(newRunnerOrganization.address);
|
||||||
|
|
||||||
if (this.registrationEnabled) {
|
if (this.registrationEnabled) {
|
||||||
newRunnerOrganization.key = uuid.v4().toUpperCase();
|
newRunnerOrganization.key = crypto.randomUUID()
|
||||||
}
|
}
|
||||||
|
|
||||||
return newRunnerOrganization;
|
return newRunnerOrganization;
|
||||||
|
|||||||
@@ -1,8 +1,7 @@
|
|||||||
import { hash } from '@node-rs/argon2';
|
|
||||||
import { IsBoolean, IsInt, IsOptional, IsPositive, IsString } from 'class-validator';
|
import { IsBoolean, IsInt, IsOptional, IsPositive, IsString } from 'class-validator';
|
||||||
import crypto from 'crypto';
|
import crypto from 'crypto';
|
||||||
import { getConnection } from 'typeorm';
|
import { getConnection } from 'typeorm';
|
||||||
import * as uuid from 'uuid';
|
import { config } from '../../../config';
|
||||||
import { TrackNotFoundError } from '../../../errors/TrackErrors';
|
import { TrackNotFoundError } from '../../../errors/TrackErrors';
|
||||||
import { ScanStation } from '../../entities/ScanStation';
|
import { ScanStation } from '../../entities/ScanStation';
|
||||||
import { Track } from '../../entities/Track';
|
import { Track } from '../../entities/Track';
|
||||||
@@ -42,10 +41,10 @@ export class CreateScanStation {
|
|||||||
newStation.enabled = this.enabled;
|
newStation.enabled = this.enabled;
|
||||||
newStation.track = await this.getTrack();
|
newStation.track = await this.getTrack();
|
||||||
|
|
||||||
let newUUID = uuid.v4().toUpperCase();
|
let newUUID = crypto.randomUUID().toUpperCase();
|
||||||
newStation.prefix = crypto.createHash("sha3-512").update(newUUID).digest('hex').substring(0, 7).toUpperCase();
|
newStation.prefix = crypto.createHash("sha3-512").update(newUUID).digest('hex').substring(0, 7).toUpperCase();
|
||||||
newStation.key = await hash(newStation.prefix + "." + newUUID);
|
|
||||||
newStation.cleartextkey = newStation.prefix + "." + newUUID;
|
newStation.cleartextkey = newStation.prefix + "." + newUUID;
|
||||||
|
newStation.key = crypto.createHmac("sha256", config.station_token_secret).update(newStation.cleartextkey).digest('hex');
|
||||||
|
|
||||||
return newStation;
|
return newStation;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
import { hash } from '@node-rs/argon2';
|
import * as Bun from 'bun';
|
||||||
import { IsOptional, IsString } from 'class-validator';
|
import { IsOptional, IsString } from 'class-validator';
|
||||||
import crypto from 'crypto';
|
import crypto from 'crypto';
|
||||||
import * as uuid from 'uuid';
|
|
||||||
import { StatsClient } from '../../entities/StatsClient';
|
import { StatsClient } from '../../entities/StatsClient';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -23,9 +22,9 @@ export class CreateStatsClient {
|
|||||||
|
|
||||||
newClient.description = this.description;
|
newClient.description = this.description;
|
||||||
|
|
||||||
let newUUID = uuid.v4().toUpperCase();
|
let newUUID = crypto.randomUUID().toUpperCase();
|
||||||
newClient.prefix = crypto.createHash("sha3-512").update(newUUID).digest('hex').substring(0, 7).toUpperCase();
|
newClient.prefix = crypto.createHash("sha3-512").update(newUUID).digest('hex').substring(0, 7).toUpperCase();
|
||||||
newClient.key = await hash(newClient.prefix + "." + newUUID);
|
newClient.key = await Bun.password.hash(newClient.prefix + "." + newUUID);
|
||||||
newClient.cleartextkey = newClient.prefix + "." + newUUID;
|
newClient.cleartextkey = newClient.prefix + "." + newUUID;
|
||||||
|
|
||||||
return newClient;
|
return newClient;
|
||||||
|
|||||||
@@ -1,8 +1,7 @@
|
|||||||
import { hash } from "@node-rs/argon2";
|
import * as Bun from 'bun';
|
||||||
import { passwordStrength } from "check-password-strength";
|
import { passwordStrength } from "check-password-strength";
|
||||||
import { IsBoolean, IsEmail, IsNotEmpty, IsOptional, IsPhoneNumber, IsString, IsUrl } from 'class-validator';
|
import { IsBoolean, IsEmail, IsNotEmpty, IsOptional, IsPhoneNumber, IsString, IsUrl } from 'class-validator';
|
||||||
import { getConnectionManager } from 'typeorm';
|
import { getConnectionManager } from 'typeorm';
|
||||||
import * as uuid from 'uuid';
|
|
||||||
import { config } from '../../../config';
|
import { config } from '../../../config';
|
||||||
import { PasswordMustContainLowercaseLetterError, PasswordMustContainNumberError, PasswordMustContainUppercaseLetterError, PasswordTooShortError, UserEmailNeededError, UsernameContainsIllegalCharacterError } from '../../../errors/UserErrors';
|
import { PasswordMustContainLowercaseLetterError, PasswordMustContainNumberError, PasswordMustContainUppercaseLetterError, PasswordTooShortError, UserEmailNeededError, UsernameContainsIllegalCharacterError } from '../../../errors/UserErrors';
|
||||||
import { UserGroupNotFoundError } from '../../../errors/UserGroupErrors';
|
import { UserGroupNotFoundError } from '../../../errors/UserGroupErrors';
|
||||||
@@ -108,9 +107,9 @@ export class CreateUser {
|
|||||||
newUser.firstname = this.firstname
|
newUser.firstname = this.firstname
|
||||||
newUser.middlename = this.middlename
|
newUser.middlename = this.middlename
|
||||||
newUser.lastname = this.lastname
|
newUser.lastname = this.lastname
|
||||||
newUser.uuid = uuid.v4()
|
newUser.uuid = crypto.randomUUID()
|
||||||
newUser.phone = this.phone
|
newUser.phone = this.phone
|
||||||
newUser.password = await hash(this.password + newUser.uuid);
|
newUser.password = await Bun.password.hash(this.password + newUser.uuid);
|
||||||
newUser.groups = await this.getGroups();
|
newUser.groups = await this.getGroups();
|
||||||
newUser.enabled = this.enabled;
|
newUser.enabled = this.enabled;
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,4 @@
|
|||||||
import { IsBoolean, IsInt, IsObject, IsOptional } from 'class-validator';
|
import { IsBoolean, IsInt, IsObject, IsOptional } from 'class-validator';
|
||||||
import * as uuid from 'uuid';
|
|
||||||
import { Address } from '../../entities/Address';
|
import { Address } from '../../entities/Address';
|
||||||
import { RunnerOrganization } from '../../entities/RunnerOrganization';
|
import { RunnerOrganization } from '../../entities/RunnerOrganization';
|
||||||
import { CreateRunnerGroup } from '../create/CreateRunnerGroup';
|
import { CreateRunnerGroup } from '../create/CreateRunnerGroup';
|
||||||
@@ -42,7 +41,7 @@ export class UpdateRunnerOrganization extends CreateRunnerGroup {
|
|||||||
Address.validate(organization.address);
|
Address.validate(organization.address);
|
||||||
|
|
||||||
if (this.registrationEnabled && !organization.key) {
|
if (this.registrationEnabled && !organization.key) {
|
||||||
organization.key = uuid.v4().toUpperCase();
|
organization.key = crypto.randomUUID().toUpperCase();
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
organization.key = null;
|
organization.key = null;
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import { hash } from '@node-rs/argon2';
|
import * as Bun from 'bun';
|
||||||
import { passwordStrength } from "check-password-strength";
|
import { passwordStrength } from "check-password-strength";
|
||||||
import { IsBoolean, IsEmail, IsInt, IsNotEmpty, IsOptional, IsPhoneNumber, IsString, IsUrl } from 'class-validator';
|
import { IsBoolean, IsEmail, IsInt, IsNotEmpty, IsOptional, IsPhoneNumber, IsString, IsUrl } from 'class-validator';
|
||||||
import { getConnectionManager } from 'typeorm';
|
import { getConnectionManager } from 'typeorm';
|
||||||
@@ -111,7 +111,7 @@ export class UpdateUser {
|
|||||||
if (!password_strength.contains.includes("lowercase")) { throw new PasswordMustContainLowercaseLetterError(); }
|
if (!password_strength.contains.includes("lowercase")) { throw new PasswordMustContainLowercaseLetterError(); }
|
||||||
if (!password_strength.contains.includes("number")) { throw new PasswordMustContainNumberError(); }
|
if (!password_strength.contains.includes("number")) { throw new PasswordMustContainNumberError(); }
|
||||||
if (!(password_strength.length > 9)) { throw new PasswordTooShortError(); }
|
if (!(password_strength.length > 9)) { throw new PasswordTooShortError(); }
|
||||||
user.password = await hash(this.password + user.uuid);
|
user.password = await Bun.password.hash(this.password + user.uuid);
|
||||||
user.refreshTokenCount = user.refreshTokenCount + 1;
|
user.refreshTokenCount = user.refreshTokenCount + 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,22 +1,23 @@
|
|||||||
import { IsInt, IsNotEmpty, IsPositive } from "class-validator";
|
import { IsInt, IsNotEmpty, IsPositive } from "class-validator";
|
||||||
import { ChildEntity, Column, ManyToOne } from "typeorm";
|
import { ChildEntity, Column, Index, ManyToOne } from "typeorm";
|
||||||
import { ResponseDistanceDonation } from '../responses/ResponseDistanceDonation';
|
import { ResponseDistanceDonation } from '../responses/ResponseDistanceDonation';
|
||||||
import { Donation } from "./Donation";
|
import { Donation } from "./Donation";
|
||||||
import { Runner } from "./Runner";
|
import type { Runner } from "./Runner";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Defines the DistanceDonation entity.
|
* Defines the DistanceDonation entity.
|
||||||
* For distanceDonations a donor pledges to donate a certain amount for each kilometer ran by a runner.
|
* For distanceDonations a donor pledges to donate a certain amount for each kilometer ran by a runner.
|
||||||
*/
|
*/
|
||||||
@ChildEntity()
|
@ChildEntity()
|
||||||
|
@Index(['runner'])
|
||||||
export class DistanceDonation extends Donation {
|
export class DistanceDonation extends Donation {
|
||||||
/**
|
/**
|
||||||
* The donation's associated runner.
|
* The donation's associated runner.
|
||||||
* Used as the source of the donation's distance.
|
* Used as the source of the donation's distance.
|
||||||
*/
|
*/
|
||||||
@IsNotEmpty()
|
@IsNotEmpty()
|
||||||
@ManyToOne(() => Runner, runner => runner.distanceDonations)
|
@ManyToOne(() => require("./Runner").Runner, (runner: Runner) => runner.distanceDonations)
|
||||||
runner: Runner;
|
runner!: Runner;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The donation's amount donated per distance.
|
* The donation's amount donated per distance.
|
||||||
|
|||||||
@@ -2,17 +2,18 @@ import {
|
|||||||
IsInt,
|
IsInt,
|
||||||
IsPositive
|
IsPositive
|
||||||
} from "class-validator";
|
} from "class-validator";
|
||||||
import { BeforeInsert, BeforeUpdate, Column, Entity, ManyToOne, PrimaryGeneratedColumn, TableInheritance } from "typeorm";
|
import { BeforeInsert, BeforeUpdate, Column, Entity, Index, ManyToOne, PrimaryGeneratedColumn, TableInheritance } from "typeorm";
|
||||||
import { ResponseDonation } from '../responses/ResponseDonation';
|
import { ResponseDonation } from '../responses/ResponseDonation';
|
||||||
import { Donor } from './Donor';
|
import type { Donor } from './Donor';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Defines the Donation entity.
|
* Defines the Donation entity.
|
||||||
* A donation just associates a donor with a donation amount.
|
* A donation just associates a donor with a donation amount.
|
||||||
* The specifics of the amoun's determination has to be implemented in child classes.
|
* The specifics of the amoun's determination has to be implemented in child classes.
|
||||||
*/
|
*/
|
||||||
@Entity()
|
@Entity()
|
||||||
@TableInheritance({ column: { name: "type", type: "varchar" } })
|
@TableInheritance({ column: { name: "type", type: "varchar" } })
|
||||||
|
@Index(['donor'])
|
||||||
export abstract class Donation {
|
export abstract class Donation {
|
||||||
/**
|
/**
|
||||||
* Autogenerated unique id (primary key).
|
* Autogenerated unique id (primary key).
|
||||||
@@ -24,8 +25,8 @@ export abstract class Donation {
|
|||||||
/**
|
/**
|
||||||
* The donations's donor.
|
* The donations's donor.
|
||||||
*/
|
*/
|
||||||
@ManyToOne(() => Donor, donor => donor.donations)
|
@ManyToOne(() => require("./Donor").Donor, (donor: Donor) => donor.donations)
|
||||||
donor: Donor;
|
donor!: Donor;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The donation's amount in cents (or whatever your currency's smallest unit is.).
|
* The donation's amount in cents (or whatever your currency's smallest unit is.).
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
import { IsBoolean, IsInt } from "class-validator";
|
import { IsBoolean, IsInt } from "class-validator";
|
||||||
import { ChildEntity, Column, OneToMany } from "typeorm";
|
import { ChildEntity, Column, OneToMany } from "typeorm";
|
||||||
import { ResponseDonor } from '../responses/ResponseDonor';
|
import { ResponseDonor } from '../responses/ResponseDonor';
|
||||||
import { Donation } from './Donation';
|
import type { Donation } from './Donation';
|
||||||
import { Participant } from "./Participant";
|
import { Participant } from "./Participant";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -21,8 +21,8 @@ export class Donor extends Participant {
|
|||||||
* Used to link the participant as the donor of a donation.
|
* Used to link the participant as the donor of a donation.
|
||||||
* Attention: Only runner's can be associated as a distanceDonations distance source.
|
* Attention: Only runner's can be associated as a distanceDonations distance source.
|
||||||
*/
|
*/
|
||||||
@OneToMany(() => Donation, donation => donation.donor, { nullable: true })
|
@OneToMany(() => require("./Donation").Donation, (donation: Donation) => donation.donor, { nullable: true })
|
||||||
donations: Donation[];
|
donations!: Donation[];
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns the total donations of a donor based on his linked donations.
|
* Returns the total donations of a donor based on his linked donations.
|
||||||
|
|||||||
@@ -1,19 +1,19 @@
|
|||||||
import {
|
import {
|
||||||
IsEmail,
|
IsEmail,
|
||||||
IsInt,
|
IsInt,
|
||||||
IsNotEmpty,
|
IsNotEmpty,
|
||||||
IsOptional,
|
IsOptional,
|
||||||
IsPhoneNumber,
|
IsPhoneNumber,
|
||||||
|
|
||||||
IsPositive,
|
IsPositive,
|
||||||
|
|
||||||
IsString
|
IsString
|
||||||
} from "class-validator";
|
} from "class-validator";
|
||||||
import { BeforeInsert, BeforeUpdate, Column, Entity, OneToMany, PrimaryGeneratedColumn } from "typeorm";
|
import { BeforeInsert, BeforeUpdate, Column, Entity, OneToMany, PrimaryGeneratedColumn } from "typeorm";
|
||||||
import { config } from '../../config';
|
import { config } from '../../config';
|
||||||
import { ResponseGroupContact } from '../responses/ResponseGroupContact';
|
import { ResponseGroupContact } from '../responses/ResponseGroupContact';
|
||||||
import { Address } from "./Address";
|
import { Address } from "./Address";
|
||||||
import { RunnerGroup } from "./RunnerGroup";
|
import type { RunnerGroup } from "./RunnerGroup";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Defines the GroupContact entity.
|
* Defines the GroupContact entity.
|
||||||
@@ -77,11 +77,11 @@ export class GroupContact {
|
|||||||
@IsEmail()
|
@IsEmail()
|
||||||
email?: string;
|
email?: string;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Used to link contacts to groups.
|
* Used to link contacts to groups.
|
||||||
*/
|
*/
|
||||||
@OneToMany(() => RunnerGroup, group => group.contact, { nullable: true })
|
@OneToMany(() => require("./RunnerGroup").RunnerGroup, (group: RunnerGroup) => group.contact, { nullable: true })
|
||||||
groups: RunnerGroup[];
|
groups!: RunnerGroup[];
|
||||||
|
|
||||||
@Column({ type: 'bigint', nullable: true, readonly: true })
|
@Column({ type: 'bigint', nullable: true, readonly: true })
|
||||||
@IsInt()
|
@IsInt()
|
||||||
|
|||||||
@@ -9,18 +9,19 @@ import {
|
|||||||
|
|
||||||
IsString
|
IsString
|
||||||
} from "class-validator";
|
} from "class-validator";
|
||||||
import { BeforeInsert, BeforeUpdate, Column, Entity, PrimaryGeneratedColumn, TableInheritance } from "typeorm";
|
import { BeforeInsert, BeforeUpdate, Column, Entity, Index, PrimaryGeneratedColumn, TableInheritance } from "typeorm";
|
||||||
import { config } from '../../config';
|
import { config } from '../../config';
|
||||||
import { ResponseParticipant } from '../responses/ResponseParticipant';
|
import { ResponseParticipant } from '../responses/ResponseParticipant';
|
||||||
import { Address } from "./Address";
|
import { Address } from "./Address";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Defines the Participant entity.
|
* Defines the Participant entity.
|
||||||
* Participans can donate and therefor be associated with donation entities.
|
* Participans can donate and therefor be associated with donation entities.
|
||||||
*/
|
*/
|
||||||
@Entity()
|
@Entity()
|
||||||
@TableInheritance({ column: { name: "type", type: "varchar" } })
|
@TableInheritance({ column: { name: "type", type: "varchar" } })
|
||||||
export abstract class Participant {
|
@Index(['email'])
|
||||||
|
export abstract class Participant {
|
||||||
/**
|
/**
|
||||||
* Autogenerated unique id (primary key).
|
* Autogenerated unique id (primary key).
|
||||||
*/
|
*/
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ import { BeforeInsert, BeforeUpdate, Column, Entity, ManyToOne, PrimaryGenerated
|
|||||||
import { PermissionAction } from '../enums/PermissionAction';
|
import { PermissionAction } from '../enums/PermissionAction';
|
||||||
import { PermissionTarget } from '../enums/PermissionTargets';
|
import { PermissionTarget } from '../enums/PermissionTargets';
|
||||||
import { ResponsePermission } from '../responses/ResponsePermission';
|
import { ResponsePermission } from '../responses/ResponsePermission';
|
||||||
import { Principal } from './Principal';
|
import type { Principal } from './Principal';
|
||||||
/**
|
/**
|
||||||
* Defines the Permission entity.
|
* Defines the Permission entity.
|
||||||
* Permissions can be granted to principals.
|
* Permissions can be granted to principals.
|
||||||
@@ -26,8 +26,8 @@ export class Permission {
|
|||||||
/**
|
/**
|
||||||
* The permission's principal.
|
* The permission's principal.
|
||||||
*/
|
*/
|
||||||
@ManyToOne(() => Principal, principal => principal.permissions)
|
@ManyToOne(() => require("./Principal").Principal, (principal: Principal) => principal.permissions)
|
||||||
principal: Principal;
|
principal!: Principal;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The permission's target.
|
* The permission's target.
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
import { IsInt, IsPositive } from 'class-validator';
|
import { IsInt, IsPositive } from 'class-validator';
|
||||||
import { BeforeInsert, BeforeUpdate, Column, Entity, OneToMany, PrimaryGeneratedColumn, TableInheritance } from 'typeorm';
|
import { BeforeInsert, BeforeUpdate, Column, Entity, OneToMany, PrimaryGeneratedColumn, TableInheritance } from 'typeorm';
|
||||||
import { ResponsePrincipal } from '../responses/ResponsePrincipal';
|
import { ResponsePrincipal } from '../responses/ResponsePrincipal';
|
||||||
import { Permission } from './Permission';
|
import type { Permission } from './Permission';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Defines the principal entity.
|
* Defines the principal entity.
|
||||||
@@ -20,8 +20,8 @@ export abstract class Principal {
|
|||||||
/**
|
/**
|
||||||
* The participant's permissions.
|
* The participant's permissions.
|
||||||
*/
|
*/
|
||||||
@OneToMany(() => Permission, permission => permission.principal, { nullable: true })
|
@OneToMany(() => require("./Permission").Permission, (permission: Permission) => permission.principal, { nullable: true })
|
||||||
permissions: Permission[];
|
permissions!: Permission[];
|
||||||
|
|
||||||
@Column({ type: 'bigint', nullable: true, readonly: true })
|
@Column({ type: 'bigint', nullable: true, readonly: true })
|
||||||
@IsInt()
|
@IsInt()
|
||||||
|
|||||||
@@ -1,18 +1,19 @@
|
|||||||
import { IsInt, IsNotEmpty, IsOptional, IsString } from "class-validator";
|
import { IsInt, IsNotEmpty, IsOptional, IsString } from "class-validator";
|
||||||
import { ChildEntity, Column, ManyToOne, OneToMany } from "typeorm";
|
import { ChildEntity, Column, Index, ManyToOne, OneToMany } from "typeorm";
|
||||||
import { ResponseRunner } from '../responses/ResponseRunner';
|
import { ResponseRunner } from '../responses/ResponseRunner';
|
||||||
import { DistanceDonation } from "./DistanceDonation";
|
import type { DistanceDonation } from "./DistanceDonation";
|
||||||
import { Participant } from "./Participant";
|
import { Participant } from "./Participant";
|
||||||
import { RunnerCard } from "./RunnerCard";
|
import type { RunnerCard } from "./RunnerCard";
|
||||||
import { RunnerGroup } from "./RunnerGroup";
|
import { RunnerGroup } from "./RunnerGroup";
|
||||||
import { Scan } from "./Scan";
|
import type { Scan } from "./Scan";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Defines the runner entity.
|
* Defines the runner entity.
|
||||||
* Runners differ from participants in being able to actually accumulate a ran distance through scans.
|
* Runners differ from participants in being able to actually accumulate a ran distance through scans.
|
||||||
* Runner's get organized in groups.
|
* Runner's get organized in groups.
|
||||||
*/
|
*/
|
||||||
@ChildEntity()
|
@ChildEntity()
|
||||||
|
@Index(['group'])
|
||||||
export class Runner extends Participant {
|
export class Runner extends Participant {
|
||||||
/**
|
/**
|
||||||
* The runner's associated group.
|
* The runner's associated group.
|
||||||
@@ -26,22 +27,22 @@ export class Runner extends Participant {
|
|||||||
* The runner's associated distanceDonations.
|
* The runner's associated distanceDonations.
|
||||||
* Used to link runners to distanceDonations in order to calculate the donation's amount based on the distance the runner ran.
|
* Used to link runners to distanceDonations in order to calculate the donation's amount based on the distance the runner ran.
|
||||||
*/
|
*/
|
||||||
@OneToMany(() => DistanceDonation, distanceDonation => distanceDonation.runner, { nullable: true })
|
@OneToMany(() => require("./DistanceDonation").DistanceDonation, (distanceDonation: DistanceDonation) => distanceDonation.runner, { nullable: true })
|
||||||
distanceDonations: DistanceDonation[];
|
distanceDonations!: DistanceDonation[];
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The runner's associated cards.
|
* The runner's associated cards.
|
||||||
* Used to link runners to cards - yes a runner be associated with multiple cards this came in handy in the past.
|
* Used to link runners to cards - yes a runner be associated with multiple cards this came in handy in the past.
|
||||||
*/
|
*/
|
||||||
@OneToMany(() => RunnerCard, card => card.runner, { nullable: true })
|
@OneToMany(() => require("./RunnerCard").RunnerCard, (card: RunnerCard) => card.runner, { nullable: true })
|
||||||
cards: RunnerCard[];
|
cards!: RunnerCard[];
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The runner's associated scans.
|
* The runner's associated scans.
|
||||||
* Used to link runners to scans (valid and fraudulant).
|
* Used to link runners to scans (valid and fraudulant).
|
||||||
*/
|
*/
|
||||||
@OneToMany(() => Scan, scan => scan.runner, { nullable: true })
|
@OneToMany(() => require("./Scan").Scan, (scan: Scan) => scan.runner, { nullable: true })
|
||||||
scans: Scan[];
|
scans!: Scan[];
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The last time the runner requested a selfservice link.
|
* The last time the runner requested a selfservice link.
|
||||||
|
|||||||
@@ -6,18 +6,20 @@ import {
|
|||||||
IsOptional,
|
IsOptional,
|
||||||
IsPositive
|
IsPositive
|
||||||
} from "class-validator";
|
} from "class-validator";
|
||||||
import { BeforeInsert, BeforeUpdate, Column, Entity, ManyToOne, OneToMany, PrimaryGeneratedColumn } from "typeorm";
|
import { BeforeInsert, BeforeUpdate, Column, Entity, Index, ManyToOne, OneToMany, PrimaryGeneratedColumn } from "typeorm";
|
||||||
import { RunnerCardIdOutOfRangeError } from '../../errors/RunnerCardErrors';
|
import { RunnerCardIdOutOfRangeError } from '../../errors/RunnerCardErrors';
|
||||||
import { ResponseRunnerCard } from '../responses/ResponseRunnerCard';
|
import { ResponseRunnerCard } from '../responses/ResponseRunnerCard';
|
||||||
import { Runner } from "./Runner";
|
import type { Runner } from "./Runner";
|
||||||
import { TrackScan } from "./TrackScan";
|
import type { TrackScan } from "./TrackScan";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Defines the RunnerCard entity.
|
* Defines the RunnerCard entity.
|
||||||
* A runnerCard is a physical representation for a runner.
|
* A runnerCard is a physical representation for a runner.
|
||||||
* It can be associated with a runner to create scans via the scan station's.
|
* It can be associated with a runner to create scans via the scan station's.
|
||||||
*/
|
*/
|
||||||
@Entity()
|
@Entity()
|
||||||
|
@Index(['runner'])
|
||||||
|
@Index(['enabled'])
|
||||||
export class RunnerCard {
|
export class RunnerCard {
|
||||||
/**
|
/**
|
||||||
* Autogenerated unique id (primary key).
|
* Autogenerated unique id (primary key).
|
||||||
@@ -31,8 +33,8 @@ export class RunnerCard {
|
|||||||
* To increase reusability a card can be reassigned.
|
* To increase reusability a card can be reassigned.
|
||||||
*/
|
*/
|
||||||
@IsOptional()
|
@IsOptional()
|
||||||
@ManyToOne(() => Runner, runner => runner.cards, { nullable: true })
|
@ManyToOne(() => require("./Runner").Runner, (runner: Runner) => runner.cards, { nullable: true })
|
||||||
runner: Runner;
|
runner!: Runner;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Is the card enabled (for fraud reasons)?
|
* Is the card enabled (for fraud reasons)?
|
||||||
@@ -46,8 +48,8 @@ export class RunnerCard {
|
|||||||
* The card's associated scans.
|
* The card's associated scans.
|
||||||
* Used to link cards to track scans.
|
* Used to link cards to track scans.
|
||||||
*/
|
*/
|
||||||
@OneToMany(() => TrackScan, scan => scan.track, { nullable: true })
|
@OneToMany(() => require("./TrackScan").TrackScan, (scan: TrackScan) => scan.card, { nullable: true })
|
||||||
scans: TrackScan[];
|
scans!: TrackScan[];
|
||||||
|
|
||||||
@Column({ type: 'bigint', nullable: true, readonly: true })
|
@Column({ type: 'bigint', nullable: true, readonly: true })
|
||||||
@IsInt()
|
@IsInt()
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ import {
|
|||||||
import { BeforeInsert, BeforeUpdate, Column, Entity, ManyToOne, OneToMany, PrimaryGeneratedColumn, TableInheritance } from "typeorm";
|
import { BeforeInsert, BeforeUpdate, Column, Entity, ManyToOne, OneToMany, PrimaryGeneratedColumn, TableInheritance } from "typeorm";
|
||||||
import { ResponseRunnerGroup } from '../responses/ResponseRunnerGroup';
|
import { ResponseRunnerGroup } from '../responses/ResponseRunnerGroup';
|
||||||
import { GroupContact } from "./GroupContact";
|
import { GroupContact } from "./GroupContact";
|
||||||
import { Runner } from "./Runner";
|
import type { Runner } from "./Runner";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Defines the RunnerGroup entity.
|
* Defines the RunnerGroup entity.
|
||||||
@@ -44,8 +44,8 @@ export abstract class RunnerGroup {
|
|||||||
* The group's associated runners.
|
* The group's associated runners.
|
||||||
* Used to link runners to a runner group.
|
* Used to link runners to a runner group.
|
||||||
*/
|
*/
|
||||||
@OneToMany(() => Runner, runner => runner.group, { nullable: true })
|
@OneToMany(() => require("./Runner").Runner, (runner: Runner) => runner.group, { nullable: true })
|
||||||
runners: Runner[];
|
runners!: Runner[];
|
||||||
|
|
||||||
@Column({ type: 'bigint', nullable: true, readonly: true })
|
@Column({ type: 'bigint', nullable: true, readonly: true })
|
||||||
@IsInt()
|
@IsInt()
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ import { ResponseRunnerOrganization } from '../responses/ResponseRunnerOrganizat
|
|||||||
import { Address } from './Address';
|
import { Address } from './Address';
|
||||||
import { Runner } from './Runner';
|
import { Runner } from './Runner';
|
||||||
import { RunnerGroup } from "./RunnerGroup";
|
import { RunnerGroup } from "./RunnerGroup";
|
||||||
import { RunnerTeam } from "./RunnerTeam";
|
import type { RunnerTeam } from "./RunnerTeam";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Defines the RunnerOrganization entity.
|
* Defines the RunnerOrganization entity.
|
||||||
@@ -24,8 +24,8 @@ export class RunnerOrganization extends RunnerGroup {
|
|||||||
* The organization's teams.
|
* The organization's teams.
|
||||||
* Used to link teams to a organization.
|
* Used to link teams to a organization.
|
||||||
*/
|
*/
|
||||||
@OneToMany(() => RunnerTeam, team => team.parentGroup, { nullable: true })
|
@OneToMany(() => require("./RunnerTeam").RunnerTeam, (team: RunnerTeam) => team.parentGroup, { nullable: true })
|
||||||
teams: RunnerTeam[];
|
teams!: RunnerTeam[];
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The organization's api key for self-service registration.
|
* The organization's api key for self-service registration.
|
||||||
|
|||||||
@@ -1,14 +1,15 @@
|
|||||||
import { IsNotEmpty } from "class-validator";
|
import { IsNotEmpty } from "class-validator";
|
||||||
import { ChildEntity, ManyToOne } from "typeorm";
|
import { ChildEntity, Index, ManyToOne } from "typeorm";
|
||||||
import { ResponseRunnerTeam } from '../responses/ResponseRunnerTeam';
|
import { ResponseRunnerTeam } from '../responses/ResponseRunnerTeam';
|
||||||
import { RunnerGroup } from "./RunnerGroup";
|
import { RunnerGroup } from "./RunnerGroup";
|
||||||
import { RunnerOrganization } from "./RunnerOrganization";
|
import type { RunnerOrganization } from "./RunnerOrganization";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Defines the RunnerTeam entity.
|
* Defines the RunnerTeam entity.
|
||||||
* This usually is a school class or department in a company.
|
* This usually is a school class or department in a company.
|
||||||
*/
|
*/
|
||||||
@ChildEntity()
|
@ChildEntity()
|
||||||
|
@Index(['parentGroup'])
|
||||||
export class RunnerTeam extends RunnerGroup {
|
export class RunnerTeam extends RunnerGroup {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -16,7 +17,7 @@ export class RunnerTeam extends RunnerGroup {
|
|||||||
* Every team has to be part of a runnerOrganization - this get's checked on creation and update.
|
* Every team has to be part of a runnerOrganization - this get's checked on creation and update.
|
||||||
*/
|
*/
|
||||||
@IsNotEmpty()
|
@IsNotEmpty()
|
||||||
@ManyToOne(() => RunnerOrganization, org => org.teams, { nullable: true })
|
@ManyToOne(() => require("./RunnerOrganization").RunnerOrganization, (org: RunnerOrganization) => org.teams, { nullable: true })
|
||||||
parentGroup?: RunnerOrganization;
|
parentGroup?: RunnerOrganization;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
@@ -5,16 +5,19 @@ import {
|
|||||||
|
|
||||||
IsPositive
|
IsPositive
|
||||||
} from "class-validator";
|
} from "class-validator";
|
||||||
import { BeforeInsert, BeforeUpdate, Column, Entity, ManyToOne, PrimaryGeneratedColumn, TableInheritance } from "typeorm";
|
import { BeforeInsert, BeforeUpdate, Column, Entity, Index, ManyToOne, PrimaryGeneratedColumn, TableInheritance } from "typeorm";
|
||||||
import { ResponseScan } from '../responses/ResponseScan';
|
import { ResponseScan } from '../responses/ResponseScan';
|
||||||
import { Runner } from "./Runner";
|
import type { Runner } from "./Runner";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Defines the Scan entity.
|
* Defines the Scan entity.
|
||||||
* A scan basicly adds a certain distance to a runner's total ran distance.
|
* A scan basicly adds a certain distance to a runner's total ran distance.
|
||||||
*/
|
*/
|
||||||
@Entity()
|
@Entity()
|
||||||
@TableInheritance({ column: { name: "type", type: "varchar" } })
|
@TableInheritance({ column: { name: "type", type: "varchar" } })
|
||||||
|
@Index(['runner'])
|
||||||
|
@Index(['runner', 'created_at'])
|
||||||
|
@Index(['valid'])
|
||||||
export class Scan {
|
export class Scan {
|
||||||
/**
|
/**
|
||||||
* Autogenerated unique id (primary key).
|
* Autogenerated unique id (primary key).
|
||||||
@@ -28,8 +31,8 @@ export class Scan {
|
|||||||
* This is important to link ran distances to runners.
|
* This is important to link ran distances to runners.
|
||||||
*/
|
*/
|
||||||
@IsNotEmpty()
|
@IsNotEmpty()
|
||||||
@ManyToOne(() => Runner, runner => runner.scans, { nullable: false })
|
@ManyToOne(() => require("./Runner").Runner, (runner: Runner) => runner.scans, { nullable: false })
|
||||||
runner: Runner;
|
runner!: Runner;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Is the scan valid (for fraud reasons).
|
* Is the scan valid (for fraud reasons).
|
||||||
|
|||||||
@@ -6,16 +6,19 @@ import {
|
|||||||
IsPositive,
|
IsPositive,
|
||||||
IsString
|
IsString
|
||||||
} from "class-validator";
|
} from "class-validator";
|
||||||
import { BeforeInsert, BeforeUpdate, Column, Entity, ManyToOne, OneToMany, PrimaryGeneratedColumn } from "typeorm";
|
import { BeforeInsert, BeforeUpdate, Column, Entity, Index, ManyToOne, OneToMany, PrimaryGeneratedColumn } from "typeorm";
|
||||||
import { ResponseScanStation } from '../responses/ResponseScanStation';
|
import { ResponseScanStation } from '../responses/ResponseScanStation';
|
||||||
import { Track } from "./Track";
|
import type { Track } from "./Track";
|
||||||
import { TrackScan } from "./TrackScan";
|
import type { TrackScan } from "./TrackScan";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Defines the ScanStation entity.
|
* Defines the ScanStation entity.
|
||||||
* ScanStations get used to create TrackScans for runners based on a scan of their runnerCard.
|
* ScanStations get used to create TrackScans for runners based on a scan of their runnerCard.
|
||||||
*/
|
*/
|
||||||
@Entity()
|
@Entity()
|
||||||
|
@Index(['track'])
|
||||||
|
@Index(['prefix'])
|
||||||
|
@Index(['enabled'])
|
||||||
export class ScanStation {
|
export class ScanStation {
|
||||||
/**
|
/**
|
||||||
* Autogenerated unique id (primary key).
|
* Autogenerated unique id (primary key).
|
||||||
@@ -38,8 +41,8 @@ export class ScanStation {
|
|||||||
* All scans created by this station will also be associated with this track.
|
* All scans created by this station will also be associated with this track.
|
||||||
*/
|
*/
|
||||||
@IsNotEmpty()
|
@IsNotEmpty()
|
||||||
@ManyToOne(() => Track, track => track.stations, { nullable: false })
|
@ManyToOne(() => require("./Track").Track, (track: Track) => track.stations, { nullable: false })
|
||||||
track: Track;
|
track!: Track;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The client's api key prefix.
|
* The client's api key prefix.
|
||||||
@@ -69,8 +72,8 @@ export class ScanStation {
|
|||||||
/**
|
/**
|
||||||
* Used to link track scans to a scan station.
|
* Used to link track scans to a scan station.
|
||||||
*/
|
*/
|
||||||
@OneToMany(() => TrackScan, scan => scan.track, { nullable: true })
|
@OneToMany(() => require("./TrackScan").TrackScan, (scan: TrackScan) => scan.station, { nullable: true })
|
||||||
scans: TrackScan[];
|
scans!: TrackScan[];
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Is this station enabled?
|
* Is this station enabled?
|
||||||
|
|||||||
@@ -7,8 +7,8 @@ import {
|
|||||||
} from "class-validator";
|
} from "class-validator";
|
||||||
import { BeforeInsert, BeforeUpdate, Column, Entity, OneToMany, PrimaryGeneratedColumn } from "typeorm";
|
import { BeforeInsert, BeforeUpdate, Column, Entity, OneToMany, PrimaryGeneratedColumn } from "typeorm";
|
||||||
import { ResponseTrack } from '../responses/ResponseTrack';
|
import { ResponseTrack } from '../responses/ResponseTrack';
|
||||||
import { ScanStation } from "./ScanStation";
|
import type { ScanStation } from "./ScanStation";
|
||||||
import { TrackScan } from "./TrackScan";
|
import type { TrackScan } from "./TrackScan";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Defines the Track entity.
|
* Defines the Track entity.
|
||||||
@@ -53,15 +53,15 @@ export class Track {
|
|||||||
* Used to link scan stations to a certain track.
|
* Used to link scan stations to a certain track.
|
||||||
* This makes the configuration of the scan stations easier.
|
* This makes the configuration of the scan stations easier.
|
||||||
*/
|
*/
|
||||||
@OneToMany(() => ScanStation, station => station.track, { nullable: true })
|
@OneToMany(() => require("./ScanStation").ScanStation, (station: ScanStation) => station.track, { nullable: true })
|
||||||
stations: ScanStation[];
|
stations!: ScanStation[];
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Used to link track scans to a track.
|
* Used to link track scans to a track.
|
||||||
* The scan will derive it's distance from the track's distance.
|
* The scan will derive it's distance from the track's distance.
|
||||||
*/
|
*/
|
||||||
@OneToMany(() => TrackScan, scan => scan.track, { nullable: true })
|
@OneToMany(() => require("./TrackScan").TrackScan, (scan: TrackScan) => scan.track, { nullable: true })
|
||||||
scans: TrackScan[];
|
scans!: TrackScan[];
|
||||||
|
|
||||||
@Column({ type: 'bigint', nullable: true, readonly: true })
|
@Column({ type: 'bigint', nullable: true, readonly: true })
|
||||||
@IsInt()
|
@IsInt()
|
||||||
|
|||||||
@@ -6,42 +6,47 @@ import {
|
|||||||
|
|
||||||
IsPositive
|
IsPositive
|
||||||
} from "class-validator";
|
} from "class-validator";
|
||||||
import { ChildEntity, Column, ManyToOne } from "typeorm";
|
import { ChildEntity, Column, Index, ManyToOne } from "typeorm";
|
||||||
import { ResponseTrackScan } from '../responses/ResponseTrackScan';
|
import { ResponseTrackScan } from '../responses/ResponseTrackScan';
|
||||||
import { RunnerCard } from "./RunnerCard";
|
import type { RunnerCard } from "./RunnerCard";
|
||||||
import { Scan } from "./Scan";
|
import { Scan } from "./Scan";
|
||||||
import { ScanStation } from "./ScanStation";
|
import type { ScanStation } from "./ScanStation";
|
||||||
import { Track } from "./Track";
|
import type { Track } from "./Track";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Defines the TrackScan entity.
|
* Defines the TrackScan entity.
|
||||||
* A track scan usaually get's generated by a scan station.
|
* A track scan usaually get's generated by a scan station.
|
||||||
*/
|
*/
|
||||||
@ChildEntity()
|
@ChildEntity()
|
||||||
|
@Index(['track'])
|
||||||
|
@Index(['card'])
|
||||||
|
@Index(['station'])
|
||||||
|
@Index(['timestamp'])
|
||||||
|
@Index(['station', 'timestamp'])
|
||||||
export class TrackScan extends Scan {
|
export class TrackScan extends Scan {
|
||||||
/**
|
/**
|
||||||
* The scan's associated track.
|
* The scan's associated track.
|
||||||
* This is used to determine the scan's distance.
|
* This is used to determine the scan's distance.
|
||||||
*/
|
*/
|
||||||
@IsNotEmpty()
|
@IsNotEmpty()
|
||||||
@ManyToOne(() => Track, track => track.scans, { nullable: true })
|
@ManyToOne(() => require("./Track").Track, (track: Track) => track.scans, { nullable: true })
|
||||||
track: Track;
|
track!: Track;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The runnerCard associated with the scan.
|
* The runnerCard associated with the scan.
|
||||||
* This get's saved for documentation and management purposes.
|
* This get's saved for documentation and management purposes.
|
||||||
*/
|
*/
|
||||||
@IsNotEmpty()
|
@IsNotEmpty()
|
||||||
@ManyToOne(() => RunnerCard, card => card.scans, { nullable: true })
|
@ManyToOne(() => require("./RunnerCard").RunnerCard, (card: RunnerCard) => card.scans, { nullable: true })
|
||||||
card: RunnerCard;
|
card!: RunnerCard;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The scanning station that created the scan.
|
* The scanning station that created the scan.
|
||||||
* Mainly used for logging and traceing back scans (or errors)
|
* Mainly used for logging and traceing back scans (or errors)
|
||||||
*/
|
*/
|
||||||
@IsNotEmpty()
|
@IsNotEmpty()
|
||||||
@ManyToOne(() => ScanStation, station => station.scans, { nullable: true })
|
@ManyToOne(() => require("./ScanStation").ScanStation, (station: ScanStation) => station.scans, { nullable: true })
|
||||||
station: ScanStation;
|
station!: ScanStation;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The scan's distance in meters.
|
* The scan's distance in meters.
|
||||||
|
|||||||
@@ -1,19 +1,20 @@
|
|||||||
import { IsBoolean, IsEmail, IsInt, IsNotEmpty, IsOptional, IsPhoneNumber, IsString, IsUrl, IsUUID } from "class-validator";
|
import { IsBoolean, IsEmail, IsInt, IsNotEmpty, IsOptional, IsPhoneNumber, IsString, IsUrl, IsUUID } from "class-validator";
|
||||||
import { ChildEntity, Column, JoinTable, ManyToMany, OneToMany } from "typeorm";
|
import { ChildEntity, Column, Index, JoinTable, ManyToMany, OneToMany } from "typeorm";
|
||||||
import { config } from '../../config';
|
import { config } from '../../config';
|
||||||
import { ResponsePrincipal } from '../responses/ResponsePrincipal';
|
import { ResponsePrincipal } from '../responses/ResponsePrincipal';
|
||||||
import { ResponseUser } from '../responses/ResponseUser';
|
import { ResponseUser } from '../responses/ResponseUser';
|
||||||
import { Permission } from './Permission';
|
import { Permission } from './Permission';
|
||||||
import { Principal } from './Principal';
|
import { Principal } from './Principal';
|
||||||
import { UserAction } from './UserAction';
|
import type { UserAction } from './UserAction';
|
||||||
import { UserGroup } from './UserGroup';
|
import { UserGroup } from './UserGroup';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Defines the User entity.
|
* Defines the User entity.
|
||||||
* Users are the ones that can use the "admin" webui and do stuff in the backend.
|
* Users are the ones that can use the "admin" webui and do stuff in the backend.
|
||||||
*/
|
*/
|
||||||
@ChildEntity()
|
@ChildEntity()
|
||||||
export class User extends Principal {
|
@Index(['enabled'])
|
||||||
|
export class User extends Principal {
|
||||||
/**
|
/**
|
||||||
* The user's uuid.
|
* The user's uuid.
|
||||||
* Mainly gets used as a per-user salt for the password hash.
|
* Mainly gets used as a per-user salt for the password hash.
|
||||||
@@ -124,11 +125,11 @@ export class User extends Principal {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* The actions performed by this user.
|
* The actions performed by this user.
|
||||||
* For documentation purposes only, will be implemented later.
|
* For documentation purposes only, will be implemented later.
|
||||||
*/
|
*/
|
||||||
@IsOptional()
|
@IsOptional()
|
||||||
@OneToMany(() => UserAction, action => action.user, { nullable: true })
|
@OneToMany(() => require("./UserAction").UserAction, (action: UserAction) => action.user, { nullable: true })
|
||||||
actions: UserAction[]
|
actions!: UserAction[]
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Resolves all permissions granted to this user through groups.
|
* Resolves all permissions granted to this user through groups.
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ import {
|
|||||||
} from "class-validator";
|
} from "class-validator";
|
||||||
import { BeforeInsert, BeforeUpdate, Column, Entity, ManyToOne, PrimaryGeneratedColumn } from "typeorm";
|
import { BeforeInsert, BeforeUpdate, Column, Entity, ManyToOne, PrimaryGeneratedColumn } from "typeorm";
|
||||||
import { PermissionAction } from '../enums/PermissionAction';
|
import { PermissionAction } from '../enums/PermissionAction';
|
||||||
import { User } from './User';
|
import type { User } from './User';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Defines the UserAction entity.
|
* Defines the UserAction entity.
|
||||||
@@ -26,8 +26,8 @@ export class UserAction {
|
|||||||
/**
|
/**
|
||||||
* The user that performed the action.
|
* The user that performed the action.
|
||||||
*/
|
*/
|
||||||
@ManyToOne(() => User, user => user.actions)
|
@ManyToOne(() => require("./User").User, (user: User) => user.actions)
|
||||||
user: User
|
user!: User
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The actions's target (e.g. Track#2)
|
* The actions's target (e.g. Track#2)
|
||||||
|
|||||||
35
src/models/entities/index.ts
Normal file
35
src/models/entities/index.ts
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
/**
|
||||||
|
* Entity barrel file for Bun compatibility.
|
||||||
|
* Imports all entities in the correct order to resolve circular dependencies.
|
||||||
|
*/
|
||||||
|
|
||||||
|
// Base/parent entities first
|
||||||
|
export * from './Participant';
|
||||||
|
export * from './Donation';
|
||||||
|
export * from './Scan';
|
||||||
|
|
||||||
|
// Child entities that depend on the above
|
||||||
|
export * from './Runner';
|
||||||
|
export * from './DistanceDonation';
|
||||||
|
export * from './FixedDonation';
|
||||||
|
export * from './TrackScan';
|
||||||
|
|
||||||
|
// Entities with cross-references
|
||||||
|
export * from './RunnerCard';
|
||||||
|
export * from './RunnerGroup';
|
||||||
|
export * from './RunnerOrganization';
|
||||||
|
export * from './RunnerTeam';
|
||||||
|
export * from './ScanStation';
|
||||||
|
export * from './Track';
|
||||||
|
|
||||||
|
// Independent entities
|
||||||
|
export * from './Address';
|
||||||
|
export * from './ConfigFlags';
|
||||||
|
export * from './Donor';
|
||||||
|
export * from './GroupContact';
|
||||||
|
export * from './Permission';
|
||||||
|
export * from './Principal';
|
||||||
|
export * from './StatsClient';
|
||||||
|
export * from './User';
|
||||||
|
export * from './UserAction';
|
||||||
|
export * from './UserGroup';
|
||||||
@@ -3,7 +3,7 @@ import { Donation } from '../entities/Donation';
|
|||||||
import { DonationStatus } from '../enums/DonationStatus';
|
import { DonationStatus } from '../enums/DonationStatus';
|
||||||
import { ResponseObjectType } from '../enums/ResponseObjectType';
|
import { ResponseObjectType } from '../enums/ResponseObjectType';
|
||||||
import { IResponse } from './IResponse';
|
import { IResponse } from './IResponse';
|
||||||
import { ResponseDonor } from './ResponseDonor';
|
import type { ResponseDonor } from './ResponseDonor';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Defines the donation response.
|
* Defines the donation response.
|
||||||
@@ -33,7 +33,7 @@ export class ResponseDonation implements IResponse {
|
|||||||
* The donation's donor.
|
* The donation's donor.
|
||||||
*/
|
*/
|
||||||
@IsNotEmpty()
|
@IsNotEmpty()
|
||||||
donor: ResponseDonor;
|
donor?: ResponseDonor;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The donation's amount in the smalles unit of your currency (default: euro cent).
|
* The donation's amount in the smalles unit of your currency (default: euro cent).
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ import {
|
|||||||
import { Donor } from '../entities/Donor';
|
import { Donor } from '../entities/Donor';
|
||||||
import { ResponseObjectType } from '../enums/ResponseObjectType';
|
import { ResponseObjectType } from '../enums/ResponseObjectType';
|
||||||
import { IResponse } from './IResponse';
|
import { IResponse } from './IResponse';
|
||||||
import { ResponseDonation } from './ResponseDonation';
|
import type { ResponseDonation } from './ResponseDonation';
|
||||||
import { ResponseParticipant } from './ResponseParticipant';
|
import { ResponseParticipant } from './ResponseParticipant';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -35,7 +35,7 @@ export class ResponseDonor extends ResponseParticipant implements IResponse {
|
|||||||
@IsInt()
|
@IsInt()
|
||||||
paidDonationAmount: number;
|
paidDonationAmount: number;
|
||||||
|
|
||||||
donations: Array<ResponseDonation>;
|
donations?: Array<ResponseDonation>;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Creates a ResponseRunner object from a runner.
|
* Creates a ResponseRunner object from a runner.
|
||||||
@@ -46,6 +46,7 @@ export class ResponseDonor extends ResponseParticipant implements IResponse {
|
|||||||
this.receiptNeeded = donor.receiptNeeded;
|
this.receiptNeeded = donor.receiptNeeded;
|
||||||
this.donationAmount = donor.donationAmount;
|
this.donationAmount = donor.donationAmount;
|
||||||
this.paidDonationAmount = donor.paidDonationAmount;
|
this.paidDonationAmount = donor.paidDonationAmount;
|
||||||
|
const ResponseDonation = require('./ResponseDonation').ResponseDonation;
|
||||||
this.donations = new Array<ResponseDonation>();
|
this.donations = new Array<ResponseDonation>();
|
||||||
if (donor.donations?.length > 0) {
|
if (donor.donations?.length > 0) {
|
||||||
for (const donation of donor.donations) {
|
for (const donation of donor.donations) {
|
||||||
|
|||||||
@@ -14,7 +14,7 @@ import { RunnerOrganization } from '../entities/RunnerOrganization';
|
|||||||
import { ResponseObjectType } from '../enums/ResponseObjectType';
|
import { ResponseObjectType } from '../enums/ResponseObjectType';
|
||||||
import { IResponse } from './IResponse';
|
import { IResponse } from './IResponse';
|
||||||
import { ResponseRunnerGroup } from './ResponseRunnerGroup';
|
import { ResponseRunnerGroup } from './ResponseRunnerGroup';
|
||||||
import { ResponseRunnerTeam } from './ResponseRunnerTeam';
|
import type { ResponseRunnerTeam } from './ResponseRunnerTeam';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Defines the runnerOrganization response.
|
* Defines the runnerOrganization response.
|
||||||
@@ -37,7 +37,7 @@ export class ResponseRunnerOrganization extends ResponseRunnerGroup implements I
|
|||||||
* The runnerOrganization associated teams.
|
* The runnerOrganization associated teams.
|
||||||
*/
|
*/
|
||||||
@IsArray()
|
@IsArray()
|
||||||
teams: ResponseRunnerTeam[];
|
teams?: ResponseRunnerTeam[];
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The organization's registration key.
|
* The organization's registration key.
|
||||||
@@ -62,6 +62,7 @@ export class ResponseRunnerOrganization extends ResponseRunnerGroup implements I
|
|||||||
public constructor(org: RunnerOrganization) {
|
public constructor(org: RunnerOrganization) {
|
||||||
super(org);
|
super(org);
|
||||||
this.address = org.address;
|
this.address = org.address;
|
||||||
|
const ResponseRunnerTeam = require('./ResponseRunnerTeam').ResponseRunnerTeam;
|
||||||
this.teams = new Array<ResponseRunnerTeam>();
|
this.teams = new Array<ResponseRunnerTeam>();
|
||||||
if (org.teams) {
|
if (org.teams) {
|
||||||
for (let team of org.teams) {
|
for (let team of org.teams) {
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ import { RunnerTeam } from '../entities/RunnerTeam';
|
|||||||
import { ResponseObjectType } from '../enums/ResponseObjectType';
|
import { ResponseObjectType } from '../enums/ResponseObjectType';
|
||||||
import { IResponse } from './IResponse';
|
import { IResponse } from './IResponse';
|
||||||
import { ResponseRunnerGroup } from './ResponseRunnerGroup';
|
import { ResponseRunnerGroup } from './ResponseRunnerGroup';
|
||||||
import { ResponseRunnerOrganization } from './ResponseRunnerOrganization';
|
import type { ResponseRunnerOrganization } from './ResponseRunnerOrganization';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Defines the runnerTeam response.
|
* Defines the runnerTeam response.
|
||||||
@@ -20,7 +20,7 @@ export class ResponseRunnerTeam extends ResponseRunnerGroup implements IResponse
|
|||||||
*/
|
*/
|
||||||
@IsObject()
|
@IsObject()
|
||||||
@IsNotEmpty()
|
@IsNotEmpty()
|
||||||
parentGroup: ResponseRunnerOrganization;
|
parentGroup?: ResponseRunnerOrganization;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Creates a ResponseRunnerTeam object from a runnerTeam.
|
* Creates a ResponseRunnerTeam object from a runnerTeam.
|
||||||
|
|||||||
30
src/models/responses/ResponseScanIntake.ts
Normal file
30
src/models/responses/ResponseScanIntake.ts
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
import { IsBoolean, IsInt, IsNotEmpty, IsObject, IsString } from 'class-validator';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Lightweight response returned to scan stations after a TrackScan submission.
|
||||||
|
* Contains only what the scan display needs — validity, lap time, and runner info.
|
||||||
|
* Full ResponseTrackScan is still returned to JWT-authenticated admin/UI callers.
|
||||||
|
*/
|
||||||
|
export class ResponseScanIntakeRunner {
|
||||||
|
@IsString()
|
||||||
|
@IsNotEmpty()
|
||||||
|
displayName: string;
|
||||||
|
|
||||||
|
@IsInt()
|
||||||
|
distance: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export class ResponseScanIntake {
|
||||||
|
@IsBoolean()
|
||||||
|
accepted: boolean;
|
||||||
|
|
||||||
|
@IsBoolean()
|
||||||
|
valid: boolean;
|
||||||
|
|
||||||
|
@IsInt()
|
||||||
|
lapTime: number;
|
||||||
|
|
||||||
|
@IsObject()
|
||||||
|
@IsNotEmpty()
|
||||||
|
runner: ResponseScanIntakeRunner;
|
||||||
|
}
|
||||||
67
src/nats/CardKV.ts
Normal file
67
src/nats/CardKV.ts
Normal file
@@ -0,0 +1,67 @@
|
|||||||
|
import { KvEntry } from 'nats';
|
||||||
|
import NatsClient from './NatsClient';
|
||||||
|
|
||||||
|
const BUCKET = 'card_state';
|
||||||
|
/** 1 hour TTL in milliseconds — sliding window, reset on each access. */
|
||||||
|
const TTL_MS = 60 * 60 * 1000;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Cached card data stored in NATS KV.
|
||||||
|
* Keyed by the stripped card id (rawBarcode % 200000000000).
|
||||||
|
* TTL of 1 hour of inactivity — re-put on each access to slide the window.
|
||||||
|
*/
|
||||||
|
export interface CardKVEntry {
|
||||||
|
runnerId: number;
|
||||||
|
runnerDisplayName: string;
|
||||||
|
enabled: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getBucket() {
|
||||||
|
return NatsClient.getKV(BUCKET, { ttl: TTL_MS });
|
||||||
|
}
|
||||||
|
|
||||||
|
function entryKey(cardId: number): string {
|
||||||
|
return `card.${cardId}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the cached CardKVEntry for the given stripped card id, or null on a miss.
|
||||||
|
* On a cache hit the entry is re-put with a fresh TTL to slide the inactivity window.
|
||||||
|
*/
|
||||||
|
export async function getCardEntry(cardId: number): Promise<CardKVEntry | null> {
|
||||||
|
const bucket = await getBucket();
|
||||||
|
let entry: KvEntry | null = null;
|
||||||
|
try {
|
||||||
|
entry = await bucket.get(entryKey(cardId));
|
||||||
|
} catch {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
if (!entry || entry.operation === 'DEL' || entry.operation === 'PURGE') {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
const value = JSON.parse(entry.string()) as CardKVEntry;
|
||||||
|
// Re-put to slide the TTL window
|
||||||
|
await bucket.put(entryKey(cardId), JSON.stringify(value));
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Writes a CardKVEntry for the given stripped card id with a 1-hour TTL.
|
||||||
|
*/
|
||||||
|
export async function setCardEntry(cardId: number, entry: CardKVEntry): Promise<void> {
|
||||||
|
const bucket = await getBucket();
|
||||||
|
await bucket.put(entryKey(cardId), JSON.stringify(entry));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Removes the cached entry for the given stripped card id.
|
||||||
|
* Call on card update (runner reassignment, enable/disable change) or delete.
|
||||||
|
*/
|
||||||
|
export async function deleteCardEntry(cardId: number): Promise<void> {
|
||||||
|
const bucket = await getBucket();
|
||||||
|
try {
|
||||||
|
await bucket.delete(entryKey(cardId));
|
||||||
|
} catch {
|
||||||
|
// Entry may not exist in KV yet — that's fine
|
||||||
|
}
|
||||||
|
}
|
||||||
60
src/nats/NatsClient.ts
Normal file
60
src/nats/NatsClient.ts
Normal file
@@ -0,0 +1,60 @@
|
|||||||
|
import consola from 'consola';
|
||||||
|
import { connect, JetStreamClient, KV, KvOptions, NatsConnection } from 'nats';
|
||||||
|
import { config } from '../config';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Singleton NATS client.
|
||||||
|
* Call connect() once during app startup (after the DB loader).
|
||||||
|
* All other modules obtain the connection via getKV().
|
||||||
|
*/
|
||||||
|
class NatsClient {
|
||||||
|
private connection: NatsConnection | null = null;
|
||||||
|
private js: JetStreamClient | null = null;
|
||||||
|
private kvBuckets: Map<string, KV> = new Map();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Establishes the NATS connection and JetStream context.
|
||||||
|
* Must be called once before any KV operations.
|
||||||
|
*/
|
||||||
|
public async connect(): Promise<void> {
|
||||||
|
this.connection = await connect({ servers: config.nats_url });
|
||||||
|
this.js = this.connection.jetstream();
|
||||||
|
consola.success(`NATS connected to ${config.nats_url}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a KV bucket by name, creating it if it doesn't exist yet.
|
||||||
|
* Results are cached — repeated calls with the same name return the same instance.
|
||||||
|
*/
|
||||||
|
public async getKV(bucketName: string, options?: Partial<KvOptions>): Promise<KV> {
|
||||||
|
if (this.kvBuckets.has(bucketName)) {
|
||||||
|
return this.kvBuckets.get(bucketName);
|
||||||
|
}
|
||||||
|
if (!this.js) {
|
||||||
|
throw new Error('NATS not connected. Call NatsClient.connect() first.');
|
||||||
|
}
|
||||||
|
const kv = await this.js.views.kv(bucketName, options);
|
||||||
|
this.kvBuckets.set(bucketName, kv);
|
||||||
|
return kv;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gracefully closes the NATS connection.
|
||||||
|
* Call during app shutdown if needed.
|
||||||
|
*/
|
||||||
|
public async disconnect(): Promise<void> {
|
||||||
|
if (this.connection) {
|
||||||
|
await this.connection.drain();
|
||||||
|
this.connection = null;
|
||||||
|
this.js = null;
|
||||||
|
this.kvBuckets.clear();
|
||||||
|
consola.info('NATS disconnected.');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public isConnected(): boolean {
|
||||||
|
return this.connection !== null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export default new NatsClient();
|
||||||
190
src/nats/RunnerKV.ts
Normal file
190
src/nats/RunnerKV.ts
Normal file
@@ -0,0 +1,190 @@
|
|||||||
|
import { KvEntry } from 'nats';
|
||||||
|
import { getConnection } from 'typeorm';
|
||||||
|
import { Runner } from '../models/entities/Runner';
|
||||||
|
import { TrackScan } from '../models/entities/TrackScan';
|
||||||
|
import NatsClient from './NatsClient';
|
||||||
|
|
||||||
|
const BUCKET = 'runner_state';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Cached runner state stored in NATS KV.
|
||||||
|
* Keyed by runner id. No TTL — entries are permanent until explicitly deleted.
|
||||||
|
*/
|
||||||
|
export interface RunnerKVEntry {
|
||||||
|
/** "Firstname Lastname" — middlename omitted. */
|
||||||
|
displayName: string;
|
||||||
|
/** Sum of all valid scan distances in metres. */
|
||||||
|
distance: number;
|
||||||
|
/** Unix seconds timestamp of the last valid scan. 0 if none. */
|
||||||
|
latestTimestamp: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Returned from getRunnerEntry — includes the KV revision for CAS updates. */
|
||||||
|
export interface RunnerKVResult {
|
||||||
|
entry: RunnerKVEntry;
|
||||||
|
revision: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getBucket() {
|
||||||
|
return NatsClient.getKV(BUCKET);
|
||||||
|
}
|
||||||
|
|
||||||
|
function entryKey(runnerId: number): string {
|
||||||
|
return `runner.${runnerId}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the cached RunnerKVEntry + revision for the given runner id, or null on a miss.
|
||||||
|
* The revision is required for CAS (compare-and-swap) updates.
|
||||||
|
*/
|
||||||
|
export async function getRunnerEntry(runnerId: number): Promise<RunnerKVResult | null> {
|
||||||
|
const bucket = await getBucket();
|
||||||
|
let entry: KvEntry | null = null;
|
||||||
|
try {
|
||||||
|
entry = await bucket.get(entryKey(runnerId));
|
||||||
|
} catch {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
if (!entry || entry.operation === 'DEL' || entry.operation === 'PURGE') {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
entry: JSON.parse(entry.string()) as RunnerKVEntry,
|
||||||
|
revision: entry.revision,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Writes a RunnerKVEntry for the given runner id.
|
||||||
|
* If revision is provided, performs a CAS update — returns false if the revision
|
||||||
|
* has changed (concurrent write), true on success.
|
||||||
|
* Without a revision, performs an unconditional put.
|
||||||
|
*/
|
||||||
|
export async function setRunnerEntry(runnerId: number, entry: RunnerKVEntry, revision?: number): Promise<boolean> {
|
||||||
|
const bucket = await getBucket();
|
||||||
|
try {
|
||||||
|
if (revision !== undefined) {
|
||||||
|
await bucket.update(entryKey(runnerId), JSON.stringify(entry), revision);
|
||||||
|
} else {
|
||||||
|
await bucket.put(entryKey(runnerId), JSON.stringify(entry));
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
} catch {
|
||||||
|
// CAS conflict — revision has changed
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Removes the cached entry for the given runner id.
|
||||||
|
* Call on runner name update or when a scan's valid flag is changed via PUT /scans/:id.
|
||||||
|
*/
|
||||||
|
export async function deleteRunnerEntry(runnerId: number): Promise<void> {
|
||||||
|
const bucket = await getBucket();
|
||||||
|
try {
|
||||||
|
await bucket.delete(entryKey(runnerId));
|
||||||
|
} catch {
|
||||||
|
// Entry may not exist in KV yet — that's fine
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* DB fallback: loads a runner's display name, total valid distance, and latest valid
|
||||||
|
* scan timestamp from the database, writes the result to KV, and returns it.
|
||||||
|
*
|
||||||
|
* Called on any KV cache miss during the scan intake flow.
|
||||||
|
* Also handles the first-scan-ever case — latestTimestamp=0, distance=0.
|
||||||
|
*/
|
||||||
|
export async function warmRunner(runnerId: number): Promise<RunnerKVEntry> {
|
||||||
|
const connection = getConnection();
|
||||||
|
|
||||||
|
const runner = await connection.getRepository(Runner).findOne({ id: runnerId });
|
||||||
|
const displayName = runner ? `${runner.firstname} ${runner.lastname}` : 'Unknown Runner';
|
||||||
|
|
||||||
|
const distanceResult = await connection
|
||||||
|
.getRepository(TrackScan)
|
||||||
|
.createQueryBuilder('scan')
|
||||||
|
.select('COALESCE(SUM(track.distance), 0)', 'total')
|
||||||
|
.innerJoin('scan.track', 'track')
|
||||||
|
.where('scan.runner = :runnerId', { runnerId })
|
||||||
|
.andWhere('scan.valid = :valid', { valid: true })
|
||||||
|
.getRawOne();
|
||||||
|
|
||||||
|
const latestScan = await connection
|
||||||
|
.getRepository(TrackScan)
|
||||||
|
.findOne({
|
||||||
|
where: { runner: { id: runnerId }, valid: true },
|
||||||
|
order: { timestamp: 'DESC' },
|
||||||
|
});
|
||||||
|
|
||||||
|
const entry: RunnerKVEntry = {
|
||||||
|
displayName,
|
||||||
|
distance: parseInt(distanceResult?.total ?? '0', 10),
|
||||||
|
latestTimestamp: latestScan?.timestamp ?? 0,
|
||||||
|
};
|
||||||
|
|
||||||
|
await setRunnerEntry(runnerId, entry);
|
||||||
|
return entry;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Bulk cache prewarming: loads all runners from the database and populates the KV cache.
|
||||||
|
* Uses 3 efficient queries and parallel KV writes to minimize startup time.
|
||||||
|
*
|
||||||
|
* Call from loader during startup (if NATS_PREWARM=true) to eliminate DB reads on the hot
|
||||||
|
* path from the very first scan.
|
||||||
|
*/
|
||||||
|
export async function warmAll(): Promise<void> {
|
||||||
|
const connection = getConnection();
|
||||||
|
|
||||||
|
// Query 1: All runners
|
||||||
|
const runners = await connection
|
||||||
|
.getRepository(Runner)
|
||||||
|
.createQueryBuilder('runner')
|
||||||
|
.select(['runner.id', 'runner.firstname', 'runner.lastname'])
|
||||||
|
.getMany();
|
||||||
|
|
||||||
|
// Query 2: Total valid distance per runner
|
||||||
|
const distanceResults = await connection
|
||||||
|
.getRepository(TrackScan)
|
||||||
|
.createQueryBuilder('scan')
|
||||||
|
.select('scan.runner', 'runnerId')
|
||||||
|
.addSelect('COALESCE(SUM(track.distance), 0)', 'total')
|
||||||
|
.innerJoin('scan.track', 'track')
|
||||||
|
.where('scan.valid = :valid', { valid: true })
|
||||||
|
.groupBy('scan.runner')
|
||||||
|
.getRawMany();
|
||||||
|
|
||||||
|
// Query 3: Latest valid scan timestamp per runner
|
||||||
|
const latestResults = await connection
|
||||||
|
.getRepository(TrackScan)
|
||||||
|
.createQueryBuilder('scan')
|
||||||
|
.select('scan.runner', 'runnerId')
|
||||||
|
.addSelect('MAX(scan.timestamp)', 'latestTimestamp')
|
||||||
|
.where('scan.valid = :valid', { valid: true })
|
||||||
|
.groupBy('scan.runner')
|
||||||
|
.getRawMany();
|
||||||
|
|
||||||
|
// Build lookup maps
|
||||||
|
const distanceMap = new Map<number, number>();
|
||||||
|
distanceResults.forEach((row: any) => {
|
||||||
|
distanceMap.set(parseInt(row.runnerId, 10), parseInt(row.total, 10));
|
||||||
|
});
|
||||||
|
|
||||||
|
const latestMap = new Map<number, number>();
|
||||||
|
latestResults.forEach((row: any) => {
|
||||||
|
latestMap.set(parseInt(row.runnerId, 10), parseInt(row.latestTimestamp, 10));
|
||||||
|
});
|
||||||
|
|
||||||
|
// Write all entries in parallel
|
||||||
|
const writePromises = runners.map((runner) => {
|
||||||
|
const entry: RunnerKVEntry = {
|
||||||
|
displayName: `${runner.firstname} ${runner.lastname}`,
|
||||||
|
distance: distanceMap.get(runner.id) ?? 0,
|
||||||
|
latestTimestamp: latestMap.get(runner.id) ?? 0,
|
||||||
|
};
|
||||||
|
return setRunnerEntry(runner.id, entry);
|
||||||
|
});
|
||||||
|
|
||||||
|
await Promise.all(writePromises);
|
||||||
|
}
|
||||||
88
src/nats/StationKV.ts
Normal file
88
src/nats/StationKV.ts
Normal file
@@ -0,0 +1,88 @@
|
|||||||
|
import { KvEntry } from 'nats';
|
||||||
|
import NatsClient from './NatsClient';
|
||||||
|
|
||||||
|
const BUCKET = 'station_state';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Cached station data stored in NATS KV.
|
||||||
|
* Keyed by station prefix — the same prefix embedded in the station token.
|
||||||
|
* Carries all fields needed for auth and scan validation so no DB read
|
||||||
|
* is required on the hot path after the first request from a station.
|
||||||
|
*/
|
||||||
|
export interface StationKVEntry {
|
||||||
|
id: number;
|
||||||
|
enabled: boolean;
|
||||||
|
/** HMAC-SHA256 of the full station token, for re-verification on cache hit. */
|
||||||
|
tokenHash: string;
|
||||||
|
trackId: number;
|
||||||
|
/** Track distance in metres. */
|
||||||
|
trackDistance: number;
|
||||||
|
/** Minimum lap time in seconds. 0 means no minimum (DB null mapped to 0). */
|
||||||
|
minimumLapTime: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getBucket() {
|
||||||
|
return NatsClient.getKV(BUCKET);
|
||||||
|
}
|
||||||
|
|
||||||
|
function prefixKey(prefix: string): string {
|
||||||
|
return `station.prefix.${prefix}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
function idKey(id: number): string {
|
||||||
|
return `station.id.${id}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getEntry(key: string): Promise<StationKVEntry | null> {
|
||||||
|
const bucket = await getBucket();
|
||||||
|
let raw: KvEntry | null = null;
|
||||||
|
try {
|
||||||
|
raw = await bucket.get(key);
|
||||||
|
} catch {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
if (!raw || raw.operation === 'DEL' || raw.operation === 'PURGE') {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
return JSON.parse(raw.string()) as StationKVEntry;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the cached StationKVEntry for the given token prefix, or null on a cache miss.
|
||||||
|
*/
|
||||||
|
export async function getStationEntry(prefix: string): Promise<StationKVEntry | null> {
|
||||||
|
return getEntry(prefixKey(prefix));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the cached StationKVEntry for the given station DB id, or null on a cache miss.
|
||||||
|
* Used by the intake flow where only stationId is available after ScanAuth.
|
||||||
|
*/
|
||||||
|
export async function getStationEntryById(id: number): Promise<StationKVEntry | null> {
|
||||||
|
return getEntry(idKey(id));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Writes a StationKVEntry under both the prefix key and the id key.
|
||||||
|
* No TTL — entries are permanent until explicitly deleted.
|
||||||
|
*/
|
||||||
|
export async function setStationEntry(prefix: string, entry: StationKVEntry): Promise<void> {
|
||||||
|
const bucket = await getBucket();
|
||||||
|
const serialised = JSON.stringify(entry);
|
||||||
|
await bucket.put(prefixKey(prefix), serialised);
|
||||||
|
await bucket.put(idKey(entry.id), serialised);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Removes the cached entries for the given prefix (and its id mirror).
|
||||||
|
* Call this on station update or delete so the next request re-fetches from DB.
|
||||||
|
*/
|
||||||
|
export async function deleteStationEntry(prefix: string): Promise<void> {
|
||||||
|
const bucket = await getBucket();
|
||||||
|
// Fetch the entry first so we can also delete the id-keyed mirror
|
||||||
|
const entry = await getEntry(prefixKey(prefix));
|
||||||
|
try { await bucket.delete(prefixKey(prefix)); } catch { /* not cached yet */ }
|
||||||
|
if (entry) {
|
||||||
|
try { await bucket.delete(idKey(entry.id)); } catch { /* not cached yet */ }
|
||||||
|
}
|
||||||
|
}
|
||||||
86
src/nats/StatsKV.ts
Normal file
86
src/nats/StatsKV.ts
Normal file
@@ -0,0 +1,86 @@
|
|||||||
|
import { KvEntry } from 'nats';
|
||||||
|
import NatsClient from './NatsClient';
|
||||||
|
|
||||||
|
const BUCKET = 'stats_cache';
|
||||||
|
const TTL_SECONDS = 60; // 60 second TTL
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Stats cache stored in NATS KV with 60 second TTL.
|
||||||
|
* Used to cache expensive aggregation queries from the stats endpoints.
|
||||||
|
*/
|
||||||
|
|
||||||
|
async function getBucket() {
|
||||||
|
return NatsClient.getKV(BUCKET, { ttl: TTL_SECONDS * 1000 }); // TTL in milliseconds
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Cache key patterns (using dots instead of colons for NATS KV compatibility):
|
||||||
|
* - "stats.overview" - main stats endpoint (GET /stats)
|
||||||
|
* - "stats.runners.distance" - top runners by distance
|
||||||
|
* - "stats.runners.donations" - top runners by donations
|
||||||
|
* - "stats.runners.laptime.{trackId}" - top runners by laptime for specific track
|
||||||
|
* - "stats.teams.distance" - top teams by distance
|
||||||
|
* - "stats.teams.donations" - top teams by donations
|
||||||
|
* - "stats.organizations.distance" - top organizations by distance
|
||||||
|
* - "stats.organizations.donations" - top organizations by donations
|
||||||
|
*/
|
||||||
|
|
||||||
|
function cacheKey(path: string): string {
|
||||||
|
// Replace colons with dots for NATS KV compatibility
|
||||||
|
return `stats.${path.replace(/:/g, '.')}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the cached value for the given stats cache key, or null on a miss.
|
||||||
|
*/
|
||||||
|
export async function getStatsCache<T>(path: string): Promise<T | null> {
|
||||||
|
const bucket = await getBucket();
|
||||||
|
let entry: KvEntry | null = null;
|
||||||
|
try {
|
||||||
|
entry = await bucket.get(cacheKey(path));
|
||||||
|
} catch {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
if (!entry || entry.operation === 'DEL' || entry.operation === 'PURGE') {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
return JSON.parse(entry.string()) as T;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Stores a value in the stats cache with 60 second TTL.
|
||||||
|
* The TTL is applied at the bucket level, so all entries expire automatically.
|
||||||
|
*/
|
||||||
|
export async function setStatsCache<T>(path: string, value: T): Promise<void> {
|
||||||
|
const bucket = await getBucket();
|
||||||
|
await bucket.put(cacheKey(path), JSON.stringify(value));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Removes the cached entry for the given stats path.
|
||||||
|
* Useful for cache invalidation when data changes.
|
||||||
|
*/
|
||||||
|
export async function deleteStatsCache(path: string): Promise<void> {
|
||||||
|
const bucket = await getBucket();
|
||||||
|
try {
|
||||||
|
await bucket.delete(cacheKey(path));
|
||||||
|
} catch {
|
||||||
|
// Entry doesn't exist or already deleted - ignore
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Removes all cached stats entries.
|
||||||
|
* Call this when runners, scans, or donations are modified to ensure fresh data.
|
||||||
|
*/
|
||||||
|
export async function invalidateAllStats(): Promise<void> {
|
||||||
|
const bucket = await getBucket();
|
||||||
|
try {
|
||||||
|
// Purge the entire bucket to clear all cached stats
|
||||||
|
await bucket.destroy();
|
||||||
|
// Recreate the bucket for future use
|
||||||
|
await NatsClient.getKV(BUCKET, { ttl: TTL_SECONDS * 1000 });
|
||||||
|
} catch {
|
||||||
|
// Bucket operations can fail if bucket doesn't exist - ignore
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,7 +1,6 @@
|
|||||||
import { hash } from '@node-rs/argon2';
|
import * as Bun from 'bun';
|
||||||
import { Connection } from 'typeorm';
|
import { Connection } from 'typeorm';
|
||||||
import { Factory, Seeder } from 'typeorm-seeding';
|
import { Factory, Seeder } from 'typeorm-seeding';
|
||||||
import * as uuid from 'uuid';
|
|
||||||
import { CreatePermission } from '../models/actions/create/CreatePermission';
|
import { CreatePermission } from '../models/actions/create/CreatePermission';
|
||||||
import { CreateUserGroup } from '../models/actions/create/CreateUserGroup';
|
import { CreateUserGroup } from '../models/actions/create/CreateUserGroup';
|
||||||
import { Permission } from '../models/entities/Permission';
|
import { Permission } from '../models/entities/Permission';
|
||||||
@@ -32,8 +31,8 @@ export default class SeedUsers implements Seeder {
|
|||||||
initialUser.firstname = "demo";
|
initialUser.firstname = "demo";
|
||||||
initialUser.lastname = "demo";
|
initialUser.lastname = "demo";
|
||||||
initialUser.username = "demo";
|
initialUser.username = "demo";
|
||||||
initialUser.uuid = uuid.v4();
|
initialUser.uuid = crypto.randomUUID();
|
||||||
initialUser.password = await hash("demo" + initialUser.uuid);
|
initialUser.password = await Bun.password.hash("demo" + initialUser.uuid);
|
||||||
initialUser.email = "demo@dev.lauf-fuer-kaya.de"
|
initialUser.email = "demo@dev.lauf-fuer-kaya.de"
|
||||||
initialUser.groups = [group];
|
initialUser.groups = [group];
|
||||||
return await connection.getRepository(User).save(initialUser);
|
return await connection.getRepository(User).save(initialUser);
|
||||||
|
|||||||
8
src/types/express.d.ts
vendored
Normal file
8
src/types/express.d.ts
vendored
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
declare namespace Express {
|
||||||
|
interface Request {
|
||||||
|
/** Set by ScanAuth when the request was authenticated via a station token. Not a header — not spoofable by clients. */
|
||||||
|
isStationAuth?: boolean;
|
||||||
|
/** The authenticated station's DB id. Only present when isStationAuth === true. */
|
||||||
|
stationId?: number;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -13,6 +13,9 @@
|
|||||||
"include": [
|
"include": [
|
||||||
"src/**/*"
|
"src/**/*"
|
||||||
],
|
],
|
||||||
|
"files": [
|
||||||
|
"src/types/express.d.ts"
|
||||||
|
],
|
||||||
"exclude": [
|
"exclude": [
|
||||||
"node_modules",
|
"node_modules",
|
||||||
"**/*.spec.ts"
|
"**/*.spec.ts"
|
||||||
|
|||||||
Reference in New Issue
Block a user