Merge branch 'dev' into Inconsistent-Shift-Booking-Status

This commit is contained in:
Achintha Isuru
2026-02-26 10:38:02 -05:00
committed by GitHub
204 changed files with 15504 additions and 1061 deletions

View File

@@ -0,0 +1,13 @@
FROM node:20-alpine
WORKDIR /app
COPY package*.json ./
RUN npm ci --omit=dev
COPY src ./src
ENV PORT=8080
EXPOSE 8080
CMD ["node", "src/server.js"]

3035
backend/command-api/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,25 @@
{
"name": "@krow/command-api",
"version": "0.1.0",
"private": true,
"type": "module",
"engines": {
"node": ">=20"
},
"scripts": {
"start": "node src/server.js",
"test": "node --test",
"migrate:idempotency": "node scripts/migrate-idempotency.mjs"
},
"dependencies": {
"express": "^4.21.2",
"firebase-admin": "^13.0.2",
"pg": "^8.16.3",
"pino": "^9.6.0",
"pino-http": "^10.3.0",
"zod": "^3.24.2"
},
"devDependencies": {
"supertest": "^7.0.0"
}
}

View File

@@ -0,0 +1,29 @@
import { readFileSync } from 'node:fs';
import { resolve } from 'node:path';
import { fileURLToPath } from 'node:url';
import { Pool } from 'pg';
const databaseUrl = process.env.IDEMPOTENCY_DATABASE_URL;
if (!databaseUrl) {
// eslint-disable-next-line no-console
console.error('IDEMPOTENCY_DATABASE_URL is required');
process.exit(1);
}
const scriptDir = resolve(fileURLToPath(new URL('.', import.meta.url)));
const migrationPath = resolve(scriptDir, '../sql/001_command_idempotency.sql');
const sql = readFileSync(migrationPath, 'utf8');
const pool = new Pool({
connectionString: databaseUrl,
max: Number.parseInt(process.env.IDEMPOTENCY_DB_POOL_MAX || '5', 10),
});
try {
await pool.query(sql);
// eslint-disable-next-line no-console
console.log('Idempotency migration applied successfully');
} finally {
await pool.end();
}

View File

@@ -0,0 +1,13 @@
CREATE TABLE IF NOT EXISTS command_idempotency (
composite_key TEXT PRIMARY KEY,
user_id TEXT NOT NULL,
route TEXT NOT NULL,
idempotency_key TEXT NOT NULL,
status_code INTEGER NOT NULL,
response_payload JSONB NOT NULL,
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
expires_at TIMESTAMPTZ NOT NULL
);
CREATE INDEX IF NOT EXISTS idx_command_idempotency_expires_at
ON command_idempotency (expires_at);

View File

@@ -0,0 +1,30 @@
import express from 'express';
import pino from 'pino';
import pinoHttp from 'pino-http';
import { requestContext } from './middleware/request-context.js';
import { errorHandler, notFoundHandler } from './middleware/error-handler.js';
import { healthRouter } from './routes/health.js';
import { createCommandsRouter } from './routes/commands.js';
const logger = pino({ level: process.env.LOG_LEVEL || 'info' });
export function createApp() {
const app = express();
app.use(requestContext);
app.use(
pinoHttp({
logger,
customProps: (req) => ({ requestId: req.requestId }),
})
);
app.use(express.json({ limit: '2mb' }));
app.use(healthRouter);
app.use('/commands', createCommandsRouter());
app.use(notFoundHandler);
app.use(errorHandler);
return app;
}

View File

@@ -0,0 +1,6 @@
import { z } from 'zod';
export const commandBaseSchema = z.object({
payload: z.record(z.any()).optional(),
metadata: z.record(z.any()).optional(),
});

View File

@@ -0,0 +1,26 @@
export class AppError extends Error {
constructor(code, message, status = 400, details = {}) {
super(message);
this.name = 'AppError';
this.code = code;
this.status = status;
this.details = details;
}
}
export function toErrorEnvelope(error, requestId) {
const status = error?.status && Number.isInteger(error.status) ? error.status : 500;
const code = error?.code || 'INTERNAL_ERROR';
const message = error?.message || 'Unexpected error';
const details = error?.details || {};
return {
status,
body: {
code,
message,
details,
requestId,
},
};
}

View File

@@ -0,0 +1,45 @@
import { AppError } from '../lib/errors.js';
import { can } from '../services/policy.js';
import { verifyFirebaseToken } from '../services/firebase-auth.js';
function getBearerToken(header) {
if (!header) return null;
const [scheme, token] = header.split(' ');
if (!scheme || scheme.toLowerCase() !== 'bearer' || !token) return null;
return token;
}
export async function requireAuth(req, _res, next) {
try {
const token = getBearerToken(req.get('Authorization'));
if (!token) {
throw new AppError('UNAUTHENTICATED', 'Missing bearer token', 401);
}
if (process.env.AUTH_BYPASS === 'true') {
req.actor = { uid: 'test-user', email: 'test@krow.local', role: 'TEST' };
return next();
}
const decoded = await verifyFirebaseToken(token);
req.actor = {
uid: decoded.uid,
email: decoded.email || null,
role: decoded.role || null,
};
return next();
} catch (error) {
if (error instanceof AppError) return next(error);
return next(new AppError('UNAUTHENTICATED', 'Token verification failed', 401));
}
}
export function requirePolicy(action, resource) {
return (req, _res, next) => {
if (!can(action, resource, req.actor)) {
return next(new AppError('FORBIDDEN', 'Not allowed to perform this action', 403));
}
return next();
};
}

View File

@@ -0,0 +1,25 @@
import { toErrorEnvelope } from '../lib/errors.js';
export function notFoundHandler(req, res) {
res.status(404).json({
code: 'NOT_FOUND',
message: `Route not found: ${req.method} ${req.path}`,
details: {},
requestId: req.requestId,
});
}
export function errorHandler(error, req, res, _next) {
const envelope = toErrorEnvelope(error, req.requestId);
if (req.log) {
req.log.error(
{
errCode: envelope.body.code,
status: envelope.status,
details: envelope.body.details,
},
envelope.body.message
);
}
res.status(envelope.status).json(envelope.body);
}

View File

@@ -0,0 +1,10 @@
import { AppError } from '../lib/errors.js';
export function requireIdempotencyKey(req, _res, next) {
const idempotencyKey = req.get('Idempotency-Key');
if (!idempotencyKey) {
return next(new AppError('MISSING_IDEMPOTENCY_KEY', 'Missing Idempotency-Key header', 400));
}
req.idempotencyKey = idempotencyKey;
return next();
}

View File

@@ -0,0 +1,9 @@
import { randomUUID } from 'node:crypto';
export function requestContext(req, res, next) {
const incoming = req.get('X-Request-Id');
req.requestId = incoming || randomUUID();
res.setHeader('X-Request-Id', req.requestId);
res.locals.startedAt = Date.now();
next();
}

View File

@@ -0,0 +1,113 @@
import { Router } from 'express';
import { AppError } from '../lib/errors.js';
import { requireAuth, requirePolicy } from '../middleware/auth.js';
import { requireIdempotencyKey } from '../middleware/idempotency.js';
import { buildIdempotencyKey, readIdempotentResult, writeIdempotentResult } from '../services/idempotency-store.js';
import { commandBaseSchema } from '../contracts/commands/command-base.js';
function parseBody(body) {
const parsed = commandBaseSchema.safeParse(body || {});
if (!parsed.success) {
throw new AppError('VALIDATION_ERROR', 'Invalid command payload', 400, {
issues: parsed.error.issues,
});
}
return parsed.data;
}
function createCommandResponse(route, requestId, idempotencyKey) {
return {
accepted: true,
route,
commandId: `${route}:${Date.now()}`,
idempotencyKey,
requestId,
};
}
function buildCommandHandler(policyAction, policyResource) {
return async (req, res, next) => {
try {
parseBody(req.body);
const route = `${req.baseUrl}${req.route.path}`;
const compositeKey = buildIdempotencyKey({
userId: req.actor.uid,
route,
idempotencyKey: req.idempotencyKey,
});
const existing = await readIdempotentResult(compositeKey);
if (existing) {
return res.status(existing.statusCode).json(existing.payload);
}
const payload = createCommandResponse(route, req.requestId, req.idempotencyKey);
const persisted = await writeIdempotentResult({
compositeKey,
userId: req.actor.uid,
route,
idempotencyKey: req.idempotencyKey,
payload,
statusCode: 200,
});
return res.status(persisted.statusCode).json(persisted.payload);
} catch (error) {
return next(error);
}
};
}
export function createCommandsRouter() {
const router = Router();
router.post(
'/orders/create',
requireAuth,
requireIdempotencyKey,
requirePolicy('orders.create', 'order'),
buildCommandHandler('orders.create', 'order')
);
router.post(
'/orders/:orderId/update',
requireAuth,
requireIdempotencyKey,
requirePolicy('orders.update', 'order'),
buildCommandHandler('orders.update', 'order')
);
router.post(
'/orders/:orderId/cancel',
requireAuth,
requireIdempotencyKey,
requirePolicy('orders.cancel', 'order'),
buildCommandHandler('orders.cancel', 'order')
);
router.post(
'/shifts/:shiftId/change-status',
requireAuth,
requireIdempotencyKey,
requirePolicy('shifts.change-status', 'shift'),
buildCommandHandler('shifts.change-status', 'shift')
);
router.post(
'/shifts/:shiftId/assign-staff',
requireAuth,
requireIdempotencyKey,
requirePolicy('shifts.assign-staff', 'shift'),
buildCommandHandler('shifts.assign-staff', 'shift')
);
router.post(
'/shifts/:shiftId/accept',
requireAuth,
requireIdempotencyKey,
requirePolicy('shifts.accept', 'shift'),
buildCommandHandler('shifts.accept', 'shift')
);
return router;
}

View File

@@ -0,0 +1,15 @@
import { Router } from 'express';
export const healthRouter = Router();
function healthHandler(req, res) {
res.status(200).json({
ok: true,
service: 'krow-command-api',
version: process.env.SERVICE_VERSION || 'dev',
requestId: req.requestId,
});
}
healthRouter.get('/health', healthHandler);
healthRouter.get('/healthz', healthHandler);

View File

@@ -0,0 +1,9 @@
import { createApp } from './app.js';
const port = Number(process.env.PORT || 8080);
const app = createApp();
app.listen(port, () => {
// eslint-disable-next-line no-console
console.log(`krow-command-api listening on port ${port}`);
});

View File

@@ -0,0 +1,13 @@
import { applicationDefault, getApps, initializeApp } from 'firebase-admin/app';
import { getAuth } from 'firebase-admin/auth';
function ensureAdminApp() {
if (getApps().length === 0) {
initializeApp({ credential: applicationDefault() });
}
}
export async function verifyFirebaseToken(token) {
ensureAdminApp();
return getAuth().verifyIdToken(token);
}

View File

@@ -0,0 +1,208 @@
import { Pool } from 'pg';
const DEFAULT_TTL_SECONDS = Number.parseInt(process.env.IDEMPOTENCY_TTL_SECONDS || '86400', 10);
const CLEANUP_EVERY_OPS = Number.parseInt(process.env.IDEMPOTENCY_CLEANUP_EVERY_OPS || '100', 10);
const memoryStore = new Map();
let adapterPromise = null;
function shouldUseSqlStore() {
const mode = (process.env.IDEMPOTENCY_STORE || '').toLowerCase();
if (mode === 'memory') {
return false;
}
if (mode === 'sql') {
return true;
}
return Boolean(process.env.IDEMPOTENCY_DATABASE_URL);
}
function gcExpiredMemoryRecords(now = Date.now()) {
for (const [key, value] of memoryStore.entries()) {
if (value.expiresAt <= now) {
memoryStore.delete(key);
}
}
}
function createMemoryAdapter() {
return {
async read(compositeKey) {
gcExpiredMemoryRecords();
return memoryStore.get(compositeKey) || null;
},
async write({
compositeKey,
payload,
statusCode = 200,
}) {
const now = Date.now();
const existing = memoryStore.get(compositeKey);
if (existing && existing.expiresAt > now) {
return existing;
}
const record = {
payload,
statusCode,
createdAt: now,
expiresAt: now + (DEFAULT_TTL_SECONDS * 1000),
};
memoryStore.set(compositeKey, record);
return record;
},
};
}
async function createSqlAdapter() {
const connectionString = process.env.IDEMPOTENCY_DATABASE_URL;
if (!connectionString) {
throw new Error('IDEMPOTENCY_DATABASE_URL is required for sql idempotency store');
}
const pool = new Pool({
connectionString,
max: Number.parseInt(process.env.IDEMPOTENCY_DB_POOL_MAX || '5', 10),
});
await pool.query(`
CREATE TABLE IF NOT EXISTS command_idempotency (
composite_key TEXT PRIMARY KEY,
user_id TEXT NOT NULL,
route TEXT NOT NULL,
idempotency_key TEXT NOT NULL,
status_code INTEGER NOT NULL,
response_payload JSONB NOT NULL,
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
expires_at TIMESTAMPTZ NOT NULL
);
`);
await pool.query(`
CREATE INDEX IF NOT EXISTS idx_command_idempotency_expires_at
ON command_idempotency (expires_at);
`);
let opCount = 0;
async function maybeCleanupExpiredRows() {
opCount += 1;
if (CLEANUP_EVERY_OPS <= 0 || opCount % CLEANUP_EVERY_OPS !== 0) {
return;
}
await pool.query('DELETE FROM command_idempotency WHERE expires_at <= NOW()');
}
function mapRow(row) {
return {
statusCode: row.status_code,
payload: row.response_payload,
};
}
return {
async read(compositeKey) {
await maybeCleanupExpiredRows();
const result = await pool.query(
`
SELECT status_code, response_payload
FROM command_idempotency
WHERE composite_key = $1
AND expires_at > NOW()
`,
[compositeKey]
);
if (result.rowCount === 0) {
return null;
}
return mapRow(result.rows[0]);
},
async write({
compositeKey,
userId,
route,
idempotencyKey,
payload,
statusCode = 200,
}) {
await maybeCleanupExpiredRows();
const expiresAt = new Date(Date.now() + (DEFAULT_TTL_SECONDS * 1000));
const payloadJson = JSON.stringify(payload);
await pool.query(
`
INSERT INTO command_idempotency (
composite_key,
user_id,
route,
idempotency_key,
status_code,
response_payload,
expires_at
)
VALUES ($1, $2, $3, $4, $5, $6::jsonb, $7)
ON CONFLICT (composite_key) DO NOTHING
`,
[compositeKey, userId, route, idempotencyKey, statusCode, payloadJson, expiresAt]
);
const existingResult = await pool.query(
`
SELECT status_code, response_payload
FROM command_idempotency
WHERE composite_key = $1
AND expires_at > NOW()
`,
[compositeKey]
);
if (existingResult.rowCount === 0) {
throw new Error('Idempotency write failed to persist or recover existing record');
}
return mapRow(existingResult.rows[0]);
},
};
}
async function getAdapter() {
if (!adapterPromise) {
adapterPromise = shouldUseSqlStore()
? createSqlAdapter()
: Promise.resolve(createMemoryAdapter());
}
return adapterPromise;
}
export function buildIdempotencyKey({ userId, route, idempotencyKey }) {
return `${userId}:${route}:${idempotencyKey}`;
}
export async function readIdempotentResult(compositeKey) {
const adapter = await getAdapter();
return adapter.read(compositeKey);
}
export async function writeIdempotentResult({
compositeKey,
userId,
route,
idempotencyKey,
payload,
statusCode = 200,
}) {
const adapter = await getAdapter();
return adapter.write({
compositeKey,
userId,
route,
idempotencyKey,
payload,
statusCode,
});
}
export function __resetIdempotencyStoreForTests() {
memoryStore.clear();
adapterPromise = null;
}

View File

@@ -0,0 +1,5 @@
export function can(action, resource, actor) {
void action;
void resource;
return Boolean(actor?.uid);
}

View File

@@ -0,0 +1,54 @@
import test, { beforeEach } from 'node:test';
import assert from 'node:assert/strict';
import request from 'supertest';
import { createApp } from '../src/app.js';
import { __resetIdempotencyStoreForTests } from '../src/services/idempotency-store.js';
process.env.AUTH_BYPASS = 'true';
beforeEach(() => {
process.env.IDEMPOTENCY_STORE = 'memory';
delete process.env.IDEMPOTENCY_DATABASE_URL;
__resetIdempotencyStoreForTests();
});
test('GET /healthz returns healthy response', async () => {
const app = createApp();
const res = await request(app).get('/healthz');
assert.equal(res.status, 200);
assert.equal(res.body.ok, true);
assert.equal(typeof res.body.requestId, 'string');
});
test('command route requires idempotency key', async () => {
const app = createApp();
const res = await request(app)
.post('/commands/orders/create')
.set('Authorization', 'Bearer test-token')
.send({ payload: {} });
assert.equal(res.status, 400);
assert.equal(res.body.code, 'MISSING_IDEMPOTENCY_KEY');
});
test('command route is idempotent by key', async () => {
const app = createApp();
const first = await request(app)
.post('/commands/orders/create')
.set('Authorization', 'Bearer test-token')
.set('Idempotency-Key', 'abc-123')
.send({ payload: { order: 'x' } });
const second = await request(app)
.post('/commands/orders/create')
.set('Authorization', 'Bearer test-token')
.set('Idempotency-Key', 'abc-123')
.send({ payload: { order: 'x' } });
assert.equal(first.status, 200);
assert.equal(second.status, 200);
assert.equal(first.body.commandId, second.body.commandId);
assert.equal(first.body.idempotencyKey, 'abc-123');
});

View File

@@ -0,0 +1,56 @@
import test, { beforeEach } from 'node:test';
import assert from 'node:assert/strict';
import {
__resetIdempotencyStoreForTests,
buildIdempotencyKey,
readIdempotentResult,
writeIdempotentResult,
} from '../src/services/idempotency-store.js';
beforeEach(() => {
process.env.IDEMPOTENCY_STORE = 'memory';
delete process.env.IDEMPOTENCY_DATABASE_URL;
__resetIdempotencyStoreForTests();
});
test('buildIdempotencyKey composes user route and client key', () => {
const key = buildIdempotencyKey({
userId: 'user-1',
route: '/commands/orders/create',
idempotencyKey: 'req-abc',
});
assert.equal(key, 'user-1:/commands/orders/create:req-abc');
});
test('memory idempotency store returns existing payload for duplicate key', async () => {
const compositeKey = buildIdempotencyKey({
userId: 'user-1',
route: '/commands/orders/create',
idempotencyKey: 'req-abc',
});
const first = await writeIdempotentResult({
compositeKey,
userId: 'user-1',
route: '/commands/orders/create',
idempotencyKey: 'req-abc',
payload: { accepted: true, commandId: 'c-1' },
statusCode: 200,
});
const second = await writeIdempotentResult({
compositeKey,
userId: 'user-1',
route: '/commands/orders/create',
idempotencyKey: 'req-abc',
payload: { accepted: true, commandId: 'c-2' },
statusCode: 200,
});
const read = await readIdempotentResult(compositeKey);
assert.equal(first.payload.commandId, 'c-1');
assert.equal(second.payload.commandId, 'c-1');
assert.equal(read.payload.commandId, 'c-1');
});

View File

@@ -0,0 +1,13 @@
FROM node:20-alpine
WORKDIR /app
COPY package*.json ./
RUN npm ci --omit=dev
COPY src ./src
ENV PORT=8080
EXPOSE 8080
CMD ["node", "src/server.js"]

2962
backend/core-api/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,26 @@
{
"name": "@krow/core-api",
"version": "0.1.0",
"private": true,
"type": "module",
"engines": {
"node": ">=20"
},
"scripts": {
"start": "node src/server.js",
"test": "node --test"
},
"dependencies": {
"@google-cloud/storage": "^7.16.0",
"express": "^4.21.2",
"firebase-admin": "^13.0.2",
"google-auth-library": "^9.15.1",
"multer": "^2.0.2",
"pino": "^9.6.0",
"pino-http": "^10.3.0",
"zod": "^3.24.2"
},
"devDependencies": {
"supertest": "^7.0.0"
}
}

View File

@@ -0,0 +1,31 @@
import express from 'express';
import pino from 'pino';
import pinoHttp from 'pino-http';
import { requestContext } from './middleware/request-context.js';
import { errorHandler, notFoundHandler } from './middleware/error-handler.js';
import { healthRouter } from './routes/health.js';
import { createCoreRouter, createLegacyCoreRouter } from './routes/core.js';
const logger = pino({ level: process.env.LOG_LEVEL || 'info' });
export function createApp() {
const app = express();
app.use(requestContext);
app.use(
pinoHttp({
logger,
customProps: (req) => ({ requestId: req.requestId }),
})
);
app.use(express.json({ limit: '2mb' }));
app.use(healthRouter);
app.use('/core', createCoreRouter());
app.use('/', createLegacyCoreRouter());
app.use(notFoundHandler);
app.use(errorHandler);
return app;
}

View File

@@ -0,0 +1,6 @@
import { z } from 'zod';
export const createSignedUrlSchema = z.object({
fileUri: z.string().startsWith('gs://', 'fileUri must start with gs://'),
expiresInSeconds: z.number().int().min(60).max(3600).optional(),
});

View File

@@ -0,0 +1,10 @@
import { z } from 'zod';
export const createVerificationSchema = z.object({
type: z.enum(['attire', 'government_id', 'certification']),
subjectType: z.string().min(1).max(80).optional(),
subjectId: z.string().min(1).max(120).optional(),
fileUri: z.string().startsWith('gs://', 'fileUri must start with gs://'),
rules: z.record(z.any()).optional().default({}),
metadata: z.record(z.any()).optional().default({}),
});

View File

@@ -0,0 +1,7 @@
import { z } from 'zod';
export const invokeLlmSchema = z.object({
prompt: z.string().min(1).max(12000),
responseJsonSchema: z.record(z.any()),
fileUrls: z.array(z.string().url()).optional(),
});

View File

@@ -0,0 +1,7 @@
import { z } from 'zod';
export const reviewVerificationSchema = z.object({
decision: z.enum(['APPROVED', 'REJECTED']),
note: z.string().max(1000).optional().default(''),
reasonCode: z.string().max(100).optional().default('MANUAL_REVIEW'),
});

View File

@@ -0,0 +1,26 @@
export class AppError extends Error {
constructor(code, message, status = 400, details = {}) {
super(message);
this.name = 'AppError';
this.code = code;
this.status = status;
this.details = details;
}
}
export function toErrorEnvelope(error, requestId) {
const status = error?.status && Number.isInteger(error.status) ? error.status : 500;
const code = error?.code || 'INTERNAL_ERROR';
const message = error?.message || 'Unexpected error';
const details = error?.details || {};
return {
status,
body: {
code,
message,
details,
requestId,
},
};
}

View File

@@ -0,0 +1,45 @@
import { AppError } from '../lib/errors.js';
import { can } from '../services/policy.js';
import { verifyFirebaseToken } from '../services/firebase-auth.js';
function getBearerToken(header) {
if (!header) return null;
const [scheme, token] = header.split(' ');
if (!scheme || scheme.toLowerCase() !== 'bearer' || !token) return null;
return token;
}
export async function requireAuth(req, _res, next) {
try {
const token = getBearerToken(req.get('Authorization'));
if (!token) {
throw new AppError('UNAUTHENTICATED', 'Missing bearer token', 401);
}
if (process.env.AUTH_BYPASS === 'true') {
req.actor = { uid: 'test-user', email: 'test@krow.local', role: 'TEST' };
return next();
}
const decoded = await verifyFirebaseToken(token);
req.actor = {
uid: decoded.uid,
email: decoded.email || null,
role: decoded.role || null,
};
return next();
} catch (error) {
if (error instanceof AppError) return next(error);
return next(new AppError('UNAUTHENTICATED', 'Token verification failed', 401));
}
}
export function requirePolicy(action, resource) {
return (req, _res, next) => {
if (!can(action, resource, req.actor)) {
return next(new AppError('FORBIDDEN', 'Not allowed to perform this action', 403));
}
return next();
};
}

View File

@@ -0,0 +1,28 @@
import { toErrorEnvelope } from '../lib/errors.js';
export function notFoundHandler(req, res) {
res.status(404).json({
code: 'NOT_FOUND',
message: `Route not found: ${req.method} ${req.path}`,
details: {},
requestId: req.requestId,
});
}
export function errorHandler(error, req, res, _next) {
const envelope = toErrorEnvelope(error, req.requestId);
if (envelope.status === 429 && envelope.body.details?.retryAfterSeconds) {
res.set('Retry-After', String(envelope.body.details.retryAfterSeconds));
}
if (req.log) {
req.log.error(
{
errCode: envelope.body.code,
status: envelope.status,
details: envelope.body.details,
},
envelope.body.message
);
}
res.status(envelope.status).json(envelope.body);
}

View File

@@ -0,0 +1,9 @@
import { randomUUID } from 'node:crypto';
export function requestContext(req, res, next) {
const incoming = req.get('X-Request-Id');
req.requestId = incoming || randomUUID();
res.setHeader('X-Request-Id', req.requestId);
res.locals.startedAt = Date.now();
next();
}

View File

@@ -0,0 +1,287 @@
import { Router } from 'express';
import multer from 'multer';
import { z } from 'zod';
import { AppError } from '../lib/errors.js';
import { requireAuth, requirePolicy } from '../middleware/auth.js';
import { createSignedUrlSchema } from '../contracts/core/create-signed-url.js';
import { createVerificationSchema } from '../contracts/core/create-verification.js';
import { invokeLlmSchema } from '../contracts/core/invoke-llm.js';
import { reviewVerificationSchema } from '../contracts/core/review-verification.js';
import { invokeVertexModel } from '../services/llm.js';
import { checkLlmRateLimit } from '../services/llm-rate-limit.js';
import {
ensureFileExistsForActor,
generateReadSignedUrl,
uploadToGcs,
validateFileUriAccess,
} from '../services/storage.js';
import {
createVerificationJob,
getVerificationJob,
retryVerificationJob,
reviewVerificationJob,
} from '../services/verification-jobs.js';
const DEFAULT_MAX_FILE_BYTES = 10 * 1024 * 1024;
const DEFAULT_MAX_SIGNED_URL_SECONDS = 900;
const ALLOWED_FILE_TYPES = new Set(['application/pdf', 'image/jpeg', 'image/jpg', 'image/png']);
const upload = multer({
storage: multer.memoryStorage(),
limits: {
fileSize: Number(process.env.MAX_UPLOAD_BYTES || DEFAULT_MAX_FILE_BYTES),
},
});
const uploadMetaSchema = z.object({
category: z.string().max(80).optional(),
visibility: z.enum(['public', 'private']).optional(),
});
function mockSignedUrl(fileUri, expiresInSeconds) {
const encoded = encodeURIComponent(fileUri);
const expiresAt = new Date(Date.now() + expiresInSeconds * 1000).toISOString();
return {
signedUrl: `https://storage.googleapis.com/mock-signed-url/${encoded}?expires=${expiresInSeconds}`,
expiresAt,
};
}
function useMockSignedUrl() {
return process.env.SIGNED_URL_MOCK !== 'false';
}
function useMockUpload() {
return process.env.UPLOAD_MOCK !== 'false';
}
function requireVerificationFileExists() {
return process.env.VERIFICATION_REQUIRE_FILE_EXISTS !== 'false';
}
function parseBody(schema, body) {
const parsed = schema.safeParse(body);
if (!parsed.success) {
throw new AppError('VALIDATION_ERROR', 'Invalid request payload', 400, {
issues: parsed.error.issues,
});
}
return parsed.data;
}
async function handleUploadFile(req, res, next) {
try {
const file = req.file;
if (!file) {
throw new AppError('INVALID_FILE', 'Missing file in multipart form data', 400);
}
if (!ALLOWED_FILE_TYPES.has(file.mimetype)) {
throw new AppError('INVALID_FILE_TYPE', `Unsupported file type: ${file.mimetype}`, 400);
}
const maxFileSize = Number(process.env.MAX_UPLOAD_BYTES || DEFAULT_MAX_FILE_BYTES);
if (file.size > maxFileSize) {
throw new AppError('FILE_TOO_LARGE', `File exceeds ${maxFileSize} bytes`, 400);
}
const meta = parseBody(uploadMetaSchema, req.body || {});
const visibility = meta.visibility || 'private';
const bucket = visibility === 'public'
? process.env.PUBLIC_BUCKET || 'krow-workforce-dev-public'
: process.env.PRIVATE_BUCKET || 'krow-workforce-dev-private';
const safeName = file.originalname.replace(/[^a-zA-Z0-9._-]/g, '_');
const objectPath = `uploads/${req.actor.uid}/${Date.now()}_${safeName}`;
const fileUri = `gs://${bucket}/${objectPath}`;
if (!useMockUpload()) {
await uploadToGcs({
bucket,
objectPath,
contentType: file.mimetype,
buffer: file.buffer,
});
}
res.status(200).json({
fileUri,
contentType: file.mimetype,
size: file.size,
bucket,
path: objectPath,
requestId: req.requestId,
});
} catch (error) {
if (error?.code === 'LIMIT_FILE_SIZE') {
return next(new AppError('FILE_TOO_LARGE', 'File exceeds upload limit', 400));
}
return next(error);
}
}
async function handleCreateSignedUrl(req, res, next) {
try {
const payload = parseBody(createSignedUrlSchema, req.body || {});
const expiresInSeconds = payload.expiresInSeconds || 300;
const maxSignedUrlSeconds = Number.parseInt(
process.env.MAX_SIGNED_URL_SECONDS || `${DEFAULT_MAX_SIGNED_URL_SECONDS}`,
10
);
if (expiresInSeconds > maxSignedUrlSeconds) {
throw new AppError('VALIDATION_ERROR', `expiresInSeconds must be <= ${maxSignedUrlSeconds}`, 400);
}
const signed = useMockSignedUrl()
? (() => {
validateFileUriAccess({
fileUri: payload.fileUri,
actorUid: req.actor.uid,
});
return mockSignedUrl(payload.fileUri, expiresInSeconds);
})()
: await generateReadSignedUrl({
fileUri: payload.fileUri,
actorUid: req.actor.uid,
expiresInSeconds,
});
res.status(200).json({
...signed,
requestId: req.requestId,
});
} catch (error) {
return next(error);
}
}
async function handleInvokeLlm(req, res, next) {
try {
const payload = parseBody(invokeLlmSchema, req.body || {});
const rateLimit = checkLlmRateLimit({ uid: req.actor.uid });
if (!rateLimit.allowed) {
throw new AppError('RATE_LIMITED', 'Too many model requests. Please retry shortly.', 429, {
retryAfterSeconds: rateLimit.retryAfterSeconds,
});
}
const startedAt = Date.now();
if (process.env.LLM_MOCK === 'false') {
const llmResult = await invokeVertexModel({
prompt: payload.prompt,
responseJsonSchema: payload.responseJsonSchema,
fileUrls: payload.fileUrls,
});
return res.status(200).json({
result: llmResult.result,
model: llmResult.model,
latencyMs: Date.now() - startedAt,
requestId: req.requestId,
});
}
return res.status(200).json({
result: {
summary: 'Mock model response. Replace with Vertex AI integration.',
inputPromptSize: payload.prompt.length,
},
model: process.env.LLM_MODEL || 'vertexai/gemini-mock',
latencyMs: Date.now() - startedAt,
requestId: req.requestId,
});
} catch (error) {
return next(error);
}
}
async function handleCreateVerification(req, res, next) {
try {
const payload = parseBody(createVerificationSchema, req.body || {});
validateFileUriAccess({
fileUri: payload.fileUri,
actorUid: req.actor.uid,
});
if (requireVerificationFileExists() && !useMockUpload()) {
await ensureFileExistsForActor({
fileUri: payload.fileUri,
actorUid: req.actor.uid,
});
}
const created = createVerificationJob({
actorUid: req.actor.uid,
payload,
});
return res.status(202).json({
...created,
requestId: req.requestId,
});
} catch (error) {
return next(error);
}
}
async function handleGetVerification(req, res, next) {
try {
const verificationId = req.params.verificationId;
const job = getVerificationJob(verificationId, req.actor.uid);
return res.status(200).json({
...job,
requestId: req.requestId,
});
} catch (error) {
return next(error);
}
}
async function handleReviewVerification(req, res, next) {
try {
const verificationId = req.params.verificationId;
const payload = parseBody(reviewVerificationSchema, req.body || {});
const updated = reviewVerificationJob(verificationId, req.actor.uid, payload);
return res.status(200).json({
...updated,
requestId: req.requestId,
});
} catch (error) {
return next(error);
}
}
async function handleRetryVerification(req, res, next) {
try {
const verificationId = req.params.verificationId;
const updated = retryVerificationJob(verificationId, req.actor.uid);
return res.status(202).json({
...updated,
requestId: req.requestId,
});
} catch (error) {
return next(error);
}
}
export function createCoreRouter() {
const router = Router();
router.post('/upload-file', requireAuth, requirePolicy('core.upload', 'file'), upload.single('file'), handleUploadFile);
router.post('/create-signed-url', requireAuth, requirePolicy('core.sign-url', 'file'), handleCreateSignedUrl);
router.post('/invoke-llm', requireAuth, requirePolicy('core.invoke-llm', 'model'), handleInvokeLlm);
router.post('/verifications', requireAuth, requirePolicy('core.verification.create', 'verification'), handleCreateVerification);
router.get('/verifications/:verificationId', requireAuth, requirePolicy('core.verification.read', 'verification'), handleGetVerification);
router.post('/verifications/:verificationId/review', requireAuth, requirePolicy('core.verification.review', 'verification'), handleReviewVerification);
router.post('/verifications/:verificationId/retry', requireAuth, requirePolicy('core.verification.retry', 'verification'), handleRetryVerification);
return router;
}
export function createLegacyCoreRouter() {
const router = Router();
router.post('/uploadFile', requireAuth, requirePolicy('core.upload', 'file'), upload.single('file'), handleUploadFile);
router.post('/createSignedUrl', requireAuth, requirePolicy('core.sign-url', 'file'), handleCreateSignedUrl);
router.post('/invokeLLM', requireAuth, requirePolicy('core.invoke-llm', 'model'), handleInvokeLlm);
return router;
}

View File

@@ -0,0 +1,15 @@
import { Router } from 'express';
export const healthRouter = Router();
function healthHandler(req, res) {
res.status(200).json({
ok: true,
service: 'krow-core-api',
version: process.env.SERVICE_VERSION || 'dev',
requestId: req.requestId,
});
}
healthRouter.get('/health', healthHandler);
healthRouter.get('/healthz', healthHandler);

View File

@@ -0,0 +1,9 @@
import { createApp } from './app.js';
const port = Number(process.env.PORT || 8080);
const app = createApp();
app.listen(port, () => {
// eslint-disable-next-line no-console
console.log(`krow-core-api listening on port ${port}`);
});

View File

@@ -0,0 +1,13 @@
import { applicationDefault, getApps, initializeApp } from 'firebase-admin/app';
import { getAuth } from 'firebase-admin/auth';
function ensureAdminApp() {
if (getApps().length === 0) {
initializeApp({ credential: applicationDefault() });
}
}
export async function verifyFirebaseToken(token) {
ensureAdminApp();
return getAuth().verifyIdToken(token);
}

View File

@@ -0,0 +1,41 @@
const counters = new Map();
function currentWindowKey(now = Date.now()) {
return Math.floor(now / 60000);
}
function perMinuteLimit() {
return Number.parseInt(process.env.LLM_RATE_LIMIT_PER_MINUTE || '20', 10);
}
export function checkLlmRateLimit({ uid, now = Date.now() }) {
const limit = perMinuteLimit();
if (!Number.isFinite(limit) || limit <= 0) {
return { allowed: true, remaining: null, retryAfterSeconds: 0 };
}
const windowKey = currentWindowKey(now);
const record = counters.get(uid);
if (!record || record.windowKey !== windowKey) {
counters.set(uid, { windowKey, count: 1 });
return { allowed: true, remaining: limit - 1, retryAfterSeconds: 0 };
}
if (record.count >= limit) {
const retryAfterSeconds = (windowKey + 1) * 60 - Math.floor(now / 1000);
return {
allowed: false,
remaining: 0,
retryAfterSeconds: Math.max(1, retryAfterSeconds),
};
}
record.count += 1;
counters.set(uid, record);
return { allowed: true, remaining: limit - record.count, retryAfterSeconds: 0 };
}
export function __resetLlmRateLimitForTests() {
counters.clear();
}

View File

@@ -0,0 +1,145 @@
import { GoogleAuth } from 'google-auth-library';
import { AppError } from '../lib/errors.js';
function buildVertexConfig() {
const project = process.env.GCP_PROJECT_ID || process.env.GOOGLE_CLOUD_PROJECT;
const location = process.env.LLM_LOCATION || process.env.BACKEND_REGION || 'us-central1';
if (!project) {
throw new AppError('MODEL_FAILED', 'GCP project is required for model invocation', 500);
}
return {
project,
location,
};
}
function withTimeout(promise, timeoutMs) {
return Promise.race([
promise,
new Promise((_, reject) => {
setTimeout(() => {
reject(new AppError('MODEL_TIMEOUT', `Model request exceeded ${timeoutMs}ms`, 504));
}, timeoutMs);
}),
]);
}
function toTextFromCandidate(candidate) {
if (!candidate?.content?.parts) {
return '';
}
return candidate.content.parts
.map((part) => part?.text || '')
.join('')
.trim();
}
function withJsonSchemaInstruction(prompt, responseJsonSchema) {
const schemaText = JSON.stringify(responseJsonSchema);
return `${prompt}\n\nRespond with strict JSON only. Follow this schema exactly:\n${schemaText}`;
}
function guessMimeTypeFromUri(fileUri) {
const path = fileUri.split('?')[0].toLowerCase();
if (path.endsWith('.jpg') || path.endsWith('.jpeg')) return 'image/jpeg';
if (path.endsWith('.png')) return 'image/png';
if (path.endsWith('.pdf')) return 'application/pdf';
return 'application/octet-stream';
}
function buildMultimodalParts(prompt, fileUris = []) {
const parts = [{ text: prompt }];
for (const fileUri of fileUris) {
parts.push({
fileData: {
fileUri,
mimeType: guessMimeTypeFromUri(fileUri),
},
});
}
return parts;
}
async function callVertexJsonModel({ model, timeoutMs, parts }) {
const { project, location } = buildVertexConfig();
const url = `https://${location}-aiplatform.googleapis.com/v1/projects/${project}/locations/${location}/publishers/google/models/${model}:generateContent`;
const auth = new GoogleAuth({
scopes: ['https://www.googleapis.com/auth/cloud-platform'],
});
let response;
try {
const client = await auth.getClient();
response = await withTimeout(
client.request({
url,
method: 'POST',
data: {
contents: [{ role: 'user', parts }],
generationConfig: {
temperature: 0.2,
responseMimeType: 'application/json',
},
},
}),
timeoutMs
);
} catch (error) {
if (error instanceof AppError) {
throw error;
}
throw new AppError('MODEL_FAILED', 'Model invocation failed', 502);
}
const text = toTextFromCandidate(response?.data?.candidates?.[0]);
if (!text) {
throw new AppError('MODEL_FAILED', 'Model returned empty response', 502);
}
try {
return {
model,
result: JSON.parse(text),
};
} catch {
return {
model,
result: {
raw: text,
},
};
}
}
export async function invokeVertexModel({ prompt, responseJsonSchema, fileUrls = [] }) {
const model = process.env.LLM_MODEL || 'gemini-2.0-flash-001';
const timeoutMs = Number.parseInt(process.env.LLM_TIMEOUT_MS || '20000', 10);
const promptWithSchema = withJsonSchemaInstruction(prompt, responseJsonSchema);
const fileContext = fileUrls.length > 0 ? `\nFiles:\n${fileUrls.join('\n')}` : '';
return callVertexJsonModel({
model,
timeoutMs,
parts: [{ text: `${promptWithSchema}${fileContext}` }],
});
}
export async function invokeVertexMultimodalModel({
prompt,
responseJsonSchema,
fileUris = [],
model,
timeoutMs,
}) {
const resolvedModel = model || process.env.LLM_MODEL || 'gemini-2.0-flash-001';
const resolvedTimeoutMs = Number.parseInt(
`${timeoutMs || process.env.LLM_TIMEOUT_MS || '20000'}`,
10
);
const promptWithSchema = withJsonSchemaInstruction(prompt, responseJsonSchema);
return callVertexJsonModel({
model: resolvedModel,
timeoutMs: resolvedTimeoutMs,
parts: buildMultimodalParts(promptWithSchema, fileUris),
});
}

View File

@@ -0,0 +1,5 @@
export function can(action, resource, actor) {
void action;
void resource;
return Boolean(actor?.uid);
}

View File

@@ -0,0 +1,83 @@
import { Storage } from '@google-cloud/storage';
import { AppError } from '../lib/errors.js';
const storage = new Storage();
export function parseGsUri(fileUri) {
if (!fileUri.startsWith('gs://')) {
throw new AppError('VALIDATION_ERROR', 'fileUri must start with gs://', 400);
}
const raw = fileUri.replace('gs://', '');
const slashIndex = raw.indexOf('/');
if (slashIndex <= 0 || slashIndex >= raw.length - 1) {
throw new AppError('VALIDATION_ERROR', 'fileUri must include bucket and object path', 400);
}
return {
bucket: raw.slice(0, slashIndex),
path: raw.slice(slashIndex + 1),
};
}
function allowedBuckets() {
return new Set([
process.env.PUBLIC_BUCKET || 'krow-workforce-dev-public',
process.env.PRIVATE_BUCKET || 'krow-workforce-dev-private',
]);
}
export function validateFileUriAccess({ fileUri, actorUid }) {
const { bucket, path } = parseGsUri(fileUri);
if (!allowedBuckets().has(bucket)) {
throw new AppError('FORBIDDEN', `Bucket not allowed for signing: ${bucket}`, 403);
}
const ownedPrefix = `uploads/${actorUid}/`;
if (!path.startsWith(ownedPrefix)) {
throw new AppError('FORBIDDEN', 'Cannot sign URL for another user path', 403);
}
return { bucket, path };
}
export async function uploadToGcs({ bucket, objectPath, contentType, buffer }) {
const file = storage.bucket(bucket).file(objectPath);
await file.save(buffer, {
resumable: false,
contentType,
metadata: {
cacheControl: 'private, max-age=0',
},
});
}
export async function generateReadSignedUrl({ fileUri, actorUid, expiresInSeconds }) {
const { bucket, path } = validateFileUriAccess({ fileUri, actorUid });
const file = storage.bucket(bucket).file(path);
const [exists] = await file.exists();
if (!exists) {
throw new AppError('NOT_FOUND', 'File not found for signed URL', 404, { fileUri });
}
const expiresAtMs = Date.now() + expiresInSeconds * 1000;
const [signedUrl] = await file.getSignedUrl({
version: 'v4',
action: 'read',
expires: expiresAtMs,
});
return {
signedUrl,
expiresAt: new Date(expiresAtMs).toISOString(),
};
}
export async function ensureFileExistsForActor({ fileUri, actorUid }) {
const { bucket, path } = validateFileUriAccess({ fileUri, actorUid });
const file = storage.bucket(bucket).file(path);
const [exists] = await file.exists();
if (!exists) {
throw new AppError('NOT_FOUND', 'Evidence file not found', 404, { fileUri });
}
}

View File

@@ -0,0 +1,510 @@
import crypto from 'node:crypto';
import { AppError } from '../lib/errors.js';
import { invokeVertexMultimodalModel } from './llm.js';
const jobs = new Map();
export const VerificationStatus = Object.freeze({
PENDING: 'PENDING',
PROCESSING: 'PROCESSING',
AUTO_PASS: 'AUTO_PASS',
AUTO_FAIL: 'AUTO_FAIL',
NEEDS_REVIEW: 'NEEDS_REVIEW',
APPROVED: 'APPROVED',
REJECTED: 'REJECTED',
ERROR: 'ERROR',
});
const MACHINE_TERMINAL_STATUSES = new Set([
VerificationStatus.AUTO_PASS,
VerificationStatus.AUTO_FAIL,
VerificationStatus.NEEDS_REVIEW,
VerificationStatus.ERROR,
]);
const HUMAN_TERMINAL_STATUSES = new Set([
VerificationStatus.APPROVED,
VerificationStatus.REJECTED,
]);
function nowIso() {
return new Date().toISOString();
}
function accessMode() {
return process.env.VERIFICATION_ACCESS_MODE || 'authenticated';
}
function eventRecord({ fromStatus, toStatus, actorType, actorId, details = {} }) {
return {
id: crypto.randomUUID(),
fromStatus,
toStatus,
actorType,
actorId,
details,
createdAt: nowIso(),
};
}
function toPublicJob(job) {
return {
verificationId: job.id,
type: job.type,
subjectType: job.subjectType,
subjectId: job.subjectId,
fileUri: job.fileUri,
status: job.status,
confidence: job.confidence,
reasons: job.reasons,
extracted: job.extracted,
provider: job.provider,
review: job.review,
createdAt: job.createdAt,
updatedAt: job.updatedAt,
};
}
function assertAccess(job, actorUid) {
if (accessMode() === 'authenticated') {
return;
}
if (job.ownerUid !== actorUid) {
throw new AppError('FORBIDDEN', 'Not allowed to access this verification', 403);
}
}
function requireJob(id) {
const job = jobs.get(id);
if (!job) {
throw new AppError('NOT_FOUND', 'Verification not found', 404, { verificationId: id });
}
return job;
}
function normalizeMachineStatus(status) {
if (
status === VerificationStatus.AUTO_PASS
|| status === VerificationStatus.AUTO_FAIL
|| status === VerificationStatus.NEEDS_REVIEW
) {
return status;
}
return VerificationStatus.NEEDS_REVIEW;
}
function clampConfidence(value, fallback = 0.5) {
const parsed = Number(value);
if (!Number.isFinite(parsed)) return fallback;
if (parsed < 0) return 0;
if (parsed > 1) return 1;
return parsed;
}
function asReasonList(reasons, fallback) {
if (Array.isArray(reasons) && reasons.length > 0) {
return reasons.map((item) => `${item}`);
}
return [fallback];
}
function providerTimeoutMs() {
return Number.parseInt(process.env.VERIFICATION_PROVIDER_TIMEOUT_MS || '8000', 10);
}
function attireModel() {
return process.env.VERIFICATION_ATTIRE_MODEL || 'gemini-2.0-flash-lite-001';
}
async function runAttireChecks(job) {
if (process.env.VERIFICATION_ATTIRE_AUTOPASS === 'true') {
return {
status: VerificationStatus.AUTO_PASS,
confidence: 0.8,
reasons: ['Auto-pass mode enabled for attire in dev'],
extracted: {
expected: job.rules,
},
provider: {
name: 'attire-auto-pass',
reference: null,
},
};
}
const attireProvider = process.env.VERIFICATION_ATTIRE_PROVIDER || 'vertex';
if (attireProvider !== 'vertex') {
return {
status: VerificationStatus.NEEDS_REVIEW,
confidence: 0.45,
reasons: [`Attire provider '${attireProvider}' is not supported`],
extracted: {
expected: job.rules,
},
provider: {
name: attireProvider,
reference: null,
},
};
}
try {
const prompt = [
'You are validating worker attire evidence.',
`Rules: ${JSON.stringify(job.rules || {})}`,
'Return AUTO_PASS only when the image clearly matches required attire.',
'Return AUTO_FAIL when the image clearly violates required attire.',
'Return NEEDS_REVIEW when uncertain.',
].join('\n');
const schema = {
type: 'object',
properties: {
status: { type: 'string' },
confidence: { type: 'number' },
reasons: {
type: 'array',
items: { type: 'string' },
},
extracted: {
type: 'object',
additionalProperties: true,
},
},
required: ['status', 'confidence', 'reasons'],
};
const modelOutput = await invokeVertexMultimodalModel({
prompt,
responseJsonSchema: schema,
fileUris: [job.fileUri],
model: attireModel(),
timeoutMs: providerTimeoutMs(),
});
const result = modelOutput?.result || {};
return {
status: normalizeMachineStatus(result.status),
confidence: clampConfidence(result.confidence, 0.6),
reasons: asReasonList(result.reasons, 'Attire check completed'),
extracted: result.extracted || {},
provider: {
name: 'vertex-attire',
reference: modelOutput?.model || attireModel(),
},
};
} catch (error) {
return {
status: VerificationStatus.NEEDS_REVIEW,
confidence: 0.35,
reasons: ['Automatic attire check unavailable, manual review required'],
extracted: {},
provider: {
name: 'vertex-attire',
reference: `error:${error?.code || 'unknown'}`,
},
};
}
}
function getProviderConfig(type) {
if (type === 'government_id') {
return {
name: 'government-id-provider',
url: process.env.VERIFICATION_GOV_ID_PROVIDER_URL,
token: process.env.VERIFICATION_GOV_ID_PROVIDER_TOKEN,
};
}
return {
name: 'certification-provider',
url: process.env.VERIFICATION_CERT_PROVIDER_URL,
token: process.env.VERIFICATION_CERT_PROVIDER_TOKEN,
};
}
async function runThirdPartyChecks(job, type) {
const provider = getProviderConfig(type);
if (!provider.url) {
return {
status: VerificationStatus.NEEDS_REVIEW,
confidence: 0.4,
reasons: [`${provider.name} is not configured`],
extracted: {},
provider: {
name: provider.name,
reference: null,
},
};
}
const controller = new AbortController();
const timeout = setTimeout(() => controller.abort(), providerTimeoutMs());
try {
const response = await fetch(provider.url, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
...(provider.token ? { Authorization: `Bearer ${provider.token}` } : {}),
},
body: JSON.stringify({
type,
subjectType: job.subjectType,
subjectId: job.subjectId,
fileUri: job.fileUri,
rules: job.rules,
metadata: job.metadata,
}),
signal: controller.signal,
});
const bodyText = await response.text();
let body = {};
try {
body = bodyText ? JSON.parse(bodyText) : {};
} catch {
body = {};
}
if (!response.ok) {
return {
status: VerificationStatus.NEEDS_REVIEW,
confidence: 0.35,
reasons: [`${provider.name} returned ${response.status}`],
extracted: {},
provider: {
name: provider.name,
reference: body?.reference || null,
},
};
}
return {
status: normalizeMachineStatus(body.status),
confidence: clampConfidence(body.confidence, 0.6),
reasons: asReasonList(body.reasons, `${provider.name} completed check`),
extracted: body.extracted || {},
provider: {
name: provider.name,
reference: body.reference || null,
},
};
} catch (error) {
const isAbort = error?.name === 'AbortError';
return {
status: VerificationStatus.NEEDS_REVIEW,
confidence: 0.3,
reasons: [
isAbort
? `${provider.name} timeout, manual review required`
: `${provider.name} unavailable, manual review required`,
],
extracted: {},
provider: {
name: provider.name,
reference: null,
},
};
} finally {
clearTimeout(timeout);
}
}
async function runMachineChecks(job) {
if (job.type === 'attire') {
return runAttireChecks(job);
}
if (job.type === 'government_id') {
return runThirdPartyChecks(job, 'government_id');
}
return runThirdPartyChecks(job, 'certification');
}
async function processVerificationJob(id) {
const job = requireJob(id);
if (job.status !== VerificationStatus.PENDING) {
return;
}
const beforeProcessing = job.status;
job.status = VerificationStatus.PROCESSING;
job.updatedAt = nowIso();
job.events.push(
eventRecord({
fromStatus: beforeProcessing,
toStatus: VerificationStatus.PROCESSING,
actorType: 'system',
actorId: 'verification-worker',
})
);
try {
const outcome = await runMachineChecks(job);
if (!MACHINE_TERMINAL_STATUSES.has(outcome.status)) {
throw new Error(`Invalid machine outcome status: ${outcome.status}`);
}
const fromStatus = job.status;
job.status = outcome.status;
job.confidence = outcome.confidence;
job.reasons = outcome.reasons;
job.extracted = outcome.extracted;
job.provider = outcome.provider;
job.updatedAt = nowIso();
job.events.push(
eventRecord({
fromStatus,
toStatus: job.status,
actorType: 'system',
actorId: 'verification-worker',
details: {
confidence: job.confidence,
reasons: job.reasons,
provider: job.provider,
},
})
);
} catch (error) {
const fromStatus = job.status;
job.status = VerificationStatus.ERROR;
job.confidence = null;
job.reasons = [error?.message || 'Verification processing failed'];
job.extracted = {};
job.provider = {
name: 'verification-worker',
reference: null,
};
job.updatedAt = nowIso();
job.events.push(
eventRecord({
fromStatus,
toStatus: VerificationStatus.ERROR,
actorType: 'system',
actorId: 'verification-worker',
details: {
error: error?.message || 'Verification processing failed',
},
})
);
}
}
function queueVerificationProcessing(id) {
setTimeout(() => {
processVerificationJob(id).catch(() => {});
}, 0);
}
export function createVerificationJob({ actorUid, payload }) {
const now = nowIso();
const id = `ver_${crypto.randomUUID()}`;
const job = {
id,
type: payload.type,
subjectType: payload.subjectType || null,
subjectId: payload.subjectId || null,
ownerUid: actorUid,
fileUri: payload.fileUri,
rules: payload.rules || {},
metadata: payload.metadata || {},
status: VerificationStatus.PENDING,
confidence: null,
reasons: [],
extracted: {},
provider: null,
review: null,
createdAt: now,
updatedAt: now,
events: [
eventRecord({
fromStatus: null,
toStatus: VerificationStatus.PENDING,
actorType: 'system',
actorId: actorUid,
}),
],
};
jobs.set(id, job);
queueVerificationProcessing(id);
return toPublicJob(job);
}
export function getVerificationJob(verificationId, actorUid) {
const job = requireJob(verificationId);
assertAccess(job, actorUid);
return toPublicJob(job);
}
export function reviewVerificationJob(verificationId, actorUid, review) {
const job = requireJob(verificationId);
assertAccess(job, actorUid);
if (HUMAN_TERMINAL_STATUSES.has(job.status)) {
throw new AppError('CONFLICT', 'Verification already finalized', 409, {
verificationId,
status: job.status,
});
}
const fromStatus = job.status;
job.status = review.decision;
job.review = {
decision: review.decision,
reviewedBy: actorUid,
reviewedAt: nowIso(),
note: review.note || '',
reasonCode: review.reasonCode || 'MANUAL_REVIEW',
};
job.updatedAt = nowIso();
job.events.push(
eventRecord({
fromStatus,
toStatus: job.status,
actorType: 'reviewer',
actorId: actorUid,
details: {
reasonCode: job.review.reasonCode,
},
})
);
return toPublicJob(job);
}
export function retryVerificationJob(verificationId, actorUid) {
const job = requireJob(verificationId);
assertAccess(job, actorUid);
if (job.status === VerificationStatus.PROCESSING) {
throw new AppError('CONFLICT', 'Cannot retry while verification is processing', 409, {
verificationId,
});
}
const fromStatus = job.status;
job.status = VerificationStatus.PENDING;
job.confidence = null;
job.reasons = [];
job.extracted = {};
job.provider = null;
job.review = null;
job.updatedAt = nowIso();
job.events.push(
eventRecord({
fromStatus,
toStatus: VerificationStatus.PENDING,
actorType: 'reviewer',
actorId: actorUid,
details: {
retried: true,
},
})
);
queueVerificationProcessing(verificationId);
return toPublicJob(job);
}
export function __resetVerificationJobsForTests() {
jobs.clear();
}

View File

@@ -0,0 +1,246 @@
import test, { beforeEach } from 'node:test';
import assert from 'node:assert/strict';
import request from 'supertest';
import { createApp } from '../src/app.js';
import { __resetLlmRateLimitForTests } from '../src/services/llm-rate-limit.js';
import { __resetVerificationJobsForTests } from '../src/services/verification-jobs.js';
beforeEach(() => {
process.env.AUTH_BYPASS = 'true';
process.env.LLM_MOCK = 'true';
process.env.SIGNED_URL_MOCK = 'true';
process.env.UPLOAD_MOCK = 'true';
process.env.MAX_SIGNED_URL_SECONDS = '900';
process.env.LLM_RATE_LIMIT_PER_MINUTE = '20';
process.env.VERIFICATION_REQUIRE_FILE_EXISTS = 'false';
process.env.VERIFICATION_ACCESS_MODE = 'authenticated';
process.env.VERIFICATION_ATTIRE_PROVIDER = 'mock';
__resetLlmRateLimitForTests();
__resetVerificationJobsForTests();
});
async function waitForMachineStatus(app, verificationId, maxAttempts = 30) {
let last;
for (let attempt = 0; attempt < maxAttempts; attempt += 1) {
last = await request(app)
.get(`/core/verifications/${verificationId}`)
.set('Authorization', 'Bearer test-token');
if (
last.body?.status === 'AUTO_PASS'
|| last.body?.status === 'AUTO_FAIL'
|| last.body?.status === 'NEEDS_REVIEW'
|| last.body?.status === 'ERROR'
) {
return last;
}
// eslint-disable-next-line no-await-in-loop
await new Promise((resolve) => setTimeout(resolve, 10));
}
return last;
}
test('GET /healthz returns healthy response', async () => {
const app = createApp();
const res = await request(app).get('/healthz');
assert.equal(res.status, 200);
assert.equal(res.body.ok, true);
assert.equal(typeof res.body.requestId, 'string');
assert.equal(typeof res.headers['x-request-id'], 'string');
});
test('POST /core/create-signed-url requires auth', async () => {
process.env.AUTH_BYPASS = 'false';
const app = createApp();
const res = await request(app).post('/core/create-signed-url').send({
fileUri: 'gs://krow-workforce-dev-private/foo.pdf',
});
assert.equal(res.status, 401);
assert.equal(res.body.code, 'UNAUTHENTICATED');
process.env.AUTH_BYPASS = 'true';
});
test('POST /core/create-signed-url returns signed URL', async () => {
const app = createApp();
const res = await request(app)
.post('/core/create-signed-url')
.set('Authorization', 'Bearer test-token')
.send({
fileUri: 'gs://krow-workforce-dev-private/uploads/test-user/foo.pdf',
expiresInSeconds: 300,
});
assert.equal(res.status, 200);
assert.equal(typeof res.body.signedUrl, 'string');
assert.equal(typeof res.body.expiresAt, 'string');
assert.equal(typeof res.body.requestId, 'string');
});
test('POST /core/create-signed-url rejects non-owned path', async () => {
const app = createApp();
const res = await request(app)
.post('/core/create-signed-url')
.set('Authorization', 'Bearer test-token')
.send({
fileUri: 'gs://krow-workforce-dev-private/uploads/other-user/foo.pdf',
expiresInSeconds: 300,
});
assert.equal(res.status, 403);
assert.equal(res.body.code, 'FORBIDDEN');
});
test('POST /core/create-signed-url enforces expiry cap', async () => {
process.env.MAX_SIGNED_URL_SECONDS = '300';
const app = createApp();
const res = await request(app)
.post('/core/create-signed-url')
.set('Authorization', 'Bearer test-token')
.send({
fileUri: 'gs://krow-workforce-dev-private/uploads/test-user/foo.pdf',
expiresInSeconds: 301,
});
assert.equal(res.status, 400);
assert.equal(res.body.code, 'VALIDATION_ERROR');
});
test('POST /invokeLLM legacy alias works', async () => {
const app = createApp();
const res = await request(app)
.post('/invokeLLM')
.set('Authorization', 'Bearer test-token')
.send({
prompt: 'hello',
responseJsonSchema: { type: 'object' },
fileUrls: [],
});
assert.equal(res.status, 200);
assert.equal(typeof res.body.result, 'object');
assert.equal(typeof res.body.model, 'string');
});
test('POST /core/invoke-llm enforces per-user rate limit', async () => {
process.env.LLM_RATE_LIMIT_PER_MINUTE = '1';
const app = createApp();
const first = await request(app)
.post('/core/invoke-llm')
.set('Authorization', 'Bearer test-token')
.send({
prompt: 'hello',
responseJsonSchema: { type: 'object' },
fileUrls: [],
});
const second = await request(app)
.post('/core/invoke-llm')
.set('Authorization', 'Bearer test-token')
.send({
prompt: 'hello again',
responseJsonSchema: { type: 'object' },
fileUrls: [],
});
assert.equal(first.status, 200);
assert.equal(second.status, 429);
assert.equal(second.body.code, 'RATE_LIMITED');
assert.equal(typeof second.headers['retry-after'], 'string');
});
test('POST /core/verifications creates async job and GET returns status', async () => {
const app = createApp();
const created = await request(app)
.post('/core/verifications')
.set('Authorization', 'Bearer test-token')
.send({
type: 'attire',
subjectType: 'staff',
subjectId: 'staff_1',
fileUri: 'gs://krow-workforce-dev-private/uploads/test-user/attire.jpg',
rules: { attireType: 'shoes', expectedColor: 'black' },
});
assert.equal(created.status, 202);
assert.equal(created.body.type, 'attire');
assert.equal(created.body.status, 'PENDING');
assert.equal(typeof created.body.verificationId, 'string');
const status = await waitForMachineStatus(app, created.body.verificationId);
assert.equal(status.status, 200);
assert.equal(status.body.verificationId, created.body.verificationId);
assert.equal(status.body.type, 'attire');
assert.ok(['NEEDS_REVIEW', 'AUTO_PASS', 'AUTO_FAIL', 'ERROR'].includes(status.body.status));
});
test('POST /core/verifications rejects file paths not owned by actor', async () => {
const app = createApp();
const res = await request(app)
.post('/core/verifications')
.set('Authorization', 'Bearer test-token')
.send({
type: 'attire',
fileUri: 'gs://krow-workforce-dev-private/uploads/other-user/not-allowed.jpg',
rules: { attireType: 'shoes' },
});
assert.equal(res.status, 403);
assert.equal(res.body.code, 'FORBIDDEN');
});
test('POST /core/verifications/:id/review finalizes verification', async () => {
const app = createApp();
const created = await request(app)
.post('/core/verifications')
.set('Authorization', 'Bearer test-token')
.send({
type: 'certification',
subjectType: 'staff',
subjectId: 'staff_1',
fileUri: 'gs://krow-workforce-dev-private/uploads/test-user/cert.pdf',
rules: { certType: 'food_safety' },
});
const status = await waitForMachineStatus(app, created.body.verificationId);
assert.equal(status.status, 200);
const reviewed = await request(app)
.post(`/core/verifications/${created.body.verificationId}/review`)
.set('Authorization', 'Bearer test-token')
.send({
decision: 'APPROVED',
note: 'Looks good',
reasonCode: 'MANUAL_REVIEW',
});
assert.equal(reviewed.status, 200);
assert.equal(reviewed.body.status, 'APPROVED');
assert.equal(reviewed.body.review.decision, 'APPROVED');
});
test('POST /core/verifications/:id/retry requeues verification', async () => {
const app = createApp();
const created = await request(app)
.post('/core/verifications')
.set('Authorization', 'Bearer test-token')
.send({
type: 'government_id',
subjectType: 'staff',
subjectId: 'staff_1',
fileUri: 'gs://krow-workforce-dev-private/uploads/test-user/id-front.jpg',
rules: {},
});
const status = await waitForMachineStatus(app, created.body.verificationId);
assert.equal(status.status, 200);
const retried = await request(app)
.post(`/core/verifications/${created.body.verificationId}/retry`)
.set('Authorization', 'Bearer test-token')
.send({});
assert.equal(retried.status, 202);
assert.equal(retried.body.status, 'PENDING');
});

View File

@@ -15,6 +15,7 @@ mutation createOrder(
$shifts: Any
$requested: Int
$teamHubId: UUID!
$hubManagerId: UUID
$recurringDays: [String!]
$permanentStartDate: Timestamp
$permanentDays: [String!]
@@ -40,6 +41,7 @@ mutation createOrder(
shifts: $shifts
requested: $requested
teamHubId: $teamHubId
hubManagerId: $hubManagerId
recurringDays: $recurringDays
permanentDays: $permanentDays
notes: $notes

View File

@@ -3,6 +3,7 @@ mutation upsertStaffAttire(
$attireOptionId: UUID!
$verificationPhotoUrl: String
$verificationId: String
$verificationStatus: AttireVerificationStatus
) @auth(level: USER) {
staffAttire_upsert(
data: {
@@ -10,7 +11,7 @@ mutation upsertStaffAttire(
attireOptionId: $attireOptionId
verificationPhotoUrl: $verificationPhotoUrl
verificationId: $verificationId
verificationStatus: PENDING
verificationStatus: $verificationStatus
}
)
}

View File

@@ -1,7 +1,12 @@
enum AttireVerificationStatus {
PENDING
FAILED
SUCCESS
PROCESSING
AUTO_PASS
AUTO_FAIL
NEEDS_REVIEW
APPROVED
REJECTED
ERROR
}
type StaffAttire @table(name: "staff_attires", key: ["staffId", "attireOptionId"]) {