feat(backend): add foundation services and sql idempotency

This commit is contained in:
zouantchaw
2026-02-23 22:27:40 -05:00
parent e81eab1165
commit f8f81ec77c
40 changed files with 7416 additions and 0 deletions

View File

@@ -0,0 +1,64 @@
name: Backend Foundation
on:
pull_request:
branches:
- dev
- main
push:
branches:
- dev
- main
jobs:
backend-foundation-makefile:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Validate backend make targets
run: |
make backend-help
make help | grep "backend-"
- name: Dry-run backend automation targets
run: |
make -n backend-enable-apis ENV=dev
make -n backend-bootstrap-dev ENV=dev
make -n backend-deploy-core ENV=dev
make -n backend-deploy-commands ENV=dev
make -n backend-deploy-workers ENV=dev
make -n backend-smoke-core ENV=dev
make -n backend-smoke-commands ENV=dev
make -n backend-logs-core ENV=dev
backend-services-tests:
runs-on: ubuntu-latest
strategy:
matrix:
service:
- backend/core-api
- backend/command-api
defaults:
run:
working-directory: ${{ matrix.service }}
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup Node
uses: actions/setup-node@v4
with:
node-version: 20
cache: npm
cache-dependency-path: ${{ matrix.service }}/package-lock.json
- name: Install dependencies
run: npm ci
- name: Run tests
env:
AUTH_BYPASS: "true"
LLM_MOCK: "true"
run: npm test

View File

@@ -7,3 +7,5 @@
| 2026-02-24 | 0.1.2 | Added API implementation contract and transition route aliases. | | 2026-02-24 | 0.1.2 | Added API implementation contract and transition route aliases. |
| 2026-02-24 | 0.1.3 | Added auth-first security policy with deferred role-map integration hooks. | | 2026-02-24 | 0.1.3 | Added auth-first security policy with deferred role-map integration hooks. |
| 2026-02-24 | 0.1.4 | Locked defaults for idempotency, validation, bucket split, model provider, and p95 objectives. | | 2026-02-24 | 0.1.4 | Locked defaults for idempotency, validation, bucket split, model provider, and p95 objectives. |
| 2026-02-24 | 0.1.5 | Added backend makefile module and CI workflow for backend target validation. |
| 2026-02-24 | 0.1.6 | Added Cloud SQL-backed idempotency storage, migration script, and command API test coverage. |

View File

@@ -11,6 +11,7 @@ include makefiles/web.mk
include makefiles/launchpad.mk include makefiles/launchpad.mk
include makefiles/mobile.mk include makefiles/mobile.mk
include makefiles/dataconnect.mk include makefiles/dataconnect.mk
include makefiles/backend.mk
include makefiles/tools.mk include makefiles/tools.mk
# --- Main Help Command --- # --- Main Help Command ---
@@ -71,6 +72,19 @@ help:
@echo " make dataconnect-bootstrap-validation-database ONE-TIME: Setup validation database" @echo " make dataconnect-bootstrap-validation-database ONE-TIME: Setup validation database"
@echo " make dataconnect-backup-dev-to-validation Backup dev database to validation" @echo " make dataconnect-backup-dev-to-validation Backup dev database to validation"
@echo "" @echo ""
@echo " ☁️ BACKEND FOUNDATION (Cloud Run + Workers)"
@echo " ────────────────────────────────────────────────────────────────────"
@echo " make backend-help Show backend foundation commands"
@echo " make backend-enable-apis [ENV=dev] Enable backend GCP APIs"
@echo " make backend-bootstrap-dev Bootstrap backend foundation resources (dev)"
@echo " make backend-migrate-idempotency Create/upgrade command idempotency table"
@echo " make backend-deploy-core [ENV=dev] Build and deploy core API service"
@echo " make backend-deploy-commands [ENV=dev] Build and deploy command API service"
@echo " make backend-deploy-workers [ENV=dev] Deploy async worker functions scaffold"
@echo " make backend-smoke-core [ENV=dev] Run health smoke test for core service"
@echo " make backend-smoke-commands [ENV=dev] Run health smoke test for command service"
@echo " make backend-logs-core [ENV=dev] Tail/read logs for core service"
@echo ""
@echo " 🛠️ DEVELOPMENT TOOLS" @echo " 🛠️ DEVELOPMENT TOOLS"
@echo " ────────────────────────────────────────────────────────────────────" @echo " ────────────────────────────────────────────────────────────────────"
@echo " make install-melos Install Melos globally (for mobile dev)" @echo " make install-melos Install Melos globally (for mobile dev)"

View File

@@ -0,0 +1,13 @@
FROM node:20-alpine
WORKDIR /app
COPY package*.json ./
RUN npm ci --omit=dev
COPY src ./src
ENV PORT=8080
EXPOSE 8080
CMD ["node", "src/server.js"]

3035
backend/command-api/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,25 @@
{
"name": "@krow/command-api",
"version": "0.1.0",
"private": true,
"type": "module",
"engines": {
"node": ">=20"
},
"scripts": {
"start": "node src/server.js",
"test": "node --test",
"migrate:idempotency": "node scripts/migrate-idempotency.mjs"
},
"dependencies": {
"express": "^4.21.2",
"firebase-admin": "^13.0.2",
"pg": "^8.16.3",
"pino": "^9.6.0",
"pino-http": "^10.3.0",
"zod": "^3.24.2"
},
"devDependencies": {
"supertest": "^7.0.0"
}
}

View File

@@ -0,0 +1,29 @@
import { readFileSync } from 'node:fs';
import { resolve } from 'node:path';
import { fileURLToPath } from 'node:url';
import { Pool } from 'pg';
const databaseUrl = process.env.IDEMPOTENCY_DATABASE_URL;
if (!databaseUrl) {
// eslint-disable-next-line no-console
console.error('IDEMPOTENCY_DATABASE_URL is required');
process.exit(1);
}
const scriptDir = resolve(fileURLToPath(new URL('.', import.meta.url)));
const migrationPath = resolve(scriptDir, '../sql/001_command_idempotency.sql');
const sql = readFileSync(migrationPath, 'utf8');
const pool = new Pool({
connectionString: databaseUrl,
max: Number.parseInt(process.env.IDEMPOTENCY_DB_POOL_MAX || '5', 10),
});
try {
await pool.query(sql);
// eslint-disable-next-line no-console
console.log('Idempotency migration applied successfully');
} finally {
await pool.end();
}

View File

@@ -0,0 +1,13 @@
CREATE TABLE IF NOT EXISTS command_idempotency (
composite_key TEXT PRIMARY KEY,
user_id TEXT NOT NULL,
route TEXT NOT NULL,
idempotency_key TEXT NOT NULL,
status_code INTEGER NOT NULL,
response_payload JSONB NOT NULL,
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
expires_at TIMESTAMPTZ NOT NULL
);
CREATE INDEX IF NOT EXISTS idx_command_idempotency_expires_at
ON command_idempotency (expires_at);

View File

@@ -0,0 +1,30 @@
import express from 'express';
import pino from 'pino';
import pinoHttp from 'pino-http';
import { requestContext } from './middleware/request-context.js';
import { errorHandler, notFoundHandler } from './middleware/error-handler.js';
import { healthRouter } from './routes/health.js';
import { createCommandsRouter } from './routes/commands.js';
const logger = pino({ level: process.env.LOG_LEVEL || 'info' });
export function createApp() {
const app = express();
app.use(requestContext);
app.use(
pinoHttp({
logger,
customProps: (req) => ({ requestId: req.requestId }),
})
);
app.use(express.json({ limit: '2mb' }));
app.use(healthRouter);
app.use('/commands', createCommandsRouter());
app.use(notFoundHandler);
app.use(errorHandler);
return app;
}

View File

@@ -0,0 +1,6 @@
import { z } from 'zod';
export const commandBaseSchema = z.object({
payload: z.record(z.any()).optional(),
metadata: z.record(z.any()).optional(),
});

View File

@@ -0,0 +1,26 @@
export class AppError extends Error {
constructor(code, message, status = 400, details = {}) {
super(message);
this.name = 'AppError';
this.code = code;
this.status = status;
this.details = details;
}
}
export function toErrorEnvelope(error, requestId) {
const status = error?.status && Number.isInteger(error.status) ? error.status : 500;
const code = error?.code || 'INTERNAL_ERROR';
const message = error?.message || 'Unexpected error';
const details = error?.details || {};
return {
status,
body: {
code,
message,
details,
requestId,
},
};
}

View File

@@ -0,0 +1,45 @@
import { AppError } from '../lib/errors.js';
import { can } from '../services/policy.js';
import { verifyFirebaseToken } from '../services/firebase-auth.js';
function getBearerToken(header) {
if (!header) return null;
const [scheme, token] = header.split(' ');
if (!scheme || scheme.toLowerCase() !== 'bearer' || !token) return null;
return token;
}
export async function requireAuth(req, _res, next) {
try {
const token = getBearerToken(req.get('Authorization'));
if (!token) {
throw new AppError('UNAUTHENTICATED', 'Missing bearer token', 401);
}
if (process.env.AUTH_BYPASS === 'true') {
req.actor = { uid: 'test-user', email: 'test@krow.local', role: 'TEST' };
return next();
}
const decoded = await verifyFirebaseToken(token);
req.actor = {
uid: decoded.uid,
email: decoded.email || null,
role: decoded.role || null,
};
return next();
} catch (error) {
if (error instanceof AppError) return next(error);
return next(new AppError('UNAUTHENTICATED', 'Token verification failed', 401));
}
}
export function requirePolicy(action, resource) {
return (req, _res, next) => {
if (!can(action, resource, req.actor)) {
return next(new AppError('FORBIDDEN', 'Not allowed to perform this action', 403));
}
return next();
};
}

View File

@@ -0,0 +1,25 @@
import { toErrorEnvelope } from '../lib/errors.js';
export function notFoundHandler(req, res) {
res.status(404).json({
code: 'NOT_FOUND',
message: `Route not found: ${req.method} ${req.path}`,
details: {},
requestId: req.requestId,
});
}
export function errorHandler(error, req, res, _next) {
const envelope = toErrorEnvelope(error, req.requestId);
if (req.log) {
req.log.error(
{
errCode: envelope.body.code,
status: envelope.status,
details: envelope.body.details,
},
envelope.body.message
);
}
res.status(envelope.status).json(envelope.body);
}

View File

@@ -0,0 +1,10 @@
import { AppError } from '../lib/errors.js';
export function requireIdempotencyKey(req, _res, next) {
const idempotencyKey = req.get('Idempotency-Key');
if (!idempotencyKey) {
return next(new AppError('MISSING_IDEMPOTENCY_KEY', 'Missing Idempotency-Key header', 400));
}
req.idempotencyKey = idempotencyKey;
return next();
}

View File

@@ -0,0 +1,9 @@
import { randomUUID } from 'node:crypto';
export function requestContext(req, res, next) {
const incoming = req.get('X-Request-Id');
req.requestId = incoming || randomUUID();
res.setHeader('X-Request-Id', req.requestId);
res.locals.startedAt = Date.now();
next();
}

View File

@@ -0,0 +1,113 @@
import { Router } from 'express';
import { AppError } from '../lib/errors.js';
import { requireAuth, requirePolicy } from '../middleware/auth.js';
import { requireIdempotencyKey } from '../middleware/idempotency.js';
import { buildIdempotencyKey, readIdempotentResult, writeIdempotentResult } from '../services/idempotency-store.js';
import { commandBaseSchema } from '../contracts/commands/command-base.js';
function parseBody(body) {
const parsed = commandBaseSchema.safeParse(body || {});
if (!parsed.success) {
throw new AppError('VALIDATION_ERROR', 'Invalid command payload', 400, {
issues: parsed.error.issues,
});
}
return parsed.data;
}
function createCommandResponse(route, requestId, idempotencyKey) {
return {
accepted: true,
route,
commandId: `${route}:${Date.now()}`,
idempotencyKey,
requestId,
};
}
function buildCommandHandler(policyAction, policyResource) {
return async (req, res, next) => {
try {
parseBody(req.body);
const route = `${req.baseUrl}${req.route.path}`;
const compositeKey = buildIdempotencyKey({
userId: req.actor.uid,
route,
idempotencyKey: req.idempotencyKey,
});
const existing = await readIdempotentResult(compositeKey);
if (existing) {
return res.status(existing.statusCode).json(existing.payload);
}
const payload = createCommandResponse(route, req.requestId, req.idempotencyKey);
const persisted = await writeIdempotentResult({
compositeKey,
userId: req.actor.uid,
route,
idempotencyKey: req.idempotencyKey,
payload,
statusCode: 200,
});
return res.status(persisted.statusCode).json(persisted.payload);
} catch (error) {
return next(error);
}
};
}
export function createCommandsRouter() {
const router = Router();
router.post(
'/orders/create',
requireAuth,
requireIdempotencyKey,
requirePolicy('orders.create', 'order'),
buildCommandHandler('orders.create', 'order')
);
router.post(
'/orders/:orderId/update',
requireAuth,
requireIdempotencyKey,
requirePolicy('orders.update', 'order'),
buildCommandHandler('orders.update', 'order')
);
router.post(
'/orders/:orderId/cancel',
requireAuth,
requireIdempotencyKey,
requirePolicy('orders.cancel', 'order'),
buildCommandHandler('orders.cancel', 'order')
);
router.post(
'/shifts/:shiftId/change-status',
requireAuth,
requireIdempotencyKey,
requirePolicy('shifts.change-status', 'shift'),
buildCommandHandler('shifts.change-status', 'shift')
);
router.post(
'/shifts/:shiftId/assign-staff',
requireAuth,
requireIdempotencyKey,
requirePolicy('shifts.assign-staff', 'shift'),
buildCommandHandler('shifts.assign-staff', 'shift')
);
router.post(
'/shifts/:shiftId/accept',
requireAuth,
requireIdempotencyKey,
requirePolicy('shifts.accept', 'shift'),
buildCommandHandler('shifts.accept', 'shift')
);
return router;
}

View File

@@ -0,0 +1,12 @@
import { Router } from 'express';
export const healthRouter = Router();
healthRouter.get('/healthz', (req, res) => {
res.status(200).json({
ok: true,
service: 'krow-command-api',
version: process.env.SERVICE_VERSION || 'dev',
requestId: req.requestId,
});
});

View File

@@ -0,0 +1,9 @@
import { createApp } from './app.js';
const port = Number(process.env.PORT || 8080);
const app = createApp();
app.listen(port, () => {
// eslint-disable-next-line no-console
console.log(`krow-command-api listening on port ${port}`);
});

View File

@@ -0,0 +1,13 @@
import { applicationDefault, getApps, initializeApp } from 'firebase-admin/app';
import { getAuth } from 'firebase-admin/auth';
function ensureAdminApp() {
if (getApps().length === 0) {
initializeApp({ credential: applicationDefault() });
}
}
export async function verifyFirebaseToken(token) {
ensureAdminApp();
return getAuth().verifyIdToken(token);
}

View File

@@ -0,0 +1,208 @@
import { Pool } from 'pg';
const DEFAULT_TTL_SECONDS = Number.parseInt(process.env.IDEMPOTENCY_TTL_SECONDS || '86400', 10);
const CLEANUP_EVERY_OPS = Number.parseInt(process.env.IDEMPOTENCY_CLEANUP_EVERY_OPS || '100', 10);
const memoryStore = new Map();
let adapterPromise = null;
function shouldUseSqlStore() {
const mode = (process.env.IDEMPOTENCY_STORE || '').toLowerCase();
if (mode === 'memory') {
return false;
}
if (mode === 'sql') {
return true;
}
return Boolean(process.env.IDEMPOTENCY_DATABASE_URL);
}
function gcExpiredMemoryRecords(now = Date.now()) {
for (const [key, value] of memoryStore.entries()) {
if (value.expiresAt <= now) {
memoryStore.delete(key);
}
}
}
function createMemoryAdapter() {
return {
async read(compositeKey) {
gcExpiredMemoryRecords();
return memoryStore.get(compositeKey) || null;
},
async write({
compositeKey,
payload,
statusCode = 200,
}) {
const now = Date.now();
const existing = memoryStore.get(compositeKey);
if (existing && existing.expiresAt > now) {
return existing;
}
const record = {
payload,
statusCode,
createdAt: now,
expiresAt: now + (DEFAULT_TTL_SECONDS * 1000),
};
memoryStore.set(compositeKey, record);
return record;
},
};
}
async function createSqlAdapter() {
const connectionString = process.env.IDEMPOTENCY_DATABASE_URL;
if (!connectionString) {
throw new Error('IDEMPOTENCY_DATABASE_URL is required for sql idempotency store');
}
const pool = new Pool({
connectionString,
max: Number.parseInt(process.env.IDEMPOTENCY_DB_POOL_MAX || '5', 10),
});
await pool.query(`
CREATE TABLE IF NOT EXISTS command_idempotency (
composite_key TEXT PRIMARY KEY,
user_id TEXT NOT NULL,
route TEXT NOT NULL,
idempotency_key TEXT NOT NULL,
status_code INTEGER NOT NULL,
response_payload JSONB NOT NULL,
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
expires_at TIMESTAMPTZ NOT NULL
);
`);
await pool.query(`
CREATE INDEX IF NOT EXISTS idx_command_idempotency_expires_at
ON command_idempotency (expires_at);
`);
let opCount = 0;
async function maybeCleanupExpiredRows() {
opCount += 1;
if (CLEANUP_EVERY_OPS <= 0 || opCount % CLEANUP_EVERY_OPS !== 0) {
return;
}
await pool.query('DELETE FROM command_idempotency WHERE expires_at <= NOW()');
}
function mapRow(row) {
return {
statusCode: row.status_code,
payload: row.response_payload,
};
}
return {
async read(compositeKey) {
await maybeCleanupExpiredRows();
const result = await pool.query(
`
SELECT status_code, response_payload
FROM command_idempotency
WHERE composite_key = $1
AND expires_at > NOW()
`,
[compositeKey]
);
if (result.rowCount === 0) {
return null;
}
return mapRow(result.rows[0]);
},
async write({
compositeKey,
userId,
route,
idempotencyKey,
payload,
statusCode = 200,
}) {
await maybeCleanupExpiredRows();
const expiresAt = new Date(Date.now() + (DEFAULT_TTL_SECONDS * 1000));
const payloadJson = JSON.stringify(payload);
await pool.query(
`
INSERT INTO command_idempotency (
composite_key,
user_id,
route,
idempotency_key,
status_code,
response_payload,
expires_at
)
VALUES ($1, $2, $3, $4, $5, $6::jsonb, $7)
ON CONFLICT (composite_key) DO NOTHING
`,
[compositeKey, userId, route, idempotencyKey, statusCode, payloadJson, expiresAt]
);
const existingResult = await pool.query(
`
SELECT status_code, response_payload
FROM command_idempotency
WHERE composite_key = $1
AND expires_at > NOW()
`,
[compositeKey]
);
if (existingResult.rowCount === 0) {
throw new Error('Idempotency write failed to persist or recover existing record');
}
return mapRow(existingResult.rows[0]);
},
};
}
async function getAdapter() {
if (!adapterPromise) {
adapterPromise = shouldUseSqlStore()
? createSqlAdapter()
: Promise.resolve(createMemoryAdapter());
}
return adapterPromise;
}
export function buildIdempotencyKey({ userId, route, idempotencyKey }) {
return `${userId}:${route}:${idempotencyKey}`;
}
export async function readIdempotentResult(compositeKey) {
const adapter = await getAdapter();
return adapter.read(compositeKey);
}
export async function writeIdempotentResult({
compositeKey,
userId,
route,
idempotencyKey,
payload,
statusCode = 200,
}) {
const adapter = await getAdapter();
return adapter.write({
compositeKey,
userId,
route,
idempotencyKey,
payload,
statusCode,
});
}
export function __resetIdempotencyStoreForTests() {
memoryStore.clear();
adapterPromise = null;
}

View File

@@ -0,0 +1,5 @@
export function can(action, resource, actor) {
void action;
void resource;
return Boolean(actor?.uid);
}

View File

@@ -0,0 +1,54 @@
import test, { beforeEach } from 'node:test';
import assert from 'node:assert/strict';
import request from 'supertest';
import { createApp } from '../src/app.js';
import { __resetIdempotencyStoreForTests } from '../src/services/idempotency-store.js';
process.env.AUTH_BYPASS = 'true';
beforeEach(() => {
process.env.IDEMPOTENCY_STORE = 'memory';
delete process.env.IDEMPOTENCY_DATABASE_URL;
__resetIdempotencyStoreForTests();
});
test('GET /healthz returns healthy response', async () => {
const app = createApp();
const res = await request(app).get('/healthz');
assert.equal(res.status, 200);
assert.equal(res.body.ok, true);
assert.equal(typeof res.body.requestId, 'string');
});
test('command route requires idempotency key', async () => {
const app = createApp();
const res = await request(app)
.post('/commands/orders/create')
.set('Authorization', 'Bearer test-token')
.send({ payload: {} });
assert.equal(res.status, 400);
assert.equal(res.body.code, 'MISSING_IDEMPOTENCY_KEY');
});
test('command route is idempotent by key', async () => {
const app = createApp();
const first = await request(app)
.post('/commands/orders/create')
.set('Authorization', 'Bearer test-token')
.set('Idempotency-Key', 'abc-123')
.send({ payload: { order: 'x' } });
const second = await request(app)
.post('/commands/orders/create')
.set('Authorization', 'Bearer test-token')
.set('Idempotency-Key', 'abc-123')
.send({ payload: { order: 'x' } });
assert.equal(first.status, 200);
assert.equal(second.status, 200);
assert.equal(first.body.commandId, second.body.commandId);
assert.equal(first.body.idempotencyKey, 'abc-123');
});

View File

@@ -0,0 +1,56 @@
import test, { beforeEach } from 'node:test';
import assert from 'node:assert/strict';
import {
__resetIdempotencyStoreForTests,
buildIdempotencyKey,
readIdempotentResult,
writeIdempotentResult,
} from '../src/services/idempotency-store.js';
beforeEach(() => {
process.env.IDEMPOTENCY_STORE = 'memory';
delete process.env.IDEMPOTENCY_DATABASE_URL;
__resetIdempotencyStoreForTests();
});
test('buildIdempotencyKey composes user route and client key', () => {
const key = buildIdempotencyKey({
userId: 'user-1',
route: '/commands/orders/create',
idempotencyKey: 'req-abc',
});
assert.equal(key, 'user-1:/commands/orders/create:req-abc');
});
test('memory idempotency store returns existing payload for duplicate key', async () => {
const compositeKey = buildIdempotencyKey({
userId: 'user-1',
route: '/commands/orders/create',
idempotencyKey: 'req-abc',
});
const first = await writeIdempotentResult({
compositeKey,
userId: 'user-1',
route: '/commands/orders/create',
idempotencyKey: 'req-abc',
payload: { accepted: true, commandId: 'c-1' },
statusCode: 200,
});
const second = await writeIdempotentResult({
compositeKey,
userId: 'user-1',
route: '/commands/orders/create',
idempotencyKey: 'req-abc',
payload: { accepted: true, commandId: 'c-2' },
statusCode: 200,
});
const read = await readIdempotentResult(compositeKey);
assert.equal(first.payload.commandId, 'c-1');
assert.equal(second.payload.commandId, 'c-1');
assert.equal(read.payload.commandId, 'c-1');
});

View File

@@ -0,0 +1,13 @@
FROM node:20-alpine
WORKDIR /app
COPY package*.json ./
RUN npm ci --omit=dev
COPY src ./src
ENV PORT=8080
EXPOSE 8080
CMD ["node", "src/server.js"]

3004
backend/core-api/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,24 @@
{
"name": "@krow/core-api",
"version": "0.1.0",
"private": true,
"type": "module",
"engines": {
"node": ">=20"
},
"scripts": {
"start": "node src/server.js",
"test": "node --test"
},
"dependencies": {
"express": "^4.21.2",
"firebase-admin": "^13.0.2",
"multer": "^2.0.2",
"pino": "^9.6.0",
"pino-http": "^10.3.0",
"zod": "^3.24.2"
},
"devDependencies": {
"supertest": "^7.0.0"
}
}

View File

@@ -0,0 +1,31 @@
import express from 'express';
import pino from 'pino';
import pinoHttp from 'pino-http';
import { requestContext } from './middleware/request-context.js';
import { errorHandler, notFoundHandler } from './middleware/error-handler.js';
import { healthRouter } from './routes/health.js';
import { createCoreRouter, createLegacyCoreRouter } from './routes/core.js';
const logger = pino({ level: process.env.LOG_LEVEL || 'info' });
export function createApp() {
const app = express();
app.use(requestContext);
app.use(
pinoHttp({
logger,
customProps: (req) => ({ requestId: req.requestId }),
})
);
app.use(express.json({ limit: '2mb' }));
app.use(healthRouter);
app.use('/core', createCoreRouter());
app.use('/', createLegacyCoreRouter());
app.use(notFoundHandler);
app.use(errorHandler);
return app;
}

View File

@@ -0,0 +1,6 @@
import { z } from 'zod';
export const createSignedUrlSchema = z.object({
fileUri: z.string().startsWith('gs://', 'fileUri must start with gs://'),
expiresInSeconds: z.number().int().min(60).max(3600).optional(),
});

View File

@@ -0,0 +1,7 @@
import { z } from 'zod';
export const invokeLlmSchema = z.object({
prompt: z.string().min(1).max(12000),
responseJsonSchema: z.record(z.any()),
fileUrls: z.array(z.string().url()).optional(),
});

View File

@@ -0,0 +1,26 @@
export class AppError extends Error {
constructor(code, message, status = 400, details = {}) {
super(message);
this.name = 'AppError';
this.code = code;
this.status = status;
this.details = details;
}
}
export function toErrorEnvelope(error, requestId) {
const status = error?.status && Number.isInteger(error.status) ? error.status : 500;
const code = error?.code || 'INTERNAL_ERROR';
const message = error?.message || 'Unexpected error';
const details = error?.details || {};
return {
status,
body: {
code,
message,
details,
requestId,
},
};
}

View File

@@ -0,0 +1,45 @@
import { AppError } from '../lib/errors.js';
import { can } from '../services/policy.js';
import { verifyFirebaseToken } from '../services/firebase-auth.js';
function getBearerToken(header) {
if (!header) return null;
const [scheme, token] = header.split(' ');
if (!scheme || scheme.toLowerCase() !== 'bearer' || !token) return null;
return token;
}
export async function requireAuth(req, _res, next) {
try {
const token = getBearerToken(req.get('Authorization'));
if (!token) {
throw new AppError('UNAUTHENTICATED', 'Missing bearer token', 401);
}
if (process.env.AUTH_BYPASS === 'true') {
req.actor = { uid: 'test-user', email: 'test@krow.local', role: 'TEST' };
return next();
}
const decoded = await verifyFirebaseToken(token);
req.actor = {
uid: decoded.uid,
email: decoded.email || null,
role: decoded.role || null,
};
return next();
} catch (error) {
if (error instanceof AppError) return next(error);
return next(new AppError('UNAUTHENTICATED', 'Token verification failed', 401));
}
}
export function requirePolicy(action, resource) {
return (req, _res, next) => {
if (!can(action, resource, req.actor)) {
return next(new AppError('FORBIDDEN', 'Not allowed to perform this action', 403));
}
return next();
};
}

View File

@@ -0,0 +1,25 @@
import { toErrorEnvelope } from '../lib/errors.js';
export function notFoundHandler(req, res) {
res.status(404).json({
code: 'NOT_FOUND',
message: `Route not found: ${req.method} ${req.path}`,
details: {},
requestId: req.requestId,
});
}
export function errorHandler(error, req, res, _next) {
const envelope = toErrorEnvelope(error, req.requestId);
if (req.log) {
req.log.error(
{
errCode: envelope.body.code,
status: envelope.status,
details: envelope.body.details,
},
envelope.body.message
);
}
res.status(envelope.status).json(envelope.body);
}

View File

@@ -0,0 +1,9 @@
import { randomUUID } from 'node:crypto';
export function requestContext(req, res, next) {
const incoming = req.get('X-Request-Id');
req.requestId = incoming || randomUUID();
res.setHeader('X-Request-Id', req.requestId);
res.locals.startedAt = Date.now();
next();
}

View File

@@ -0,0 +1,141 @@
import { Router } from 'express';
import multer from 'multer';
import { z } from 'zod';
import { AppError } from '../lib/errors.js';
import { requireAuth, requirePolicy } from '../middleware/auth.js';
import { createSignedUrlSchema } from '../contracts/core/create-signed-url.js';
import { invokeLlmSchema } from '../contracts/core/invoke-llm.js';
const DEFAULT_MAX_FILE_BYTES = 10 * 1024 * 1024;
const ALLOWED_FILE_TYPES = new Set(['application/pdf', 'image/jpeg', 'image/jpg', 'image/png']);
const upload = multer({
storage: multer.memoryStorage(),
limits: {
fileSize: Number(process.env.MAX_UPLOAD_BYTES || DEFAULT_MAX_FILE_BYTES),
},
});
const uploadMetaSchema = z.object({
category: z.string().max(80).optional(),
visibility: z.enum(['public', 'private']).optional(),
});
function mockSignedUrl(fileUri, expiresInSeconds) {
const encoded = encodeURIComponent(fileUri);
const expiresAt = new Date(Date.now() + expiresInSeconds * 1000).toISOString();
return {
signedUrl: `https://storage.googleapis.com/mock-signed-url/${encoded}?expires=${expiresInSeconds}`,
expiresAt,
};
}
function parseBody(schema, body) {
const parsed = schema.safeParse(body);
if (!parsed.success) {
throw new AppError('VALIDATION_ERROR', 'Invalid request payload', 400, {
issues: parsed.error.issues,
});
}
return parsed.data;
}
async function handleUploadFile(req, res, next) {
try {
const file = req.file;
if (!file) {
throw new AppError('INVALID_FILE', 'Missing file in multipart form data', 400);
}
if (!ALLOWED_FILE_TYPES.has(file.mimetype)) {
throw new AppError('INVALID_FILE_TYPE', `Unsupported file type: ${file.mimetype}`, 400);
}
const maxFileSize = Number(process.env.MAX_UPLOAD_BYTES || DEFAULT_MAX_FILE_BYTES);
if (file.size > maxFileSize) {
throw new AppError('FILE_TOO_LARGE', `File exceeds ${maxFileSize} bytes`, 400);
}
const meta = parseBody(uploadMetaSchema, req.body || {});
const visibility = meta.visibility || 'private';
const bucket = visibility === 'public'
? process.env.PUBLIC_BUCKET || 'krow-workforce-dev-public'
: process.env.PRIVATE_BUCKET || 'krow-workforce-dev-private';
const safeName = file.originalname.replace(/[^a-zA-Z0-9._-]/g, '_');
const objectPath = `uploads/${req.actor.uid}/${Date.now()}_${safeName}`;
res.status(200).json({
fileUri: `gs://${bucket}/${objectPath}`,
contentType: file.mimetype,
size: file.size,
bucket,
path: objectPath,
requestId: req.requestId,
});
} catch (error) {
if (error?.code === 'LIMIT_FILE_SIZE') {
return next(new AppError('FILE_TOO_LARGE', 'File exceeds upload limit', 400));
}
return next(error);
}
}
async function handleCreateSignedUrl(req, res, next) {
try {
const payload = parseBody(createSignedUrlSchema, req.body || {});
const expiresInSeconds = payload.expiresInSeconds || 300;
const signed = mockSignedUrl(payload.fileUri, expiresInSeconds);
res.status(200).json({
...signed,
requestId: req.requestId,
});
} catch (error) {
return next(error);
}
}
async function handleInvokeLlm(req, res, next) {
try {
const payload = parseBody(invokeLlmSchema, req.body || {});
if (process.env.LLM_MOCK === 'false') {
throw new AppError('MODEL_FAILED', 'Real model integration not wired yet', 501);
}
const startedAt = Date.now();
res.status(200).json({
result: {
summary: 'Mock model response. Replace with Vertex AI integration.',
inputPromptSize: payload.prompt.length,
},
model: process.env.LLM_MODEL || 'vertexai/gemini-mock',
latencyMs: Date.now() - startedAt,
requestId: req.requestId,
});
} catch (error) {
return next(error);
}
}
export function createCoreRouter() {
const router = Router();
router.post('/upload-file', requireAuth, requirePolicy('core.upload', 'file'), upload.single('file'), handleUploadFile);
router.post('/create-signed-url', requireAuth, requirePolicy('core.sign-url', 'file'), handleCreateSignedUrl);
router.post('/invoke-llm', requireAuth, requirePolicy('core.invoke-llm', 'model'), handleInvokeLlm);
return router;
}
export function createLegacyCoreRouter() {
const router = Router();
router.post('/uploadFile', requireAuth, requirePolicy('core.upload', 'file'), upload.single('file'), handleUploadFile);
router.post('/createSignedUrl', requireAuth, requirePolicy('core.sign-url', 'file'), handleCreateSignedUrl);
router.post('/invokeLLM', requireAuth, requirePolicy('core.invoke-llm', 'model'), handleInvokeLlm);
return router;
}

View File

@@ -0,0 +1,12 @@
import { Router } from 'express';
export const healthRouter = Router();
healthRouter.get('/healthz', (req, res) => {
res.status(200).json({
ok: true,
service: 'krow-core-api',
version: process.env.SERVICE_VERSION || 'dev',
requestId: req.requestId,
});
});

View File

@@ -0,0 +1,9 @@
import { createApp } from './app.js';
const port = Number(process.env.PORT || 8080);
const app = createApp();
app.listen(port, () => {
// eslint-disable-next-line no-console
console.log(`krow-core-api listening on port ${port}`);
});

View File

@@ -0,0 +1,13 @@
import { applicationDefault, getApps, initializeApp } from 'firebase-admin/app';
import { getAuth } from 'firebase-admin/auth';
function ensureAdminApp() {
if (getApps().length === 0) {
initializeApp({ credential: applicationDefault() });
}
}
export async function verifyFirebaseToken(token) {
ensureAdminApp();
return getAuth().verifyIdToken(token);
}

View File

@@ -0,0 +1,5 @@
export function can(action, resource, actor) {
void action;
void resource;
return Boolean(actor?.uid);
}

View File

@@ -0,0 +1,61 @@
import test from 'node:test';
import assert from 'node:assert/strict';
import request from 'supertest';
import { createApp } from '../src/app.js';
process.env.AUTH_BYPASS = 'true';
process.env.LLM_MOCK = 'true';
test('GET /healthz returns healthy response', async () => {
const app = createApp();
const res = await request(app).get('/healthz');
assert.equal(res.status, 200);
assert.equal(res.body.ok, true);
assert.equal(typeof res.body.requestId, 'string');
assert.equal(typeof res.headers['x-request-id'], 'string');
});
test('POST /core/create-signed-url requires auth', async () => {
process.env.AUTH_BYPASS = 'false';
const app = createApp();
const res = await request(app).post('/core/create-signed-url').send({
fileUri: 'gs://krow-workforce-dev-private/foo.pdf',
});
assert.equal(res.status, 401);
assert.equal(res.body.code, 'UNAUTHENTICATED');
process.env.AUTH_BYPASS = 'true';
});
test('POST /core/create-signed-url returns signed URL', async () => {
const app = createApp();
const res = await request(app)
.post('/core/create-signed-url')
.set('Authorization', 'Bearer test-token')
.send({
fileUri: 'gs://krow-workforce-dev-private/foo.pdf',
expiresInSeconds: 300,
});
assert.equal(res.status, 200);
assert.equal(typeof res.body.signedUrl, 'string');
assert.equal(typeof res.body.expiresAt, 'string');
assert.equal(typeof res.body.requestId, 'string');
});
test('POST /invokeLLM legacy alias works', async () => {
const app = createApp();
const res = await request(app)
.post('/invokeLLM')
.set('Authorization', 'Bearer test-token')
.send({
prompt: 'hello',
responseJsonSchema: { type: 'object' },
fileUrls: [],
});
assert.equal(res.status, 200);
assert.equal(typeof res.body.result, 'object');
assert.equal(typeof res.body.model, 'string');
});

169
makefiles/backend.mk Normal file
View File

@@ -0,0 +1,169 @@
# --- Backend Foundation (Cloud Run + Workers) ---
BACKEND_REGION ?= us-central1
BACKEND_ARTIFACT_REPO ?= krow-backend
BACKEND_CORE_SERVICE_NAME ?= krow-core-api
BACKEND_COMMAND_SERVICE_NAME ?= krow-command-api
BACKEND_RUNTIME_SA_NAME ?= krow-backend-runtime
BACKEND_RUNTIME_SA_EMAIL := $(BACKEND_RUNTIME_SA_NAME)@$(GCP_PROJECT_ID).iam.gserviceaccount.com
BACKEND_CORE_DIR ?= backend/core-api
BACKEND_COMMAND_DIR ?= backend/command-api
BACKEND_WORKERS_DIR ?= backend/cloud-functions
BACKEND_DEV_PUBLIC_BUCKET ?= krow-workforce-dev-public
BACKEND_DEV_PRIVATE_BUCKET ?= krow-workforce-dev-private
BACKEND_STAGING_PUBLIC_BUCKET ?= krow-workforce-staging-public
BACKEND_STAGING_PRIVATE_BUCKET ?= krow-workforce-staging-private
ifeq ($(ENV),staging)
BACKEND_PUBLIC_BUCKET := $(BACKEND_STAGING_PUBLIC_BUCKET)
BACKEND_PRIVATE_BUCKET := $(BACKEND_STAGING_PRIVATE_BUCKET)
else
BACKEND_PUBLIC_BUCKET := $(BACKEND_DEV_PUBLIC_BUCKET)
BACKEND_PRIVATE_BUCKET := $(BACKEND_DEV_PRIVATE_BUCKET)
endif
BACKEND_CORE_IMAGE ?= $(BACKEND_REGION)-docker.pkg.dev/$(GCP_PROJECT_ID)/$(BACKEND_ARTIFACT_REPO)/core-api:latest
BACKEND_COMMAND_IMAGE ?= $(BACKEND_REGION)-docker.pkg.dev/$(GCP_PROJECT_ID)/$(BACKEND_ARTIFACT_REPO)/command-api:latest
BACKEND_LOG_LIMIT ?= 100
.PHONY: backend-help backend-enable-apis backend-bootstrap-dev backend-migrate-idempotency backend-deploy-core backend-deploy-commands backend-deploy-workers backend-smoke-core backend-smoke-commands backend-logs-core
backend-help:
@echo "--> Backend Foundation Commands"
@echo " make backend-enable-apis [ENV=dev] Enable Cloud Run/Functions/Build/Secret APIs"
@echo " make backend-bootstrap-dev Bootstrap artifact repo, runtime SA, and buckets"
@echo " make backend-migrate-idempotency Create/upgrade idempotency table in Cloud SQL"
@echo " make backend-deploy-core [ENV=dev] Build + deploy core API service"
@echo " make backend-deploy-commands [ENV=dev] Build + deploy command API service"
@echo " make backend-deploy-workers [ENV=dev] Deploy worker scaffold"
@echo " make backend-smoke-core [ENV=dev] Smoke test core /healthz"
@echo " make backend-smoke-commands [ENV=dev] Smoke test commands /healthz"
@echo " make backend-logs-core [ENV=dev] Read core service logs"
backend-enable-apis:
@echo "--> Enabling backend APIs on project [$(GCP_PROJECT_ID)]..."
@for api in \
run.googleapis.com \
cloudbuild.googleapis.com \
artifactregistry.googleapis.com \
secretmanager.googleapis.com \
cloudfunctions.googleapis.com \
eventarc.googleapis.com \
storage.googleapis.com \
iam.googleapis.com \
serviceusage.googleapis.com \
firebase.googleapis.com; do \
echo " - $$api"; \
gcloud services enable $$api --project=$(GCP_PROJECT_ID); \
done
@echo "✅ Backend APIs enabled."
backend-bootstrap-dev: backend-enable-apis
@echo "--> Bootstrapping backend foundation for [$(ENV)] on project [$(GCP_PROJECT_ID)]..."
@echo "--> Ensuring Artifact Registry repo [$(BACKEND_ARTIFACT_REPO)] exists..."
@if ! gcloud artifacts repositories describe $(BACKEND_ARTIFACT_REPO) --location=$(BACKEND_REGION) --project=$(GCP_PROJECT_ID) >/dev/null 2>&1; then \
gcloud artifacts repositories create $(BACKEND_ARTIFACT_REPO) \
--repository-format=docker \
--location=$(BACKEND_REGION) \
--description="KROW backend services" \
--project=$(GCP_PROJECT_ID); \
else \
echo " - Artifact Registry repo already exists."; \
fi
@echo "--> Ensuring runtime service account [$(BACKEND_RUNTIME_SA_NAME)] exists..."
@if ! gcloud iam service-accounts describe $(BACKEND_RUNTIME_SA_EMAIL) --project=$(GCP_PROJECT_ID) >/dev/null 2>&1; then \
gcloud iam service-accounts create $(BACKEND_RUNTIME_SA_NAME) \
--display-name="KROW Backend Runtime" \
--project=$(GCP_PROJECT_ID); \
else \
echo " - Runtime service account already exists."; \
fi
@echo "--> Ensuring storage buckets exist..."
@if ! gcloud storage buckets describe gs://$(BACKEND_PUBLIC_BUCKET) --project=$(GCP_PROJECT_ID) >/dev/null 2>&1; then \
gcloud storage buckets create gs://$(BACKEND_PUBLIC_BUCKET) --location=$(BACKEND_REGION) --project=$(GCP_PROJECT_ID); \
else \
echo " - Public bucket already exists: $(BACKEND_PUBLIC_BUCKET)"; \
fi
@if ! gcloud storage buckets describe gs://$(BACKEND_PRIVATE_BUCKET) --project=$(GCP_PROJECT_ID) >/dev/null 2>&1; then \
gcloud storage buckets create gs://$(BACKEND_PRIVATE_BUCKET) --location=$(BACKEND_REGION) --project=$(GCP_PROJECT_ID); \
else \
echo " - Private bucket already exists: $(BACKEND_PRIVATE_BUCKET)"; \
fi
@echo "✅ Backend foundation bootstrap complete for [$(ENV)]."
backend-migrate-idempotency:
@echo "--> Applying idempotency table migration..."
@test -n "$(IDEMPOTENCY_DATABASE_URL)" || (echo "❌ IDEMPOTENCY_DATABASE_URL is required" && exit 1)
@cd $(BACKEND_COMMAND_DIR) && IDEMPOTENCY_DATABASE_URL="$(IDEMPOTENCY_DATABASE_URL)" npm run migrate:idempotency
@echo "✅ Idempotency migration applied."
backend-deploy-core:
@echo "--> Deploying core backend service [$(BACKEND_CORE_SERVICE_NAME)] to [$(ENV)]..."
@test -d $(BACKEND_CORE_DIR) || (echo "❌ Missing directory: $(BACKEND_CORE_DIR)" && exit 1)
@test -f $(BACKEND_CORE_DIR)/Dockerfile || (echo "❌ Missing Dockerfile: $(BACKEND_CORE_DIR)/Dockerfile" && exit 1)
@gcloud builds submit $(BACKEND_CORE_DIR) --tag $(BACKEND_CORE_IMAGE) --project=$(GCP_PROJECT_ID)
@gcloud run deploy $(BACKEND_CORE_SERVICE_NAME) \
--image=$(BACKEND_CORE_IMAGE) \
--region=$(BACKEND_REGION) \
--project=$(GCP_PROJECT_ID) \
--service-account=$(BACKEND_RUNTIME_SA_EMAIL) \
--set-env-vars=APP_ENV=$(ENV),GCP_PROJECT_ID=$(GCP_PROJECT_ID),PUBLIC_BUCKET=$(BACKEND_PUBLIC_BUCKET),PRIVATE_BUCKET=$(BACKEND_PRIVATE_BUCKET) \
--no-allow-unauthenticated
@echo "✅ Core backend service deployed."
backend-deploy-commands:
@echo "--> Deploying command backend service [$(BACKEND_COMMAND_SERVICE_NAME)] to [$(ENV)]..."
@test -d $(BACKEND_COMMAND_DIR) || (echo "❌ Missing directory: $(BACKEND_COMMAND_DIR)" && exit 1)
@test -f $(BACKEND_COMMAND_DIR)/Dockerfile || (echo "❌ Missing Dockerfile: $(BACKEND_COMMAND_DIR)/Dockerfile" && exit 1)
@gcloud builds submit $(BACKEND_COMMAND_DIR) --tag $(BACKEND_COMMAND_IMAGE) --project=$(GCP_PROJECT_ID)
@gcloud run deploy $(BACKEND_COMMAND_SERVICE_NAME) \
--image=$(BACKEND_COMMAND_IMAGE) \
--region=$(BACKEND_REGION) \
--project=$(GCP_PROJECT_ID) \
--service-account=$(BACKEND_RUNTIME_SA_EMAIL) \
--set-env-vars=APP_ENV=$(ENV),GCP_PROJECT_ID=$(GCP_PROJECT_ID),PUBLIC_BUCKET=$(BACKEND_PUBLIC_BUCKET),PRIVATE_BUCKET=$(BACKEND_PRIVATE_BUCKET) \
--no-allow-unauthenticated
@echo "✅ Command backend service deployed."
backend-deploy-workers:
@echo "--> Deploying worker scaffold for [$(ENV)]..."
@if [ ! -d "$(BACKEND_WORKERS_DIR)" ]; then \
echo "❌ Missing directory: $(BACKEND_WORKERS_DIR)"; \
exit 1; \
fi
@if [ -z "$$(find $(BACKEND_WORKERS_DIR) -mindepth 1 ! -name '.keep' -print -quit)" ]; then \
echo "⚠️ No worker code found in $(BACKEND_WORKERS_DIR). Skipping deployment."; \
exit 0; \
fi
@echo "⚠️ Worker deployment is scaffold-only for now."
@echo " Add concrete worker deployment commands once worker code is introduced."
backend-smoke-core:
@echo "--> Running core smoke check..."
@URL=$$(gcloud run services describe $(BACKEND_CORE_SERVICE_NAME) --region=$(BACKEND_REGION) --project=$(GCP_PROJECT_ID) --format='value(status.url)'); \
if [ -z "$$URL" ]; then \
echo "❌ Could not resolve URL for service $(BACKEND_CORE_SERVICE_NAME)"; \
exit 1; \
fi; \
TOKEN=$$(gcloud auth print-identity-token); \
curl -fsS -H "Authorization: Bearer $$TOKEN" "$$URL/healthz" >/dev/null && echo "✅ Core smoke check passed: $$URL/healthz"
backend-smoke-commands:
@echo "--> Running commands smoke check..."
@URL=$$(gcloud run services describe $(BACKEND_COMMAND_SERVICE_NAME) --region=$(BACKEND_REGION) --project=$(GCP_PROJECT_ID) --format='value(status.url)'); \
if [ -z "$$URL" ]; then \
echo "❌ Could not resolve URL for service $(BACKEND_COMMAND_SERVICE_NAME)"; \
exit 1; \
fi; \
TOKEN=$$(gcloud auth print-identity-token); \
curl -fsS -H "Authorization: Bearer $$TOKEN" "$$URL/healthz" >/dev/null && echo "✅ Commands smoke check passed: $$URL/healthz"
backend-logs-core:
@echo "--> Reading logs for core backend service [$(BACKEND_CORE_SERVICE_NAME)]..."
@gcloud run services logs read $(BACKEND_CORE_SERVICE_NAME) \
--region=$(BACKEND_REGION) \
--project=$(GCP_PROJECT_ID) \
--limit=$(BACKEND_LOG_LIMIT)