feat(api): complete unified v2 mobile surface

This commit is contained in:
zouantchaw
2026-03-13 17:02:24 +01:00
parent 817a39e305
commit b455455a49
39 changed files with 7726 additions and 506 deletions

View File

@@ -13,6 +13,7 @@
"firebase-admin": "^13.0.2",
"google-auth-library": "^9.15.1",
"multer": "^2.0.2",
"pg": "^8.20.0",
"pino": "^9.6.0",
"pino-http": "^10.3.0",
"zod": "^3.24.2"
@@ -2037,6 +2038,95 @@
"integrity": "sha512-RA1GjUVMnvYFxuqovrEqZoxxW5NUZqbwKtYz/Tt7nXerk0LbLblQmrsgdeOxV5SFHf0UDggjS/bSeOZwt1pmEQ==",
"license": "MIT"
},
"node_modules/pg": {
"version": "8.20.0",
"resolved": "https://registry.npmjs.org/pg/-/pg-8.20.0.tgz",
"integrity": "sha512-ldhMxz2r8fl/6QkXnBD3CR9/xg694oT6DZQ2s6c/RI28OjtSOpxnPrUCGOBJ46RCUxcWdx3p6kw/xnDHjKvaRA==",
"license": "MIT",
"dependencies": {
"pg-connection-string": "^2.12.0",
"pg-pool": "^3.13.0",
"pg-protocol": "^1.13.0",
"pg-types": "2.2.0",
"pgpass": "1.0.5"
},
"engines": {
"node": ">= 16.0.0"
},
"optionalDependencies": {
"pg-cloudflare": "^1.3.0"
},
"peerDependencies": {
"pg-native": ">=3.0.1"
},
"peerDependenciesMeta": {
"pg-native": {
"optional": true
}
}
},
"node_modules/pg-cloudflare": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/pg-cloudflare/-/pg-cloudflare-1.3.0.tgz",
"integrity": "sha512-6lswVVSztmHiRtD6I8hw4qP/nDm1EJbKMRhf3HCYaqud7frGysPv7FYJ5noZQdhQtN2xJnimfMtvQq21pdbzyQ==",
"license": "MIT",
"optional": true
},
"node_modules/pg-connection-string": {
"version": "2.12.0",
"resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.12.0.tgz",
"integrity": "sha512-U7qg+bpswf3Cs5xLzRqbXbQl85ng0mfSV/J0nnA31MCLgvEaAo7CIhmeyrmJpOr7o+zm0rXK+hNnT5l9RHkCkQ==",
"license": "MIT"
},
"node_modules/pg-int8": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/pg-int8/-/pg-int8-1.0.1.tgz",
"integrity": "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==",
"license": "ISC",
"engines": {
"node": ">=4.0.0"
}
},
"node_modules/pg-pool": {
"version": "3.13.0",
"resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.13.0.tgz",
"integrity": "sha512-gB+R+Xud1gLFuRD/QgOIgGOBE2KCQPaPwkzBBGC9oG69pHTkhQeIuejVIk3/cnDyX39av2AxomQiyPT13WKHQA==",
"license": "MIT",
"peerDependencies": {
"pg": ">=8.0"
}
},
"node_modules/pg-protocol": {
"version": "1.13.0",
"resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.13.0.tgz",
"integrity": "sha512-zzdvXfS6v89r6v7OcFCHfHlyG/wvry1ALxZo4LqgUoy7W9xhBDMaqOuMiF3qEV45VqsN6rdlcehHrfDtlCPc8w==",
"license": "MIT"
},
"node_modules/pg-types": {
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/pg-types/-/pg-types-2.2.0.tgz",
"integrity": "sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==",
"license": "MIT",
"dependencies": {
"pg-int8": "1.0.1",
"postgres-array": "~2.0.0",
"postgres-bytea": "~1.0.0",
"postgres-date": "~1.0.4",
"postgres-interval": "^1.1.0"
},
"engines": {
"node": ">=4"
}
},
"node_modules/pgpass": {
"version": "1.0.5",
"resolved": "https://registry.npmjs.org/pgpass/-/pgpass-1.0.5.tgz",
"integrity": "sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==",
"license": "MIT",
"dependencies": {
"split2": "^4.1.0"
}
},
"node_modules/pino": {
"version": "9.14.0",
"resolved": "https://registry.npmjs.org/pino/-/pino-9.14.0.tgz",
@@ -2086,6 +2176,45 @@
"integrity": "sha512-BndPH67/JxGExRgiX1dX0w1FvZck5Wa4aal9198SrRhZjH3GxKQUKIBnYJTdj2HDN3UQAS06HlfcSbQj2OHmaw==",
"license": "MIT"
},
"node_modules/postgres-array": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-2.0.0.tgz",
"integrity": "sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==",
"license": "MIT",
"engines": {
"node": ">=4"
}
},
"node_modules/postgres-bytea": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/postgres-bytea/-/postgres-bytea-1.0.1.tgz",
"integrity": "sha512-5+5HqXnsZPE65IJZSMkZtURARZelel2oXUEO8rH83VS/hxH5vv1uHquPg5wZs8yMAfdv971IU+kcPUczi7NVBQ==",
"license": "MIT",
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/postgres-date": {
"version": "1.0.7",
"resolved": "https://registry.npmjs.org/postgres-date/-/postgres-date-1.0.7.tgz",
"integrity": "sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==",
"license": "MIT",
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/postgres-interval": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/postgres-interval/-/postgres-interval-1.2.0.tgz",
"integrity": "sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==",
"license": "MIT",
"dependencies": {
"xtend": "^4.0.0"
},
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/process-warning": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/process-warning/-/process-warning-5.0.0.tgz",

View File

@@ -16,6 +16,7 @@
"firebase-admin": "^13.0.2",
"google-auth-library": "^9.15.1",
"multer": "^2.0.2",
"pg": "^8.20.0",
"pino": "^9.6.0",
"pino-http": "^10.3.0",
"zod": "^3.24.2"

View File

@@ -24,6 +24,12 @@ import {
retryVerificationJob,
reviewVerificationJob,
} from '../services/verification-jobs.js';
import {
deleteCertificate,
uploadCertificate,
uploadProfilePhoto,
uploadStaffDocument,
} from '../services/mobile-upload.js';
const DEFAULT_MAX_FILE_BYTES = 10 * 1024 * 1024;
const DEFAULT_MAX_SIGNED_URL_SECONDS = 900;
@@ -56,6 +62,14 @@ const uploadMetaSchema = z.object({
visibility: z.enum(['public', 'private']).optional(),
});
const certificateUploadMetaSchema = z.object({
certificateType: z.string().min(1).max(120),
name: z.string().min(1).max(160),
issuer: z.string().max(160).optional(),
certificateNumber: z.string().max(160).optional(),
expiresAt: z.string().datetime().optional(),
});
function mockSignedUrl(fileUri, expiresInSeconds) {
const encoded = encodeURIComponent(fileUri);
const expiresAt = new Date(Date.now() + expiresInSeconds * 1000).toISOString();
@@ -292,7 +306,7 @@ async function handleCreateVerification(req, res, next) {
});
}
const created = createVerificationJob({
const created = await createVerificationJob({
actorUid: req.actor.uid,
payload,
});
@@ -305,10 +319,107 @@ async function handleCreateVerification(req, res, next) {
}
}
async function handleProfilePhotoUpload(req, res, next) {
try {
const file = req.file;
if (!file) {
throw new AppError('INVALID_FILE', 'Missing file in multipart form data', 400);
}
const result = await uploadProfilePhoto({
actorUid: req.actor.uid,
file,
});
return res.status(200).json({
...result,
requestId: req.requestId,
});
} catch (error) {
return next(error);
}
}
async function handleDocumentUpload(req, res, next) {
try {
const file = req.file;
if (!file) {
throw new AppError('INVALID_FILE', 'Missing file in multipart form data', 400);
}
const result = await uploadStaffDocument({
actorUid: req.actor.uid,
documentId: req.params.documentId,
file,
routeType: 'document',
});
return res.status(200).json({
...result,
requestId: req.requestId,
});
} catch (error) {
return next(error);
}
}
async function handleAttireUpload(req, res, next) {
try {
const file = req.file;
if (!file) {
throw new AppError('INVALID_FILE', 'Missing file in multipart form data', 400);
}
const result = await uploadStaffDocument({
actorUid: req.actor.uid,
documentId: req.params.documentId,
file,
routeType: 'attire',
});
return res.status(200).json({
...result,
requestId: req.requestId,
});
} catch (error) {
return next(error);
}
}
async function handleCertificateUpload(req, res, next) {
try {
const file = req.file;
if (!file) {
throw new AppError('INVALID_FILE', 'Missing file in multipart form data', 400);
}
const payload = parseBody(certificateUploadMetaSchema, req.body || {});
const result = await uploadCertificate({
actorUid: req.actor.uid,
file,
payload,
});
return res.status(200).json({
...result,
requestId: req.requestId,
});
} catch (error) {
return next(error);
}
}
async function handleCertificateDelete(req, res, next) {
try {
const result = await deleteCertificate({
actorUid: req.actor.uid,
certificateType: req.params.certificateType,
});
return res.status(200).json({
...result,
requestId: req.requestId,
});
} catch (error) {
return next(error);
}
}
async function handleGetVerification(req, res, next) {
try {
const verificationId = req.params.verificationId;
const job = getVerificationJob(verificationId, req.actor.uid);
const job = await getVerificationJob(verificationId, req.actor.uid);
return res.status(200).json({
...job,
requestId: req.requestId,
@@ -322,7 +433,7 @@ async function handleReviewVerification(req, res, next) {
try {
const verificationId = req.params.verificationId;
const payload = parseBody(reviewVerificationSchema, req.body || {});
const updated = reviewVerificationJob(verificationId, req.actor.uid, payload);
const updated = await reviewVerificationJob(verificationId, req.actor.uid, payload);
return res.status(200).json({
...updated,
requestId: req.requestId,
@@ -335,7 +446,7 @@ async function handleReviewVerification(req, res, next) {
async function handleRetryVerification(req, res, next) {
try {
const verificationId = req.params.verificationId;
const updated = retryVerificationJob(verificationId, req.actor.uid);
const updated = await retryVerificationJob(verificationId, req.actor.uid);
return res.status(202).json({
...updated,
requestId: req.requestId,
@@ -353,6 +464,11 @@ export function createCoreRouter() {
router.post('/invoke-llm', requireAuth, requirePolicy('core.invoke-llm', 'model'), handleInvokeLlm);
router.post('/rapid-orders/transcribe', requireAuth, requirePolicy('core.rapid-order.transcribe', 'model'), handleRapidOrderTranscribe);
router.post('/rapid-orders/parse', requireAuth, requirePolicy('core.rapid-order.parse', 'model'), handleRapidOrderParse);
router.post('/staff/profile/photo', requireAuth, requirePolicy('core.upload', 'file'), upload.single('file'), handleProfilePhotoUpload);
router.post('/staff/documents/:documentId/upload', requireAuth, requirePolicy('core.upload', 'file'), upload.single('file'), handleDocumentUpload);
router.post('/staff/attire/:documentId/upload', requireAuth, requirePolicy('core.upload', 'file'), upload.single('file'), handleAttireUpload);
router.post('/staff/certificates/upload', requireAuth, requirePolicy('core.upload', 'file'), upload.single('file'), handleCertificateUpload);
router.delete('/staff/certificates/:certificateType', requireAuth, requirePolicy('core.upload', 'file'), handleCertificateDelete);
router.post('/verifications', requireAuth, requirePolicy('core.verification.create', 'verification'), handleCreateVerification);
router.get('/verifications/:verificationId', requireAuth, requirePolicy('core.verification.read', 'verification'), handleGetVerification);
router.post('/verifications/:verificationId/review', requireAuth, requirePolicy('core.verification.review', 'verification'), handleReviewVerification);

View File

@@ -1,4 +1,5 @@
import { Router } from 'express';
import { checkDatabaseHealth, isDatabaseConfigured } from '../services/db.js';
export const healthRouter = Router();
@@ -13,3 +14,31 @@ function healthHandler(req, res) {
healthRouter.get('/health', healthHandler);
healthRouter.get('/healthz', healthHandler);
healthRouter.get('/readyz', async (req, res) => {
if (!isDatabaseConfigured()) {
return res.status(503).json({
ok: false,
service: 'krow-core-api',
status: 'DATABASE_NOT_CONFIGURED',
requestId: req.requestId,
});
}
const healthy = await checkDatabaseHealth().catch(() => false);
if (!healthy) {
return res.status(503).json({
ok: false,
service: 'krow-core-api',
status: 'DATABASE_UNAVAILABLE',
requestId: req.requestId,
});
}
return res.status(200).json({
ok: true,
service: 'krow-core-api',
status: 'READY',
requestId: req.requestId,
});
});

View File

@@ -0,0 +1,67 @@
import { AppError } from '../lib/errors.js';
import { query } from './db.js';
export async function loadActorContext(uid) {
const [userResult, tenantResult, staffResult] = await Promise.all([
query(
`
SELECT id AS "userId", email, display_name AS "displayName", phone, status
FROM users
WHERE id = $1
`,
[uid]
),
query(
`
SELECT tm.id AS "membershipId",
tm.tenant_id AS "tenantId",
tm.base_role AS role,
t.name AS "tenantName",
t.slug AS "tenantSlug"
FROM tenant_memberships tm
JOIN tenants t ON t.id = tm.tenant_id
WHERE tm.user_id = $1
AND tm.membership_status = 'ACTIVE'
ORDER BY tm.created_at ASC
LIMIT 1
`,
[uid]
),
query(
`
SELECT s.id AS "staffId",
s.tenant_id AS "tenantId",
s.full_name AS "fullName",
s.status,
s.metadata
FROM staffs s
WHERE s.user_id = $1
ORDER BY s.created_at ASC
LIMIT 1
`,
[uid]
),
]);
return {
user: userResult.rows[0] || null,
tenant: tenantResult.rows[0] || null,
staff: staffResult.rows[0] || null,
};
}
export async function requireTenantContext(uid) {
const context = await loadActorContext(uid);
if (!context.user || !context.tenant) {
throw new AppError('FORBIDDEN', 'Tenant context is required for this route', 403, { uid });
}
return context;
}
export async function requireStaffContext(uid) {
const context = await loadActorContext(uid);
if (!context.user || !context.tenant || !context.staff) {
throw new AppError('FORBIDDEN', 'Staff context is required for this route', 403, { uid });
}
return context;
}

View File

@@ -0,0 +1,98 @@
import pg from 'pg';
const { Pool, types } = pg;
function parseNumericDatabaseValue(value) {
if (value == null) return value;
const parsed = Number(value);
return Number.isFinite(parsed) ? parsed : value;
}
types.setTypeParser(types.builtins.INT8, parseNumericDatabaseValue);
types.setTypeParser(types.builtins.NUMERIC, parseNumericDatabaseValue);
let pool;
function parseIntOrDefault(value, fallback) {
const parsed = Number.parseInt(`${value || fallback}`, 10);
return Number.isFinite(parsed) ? parsed : fallback;
}
function resolveDatabasePoolConfig() {
if (process.env.DATABASE_URL) {
return {
connectionString: process.env.DATABASE_URL,
max: parseIntOrDefault(process.env.DB_POOL_MAX, 10),
idleTimeoutMillis: parseIntOrDefault(process.env.DB_IDLE_TIMEOUT_MS, 30000),
};
}
const user = process.env.DB_USER;
const password = process.env.DB_PASSWORD;
const database = process.env.DB_NAME;
const host = process.env.DB_HOST || (
process.env.INSTANCE_CONNECTION_NAME
? `/cloudsql/${process.env.INSTANCE_CONNECTION_NAME}`
: ''
);
if (!user || password == null || !database || !host) {
return null;
}
return {
host,
port: parseIntOrDefault(process.env.DB_PORT, 5432),
user,
password,
database,
max: parseIntOrDefault(process.env.DB_POOL_MAX, 10),
idleTimeoutMillis: parseIntOrDefault(process.env.DB_IDLE_TIMEOUT_MS, 30000),
};
}
export function isDatabaseConfigured() {
return Boolean(resolveDatabasePoolConfig());
}
function getPool() {
if (!pool) {
const resolved = resolveDatabasePoolConfig();
if (!resolved) {
throw new Error('Database connection settings are required');
}
pool = new Pool(resolved);
}
return pool;
}
export async function query(text, params = []) {
return getPool().query(text, params);
}
export async function withTransaction(work) {
const client = await getPool().connect();
try {
await client.query('BEGIN');
const result = await work(client);
await client.query('COMMIT');
return result;
} catch (error) {
await client.query('ROLLBACK');
throw error;
} finally {
client.release();
}
}
export async function checkDatabaseHealth() {
const result = await query('SELECT 1 AS ok');
return result.rows[0]?.ok === 1;
}
export async function closePool() {
if (pool) {
await pool.end();
pool = null;
}
}

View File

@@ -0,0 +1,260 @@
import { AppError } from '../lib/errors.js';
import { requireStaffContext } from './actor-context.js';
import { generateReadSignedUrl, uploadToGcs } from './storage.js';
import { query, withTransaction } from './db.js';
import { createVerificationJob } from './verification-jobs.js';
function safeName(value) {
return `${value}`.replace(/[^a-zA-Z0-9._-]/g, '_');
}
function uploadBucket() {
return process.env.PRIVATE_BUCKET || 'krow-workforce-dev-private';
}
async function uploadActorFile({ actorUid, file, category }) {
const bucket = uploadBucket();
const objectPath = `uploads/${actorUid}/${category}/${Date.now()}_${safeName(file.originalname)}`;
const fileUri = `gs://${bucket}/${objectPath}`;
await uploadToGcs({
bucket,
objectPath,
contentType: file.mimetype,
buffer: file.buffer,
});
return { bucket, objectPath, fileUri };
}
async function createPreviewUrl(actorUid, fileUri) {
try {
return await generateReadSignedUrl({
fileUri,
actorUid,
expiresInSeconds: 900,
});
} catch {
return {
signedUrl: null,
expiresAt: null,
};
}
}
export async function uploadProfilePhoto({ actorUid, file }) {
const context = await requireStaffContext(actorUid);
const uploaded = await uploadActorFile({
actorUid,
file,
category: 'profile-photo',
});
await withTransaction(async (client) => {
await client.query(
`
UPDATE staffs
SET metadata = COALESCE(metadata, '{}'::jsonb) || $2::jsonb,
updated_at = NOW()
WHERE id = $1
`,
[context.staff.staffId, JSON.stringify({ profilePhotoUri: uploaded.fileUri })]
);
});
const preview = await createPreviewUrl(actorUid, uploaded.fileUri);
return {
staffId: context.staff.staffId,
fileUri: uploaded.fileUri,
signedUrl: preview.signedUrl,
expiresAt: preview.expiresAt,
};
}
async function requireDocument(tenantId, documentId, allowedTypes) {
const result = await query(
`
SELECT id, document_type, name
FROM documents
WHERE tenant_id = $1
AND id = $2
AND document_type = ANY($3::text[])
`,
[tenantId, documentId, allowedTypes]
);
if (result.rowCount === 0) {
throw new AppError('NOT_FOUND', 'Document not found for requested upload type', 404, {
documentId,
allowedTypes,
});
}
return result.rows[0];
}
export async function uploadStaffDocument({ actorUid, documentId, file, routeType }) {
const context = await requireStaffContext(actorUid);
const document = await requireDocument(
context.tenant.tenantId,
documentId,
routeType === 'attire' ? ['ATTIRE'] : ['DOCUMENT', 'GOVERNMENT_ID', 'TAX_FORM']
);
const uploaded = await uploadActorFile({
actorUid,
file,
category: routeType,
});
const verification = await createVerificationJob({
actorUid,
payload: {
type: routeType === 'attire' ? 'attire' : 'government_id',
subjectType: routeType === 'attire' ? 'attire_item' : 'staff_document',
subjectId: documentId,
fileUri: uploaded.fileUri,
metadata: {
routeType,
documentType: document.document_type,
},
rules: {
expectedDocumentName: document.name,
},
},
});
await withTransaction(async (client) => {
await client.query(
`
INSERT INTO staff_documents (
tenant_id,
staff_id,
document_id,
file_uri,
status,
verification_job_id,
metadata
)
VALUES ($1, $2, $3, $4, 'PENDING', $5, $6::jsonb)
ON CONFLICT (staff_id, document_id) DO UPDATE
SET file_uri = EXCLUDED.file_uri,
status = 'PENDING',
verification_job_id = EXCLUDED.verification_job_id,
metadata = COALESCE(staff_documents.metadata, '{}'::jsonb) || EXCLUDED.metadata,
updated_at = NOW()
`,
[
context.tenant.tenantId,
context.staff.staffId,
document.id,
uploaded.fileUri,
verification.verificationId,
JSON.stringify({
verificationStatus: verification.status,
routeType,
}),
]
);
});
const preview = await createPreviewUrl(actorUid, uploaded.fileUri);
return {
documentId: document.id,
documentType: document.document_type,
fileUri: uploaded.fileUri,
signedUrl: preview.signedUrl,
expiresAt: preview.expiresAt,
verification,
};
}
export async function uploadCertificate({ actorUid, file, payload }) {
const context = await requireStaffContext(actorUid);
const uploaded = await uploadActorFile({
actorUid,
file,
category: 'certificate',
});
const verification = await createVerificationJob({
actorUid,
payload: {
type: 'certification',
subjectType: 'certificate',
subjectId: payload.certificateType,
fileUri: uploaded.fileUri,
metadata: {
certificateType: payload.certificateType,
name: payload.name,
issuer: payload.issuer || null,
certificateNumber: payload.certificateNumber || null,
},
rules: {
certificateType: payload.certificateType,
name: payload.name,
},
},
});
const certificateResult = await withTransaction(async (client) => {
return client.query(
`
INSERT INTO certificates (
tenant_id,
staff_id,
certificate_type,
certificate_number,
issued_at,
expires_at,
status,
file_uri,
verification_job_id,
metadata
)
VALUES ($1, $2, $3, $4, NOW(), $5, 'PENDING', $6, $7, $8::jsonb)
RETURNING id
`,
[
context.tenant.tenantId,
context.staff.staffId,
payload.certificateType,
payload.certificateNumber || null,
payload.expiresAt || null,
uploaded.fileUri,
verification.verificationId,
JSON.stringify({
name: payload.name,
issuer: payload.issuer || null,
verificationStatus: verification.status,
}),
]
);
});
const preview = await createPreviewUrl(actorUid, uploaded.fileUri);
return {
certificateId: certificateResult.rows[0].id,
certificateType: payload.certificateType,
fileUri: uploaded.fileUri,
signedUrl: preview.signedUrl,
expiresAt: preview.expiresAt,
verification,
};
}
export async function deleteCertificate({ actorUid, certificateType }) {
const context = await requireStaffContext(actorUid);
const result = await query(
`
DELETE FROM certificates
WHERE tenant_id = $1
AND staff_id = $2
AND certificate_type = $3
RETURNING id
`,
[context.tenant.tenantId, context.staff.staffId, certificateType]
);
if (result.rowCount === 0) {
throw new AppError('NOT_FOUND', 'Certificate not found for current staff user', 404, {
certificateType,
});
}
return {
certificateId: result.rows[0].id,
deleted: true,
};
}

View File

@@ -1,9 +1,8 @@
import crypto from 'node:crypto';
import { AppError } from '../lib/errors.js';
import { isDatabaseConfigured, query, withTransaction } from './db.js';
import { requireTenantContext } from './actor-context.js';
import { invokeVertexMultimodalModel } from './llm.js';
const jobs = new Map();
export const VerificationStatus = Object.freeze({
PENDING: 'PENDING',
PROCESSING: 'PROCESSING',
@@ -15,82 +14,96 @@ export const VerificationStatus = Object.freeze({
ERROR: 'ERROR',
});
const MACHINE_TERMINAL_STATUSES = new Set([
VerificationStatus.AUTO_PASS,
VerificationStatus.AUTO_FAIL,
VerificationStatus.NEEDS_REVIEW,
VerificationStatus.ERROR,
]);
const HUMAN_TERMINAL_STATUSES = new Set([
VerificationStatus.APPROVED,
VerificationStatus.REJECTED,
]);
function nowIso() {
return new Date().toISOString();
const memoryVerificationJobs = new Map();
function useMemoryStore() {
if (process.env.VERIFICATION_STORE === 'memory') {
return true;
}
return !isDatabaseConfigured() && (process.env.NODE_ENV === 'test' || process.env.AUTH_BYPASS === 'true');
}
function nextVerificationId() {
if (typeof crypto?.randomUUID === 'function') {
return crypto.randomUUID();
}
return `verification_${Date.now()}_${Math.random().toString(36).slice(2, 10)}`;
}
function loadMemoryJob(verificationId) {
const job = memoryVerificationJobs.get(verificationId);
if (!job) {
throw new AppError('NOT_FOUND', 'Verification not found', 404, {
verificationId,
});
}
return job;
}
async function processVerificationJobInMemory(verificationId) {
const job = memoryVerificationJobs.get(verificationId);
if (!job || job.status !== VerificationStatus.PENDING) {
return;
}
job.status = VerificationStatus.PROCESSING;
job.updated_at = new Date().toISOString();
memoryVerificationJobs.set(verificationId, job);
const workItem = {
id: job.id,
type: job.type,
fileUri: job.file_uri,
subjectType: job.subject_type,
subjectId: job.subject_id,
rules: job.metadata?.rules || {},
metadata: job.metadata || {},
};
try {
const result = workItem.type === 'attire'
? await runAttireChecks(workItem)
: await runThirdPartyChecks(workItem, workItem.type);
const updated = {
...job,
status: result.status,
confidence: result.confidence,
reasons: result.reasons || [],
extracted: result.extracted || {},
provider_name: result.provider?.name || null,
provider_reference: result.provider?.reference || null,
updated_at: new Date().toISOString(),
};
memoryVerificationJobs.set(verificationId, updated);
} catch (error) {
const updated = {
...job,
status: VerificationStatus.ERROR,
reasons: [error?.message || 'Verification processing failed'],
provider_name: 'verification-worker',
provider_reference: `error:${error?.code || 'unknown'}`,
updated_at: new Date().toISOString(),
};
memoryVerificationJobs.set(verificationId, updated);
}
}
function accessMode() {
return process.env.VERIFICATION_ACCESS_MODE || 'authenticated';
}
function eventRecord({ fromStatus, toStatus, actorType, actorId, details = {} }) {
return {
id: crypto.randomUUID(),
fromStatus,
toStatus,
actorType,
actorId,
details,
createdAt: nowIso(),
};
function providerTimeoutMs() {
return Number.parseInt(process.env.VERIFICATION_PROVIDER_TIMEOUT_MS || '8000', 10);
}
function toPublicJob(job) {
return {
verificationId: job.id,
type: job.type,
subjectType: job.subjectType,
subjectId: job.subjectId,
fileUri: job.fileUri,
status: job.status,
confidence: job.confidence,
reasons: job.reasons,
extracted: job.extracted,
provider: job.provider,
review: job.review,
createdAt: job.createdAt,
updatedAt: job.updatedAt,
};
}
function assertAccess(job, actorUid) {
if (accessMode() === 'authenticated') {
return;
}
if (job.ownerUid !== actorUid) {
throw new AppError('FORBIDDEN', 'Not allowed to access this verification', 403);
}
}
function requireJob(id) {
const job = jobs.get(id);
if (!job) {
throw new AppError('NOT_FOUND', 'Verification not found', 404, { verificationId: id });
}
return job;
}
function normalizeMachineStatus(status) {
if (
status === VerificationStatus.AUTO_PASS
|| status === VerificationStatus.AUTO_FAIL
|| status === VerificationStatus.NEEDS_REVIEW
) {
return status;
}
return VerificationStatus.NEEDS_REVIEW;
function attireModel() {
return process.env.VERIFICATION_ATTIRE_MODEL || 'gemini-2.0-flash-lite-001';
}
function clampConfidence(value, fallback = 0.5) {
@@ -108,12 +121,89 @@ function asReasonList(reasons, fallback) {
return [fallback];
}
function providerTimeoutMs() {
return Number.parseInt(process.env.VERIFICATION_PROVIDER_TIMEOUT_MS || '8000', 10);
function normalizeMachineStatus(status) {
if (
status === VerificationStatus.AUTO_PASS
|| status === VerificationStatus.AUTO_FAIL
|| status === VerificationStatus.NEEDS_REVIEW
) {
return status;
}
return VerificationStatus.NEEDS_REVIEW;
}
function attireModel() {
return process.env.VERIFICATION_ATTIRE_MODEL || 'gemini-2.0-flash-lite-001';
function toPublicJob(row) {
if (!row) return null;
return {
verificationId: row.id,
type: row.type,
subjectType: row.subject_type,
subjectId: row.subject_id,
fileUri: row.file_uri,
status: row.status,
confidence: row.confidence == null ? null : Number(row.confidence),
reasons: Array.isArray(row.reasons) ? row.reasons : [],
extracted: row.extracted || {},
provider: row.provider_name
? {
name: row.provider_name,
reference: row.provider_reference || null,
}
: null,
review: row.review || {},
createdAt: row.created_at,
updatedAt: row.updated_at,
};
}
function assertAccess(row, actorUid) {
if (accessMode() === 'authenticated') {
return;
}
if (row.owner_user_id !== actorUid) {
throw new AppError('FORBIDDEN', 'Not allowed to access this verification', 403);
}
}
async function loadJob(verificationId) {
const result = await query(
`
SELECT *
FROM verification_jobs
WHERE id = $1
`,
[verificationId]
);
if (result.rowCount === 0) {
throw new AppError('NOT_FOUND', 'Verification not found', 404, {
verificationId,
});
}
return result.rows[0];
}
async function appendVerificationEvent(client, {
verificationJobId,
fromStatus,
toStatus,
actorType,
actorId,
details = {},
}) {
await client.query(
`
INSERT INTO verification_events (
verification_job_id,
from_status,
to_status,
actor_type,
actor_id,
details
)
VALUES ($1, $2, $3, $4, $5, $6::jsonb)
`,
[verificationJobId, fromStatus, toStatus, actorType, actorId, JSON.stringify(details)]
);
}
async function runAttireChecks(job) {
@@ -258,47 +348,26 @@ async function runThirdPartyChecks(job, type) {
signal: controller.signal,
});
const bodyText = await response.text();
let body = {};
try {
body = bodyText ? JSON.parse(bodyText) : {};
} catch {
body = {};
}
const payload = await response.json().catch(() => ({}));
if (!response.ok) {
return {
status: VerificationStatus.NEEDS_REVIEW,
confidence: 0.35,
reasons: [`${provider.name} returned ${response.status}`],
extracted: {},
provider: {
name: provider.name,
reference: body?.reference || null,
},
};
throw new Error(payload?.error || payload?.message || `${provider.name} failed`);
}
return {
status: normalizeMachineStatus(body.status),
confidence: clampConfidence(body.confidence, 0.6),
reasons: asReasonList(body.reasons, `${provider.name} completed check`),
extracted: body.extracted || {},
status: normalizeMachineStatus(payload.status),
confidence: clampConfidence(payload.confidence, 0.6),
reasons: asReasonList(payload.reasons, `${provider.name} completed`),
extracted: payload.extracted || {},
provider: {
name: provider.name,
reference: body.reference || null,
reference: payload.reference || null,
},
};
} catch (error) {
const isAbort = error?.name === 'AbortError';
return {
status: VerificationStatus.NEEDS_REVIEW,
confidence: 0.3,
reasons: [
isAbort
? `${provider.name} timeout, manual review required`
: `${provider.name} unavailable, manual review required`,
],
confidence: 0.35,
reasons: [error?.message || `${provider.name} unavailable`],
extracted: {},
provider: {
name: provider.name,
@@ -310,201 +379,462 @@ async function runThirdPartyChecks(job, type) {
}
}
async function runMachineChecks(job) {
if (job.type === 'attire') {
return runAttireChecks(job);
}
async function processVerificationJob(verificationId) {
const startedJob = await withTransaction(async (client) => {
const result = await client.query(
`
SELECT *
FROM verification_jobs
WHERE id = $1
FOR UPDATE
`,
[verificationId]
);
if (job.type === 'government_id') {
return runThirdPartyChecks(job, 'government_id');
}
if (result.rowCount === 0) {
return null;
}
return runThirdPartyChecks(job, 'certification');
}
const job = result.rows[0];
if (job.status !== VerificationStatus.PENDING) {
return null;
}
async function processVerificationJob(id) {
const job = requireJob(id);
if (job.status !== VerificationStatus.PENDING) {
await client.query(
`
UPDATE verification_jobs
SET status = $2,
updated_at = NOW()
WHERE id = $1
`,
[verificationId, VerificationStatus.PROCESSING]
);
await appendVerificationEvent(client, {
verificationJobId: verificationId,
fromStatus: job.status,
toStatus: VerificationStatus.PROCESSING,
actorType: 'worker',
actorId: 'verification-worker',
});
return {
id: verificationId,
type: job.type,
fileUri: job.file_uri,
subjectType: job.subject_type,
subjectId: job.subject_id,
rules: job.metadata?.rules || {},
metadata: job.metadata || {},
};
});
if (!startedJob) {
return;
}
const beforeProcessing = job.status;
job.status = VerificationStatus.PROCESSING;
job.updatedAt = nowIso();
job.events.push(
eventRecord({
fromStatus: beforeProcessing,
toStatus: VerificationStatus.PROCESSING,
actorType: 'system',
actorId: 'verification-worker',
})
);
try {
const outcome = await runMachineChecks(job);
if (!MACHINE_TERMINAL_STATUSES.has(outcome.status)) {
throw new Error(`Invalid machine outcome status: ${outcome.status}`);
}
const fromStatus = job.status;
job.status = outcome.status;
job.confidence = outcome.confidence;
job.reasons = outcome.reasons;
job.extracted = outcome.extracted;
job.provider = outcome.provider;
job.updatedAt = nowIso();
job.events.push(
eventRecord({
fromStatus,
toStatus: job.status,
actorType: 'system',
const result = startedJob.type === 'attire'
? await runAttireChecks(startedJob)
: await runThirdPartyChecks(startedJob, startedJob.type);
await withTransaction(async (client) => {
await client.query(
`
UPDATE verification_jobs
SET status = $2,
confidence = $3,
reasons = $4::jsonb,
extracted = $5::jsonb,
provider_name = $6,
provider_reference = $7,
updated_at = NOW()
WHERE id = $1
`,
[
verificationId,
result.status,
result.confidence,
JSON.stringify(result.reasons || []),
JSON.stringify(result.extracted || {}),
result.provider?.name || null,
result.provider?.reference || null,
]
);
await appendVerificationEvent(client, {
verificationJobId: verificationId,
fromStatus: VerificationStatus.PROCESSING,
toStatus: result.status,
actorType: 'worker',
actorId: 'verification-worker',
details: {
confidence: job.confidence,
reasons: job.reasons,
provider: job.provider,
confidence: result.confidence,
},
})
);
});
});
} catch (error) {
const fromStatus = job.status;
job.status = VerificationStatus.ERROR;
job.confidence = null;
job.reasons = [error?.message || 'Verification processing failed'];
job.extracted = {};
job.provider = {
name: 'verification-worker',
reference: null,
};
job.updatedAt = nowIso();
job.events.push(
eventRecord({
fromStatus,
await withTransaction(async (client) => {
await client.query(
`
UPDATE verification_jobs
SET status = $2,
reasons = $3::jsonb,
provider_name = 'verification-worker',
provider_reference = $4,
updated_at = NOW()
WHERE id = $1
`,
[
verificationId,
VerificationStatus.ERROR,
JSON.stringify([error?.message || 'Verification processing failed']),
`error:${error?.code || 'unknown'}`,
]
);
await appendVerificationEvent(client, {
verificationJobId: verificationId,
fromStatus: VerificationStatus.PROCESSING,
toStatus: VerificationStatus.ERROR,
actorType: 'system',
actorType: 'worker',
actorId: 'verification-worker',
details: {
error: error?.message || 'Verification processing failed',
},
})
);
}
}
function queueVerificationProcessing(id) {
setTimeout(() => {
processVerificationJob(id).catch(() => {});
}, 0);
}
export function createVerificationJob({ actorUid, payload }) {
const now = nowIso();
const id = `ver_${crypto.randomUUID()}`;
const job = {
id,
type: payload.type,
subjectType: payload.subjectType || null,
subjectId: payload.subjectId || null,
ownerUid: actorUid,
fileUri: payload.fileUri,
rules: payload.rules || {},
metadata: payload.metadata || {},
status: VerificationStatus.PENDING,
confidence: null,
reasons: [],
extracted: {},
provider: null,
review: null,
createdAt: now,
updatedAt: now,
events: [
eventRecord({
fromStatus: null,
toStatus: VerificationStatus.PENDING,
actorType: 'system',
actorId: actorUid,
}),
],
};
jobs.set(id, job);
queueVerificationProcessing(id);
return toPublicJob(job);
}
export function getVerificationJob(verificationId, actorUid) {
const job = requireJob(verificationId);
assertAccess(job, actorUid);
return toPublicJob(job);
}
export function reviewVerificationJob(verificationId, actorUid, review) {
const job = requireJob(verificationId);
assertAccess(job, actorUid);
if (HUMAN_TERMINAL_STATUSES.has(job.status)) {
throw new AppError('CONFLICT', 'Verification already finalized', 409, {
verificationId,
status: job.status,
});
});
}
}
const fromStatus = job.status;
job.status = review.decision;
job.review = {
decision: review.decision,
reviewedBy: actorUid,
reviewedAt: nowIso(),
note: review.note || '',
reasonCode: review.reasonCode || 'MANUAL_REVIEW',
};
job.updatedAt = nowIso();
job.events.push(
eventRecord({
fromStatus,
toStatus: job.status,
function queueVerificationProcessing(verificationId) {
setImmediate(() => {
const worker = useMemoryStore() ? processVerificationJobInMemory : processVerificationJob;
worker(verificationId).catch(() => {});
});
}
export async function createVerificationJob({ actorUid, payload }) {
if (useMemoryStore()) {
const timestamp = new Date().toISOString();
const created = {
id: nextVerificationId(),
tenant_id: null,
staff_id: null,
owner_user_id: actorUid,
type: payload.type,
subject_type: payload.subjectType || null,
subject_id: payload.subjectId || null,
file_uri: payload.fileUri,
status: VerificationStatus.PENDING,
confidence: null,
reasons: [],
extracted: {},
provider_name: null,
provider_reference: null,
review: {},
metadata: {
...(payload.metadata || {}),
rules: payload.rules || {},
},
created_at: timestamp,
updated_at: timestamp,
};
memoryVerificationJobs.set(created.id, created);
queueVerificationProcessing(created.id);
return toPublicJob(created);
}
const context = await requireTenantContext(actorUid);
const created = await withTransaction(async (client) => {
const result = await client.query(
`
INSERT INTO verification_jobs (
tenant_id,
staff_id,
document_id,
owner_user_id,
type,
subject_type,
subject_id,
file_uri,
status,
reasons,
extracted,
review,
metadata
)
VALUES (
$1,
$2,
NULL,
$3,
$4,
$5,
$6,
$7,
'PENDING',
'[]'::jsonb,
'{}'::jsonb,
'{}'::jsonb,
$8::jsonb
)
RETURNING *
`,
[
context.tenant.tenantId,
context.staff?.staffId || null,
actorUid,
payload.type,
payload.subjectType || null,
payload.subjectId || null,
payload.fileUri,
JSON.stringify({
...(payload.metadata || {}),
rules: payload.rules || {},
}),
]
);
await appendVerificationEvent(client, {
verificationJobId: result.rows[0].id,
fromStatus: null,
toStatus: VerificationStatus.PENDING,
actorType: 'system',
actorId: actorUid,
});
return result.rows[0];
});
queueVerificationProcessing(created.id);
return toPublicJob(created);
}
export async function getVerificationJob(verificationId, actorUid) {
if (useMemoryStore()) {
const job = loadMemoryJob(verificationId);
assertAccess(job, actorUid);
return toPublicJob(job);
}
const job = await loadJob(verificationId);
assertAccess(job, actorUid);
return toPublicJob(job);
}
export async function reviewVerificationJob(verificationId, actorUid, review) {
if (useMemoryStore()) {
const job = loadMemoryJob(verificationId);
assertAccess(job, actorUid);
if (HUMAN_TERMINAL_STATUSES.has(job.status)) {
throw new AppError('CONFLICT', 'Verification already finalized', 409, {
verificationId,
status: job.status,
});
}
const reviewPayload = {
decision: review.decision,
reviewedBy: actorUid,
reviewedAt: new Date().toISOString(),
note: review.note || '',
reasonCode: review.reasonCode || 'MANUAL_REVIEW',
};
const updated = {
...job,
status: review.decision,
review: reviewPayload,
updated_at: new Date().toISOString(),
};
memoryVerificationJobs.set(verificationId, updated);
return toPublicJob(updated);
}
const context = await requireTenantContext(actorUid);
const updated = await withTransaction(async (client) => {
const result = await client.query(
`
SELECT *
FROM verification_jobs
WHERE id = $1
FOR UPDATE
`,
[verificationId]
);
if (result.rowCount === 0) {
throw new AppError('NOT_FOUND', 'Verification not found', 404, { verificationId });
}
const job = result.rows[0];
assertAccess(job, actorUid);
if (HUMAN_TERMINAL_STATUSES.has(job.status)) {
throw new AppError('CONFLICT', 'Verification already finalized', 409, {
verificationId,
status: job.status,
});
}
const reviewPayload = {
decision: review.decision,
reviewedBy: actorUid,
reviewedAt: new Date().toISOString(),
note: review.note || '',
reasonCode: review.reasonCode || 'MANUAL_REVIEW',
};
await client.query(
`
UPDATE verification_jobs
SET status = $2,
review = $3::jsonb,
updated_at = NOW()
WHERE id = $1
`,
[verificationId, review.decision, JSON.stringify(reviewPayload)]
);
await client.query(
`
INSERT INTO verification_reviews (
verification_job_id,
reviewer_user_id,
decision,
note,
reason_code
)
VALUES ($1, $2, $3, $4, $5)
`,
[verificationId, actorUid, review.decision, review.note || null, review.reasonCode || 'MANUAL_REVIEW']
);
await appendVerificationEvent(client, {
verificationJobId: verificationId,
fromStatus: job.status,
toStatus: review.decision,
actorType: 'reviewer',
actorId: actorUid,
details: {
reasonCode: job.review.reasonCode,
reasonCode: review.reasonCode || 'MANUAL_REVIEW',
},
})
);
});
return toPublicJob(job);
return {
...job,
status: review.decision,
review: reviewPayload,
updated_at: new Date().toISOString(),
};
});
void context;
return toPublicJob(updated);
}
export function retryVerificationJob(verificationId, actorUid) {
const job = requireJob(verificationId);
assertAccess(job, actorUid);
export async function retryVerificationJob(verificationId, actorUid) {
if (useMemoryStore()) {
const job = loadMemoryJob(verificationId);
assertAccess(job, actorUid);
if (job.status === VerificationStatus.PROCESSING) {
throw new AppError('CONFLICT', 'Cannot retry while verification is processing', 409, {
verificationId,
});
}
if (job.status === VerificationStatus.PROCESSING) {
throw new AppError('CONFLICT', 'Cannot retry while verification is processing', 409, {
verificationId,
});
const updated = {
...job,
status: VerificationStatus.PENDING,
confidence: null,
reasons: [],
extracted: {},
provider_name: null,
provider_reference: null,
review: {},
updated_at: new Date().toISOString(),
};
memoryVerificationJobs.set(verificationId, updated);
queueVerificationProcessing(verificationId);
return toPublicJob(updated);
}
const fromStatus = job.status;
job.status = VerificationStatus.PENDING;
job.confidence = null;
job.reasons = [];
job.extracted = {};
job.provider = null;
job.review = null;
job.updatedAt = nowIso();
job.events.push(
eventRecord({
fromStatus,
const updated = await withTransaction(async (client) => {
const result = await client.query(
`
SELECT *
FROM verification_jobs
WHERE id = $1
FOR UPDATE
`,
[verificationId]
);
if (result.rowCount === 0) {
throw new AppError('NOT_FOUND', 'Verification not found', 404, { verificationId });
}
const job = result.rows[0];
assertAccess(job, actorUid);
if (job.status === VerificationStatus.PROCESSING) {
throw new AppError('CONFLICT', 'Cannot retry while verification is processing', 409, {
verificationId,
});
}
await client.query(
`
UPDATE verification_jobs
SET status = $2,
confidence = NULL,
reasons = '[]'::jsonb,
extracted = '{}'::jsonb,
provider_name = NULL,
provider_reference = NULL,
review = '{}'::jsonb,
updated_at = NOW()
WHERE id = $1
`,
[verificationId, VerificationStatus.PENDING]
);
await appendVerificationEvent(client, {
verificationJobId: verificationId,
fromStatus: job.status,
toStatus: VerificationStatus.PENDING,
actorType: 'reviewer',
actorId: actorUid,
details: {
retried: true,
},
})
);
});
return {
...job,
status: VerificationStatus.PENDING,
confidence: null,
reasons: [],
extracted: {},
provider_name: null,
provider_reference: null,
review: {},
updated_at: new Date().toISOString(),
};
});
queueVerificationProcessing(verificationId);
return toPublicJob(job);
return toPublicJob(updated);
}
export function __resetVerificationJobsForTests() {
jobs.clear();
export async function __resetVerificationJobsForTests() {
if (process.env.NODE_ENV !== 'test' && process.env.AUTH_BYPASS !== 'true') {
return;
}
memoryVerificationJobs.clear();
try {
await query('DELETE FROM verification_reviews');
await query('DELETE FROM verification_events');
await query('DELETE FROM verification_jobs');
} catch {
// Intentionally ignore when tests run without a configured database.
}
}

View File

@@ -5,7 +5,7 @@ import { createApp } from '../src/app.js';
import { __resetLlmRateLimitForTests } from '../src/services/llm-rate-limit.js';
import { __resetVerificationJobsForTests } from '../src/services/verification-jobs.js';
beforeEach(() => {
beforeEach(async () => {
process.env.AUTH_BYPASS = 'true';
process.env.LLM_MOCK = 'true';
process.env.SIGNED_URL_MOCK = 'true';
@@ -15,8 +15,9 @@ beforeEach(() => {
process.env.VERIFICATION_REQUIRE_FILE_EXISTS = 'false';
process.env.VERIFICATION_ACCESS_MODE = 'authenticated';
process.env.VERIFICATION_ATTIRE_PROVIDER = 'mock';
process.env.VERIFICATION_STORE = 'memory';
__resetLlmRateLimitForTests();
__resetVerificationJobsForTests();
await __resetVerificationJobsForTests();
});
async function waitForMachineStatus(app, verificationId, maxAttempts = 30) {
@@ -49,6 +50,22 @@ test('GET /healthz returns healthy response', async () => {
assert.equal(typeof res.headers['x-request-id'], 'string');
});
test('GET /readyz reports database not configured when env is absent', async () => {
delete process.env.DATABASE_URL;
delete process.env.DB_HOST;
delete process.env.DB_NAME;
delete process.env.DB_USER;
delete process.env.DB_PASSWORD;
delete process.env.INSTANCE_CONNECTION_NAME;
delete process.env.VERIFICATION_STORE;
const app = createApp();
const res = await request(app).get('/readyz');
assert.equal(res.status, 503);
assert.equal(res.body.status, 'DATABASE_NOT_CONFIGURED');
});
test('POST /core/create-signed-url requires auth', async () => {
process.env.AUTH_BYPASS = 'false';
const app = createApp();