70 lines
1.7 KiB
JavaScript
70 lines
1.7 KiB
JavaScript
import { readdirSync, readFileSync } from 'node:fs';
|
|
import { resolve } from 'node:path';
|
|
import { fileURLToPath } from 'node:url';
|
|
import { Pool } from 'pg';
|
|
|
|
const databaseUrl = process.env.DATABASE_URL;
|
|
|
|
if (!databaseUrl) {
|
|
// eslint-disable-next-line no-console
|
|
console.error('DATABASE_URL is required');
|
|
process.exit(1);
|
|
}
|
|
|
|
const scriptDir = resolve(fileURLToPath(new URL('.', import.meta.url)));
|
|
const migrationsDir = resolve(scriptDir, '../sql/v2');
|
|
|
|
const migrationFiles = readdirSync(migrationsDir)
|
|
.filter((file) => file.endsWith('.sql'))
|
|
.sort();
|
|
|
|
const pool = new Pool({
|
|
connectionString: databaseUrl,
|
|
max: Number.parseInt(process.env.DB_POOL_MAX || '5', 10),
|
|
});
|
|
|
|
async function ensureMigrationTable(client) {
|
|
await client.query(`
|
|
CREATE TABLE IF NOT EXISTS schema_migrations (
|
|
version TEXT PRIMARY KEY,
|
|
applied_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
|
);
|
|
`);
|
|
}
|
|
|
|
try {
|
|
const client = await pool.connect();
|
|
try {
|
|
await client.query('BEGIN');
|
|
await ensureMigrationTable(client);
|
|
|
|
for (const file of migrationFiles) {
|
|
const alreadyApplied = await client.query(
|
|
'SELECT 1 FROM schema_migrations WHERE version = $1',
|
|
[file]
|
|
);
|
|
if (alreadyApplied.rowCount > 0) {
|
|
continue;
|
|
}
|
|
|
|
const sql = readFileSync(resolve(migrationsDir, file), 'utf8');
|
|
await client.query(sql);
|
|
await client.query(
|
|
'INSERT INTO schema_migrations (version) VALUES ($1)',
|
|
[file]
|
|
);
|
|
// eslint-disable-next-line no-console
|
|
console.log(`Applied migration ${file}`);
|
|
}
|
|
|
|
await client.query('COMMIT');
|
|
} catch (error) {
|
|
await client.query('ROLLBACK');
|
|
throw error;
|
|
} finally {
|
|
client.release();
|
|
}
|
|
} finally {
|
|
await pool.end();
|
|
}
|