69 lines
2.0 KiB
JavaScript
69 lines
2.0 KiB
JavaScript
#!/usr/bin/env node
|
|
// Migration runner - executes all SQL migrations in order
|
|
import fs from 'fs';
|
|
import path from 'path';
|
|
import { fileURLToPath } from 'url';
|
|
import pg from 'pg';
|
|
|
|
const __filename = fileURLToPath(import.meta.url);
|
|
const __dirname = path.dirname(__filename);
|
|
|
|
const DATABASE_URL = process.env.DATABASE_URL;
|
|
|
|
if (!DATABASE_URL) {
|
|
console.error('❌ DATABASE_URL environment variable not set');
|
|
process.exit(1);
|
|
}
|
|
|
|
async function runMigrations() {
|
|
const pool = new pg.Pool({
|
|
connectionString: DATABASE_URL,
|
|
max: 1
|
|
});
|
|
|
|
try {
|
|
console.log('🔌 Connecting to database...');
|
|
await pool.query('SELECT NOW()');
|
|
console.log('✅ Database connected');
|
|
|
|
const migrationsDir = path.join(__dirname, '../migrations');
|
|
const files = fs.readdirSync(migrationsDir)
|
|
.filter(f => f.endsWith('.sql'))
|
|
.sort();
|
|
|
|
console.log(`\n📦 Found ${files.length} migration files:\n`);
|
|
|
|
for (const file of files) {
|
|
const filePath = path.join(migrationsDir, file);
|
|
const sql = fs.readFileSync(filePath, 'utf-8');
|
|
|
|
console.log(`⏳ Running: ${file}...`);
|
|
|
|
try {
|
|
await pool.query(sql);
|
|
console.log(`✅ Completed: ${file}`);
|
|
} catch (error) {
|
|
// Ignore "already exists" errors
|
|
if (error.code === '42P07' || error.message.includes('already exists')) {
|
|
console.log(`⚠️ Skipped: ${file} (already exists)`);
|
|
} else {
|
|
console.error(`❌ Failed: ${file}`);
|
|
console.error(error.message);
|
|
throw error;
|
|
}
|
|
}
|
|
}
|
|
|
|
console.log('\n✅ All migrations completed successfully!');
|
|
|
|
} catch (error) {
|
|
console.error('\n❌ Migration failed:');
|
|
console.error(error);
|
|
process.exit(1);
|
|
} finally {
|
|
await pool.end();
|
|
}
|
|
}
|
|
|
|
runMigrations();
|