#!/usr/bin/env node // Migration runner - executes all SQL migrations in order import fs from 'fs'; import path from 'path'; import { fileURLToPath } from 'url'; import pg from 'pg'; const __filename = fileURLToPath(import.meta.url); const __dirname = path.dirname(__filename); const DATABASE_URL = process.env.DATABASE_URL; if (!DATABASE_URL) { console.error('āŒ DATABASE_URL environment variable not set'); process.exit(1); } async function runMigrations() { const pool = new pg.Pool({ connectionString: DATABASE_URL, max: 1 }); try { console.log('šŸ”Œ Connecting to database...'); await pool.query('SELECT NOW()'); console.log('āœ… Database connected'); const migrationsDir = path.join(__dirname, '../migrations'); const files = fs.readdirSync(migrationsDir) .filter(f => f.endsWith('.sql')) .sort(); console.log(`\nšŸ“¦ Found ${files.length} migration files:\n`); for (const file of files) { const filePath = path.join(migrationsDir, file); const sql = fs.readFileSync(filePath, 'utf-8'); console.log(`ā³ Running: ${file}...`); try { await pool.query(sql); console.log(`āœ… Completed: ${file}`); } catch (error) { // Ignore "already exists" errors if (error.code === '42P07' || error.message.includes('already exists')) { console.log(`āš ļø Skipped: ${file} (already exists)`); } else { console.error(`āŒ Failed: ${file}`); console.error(error.message); throw error; } } } console.log('\nāœ… All migrations completed successfully!'); } catch (error) { console.error('\nāŒ Migration failed:'); console.error(error); process.exit(1); } finally { await pool.end(); } } runMigrations();