diff --git a/Dockerfile b/Dockerfile index 72b3eb1..1434a1a 100644 --- a/Dockerfile +++ b/Dockerfile @@ -70,5 +70,8 @@ USER astro # Expose Port EXPOSE 4321 -# Launch with optimized settings -CMD ["node", "./dist/server/entry.mjs"] +# Create startup script that runs migrations then starts server +RUN echo '#!/bin/sh\necho "šŸ”§ Running database migrations..."\nnode scripts/run-migrations.js || echo "āš ļø Migrations failed (continuing anyway)"\necho "šŸš€ Starting God Mode..."\nexec node ./dist/server/entry.mjs' > /app/start.sh && chmod +x /app/start.sh + +# Launch with migrations + server +CMD ["/app/start.sh"] diff --git a/package.json b/package.json index 124ea5c..2f5c025 100644 --- a/package.json +++ b/package.json @@ -10,6 +10,7 @@ "astro": "astro", "test": "vitest", "worker": "node scripts/start-worker.js", + "migrate": "node scripts/run-migrations.js", "test:campaign": "node scripts/test-campaign.js" }, "dependencies": { diff --git a/scripts/run-migrations.js b/scripts/run-migrations.js new file mode 100644 index 0000000..9bfa811 --- /dev/null +++ b/scripts/run-migrations.js @@ -0,0 +1,68 @@ +#!/usr/bin/env node +// Migration runner - executes all SQL migrations in order +import fs from 'fs'; +import path from 'path'; +import { fileURLToPath } from 'url'; +import pg from 'pg'; + +const __filename = fileURLToPath(import.meta.url); +const __dirname = path.dirname(__filename); + +const DATABASE_URL = process.env.DATABASE_URL; + +if (!DATABASE_URL) { + console.error('āŒ DATABASE_URL environment variable not set'); + process.exit(1); +} + +async function runMigrations() { + const pool = new pg.Pool({ + connectionString: DATABASE_URL, + max: 1 + }); + + try { + console.log('šŸ”Œ Connecting to database...'); + await pool.query('SELECT NOW()'); + console.log('āœ… Database connected'); + + const migrationsDir = path.join(__dirname, '../migrations'); + const files = fs.readdirSync(migrationsDir) + .filter(f => f.endsWith('.sql')) + .sort(); + + console.log(`\nšŸ“¦ Found ${files.length} migration files:\n`); + + for (const file of files) { + const filePath = path.join(migrationsDir, file); + const sql = fs.readFileSync(filePath, 'utf-8'); + + console.log(`ā³ Running: ${file}...`); + + try { + await pool.query(sql); + console.log(`āœ… Completed: ${file}`); + } catch (error) { + // Ignore "already exists" errors + if (error.code === '42P07' || error.message.includes('already exists')) { + console.log(`āš ļø Skipped: ${file} (already exists)`); + } else { + console.error(`āŒ Failed: ${file}`); + console.error(error.message); + throw error; + } + } + } + + console.log('\nāœ… All migrations completed successfully!'); + + } catch (error) { + console.error('\nāŒ Migration failed:'); + console.error(error); + process.exit(1); + } finally { + await pool.end(); + } +} + +runMigrations();