Migration Runner
```typescript
// migration-runner.ts
import { Pool } from 'pg';
import * as fs from 'fs';
import * as path from 'path';
interface Migration {
id: string;
name: string;
up: string;
down: string;
}
class MigrationRunner {
constructor(private pool: Pool, private migrationsDir: string) {}
async run(): Promise {
await this.ensureMigrationsTable();
const applied = await this.getAppliedMigrations();
const pending = await this.getPendingMigrations(applied);
for (const migration of pending) {
console.log(Running migration: ${migration.name});
const client = await this.pool.connect();
try {
await client.query('BEGIN');
// Run migration
await client.query(migration.up);
// Record migration
await client.query(
'INSERT INTO migrations (id, name, applied_at) VALUES ($1, $2, NOW())',
[migration.id, migration.name]
);
await client.query('COMMIT');
console.log(β ${migration.name});
} catch (error) {
await client.query('ROLLBACK');
console.error(β ${migration.name}:, error);
throw error;
} finally {
client.release();
}
}
}
async rollback(steps = 1): Promise {
const applied = await this.getAppliedMigrations();
const toRollback = applied.slice(-steps).reverse();
for (const migrationId of toRollback) {
const migration = await this.loadMigration(migrationId);
const client = await this.pool.connect();
try {
await client.query('BEGIN');
await client.query(migration.down);
await client.query('DELETE FROM migrations WHERE id = $1', [migration.id]);
await client.query('COMMIT');
console.log(Rolled back: ${migration.name});
} catch (error) {
await client.query('ROLLBACK');
throw error;
} finally {
client.release();
}
}
}
private async ensureMigrationsTable(): Promise {
await this.pool.query(`
CREATE TABLE IF NOT EXISTS migrations (
id VARCHAR(255) PRIMARY KEY,
name VARCHAR(255) NOT NULL,
applied_at TIMESTAMP DEFAULT NOW()
)
`);
}
private async getAppliedMigrations(): Promise {
const result = await this.pool.query(
'SELECT id FROM migrations ORDER BY applied_at'
);
return result.rows.map(r => r.id);
}
private async getPendingMigrations(applied: string[]): Promise {
const files = fs.readdirSync(this.migrationsDir)
.filter(f => f.endsWith('.sql'))
.sort();
const pending: Migration[] = [];
for (const file of files) {
const id = file.replace('.sql', '');
if (!applied.includes(id)) {
pending.push(await this.loadMigration(id));
}
}
return pending;
}
private async loadMigration(id: string): Promise {
const filePath = path.join(this.migrationsDir, ${id}.sql);
const content = fs.readFileSync(filePath, 'utf-8');
const [up, down] = content.split('-- DOWN');
return {
id,
name: id,
up: up.replace('-- UP', '').trim(),
down: down?.trim() || '',
};
}
}
export { MigrationRunner };
```
Migration File Format
```sql
-- migrations/20240115_001_add_phone_to_users.sql
-- UP
ALTER TABLE users ADD COLUMN phone VARCHAR(20);
CREATE INDEX idx_users_phone ON users(phone);
-- DOWN
DROP INDEX idx_users_phone;
ALTER TABLE users DROP COLUMN phone;
```
Zero-Downtime Column Rename
```typescript
// Step 1: Add new column (migration)
// 20240115_001_add_display_name.sql
`
-- UP
ALTER TABLE users ADD COLUMN display_name VARCHAR(255);
-- DOWN
ALTER TABLE users DROP COLUMN display_name;
`
// Step 2: Write to both columns (code change)
async function updateUser(id: string, name: string) {
await db.query(
'UPDATE users SET name = $1, display_name = $1 WHERE id = $2',
[name, id]
);
}
// Step 3: Backfill existing data (migration)
// 20240116_001_backfill_display_name.sql
`
-- UP
UPDATE users SET display_name = name WHERE display_name IS NULL;
-- DOWN
-- No rollback needed for data backfill
`
// Step 4: Read from new column (code change)
async function getUser(id: string) {
const result = await db.query(
'SELECT id, display_name as name FROM users WHERE id = $1',
[id]
);
return result.rows[0];
}
// Step 5: Remove old column (migration)
// 20240117_001_remove_name_column.sql
`
-- UP
ALTER TABLE users DROP COLUMN name;
-- DOWN
ALTER TABLE users ADD COLUMN name VARCHAR(255);
UPDATE users SET name = display_name;
`
```
Safe Index Creation
```sql
-- β UNSAFE: Locks table during creation
CREATE INDEX idx_orders_user ON orders(user_id);
-- β
SAFE: Non-blocking index creation
CREATE INDEX CONCURRENTLY idx_orders_user ON orders(user_id);
```
Data Migration with Batching
```typescript
// data-migration.ts
async function migrateUserEmails(): Promise {
const BATCH_SIZE = 1000;
let processed = 0;
let lastId = '';
while (true) {
const users = await db.query(`
SELECT id, email
FROM users
WHERE id > $1
ORDER BY id
LIMIT $2
`, [lastId, BATCH_SIZE]);
if (users.rows.length === 0) break;
for (const user of users.rows) {
await db.query(
'UPDATE users SET email_normalized = LOWER($1) WHERE id = $2',
[user.email, user.id]
);
}
lastId = users.rows[users.rows.length - 1].id;
processed += users.rows.length;
console.log(Processed ${processed} users);
// Avoid overwhelming the database
await new Promise(resolve => setTimeout(resolve, 100));
}
}
```