diff --git a/backend/src/db/migrate.ts b/backend/src/db/migrate.ts index 9643902..2837397 100644 --- a/backend/src/db/migrate.ts +++ b/backend/src/db/migrate.ts @@ -1,10 +1,13 @@ import "dotenv/config"; +import { config, usePostgres } from "../config.js"; import { getDb } from "./index.js"; async function main() { + const dbType = usePostgres() && config.databaseUrl ? "Postgres" : "SQLite"; + console.log(`[migrate] Using ${dbType} database`); const db = getDb(); await db.runMigrations(); - console.log("Migrations complete."); + console.log("[migrate] Migrations complete."); process.exit(0); } diff --git a/backend/src/db/pg.ts b/backend/src/db/pg.ts index 35f035e..1508d82 100644 --- a/backend/src/db/pg.ts +++ b/backend/src/db/pg.ts @@ -3,6 +3,7 @@ import { readFileSync } from "fs"; import { dirname, join } from "path"; import { fileURLToPath } from "url"; import type { ClaimRow, Db, DepositSource, IpLimitRow, QuoteRow, SponsorRow, UserRow } from "./types.js"; +import { parseSchema } from "./schemaSync.js"; const __dirname = dirname(fileURLToPath(import.meta.url)); @@ -53,71 +54,39 @@ export function createPgDb(connectionString: string): Db { return { async runMigrations() { - const schema = readFileSync(join(__dirname, "schema.pg.sql"), "utf-8"); + const schemaPath = join(__dirname, "schema.pg.sql"); + const schema = readFileSync(schemaPath, "utf-8"); await pool.query(schema); - try { - await pool.query("ALTER TABLE users ADD COLUMN lightning_address TEXT"); - } catch (_) {} - try { - await pool.query("ALTER TABLE users ADD COLUMN name TEXT"); - } catch (_) {} - try { - await pool.query( - `CREATE TABLE IF NOT EXISTS deposits ( - id SERIAL PRIMARY KEY, - created_at BIGINT NOT NULL, - amount_sats INTEGER NOT NULL, - source TEXT NOT NULL CHECK(source IN ('lightning','cashu')), - lnbits_payment_hash TEXT - )` - ); - await pool.query("CREATE INDEX IF NOT EXISTS idx_deposits_created_at ON deposits(created_at)"); - } catch (_) {} - try { - await pool.query("ALTER TABLE deposits ADD COLUMN lnbits_payment_hash TEXT"); - } catch (_) {} - try { - await pool.query("CREATE INDEX IF NOT EXISTS idx_deposits_lnbits_payment_hash ON deposits(lnbits_payment_hash)"); - } catch (_) {} + + const expected = parseSchema(schema); + for (const [table, columns] of expected) { + if (!/^\w+$/.test(table)) continue; + try { + const res = await pool.query( + "SELECT column_name FROM information_schema.columns WHERE table_schema = 'public' AND table_name = $1", + [table] + ); + const existing = res.rows.map((r) => r.column_name); + for (const col of columns) { + if (!existing.includes(col.name)) { + try { + await pool.query(`ALTER TABLE ${table} ADD COLUMN ${col.name} ${col.type}`); + console.log(`[migrate] Postgres: added column ${table}.${col.name}`); + } catch (e) { + if (!String(e).includes("already exists")) throw e; + } + } + } + } catch (e) { + console.warn(`[migrate] Postgres: could not sync table ${table}:`, e); + } + } + try { await pool.query( "UPDATE deposits SET amount_sats = amount_sats / 1000 WHERE source = 'lightning' AND lnbits_payment_hash IS NOT NULL AND amount_sats >= 1000" ); } catch (_) {} - try { - await pool.query( - `CREATE TABLE IF NOT EXISTS sponsors ( - id SERIAL PRIMARY KEY, - npub TEXT NOT NULL, - title TEXT NOT NULL, - description TEXT NOT NULL, - image_url TEXT, - link_url TEXT NOT NULL, - category TEXT, - lightning_address TEXT, - invoice_id TEXT, - payment_hash TEXT, - price_sats INTEGER NOT NULL, - duration_days INTEGER NOT NULL, - status TEXT NOT NULL CHECK(status IN ('pending_payment','pending_review','active','expired','removed')), - created_at BIGINT NOT NULL, - activated_at BIGINT, - expires_at BIGINT, - views INTEGER DEFAULT 0, - clicks INTEGER DEFAULT 0 - )` - ); - await pool.query("CREATE INDEX IF NOT EXISTS idx_sponsors_status ON sponsors(status)"); - await pool.query("CREATE INDEX IF NOT EXISTS idx_sponsors_npub ON sponsors(npub)"); - await pool.query("CREATE INDEX IF NOT EXISTS idx_sponsors_expires_at ON sponsors(expires_at)"); - await pool.query("CREATE INDEX IF NOT EXISTS idx_sponsors_payment_hash ON sponsors(payment_hash)"); - } catch (_) {} - try { - await pool.query("ALTER TABLE sponsors ADD COLUMN extends_sponsor_id INTEGER"); - } catch (_) {} - try { - await pool.query("ALTER TABLE sponsors ADD COLUMN payment_request TEXT"); - } catch (_) {} }, async getUser(pubkey: string): Promise { diff --git a/backend/src/db/schema.pg.sql b/backend/src/db/schema.pg.sql index d4f2d31..8d7b625 100644 --- a/backend/src/db/schema.pg.sql +++ b/backend/src/db/schema.pg.sql @@ -80,6 +80,7 @@ CREATE TABLE IF NOT EXISTS sponsors ( lightning_address TEXT, invoice_id TEXT, payment_hash TEXT, + payment_request TEXT, price_sats INTEGER NOT NULL, duration_days INTEGER NOT NULL, status TEXT NOT NULL CHECK(status IN ('pending_payment','pending_review','active','expired','removed')), diff --git a/backend/src/db/schema.sql b/backend/src/db/schema.sql index 6ec02bd..10875f6 100644 --- a/backend/src/db/schema.sql +++ b/backend/src/db/schema.sql @@ -81,6 +81,7 @@ CREATE TABLE IF NOT EXISTS sponsors ( lightning_address TEXT, invoice_id TEXT, payment_hash TEXT, + payment_request TEXT, price_sats INTEGER NOT NULL, duration_days INTEGER NOT NULL, status TEXT NOT NULL CHECK(status IN ('pending_payment','pending_review','active','expired','removed')), diff --git a/backend/src/db/schemaSync.ts b/backend/src/db/schemaSync.ts new file mode 100644 index 0000000..5506123 --- /dev/null +++ b/backend/src/db/schemaSync.ts @@ -0,0 +1,37 @@ +/** + * Parse schema SQL to extract table names and their columns. + * Used by migration to add missing columns when schema is updated. + */ +export interface SchemaColumn { + name: string; + type: string; +} + +export function parseSchema(schema: string): Map { + const result = new Map(); + const tableRegex = /CREATE TABLE IF NOT EXISTS\s+(\w+)\s*\(([\s\S]*?)\)\s*;?/gi; + const typeWords = ["INTEGER", "TEXT", "BIGINT", "SERIAL", "REAL", "BOOLEAN", "NUMERIC", "BIGSERIAL"]; + + let m; + while ((m = tableRegex.exec(schema)) !== null) { + const tableName = m[1]; + const body = m[2]; + const columns: SchemaColumn[] = []; + + const lines = body.split("\n"); + for (const line of lines) { + const trimmed = line.trim(); + if (!trimmed || trimmed.startsWith(")")) continue; + const colMatch = trimmed.match(/^(\w+)\s+(\w+)/); + if (colMatch) { + const name = colMatch[1]; + const type = colMatch[2].toUpperCase(); + if (typeWords.includes(type)) { + columns.push({ name, type }); + } + } + } + result.set(tableName, columns); + } + return result; +} diff --git a/backend/src/db/sqlite.ts b/backend/src/db/sqlite.ts index 6ab2e68..568f89e 100644 --- a/backend/src/db/sqlite.ts +++ b/backend/src/db/sqlite.ts @@ -3,73 +3,49 @@ import { readFileSync } from "fs"; import { dirname, join } from "path"; import { fileURLToPath } from "url"; import type { ClaimRow, Db, DepositSource, IpLimitRow, QuoteRow, SponsorRow, UserRow } from "./types.js"; +import { parseSchema } from "./schemaSync.js"; const __dirname = dirname(fileURLToPath(import.meta.url)); +function getTableColumns(db: Database.Database, table: string): string[] { + if (!/^\w+$/.test(table)) return []; + const rows = db.prepare(`PRAGMA table_info(${table})`).all() as { name: string }[]; + return rows.map((r) => r.name); +} + export function createSqliteDb(path: string): Db { const db = new Database(path); return { async runMigrations() { - const schema = readFileSync(join(__dirname, "schema.sql"), "utf-8"); + const schemaPath = join(__dirname, "schema.sql"); + const schema = readFileSync(schemaPath, "utf-8"); db.exec(schema); - try { - db.exec("ALTER TABLE users ADD COLUMN lightning_address TEXT"); - } catch (_) {} - try { - db.exec("ALTER TABLE users ADD COLUMN name TEXT"); - } catch (_) {} - try { - db.exec( - "CREATE TABLE IF NOT EXISTS deposits (id INTEGER PRIMARY KEY AUTOINCREMENT, created_at INTEGER NOT NULL, amount_sats INTEGER NOT NULL, source TEXT NOT NULL CHECK(source IN ('lightning','cashu')), lnbits_payment_hash TEXT)" - ); - db.exec("CREATE INDEX IF NOT EXISTS idx_deposits_created_at ON deposits(created_at)"); - } catch (_) {} - try { - db.exec("ALTER TABLE deposits ADD COLUMN lnbits_payment_hash TEXT"); - } catch (_) {} - try { - db.exec("CREATE INDEX IF NOT EXISTS idx_deposits_lnbits_payment_hash ON deposits(lnbits_payment_hash)"); - } catch (_) {} + + const expected = parseSchema(schema); + for (const [table, columns] of expected) { + try { + const existing = getTableColumns(db, table); + for (const col of columns) { + if (!existing.includes(col.name)) { + try { + db.exec(`ALTER TABLE ${table} ADD COLUMN ${col.name} ${col.type}`); + console.log(`[migrate] SQLite: added column ${table}.${col.name}`); + } catch (e) { + if (!String(e).includes("duplicate column name")) throw e; + } + } + } + } catch (e) { + console.warn(`[migrate] SQLite: could not sync table ${table}:`, e); + } + } + try { db.exec( "UPDATE deposits SET amount_sats = amount_sats / 1000 WHERE source = 'lightning' AND lnbits_payment_hash IS NOT NULL AND amount_sats >= 1000" ); } catch (_) {} - try { - db.exec( - `CREATE TABLE IF NOT EXISTS sponsors ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - npub TEXT NOT NULL, - title TEXT NOT NULL, - description TEXT NOT NULL, - image_url TEXT, - link_url TEXT NOT NULL, - category TEXT, - lightning_address TEXT, - invoice_id TEXT, - payment_hash TEXT, - price_sats INTEGER NOT NULL, - duration_days INTEGER NOT NULL, - status TEXT NOT NULL CHECK(status IN ('pending_payment','pending_review','active','expired','removed')), - created_at INTEGER NOT NULL, - activated_at INTEGER, - expires_at INTEGER, - views INTEGER DEFAULT 0, - clicks INTEGER DEFAULT 0 - )` - ); - db.exec("CREATE INDEX IF NOT EXISTS idx_sponsors_status ON sponsors(status)"); - db.exec("CREATE INDEX IF NOT EXISTS idx_sponsors_npub ON sponsors(npub)"); - db.exec("CREATE INDEX IF NOT EXISTS idx_sponsors_expires_at ON sponsors(expires_at)"); - db.exec("CREATE INDEX IF NOT EXISTS idx_sponsors_payment_hash ON sponsors(payment_hash)"); - } catch (_) {} - try { - db.exec("ALTER TABLE sponsors ADD COLUMN extends_sponsor_id INTEGER"); - } catch (_) {} - try { - db.exec("ALTER TABLE sponsors ADD COLUMN payment_request TEXT"); - } catch (_) {} }, async getUser(pubkey: string): Promise { diff --git a/package.json b/package.json index 40dcbbb..a50efb8 100644 --- a/package.json +++ b/package.json @@ -7,6 +7,7 @@ "build:frontend": "cd frontend && npm run build", "start": "cd backend && npm start", "dev:backend": "cd backend && npm run dev", - "dev:frontend": "cd frontend && npm run dev" + "dev:frontend": "cd frontend && npm run dev", + "db:migrate": "cd backend && npm run migrate" } }