Add schema-driven db:migrate for SQLite and Postgres

- Parse schema files to detect expected columns per table
- Add missing columns automatically when schema is updated
- Add payment_request to sponsors schema
- Add db:migrate script to root package.json

Made-with: Cursor
This commit is contained in:
Michilis
2026-03-16 00:13:06 +00:00
parent f61f41c9b0
commit a1509f21fc
7 changed files with 102 additions and 114 deletions

View File

@@ -1,10 +1,13 @@
import "dotenv/config";
import { config, usePostgres } from "../config.js";
import { getDb } from "./index.js";
async function main() {
const dbType = usePostgres() && config.databaseUrl ? "Postgres" : "SQLite";
console.log(`[migrate] Using ${dbType} database`);
const db = getDb();
await db.runMigrations();
console.log("Migrations complete.");
console.log("[migrate] Migrations complete.");
process.exit(0);
}

View File

@@ -3,6 +3,7 @@ import { readFileSync } from "fs";
import { dirname, join } from "path";
import { fileURLToPath } from "url";
import type { ClaimRow, Db, DepositSource, IpLimitRow, QuoteRow, SponsorRow, UserRow } from "./types.js";
import { parseSchema } from "./schemaSync.js";
const __dirname = dirname(fileURLToPath(import.meta.url));
@@ -53,71 +54,39 @@ export function createPgDb(connectionString: string): Db {
return {
async runMigrations() {
const schema = readFileSync(join(__dirname, "schema.pg.sql"), "utf-8");
const schemaPath = join(__dirname, "schema.pg.sql");
const schema = readFileSync(schemaPath, "utf-8");
await pool.query(schema);
try {
await pool.query("ALTER TABLE users ADD COLUMN lightning_address TEXT");
} catch (_) {}
try {
await pool.query("ALTER TABLE users ADD COLUMN name TEXT");
} catch (_) {}
try {
await pool.query(
`CREATE TABLE IF NOT EXISTS deposits (
id SERIAL PRIMARY KEY,
created_at BIGINT NOT NULL,
amount_sats INTEGER NOT NULL,
source TEXT NOT NULL CHECK(source IN ('lightning','cashu')),
lnbits_payment_hash TEXT
)`
);
await pool.query("CREATE INDEX IF NOT EXISTS idx_deposits_created_at ON deposits(created_at)");
} catch (_) {}
try {
await pool.query("ALTER TABLE deposits ADD COLUMN lnbits_payment_hash TEXT");
} catch (_) {}
try {
await pool.query("CREATE INDEX IF NOT EXISTS idx_deposits_lnbits_payment_hash ON deposits(lnbits_payment_hash)");
} catch (_) {}
const expected = parseSchema(schema);
for (const [table, columns] of expected) {
if (!/^\w+$/.test(table)) continue;
try {
const res = await pool.query(
"SELECT column_name FROM information_schema.columns WHERE table_schema = 'public' AND table_name = $1",
[table]
);
const existing = res.rows.map((r) => r.column_name);
for (const col of columns) {
if (!existing.includes(col.name)) {
try {
await pool.query(`ALTER TABLE ${table} ADD COLUMN ${col.name} ${col.type}`);
console.log(`[migrate] Postgres: added column ${table}.${col.name}`);
} catch (e) {
if (!String(e).includes("already exists")) throw e;
}
}
}
} catch (e) {
console.warn(`[migrate] Postgres: could not sync table ${table}:`, e);
}
}
try {
await pool.query(
"UPDATE deposits SET amount_sats = amount_sats / 1000 WHERE source = 'lightning' AND lnbits_payment_hash IS NOT NULL AND amount_sats >= 1000"
);
} catch (_) {}
try {
await pool.query(
`CREATE TABLE IF NOT EXISTS sponsors (
id SERIAL PRIMARY KEY,
npub TEXT NOT NULL,
title TEXT NOT NULL,
description TEXT NOT NULL,
image_url TEXT,
link_url TEXT NOT NULL,
category TEXT,
lightning_address TEXT,
invoice_id TEXT,
payment_hash TEXT,
price_sats INTEGER NOT NULL,
duration_days INTEGER NOT NULL,
status TEXT NOT NULL CHECK(status IN ('pending_payment','pending_review','active','expired','removed')),
created_at BIGINT NOT NULL,
activated_at BIGINT,
expires_at BIGINT,
views INTEGER DEFAULT 0,
clicks INTEGER DEFAULT 0
)`
);
await pool.query("CREATE INDEX IF NOT EXISTS idx_sponsors_status ON sponsors(status)");
await pool.query("CREATE INDEX IF NOT EXISTS idx_sponsors_npub ON sponsors(npub)");
await pool.query("CREATE INDEX IF NOT EXISTS idx_sponsors_expires_at ON sponsors(expires_at)");
await pool.query("CREATE INDEX IF NOT EXISTS idx_sponsors_payment_hash ON sponsors(payment_hash)");
} catch (_) {}
try {
await pool.query("ALTER TABLE sponsors ADD COLUMN extends_sponsor_id INTEGER");
} catch (_) {}
try {
await pool.query("ALTER TABLE sponsors ADD COLUMN payment_request TEXT");
} catch (_) {}
},
async getUser(pubkey: string): Promise<UserRow | null> {

View File

@@ -80,6 +80,7 @@ CREATE TABLE IF NOT EXISTS sponsors (
lightning_address TEXT,
invoice_id TEXT,
payment_hash TEXT,
payment_request TEXT,
price_sats INTEGER NOT NULL,
duration_days INTEGER NOT NULL,
status TEXT NOT NULL CHECK(status IN ('pending_payment','pending_review','active','expired','removed')),

View File

@@ -81,6 +81,7 @@ CREATE TABLE IF NOT EXISTS sponsors (
lightning_address TEXT,
invoice_id TEXT,
payment_hash TEXT,
payment_request TEXT,
price_sats INTEGER NOT NULL,
duration_days INTEGER NOT NULL,
status TEXT NOT NULL CHECK(status IN ('pending_payment','pending_review','active','expired','removed')),

View File

@@ -0,0 +1,37 @@
/**
* Parse schema SQL to extract table names and their columns.
* Used by migration to add missing columns when schema is updated.
*/
export interface SchemaColumn {
name: string;
type: string;
}
export function parseSchema(schema: string): Map<string, SchemaColumn[]> {
const result = new Map<string, SchemaColumn[]>();
const tableRegex = /CREATE TABLE IF NOT EXISTS\s+(\w+)\s*\(([\s\S]*?)\)\s*;?/gi;
const typeWords = ["INTEGER", "TEXT", "BIGINT", "SERIAL", "REAL", "BOOLEAN", "NUMERIC", "BIGSERIAL"];
let m;
while ((m = tableRegex.exec(schema)) !== null) {
const tableName = m[1];
const body = m[2];
const columns: SchemaColumn[] = [];
const lines = body.split("\n");
for (const line of lines) {
const trimmed = line.trim();
if (!trimmed || trimmed.startsWith(")")) continue;
const colMatch = trimmed.match(/^(\w+)\s+(\w+)/);
if (colMatch) {
const name = colMatch[1];
const type = colMatch[2].toUpperCase();
if (typeWords.includes(type)) {
columns.push({ name, type });
}
}
}
result.set(tableName, columns);
}
return result;
}

View File

@@ -3,73 +3,49 @@ import { readFileSync } from "fs";
import { dirname, join } from "path";
import { fileURLToPath } from "url";
import type { ClaimRow, Db, DepositSource, IpLimitRow, QuoteRow, SponsorRow, UserRow } from "./types.js";
import { parseSchema } from "./schemaSync.js";
const __dirname = dirname(fileURLToPath(import.meta.url));
function getTableColumns(db: Database.Database, table: string): string[] {
if (!/^\w+$/.test(table)) return [];
const rows = db.prepare(`PRAGMA table_info(${table})`).all() as { name: string }[];
return rows.map((r) => r.name);
}
export function createSqliteDb(path: string): Db {
const db = new Database(path);
return {
async runMigrations() {
const schema = readFileSync(join(__dirname, "schema.sql"), "utf-8");
const schemaPath = join(__dirname, "schema.sql");
const schema = readFileSync(schemaPath, "utf-8");
db.exec(schema);
try {
db.exec("ALTER TABLE users ADD COLUMN lightning_address TEXT");
} catch (_) {}
try {
db.exec("ALTER TABLE users ADD COLUMN name TEXT");
} catch (_) {}
try {
db.exec(
"CREATE TABLE IF NOT EXISTS deposits (id INTEGER PRIMARY KEY AUTOINCREMENT, created_at INTEGER NOT NULL, amount_sats INTEGER NOT NULL, source TEXT NOT NULL CHECK(source IN ('lightning','cashu')), lnbits_payment_hash TEXT)"
);
db.exec("CREATE INDEX IF NOT EXISTS idx_deposits_created_at ON deposits(created_at)");
} catch (_) {}
try {
db.exec("ALTER TABLE deposits ADD COLUMN lnbits_payment_hash TEXT");
} catch (_) {}
try {
db.exec("CREATE INDEX IF NOT EXISTS idx_deposits_lnbits_payment_hash ON deposits(lnbits_payment_hash)");
} catch (_) {}
const expected = parseSchema(schema);
for (const [table, columns] of expected) {
try {
const existing = getTableColumns(db, table);
for (const col of columns) {
if (!existing.includes(col.name)) {
try {
db.exec(`ALTER TABLE ${table} ADD COLUMN ${col.name} ${col.type}`);
console.log(`[migrate] SQLite: added column ${table}.${col.name}`);
} catch (e) {
if (!String(e).includes("duplicate column name")) throw e;
}
}
}
} catch (e) {
console.warn(`[migrate] SQLite: could not sync table ${table}:`, e);
}
}
try {
db.exec(
"UPDATE deposits SET amount_sats = amount_sats / 1000 WHERE source = 'lightning' AND lnbits_payment_hash IS NOT NULL AND amount_sats >= 1000"
);
} catch (_) {}
try {
db.exec(
`CREATE TABLE IF NOT EXISTS sponsors (
id INTEGER PRIMARY KEY AUTOINCREMENT,
npub TEXT NOT NULL,
title TEXT NOT NULL,
description TEXT NOT NULL,
image_url TEXT,
link_url TEXT NOT NULL,
category TEXT,
lightning_address TEXT,
invoice_id TEXT,
payment_hash TEXT,
price_sats INTEGER NOT NULL,
duration_days INTEGER NOT NULL,
status TEXT NOT NULL CHECK(status IN ('pending_payment','pending_review','active','expired','removed')),
created_at INTEGER NOT NULL,
activated_at INTEGER,
expires_at INTEGER,
views INTEGER DEFAULT 0,
clicks INTEGER DEFAULT 0
)`
);
db.exec("CREATE INDEX IF NOT EXISTS idx_sponsors_status ON sponsors(status)");
db.exec("CREATE INDEX IF NOT EXISTS idx_sponsors_npub ON sponsors(npub)");
db.exec("CREATE INDEX IF NOT EXISTS idx_sponsors_expires_at ON sponsors(expires_at)");
db.exec("CREATE INDEX IF NOT EXISTS idx_sponsors_payment_hash ON sponsors(payment_hash)");
} catch (_) {}
try {
db.exec("ALTER TABLE sponsors ADD COLUMN extends_sponsor_id INTEGER");
} catch (_) {}
try {
db.exec("ALTER TABLE sponsors ADD COLUMN payment_request TEXT");
} catch (_) {}
},
async getUser(pubkey: string): Promise<UserRow | null> {

View File

@@ -7,6 +7,7 @@
"build:frontend": "cd frontend && npm run build",
"start": "cd backend && npm start",
"dev:backend": "cd backend && npm run dev",
"dev:frontend": "cd frontend && npm run dev"
"dev:frontend": "cd frontend && npm run dev",
"db:migrate": "cd backend && npm run migrate"
}
}