Add schema-driven db:migrate for SQLite and Postgres
- Parse schema files to detect expected columns per table - Add missing columns automatically when schema is updated - Add payment_request to sponsors schema - Add db:migrate script to root package.json Made-with: Cursor
This commit is contained in:
@@ -1,10 +1,13 @@
|
|||||||
import "dotenv/config";
|
import "dotenv/config";
|
||||||
|
import { config, usePostgres } from "../config.js";
|
||||||
import { getDb } from "./index.js";
|
import { getDb } from "./index.js";
|
||||||
|
|
||||||
async function main() {
|
async function main() {
|
||||||
|
const dbType = usePostgres() && config.databaseUrl ? "Postgres" : "SQLite";
|
||||||
|
console.log(`[migrate] Using ${dbType} database`);
|
||||||
const db = getDb();
|
const db = getDb();
|
||||||
await db.runMigrations();
|
await db.runMigrations();
|
||||||
console.log("Migrations complete.");
|
console.log("[migrate] Migrations complete.");
|
||||||
process.exit(0);
|
process.exit(0);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ import { readFileSync } from "fs";
|
|||||||
import { dirname, join } from "path";
|
import { dirname, join } from "path";
|
||||||
import { fileURLToPath } from "url";
|
import { fileURLToPath } from "url";
|
||||||
import type { ClaimRow, Db, DepositSource, IpLimitRow, QuoteRow, SponsorRow, UserRow } from "./types.js";
|
import type { ClaimRow, Db, DepositSource, IpLimitRow, QuoteRow, SponsorRow, UserRow } from "./types.js";
|
||||||
|
import { parseSchema } from "./schemaSync.js";
|
||||||
|
|
||||||
const __dirname = dirname(fileURLToPath(import.meta.url));
|
const __dirname = dirname(fileURLToPath(import.meta.url));
|
||||||
|
|
||||||
@@ -53,71 +54,39 @@ export function createPgDb(connectionString: string): Db {
|
|||||||
|
|
||||||
return {
|
return {
|
||||||
async runMigrations() {
|
async runMigrations() {
|
||||||
const schema = readFileSync(join(__dirname, "schema.pg.sql"), "utf-8");
|
const schemaPath = join(__dirname, "schema.pg.sql");
|
||||||
|
const schema = readFileSync(schemaPath, "utf-8");
|
||||||
await pool.query(schema);
|
await pool.query(schema);
|
||||||
try {
|
|
||||||
await pool.query("ALTER TABLE users ADD COLUMN lightning_address TEXT");
|
const expected = parseSchema(schema);
|
||||||
} catch (_) {}
|
for (const [table, columns] of expected) {
|
||||||
try {
|
if (!/^\w+$/.test(table)) continue;
|
||||||
await pool.query("ALTER TABLE users ADD COLUMN name TEXT");
|
try {
|
||||||
} catch (_) {}
|
const res = await pool.query(
|
||||||
try {
|
"SELECT column_name FROM information_schema.columns WHERE table_schema = 'public' AND table_name = $1",
|
||||||
await pool.query(
|
[table]
|
||||||
`CREATE TABLE IF NOT EXISTS deposits (
|
);
|
||||||
id SERIAL PRIMARY KEY,
|
const existing = res.rows.map((r) => r.column_name);
|
||||||
created_at BIGINT NOT NULL,
|
for (const col of columns) {
|
||||||
amount_sats INTEGER NOT NULL,
|
if (!existing.includes(col.name)) {
|
||||||
source TEXT NOT NULL CHECK(source IN ('lightning','cashu')),
|
try {
|
||||||
lnbits_payment_hash TEXT
|
await pool.query(`ALTER TABLE ${table} ADD COLUMN ${col.name} ${col.type}`);
|
||||||
)`
|
console.log(`[migrate] Postgres: added column ${table}.${col.name}`);
|
||||||
);
|
} catch (e) {
|
||||||
await pool.query("CREATE INDEX IF NOT EXISTS idx_deposits_created_at ON deposits(created_at)");
|
if (!String(e).includes("already exists")) throw e;
|
||||||
} catch (_) {}
|
}
|
||||||
try {
|
}
|
||||||
await pool.query("ALTER TABLE deposits ADD COLUMN lnbits_payment_hash TEXT");
|
}
|
||||||
} catch (_) {}
|
} catch (e) {
|
||||||
try {
|
console.warn(`[migrate] Postgres: could not sync table ${table}:`, e);
|
||||||
await pool.query("CREATE INDEX IF NOT EXISTS idx_deposits_lnbits_payment_hash ON deposits(lnbits_payment_hash)");
|
}
|
||||||
} catch (_) {}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
await pool.query(
|
await pool.query(
|
||||||
"UPDATE deposits SET amount_sats = amount_sats / 1000 WHERE source = 'lightning' AND lnbits_payment_hash IS NOT NULL AND amount_sats >= 1000"
|
"UPDATE deposits SET amount_sats = amount_sats / 1000 WHERE source = 'lightning' AND lnbits_payment_hash IS NOT NULL AND amount_sats >= 1000"
|
||||||
);
|
);
|
||||||
} catch (_) {}
|
} catch (_) {}
|
||||||
try {
|
|
||||||
await pool.query(
|
|
||||||
`CREATE TABLE IF NOT EXISTS sponsors (
|
|
||||||
id SERIAL PRIMARY KEY,
|
|
||||||
npub TEXT NOT NULL,
|
|
||||||
title TEXT NOT NULL,
|
|
||||||
description TEXT NOT NULL,
|
|
||||||
image_url TEXT,
|
|
||||||
link_url TEXT NOT NULL,
|
|
||||||
category TEXT,
|
|
||||||
lightning_address TEXT,
|
|
||||||
invoice_id TEXT,
|
|
||||||
payment_hash TEXT,
|
|
||||||
price_sats INTEGER NOT NULL,
|
|
||||||
duration_days INTEGER NOT NULL,
|
|
||||||
status TEXT NOT NULL CHECK(status IN ('pending_payment','pending_review','active','expired','removed')),
|
|
||||||
created_at BIGINT NOT NULL,
|
|
||||||
activated_at BIGINT,
|
|
||||||
expires_at BIGINT,
|
|
||||||
views INTEGER DEFAULT 0,
|
|
||||||
clicks INTEGER DEFAULT 0
|
|
||||||
)`
|
|
||||||
);
|
|
||||||
await pool.query("CREATE INDEX IF NOT EXISTS idx_sponsors_status ON sponsors(status)");
|
|
||||||
await pool.query("CREATE INDEX IF NOT EXISTS idx_sponsors_npub ON sponsors(npub)");
|
|
||||||
await pool.query("CREATE INDEX IF NOT EXISTS idx_sponsors_expires_at ON sponsors(expires_at)");
|
|
||||||
await pool.query("CREATE INDEX IF NOT EXISTS idx_sponsors_payment_hash ON sponsors(payment_hash)");
|
|
||||||
} catch (_) {}
|
|
||||||
try {
|
|
||||||
await pool.query("ALTER TABLE sponsors ADD COLUMN extends_sponsor_id INTEGER");
|
|
||||||
} catch (_) {}
|
|
||||||
try {
|
|
||||||
await pool.query("ALTER TABLE sponsors ADD COLUMN payment_request TEXT");
|
|
||||||
} catch (_) {}
|
|
||||||
},
|
},
|
||||||
|
|
||||||
async getUser(pubkey: string): Promise<UserRow | null> {
|
async getUser(pubkey: string): Promise<UserRow | null> {
|
||||||
|
|||||||
@@ -80,6 +80,7 @@ CREATE TABLE IF NOT EXISTS sponsors (
|
|||||||
lightning_address TEXT,
|
lightning_address TEXT,
|
||||||
invoice_id TEXT,
|
invoice_id TEXT,
|
||||||
payment_hash TEXT,
|
payment_hash TEXT,
|
||||||
|
payment_request TEXT,
|
||||||
price_sats INTEGER NOT NULL,
|
price_sats INTEGER NOT NULL,
|
||||||
duration_days INTEGER NOT NULL,
|
duration_days INTEGER NOT NULL,
|
||||||
status TEXT NOT NULL CHECK(status IN ('pending_payment','pending_review','active','expired','removed')),
|
status TEXT NOT NULL CHECK(status IN ('pending_payment','pending_review','active','expired','removed')),
|
||||||
|
|||||||
@@ -81,6 +81,7 @@ CREATE TABLE IF NOT EXISTS sponsors (
|
|||||||
lightning_address TEXT,
|
lightning_address TEXT,
|
||||||
invoice_id TEXT,
|
invoice_id TEXT,
|
||||||
payment_hash TEXT,
|
payment_hash TEXT,
|
||||||
|
payment_request TEXT,
|
||||||
price_sats INTEGER NOT NULL,
|
price_sats INTEGER NOT NULL,
|
||||||
duration_days INTEGER NOT NULL,
|
duration_days INTEGER NOT NULL,
|
||||||
status TEXT NOT NULL CHECK(status IN ('pending_payment','pending_review','active','expired','removed')),
|
status TEXT NOT NULL CHECK(status IN ('pending_payment','pending_review','active','expired','removed')),
|
||||||
|
|||||||
37
backend/src/db/schemaSync.ts
Normal file
37
backend/src/db/schemaSync.ts
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
/**
|
||||||
|
* Parse schema SQL to extract table names and their columns.
|
||||||
|
* Used by migration to add missing columns when schema is updated.
|
||||||
|
*/
|
||||||
|
export interface SchemaColumn {
|
||||||
|
name: string;
|
||||||
|
type: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function parseSchema(schema: string): Map<string, SchemaColumn[]> {
|
||||||
|
const result = new Map<string, SchemaColumn[]>();
|
||||||
|
const tableRegex = /CREATE TABLE IF NOT EXISTS\s+(\w+)\s*\(([\s\S]*?)\)\s*;?/gi;
|
||||||
|
const typeWords = ["INTEGER", "TEXT", "BIGINT", "SERIAL", "REAL", "BOOLEAN", "NUMERIC", "BIGSERIAL"];
|
||||||
|
|
||||||
|
let m;
|
||||||
|
while ((m = tableRegex.exec(schema)) !== null) {
|
||||||
|
const tableName = m[1];
|
||||||
|
const body = m[2];
|
||||||
|
const columns: SchemaColumn[] = [];
|
||||||
|
|
||||||
|
const lines = body.split("\n");
|
||||||
|
for (const line of lines) {
|
||||||
|
const trimmed = line.trim();
|
||||||
|
if (!trimmed || trimmed.startsWith(")")) continue;
|
||||||
|
const colMatch = trimmed.match(/^(\w+)\s+(\w+)/);
|
||||||
|
if (colMatch) {
|
||||||
|
const name = colMatch[1];
|
||||||
|
const type = colMatch[2].toUpperCase();
|
||||||
|
if (typeWords.includes(type)) {
|
||||||
|
columns.push({ name, type });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
result.set(tableName, columns);
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
@@ -3,73 +3,49 @@ import { readFileSync } from "fs";
|
|||||||
import { dirname, join } from "path";
|
import { dirname, join } from "path";
|
||||||
import { fileURLToPath } from "url";
|
import { fileURLToPath } from "url";
|
||||||
import type { ClaimRow, Db, DepositSource, IpLimitRow, QuoteRow, SponsorRow, UserRow } from "./types.js";
|
import type { ClaimRow, Db, DepositSource, IpLimitRow, QuoteRow, SponsorRow, UserRow } from "./types.js";
|
||||||
|
import { parseSchema } from "./schemaSync.js";
|
||||||
|
|
||||||
const __dirname = dirname(fileURLToPath(import.meta.url));
|
const __dirname = dirname(fileURLToPath(import.meta.url));
|
||||||
|
|
||||||
|
function getTableColumns(db: Database.Database, table: string): string[] {
|
||||||
|
if (!/^\w+$/.test(table)) return [];
|
||||||
|
const rows = db.prepare(`PRAGMA table_info(${table})`).all() as { name: string }[];
|
||||||
|
return rows.map((r) => r.name);
|
||||||
|
}
|
||||||
|
|
||||||
export function createSqliteDb(path: string): Db {
|
export function createSqliteDb(path: string): Db {
|
||||||
const db = new Database(path);
|
const db = new Database(path);
|
||||||
|
|
||||||
return {
|
return {
|
||||||
async runMigrations() {
|
async runMigrations() {
|
||||||
const schema = readFileSync(join(__dirname, "schema.sql"), "utf-8");
|
const schemaPath = join(__dirname, "schema.sql");
|
||||||
|
const schema = readFileSync(schemaPath, "utf-8");
|
||||||
db.exec(schema);
|
db.exec(schema);
|
||||||
try {
|
|
||||||
db.exec("ALTER TABLE users ADD COLUMN lightning_address TEXT");
|
const expected = parseSchema(schema);
|
||||||
} catch (_) {}
|
for (const [table, columns] of expected) {
|
||||||
try {
|
try {
|
||||||
db.exec("ALTER TABLE users ADD COLUMN name TEXT");
|
const existing = getTableColumns(db, table);
|
||||||
} catch (_) {}
|
for (const col of columns) {
|
||||||
try {
|
if (!existing.includes(col.name)) {
|
||||||
db.exec(
|
try {
|
||||||
"CREATE TABLE IF NOT EXISTS deposits (id INTEGER PRIMARY KEY AUTOINCREMENT, created_at INTEGER NOT NULL, amount_sats INTEGER NOT NULL, source TEXT NOT NULL CHECK(source IN ('lightning','cashu')), lnbits_payment_hash TEXT)"
|
db.exec(`ALTER TABLE ${table} ADD COLUMN ${col.name} ${col.type}`);
|
||||||
);
|
console.log(`[migrate] SQLite: added column ${table}.${col.name}`);
|
||||||
db.exec("CREATE INDEX IF NOT EXISTS idx_deposits_created_at ON deposits(created_at)");
|
} catch (e) {
|
||||||
} catch (_) {}
|
if (!String(e).includes("duplicate column name")) throw e;
|
||||||
try {
|
}
|
||||||
db.exec("ALTER TABLE deposits ADD COLUMN lnbits_payment_hash TEXT");
|
}
|
||||||
} catch (_) {}
|
}
|
||||||
try {
|
} catch (e) {
|
||||||
db.exec("CREATE INDEX IF NOT EXISTS idx_deposits_lnbits_payment_hash ON deposits(lnbits_payment_hash)");
|
console.warn(`[migrate] SQLite: could not sync table ${table}:`, e);
|
||||||
} catch (_) {}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
db.exec(
|
db.exec(
|
||||||
"UPDATE deposits SET amount_sats = amount_sats / 1000 WHERE source = 'lightning' AND lnbits_payment_hash IS NOT NULL AND amount_sats >= 1000"
|
"UPDATE deposits SET amount_sats = amount_sats / 1000 WHERE source = 'lightning' AND lnbits_payment_hash IS NOT NULL AND amount_sats >= 1000"
|
||||||
);
|
);
|
||||||
} catch (_) {}
|
} catch (_) {}
|
||||||
try {
|
|
||||||
db.exec(
|
|
||||||
`CREATE TABLE IF NOT EXISTS sponsors (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
npub TEXT NOT NULL,
|
|
||||||
title TEXT NOT NULL,
|
|
||||||
description TEXT NOT NULL,
|
|
||||||
image_url TEXT,
|
|
||||||
link_url TEXT NOT NULL,
|
|
||||||
category TEXT,
|
|
||||||
lightning_address TEXT,
|
|
||||||
invoice_id TEXT,
|
|
||||||
payment_hash TEXT,
|
|
||||||
price_sats INTEGER NOT NULL,
|
|
||||||
duration_days INTEGER NOT NULL,
|
|
||||||
status TEXT NOT NULL CHECK(status IN ('pending_payment','pending_review','active','expired','removed')),
|
|
||||||
created_at INTEGER NOT NULL,
|
|
||||||
activated_at INTEGER,
|
|
||||||
expires_at INTEGER,
|
|
||||||
views INTEGER DEFAULT 0,
|
|
||||||
clicks INTEGER DEFAULT 0
|
|
||||||
)`
|
|
||||||
);
|
|
||||||
db.exec("CREATE INDEX IF NOT EXISTS idx_sponsors_status ON sponsors(status)");
|
|
||||||
db.exec("CREATE INDEX IF NOT EXISTS idx_sponsors_npub ON sponsors(npub)");
|
|
||||||
db.exec("CREATE INDEX IF NOT EXISTS idx_sponsors_expires_at ON sponsors(expires_at)");
|
|
||||||
db.exec("CREATE INDEX IF NOT EXISTS idx_sponsors_payment_hash ON sponsors(payment_hash)");
|
|
||||||
} catch (_) {}
|
|
||||||
try {
|
|
||||||
db.exec("ALTER TABLE sponsors ADD COLUMN extends_sponsor_id INTEGER");
|
|
||||||
} catch (_) {}
|
|
||||||
try {
|
|
||||||
db.exec("ALTER TABLE sponsors ADD COLUMN payment_request TEXT");
|
|
||||||
} catch (_) {}
|
|
||||||
},
|
},
|
||||||
|
|
||||||
async getUser(pubkey: string): Promise<UserRow | null> {
|
async getUser(pubkey: string): Promise<UserRow | null> {
|
||||||
|
|||||||
@@ -7,6 +7,7 @@
|
|||||||
"build:frontend": "cd frontend && npm run build",
|
"build:frontend": "cd frontend && npm run build",
|
||||||
"start": "cd backend && npm start",
|
"start": "cd backend && npm start",
|
||||||
"dev:backend": "cd backend && npm run dev",
|
"dev:backend": "cd backend && npm run dev",
|
||||||
"dev:frontend": "cd frontend && npm run dev"
|
"dev:frontend": "cd frontend && npm run dev",
|
||||||
|
"db:migrate": "cd backend && npm run migrate"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user