Add db:export and db:import for database backups
Made-with: Cursor
This commit is contained in:
21
README.md
21
README.md
@@ -64,6 +64,8 @@ npm run start
|
|||||||
npm run db:generate
|
npm run db:generate
|
||||||
npm run db:migrate
|
npm run db:migrate
|
||||||
npm run db:studio
|
npm run db:studio
|
||||||
|
npm run db:export # Backup database
|
||||||
|
npm run db:import # Restore from backup
|
||||||
```
|
```
|
||||||
|
|
||||||
You can also run per workspace:
|
You can also run per workspace:
|
||||||
@@ -117,6 +119,25 @@ Then run:
|
|||||||
npm run db:migrate
|
npm run db:migrate
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### Backups (export / import)
|
||||||
|
|
||||||
|
Create backups and restore if needed:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Export (creates timestamped file in backend/data/backups/)
|
||||||
|
npm run db:export
|
||||||
|
|
||||||
|
# Export to custom path
|
||||||
|
npm run db:export -- -o ./my-backup.db # SQLite
|
||||||
|
npm run db:export -- -o ./my-backup.sql # PostgreSQL
|
||||||
|
|
||||||
|
# Import (stop the backend server first)
|
||||||
|
npm run db:import -- ./data/backups/spanglish-2025-03-07-143022.db
|
||||||
|
npm run db:import -- --yes ./data/backups/spanglish-2025-03-07.sql # Skip confirmation
|
||||||
|
```
|
||||||
|
|
||||||
|
**Note:** Stop the backend before importing so the database file is not locked.
|
||||||
|
|
||||||
## Production deployment (nginx + systemd)
|
## Production deployment (nginx + systemd)
|
||||||
|
|
||||||
This repo includes example configs in `deploy/`:
|
This repo includes example configs in `deploy/`:
|
||||||
|
|||||||
@@ -8,7 +8,9 @@
|
|||||||
"start": "NODE_ENV=production node dist/index.js",
|
"start": "NODE_ENV=production node dist/index.js",
|
||||||
"db:generate": "drizzle-kit generate",
|
"db:generate": "drizzle-kit generate",
|
||||||
"db:migrate": "tsx src/db/migrate.ts",
|
"db:migrate": "tsx src/db/migrate.ts",
|
||||||
"db:studio": "drizzle-kit studio"
|
"db:studio": "drizzle-kit studio",
|
||||||
|
"db:export": "tsx src/db/export.ts",
|
||||||
|
"db:import": "tsx src/db/import.ts"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@hono/node-server": "^1.11.4",
|
"@hono/node-server": "^1.11.4",
|
||||||
|
|||||||
96
backend/src/db/export.ts
Normal file
96
backend/src/db/export.ts
Normal file
@@ -0,0 +1,96 @@
|
|||||||
|
import 'dotenv/config';
|
||||||
|
import { existsSync, mkdirSync, writeFileSync } from 'fs';
|
||||||
|
import { dirname, resolve } from 'path';
|
||||||
|
import { spawnSync } from 'child_process';
|
||||||
|
import Database from 'better-sqlite3';
|
||||||
|
|
||||||
|
const dbType = process.env.DB_TYPE || 'sqlite';
|
||||||
|
const dbPath = process.env.DATABASE_URL || './data/spanglish.db';
|
||||||
|
const BACKUP_DIR = resolve(process.cwd(), 'data', 'backups');
|
||||||
|
|
||||||
|
function parseArgs(): { output?: string } {
|
||||||
|
const args = process.argv.slice(2);
|
||||||
|
const result: { output?: string } = {};
|
||||||
|
for (let i = 0; i < args.length; i++) {
|
||||||
|
if (args[i] === '-o' || args[i] === '--output') {
|
||||||
|
result.output = args[i + 1];
|
||||||
|
i++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
function getTimestamp(): string {
|
||||||
|
const now = new Date();
|
||||||
|
const y = now.getFullYear();
|
||||||
|
const m = String(now.getMonth() + 1).padStart(2, '0');
|
||||||
|
const d = String(now.getDate()).padStart(2, '0');
|
||||||
|
const h = String(now.getHours()).padStart(2, '0');
|
||||||
|
const min = String(now.getMinutes()).padStart(2, '0');
|
||||||
|
const s = String(now.getSeconds()).padStart(2, '0');
|
||||||
|
return `${y}-${m}-${d}-${h}${min}${s}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
function exportSqlite(outputPath: string): void {
|
||||||
|
const db = new Database(resolve(process.cwd(), dbPath), { readonly: true });
|
||||||
|
try {
|
||||||
|
db.backup(outputPath);
|
||||||
|
console.log(`Exported to ${outputPath}`);
|
||||||
|
} finally {
|
||||||
|
db.close();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function exportPostgres(outputPath: string): void {
|
||||||
|
const connString = process.env.DATABASE_URL || 'postgresql://localhost:5432/spanglish';
|
||||||
|
const result = spawnSync(
|
||||||
|
'pg_dump',
|
||||||
|
['--clean', '--if-exists', connString],
|
||||||
|
{
|
||||||
|
stdio: ['ignore', 'pipe', 'pipe'],
|
||||||
|
encoding: 'utf-8',
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
if (result.error) {
|
||||||
|
console.error('pg_dump failed. Ensure pg_dump is installed and in PATH.');
|
||||||
|
console.error(result.error.message);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (result.status !== 0) {
|
||||||
|
console.error('pg_dump failed:', result.stderr);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
writeFileSync(outputPath, result.stdout);
|
||||||
|
console.log(`Exported to ${outputPath}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function main() {
|
||||||
|
const { output } = parseArgs();
|
||||||
|
const ext = dbType === 'postgres' ? '.sql' : '.db';
|
||||||
|
const defaultName = `spanglish-${getTimestamp()}${ext}`;
|
||||||
|
|
||||||
|
const outputPath = output
|
||||||
|
? resolve(process.cwd(), output)
|
||||||
|
: resolve(BACKUP_DIR, defaultName);
|
||||||
|
|
||||||
|
const dir = dirname(outputPath);
|
||||||
|
if (!existsSync(dir)) {
|
||||||
|
mkdirSync(dir, { recursive: true });
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`Database type: ${dbType}`);
|
||||||
|
if (dbType === 'sqlite') {
|
||||||
|
exportSqlite(outputPath);
|
||||||
|
} else {
|
||||||
|
exportPostgres(outputPath);
|
||||||
|
}
|
||||||
|
process.exit(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
main().catch((err) => {
|
||||||
|
console.error('Export failed:', err);
|
||||||
|
process.exit(1);
|
||||||
|
});
|
||||||
91
backend/src/db/import.ts
Normal file
91
backend/src/db/import.ts
Normal file
@@ -0,0 +1,91 @@
|
|||||||
|
import 'dotenv/config';
|
||||||
|
import { copyFileSync, existsSync, readFileSync } from 'fs';
|
||||||
|
import { resolve } from 'path';
|
||||||
|
import { spawnSync } from 'child_process';
|
||||||
|
|
||||||
|
const dbType = process.env.DB_TYPE || 'sqlite';
|
||||||
|
const dbPath = process.env.DATABASE_URL || './data/spanglish.db';
|
||||||
|
|
||||||
|
function parseArgs(): { file?: string; yes?: boolean } {
|
||||||
|
const args = process.argv.slice(2);
|
||||||
|
const result: { file?: string; yes?: boolean } = {};
|
||||||
|
for (let i = 0; i < args.length; i++) {
|
||||||
|
if (args[i] === '-y' || args[i] === '--yes') {
|
||||||
|
result.yes = true;
|
||||||
|
} else if (!args[i].startsWith('-')) {
|
||||||
|
result.file = args[i];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
function importSqlite(backupPath: string): void {
|
||||||
|
const targetPath = resolve(process.cwd(), dbPath);
|
||||||
|
copyFileSync(backupPath, targetPath);
|
||||||
|
console.log(`Restored from ${backupPath} to ${targetPath}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
function importPostgres(backupPath: string): void {
|
||||||
|
const connString = process.env.DATABASE_URL || 'postgresql://localhost:5432/spanglish';
|
||||||
|
const sql = readFileSync(backupPath, 'utf-8');
|
||||||
|
|
||||||
|
const result = spawnSync(
|
||||||
|
'psql',
|
||||||
|
[connString],
|
||||||
|
{
|
||||||
|
stdio: ['pipe', 'inherit', 'inherit'],
|
||||||
|
input: sql,
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
if (result.error) {
|
||||||
|
console.error('psql failed. Ensure psql is installed and in PATH.');
|
||||||
|
console.error(result.error.message);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (result.status !== 0) {
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`Restored from ${backupPath}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function main() {
|
||||||
|
const { file, yes } = parseArgs();
|
||||||
|
|
||||||
|
if (!file) {
|
||||||
|
console.error('Usage: npm run db:import -- <backup-file> [--yes]');
|
||||||
|
console.error('Example: npm run db:import -- ./data/backups/spanglish-2025-03-07.db');
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
const backupPath = resolve(process.cwd(), file);
|
||||||
|
if (!existsSync(backupPath)) {
|
||||||
|
console.error(`Backup file not found: ${backupPath}`);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!yes) {
|
||||||
|
console.log('WARNING: Import will overwrite the current database.');
|
||||||
|
console.log('Stop the backend server before importing.');
|
||||||
|
console.log('Press Ctrl+C to cancel, or run with --yes to skip this warning.');
|
||||||
|
await new Promise((r) => setTimeout(r, 3000));
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`Database type: ${dbType}`);
|
||||||
|
if (dbType === 'sqlite') {
|
||||||
|
importSqlite(backupPath);
|
||||||
|
} else if (dbType === 'postgres') {
|
||||||
|
importPostgres(backupPath);
|
||||||
|
} else {
|
||||||
|
console.error('Unknown DB_TYPE. Use sqlite or postgres.');
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
process.exit(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
main().catch((err) => {
|
||||||
|
console.error('Import failed:', err);
|
||||||
|
process.exit(1);
|
||||||
|
});
|
||||||
@@ -15,7 +15,9 @@
|
|||||||
"start:frontend": "npm run start --workspace=frontend",
|
"start:frontend": "npm run start --workspace=frontend",
|
||||||
"db:generate": "npm run db:generate --workspace=backend",
|
"db:generate": "npm run db:generate --workspace=backend",
|
||||||
"db:migrate": "npm run db:migrate --workspace=backend",
|
"db:migrate": "npm run db:migrate --workspace=backend",
|
||||||
"db:studio": "npm run db:studio --workspace=backend"
|
"db:studio": "npm run db:studio --workspace=backend",
|
||||||
|
"db:export": "npm run db:export --workspace=backend --",
|
||||||
|
"db:import": "npm run db:import --workspace=backend --"
|
||||||
},
|
},
|
||||||
"workspaces": [
|
"workspaces": [
|
||||||
"backend",
|
"backend",
|
||||||
|
|||||||
Reference in New Issue
Block a user