diff --git a/README.md b/README.md index 2e81d24..fb7b874 100644 --- a/README.md +++ b/README.md @@ -64,6 +64,8 @@ npm run start npm run db:generate npm run db:migrate npm run db:studio +npm run db:export # Backup database +npm run db:import # Restore from backup ``` You can also run per workspace: @@ -117,6 +119,25 @@ Then run: npm run db:migrate ``` +### Backups (export / import) + +Create backups and restore if needed: + +```bash +# Export (creates timestamped file in backend/data/backups/) +npm run db:export + +# Export to custom path +npm run db:export -- -o ./my-backup.db # SQLite +npm run db:export -- -o ./my-backup.sql # PostgreSQL + +# Import (stop the backend server first) +npm run db:import -- ./data/backups/spanglish-2025-03-07-143022.db +npm run db:import -- --yes ./data/backups/spanglish-2025-03-07.sql # Skip confirmation +``` + +**Note:** Stop the backend before importing so the database file is not locked. + ## Production deployment (nginx + systemd) This repo includes example configs in `deploy/`: diff --git a/backend/package.json b/backend/package.json index d71a030..f90b769 100644 --- a/backend/package.json +++ b/backend/package.json @@ -8,7 +8,9 @@ "start": "NODE_ENV=production node dist/index.js", "db:generate": "drizzle-kit generate", "db:migrate": "tsx src/db/migrate.ts", - "db:studio": "drizzle-kit studio" + "db:studio": "drizzle-kit studio", + "db:export": "tsx src/db/export.ts", + "db:import": "tsx src/db/import.ts" }, "dependencies": { "@hono/node-server": "^1.11.4", diff --git a/backend/src/db/export.ts b/backend/src/db/export.ts new file mode 100644 index 0000000..b6d347c --- /dev/null +++ b/backend/src/db/export.ts @@ -0,0 +1,96 @@ +import 'dotenv/config'; +import { existsSync, mkdirSync, writeFileSync } from 'fs'; +import { dirname, resolve } from 'path'; +import { spawnSync } from 'child_process'; +import Database from 'better-sqlite3'; + +const dbType = process.env.DB_TYPE || 'sqlite'; +const dbPath = process.env.DATABASE_URL || './data/spanglish.db'; +const BACKUP_DIR = resolve(process.cwd(), 'data', 'backups'); + +function parseArgs(): { output?: string } { + const args = process.argv.slice(2); + const result: { output?: string } = {}; + for (let i = 0; i < args.length; i++) { + if (args[i] === '-o' || args[i] === '--output') { + result.output = args[i + 1]; + i++; + } + } + return result; +} + +function getTimestamp(): string { + const now = new Date(); + const y = now.getFullYear(); + const m = String(now.getMonth() + 1).padStart(2, '0'); + const d = String(now.getDate()).padStart(2, '0'); + const h = String(now.getHours()).padStart(2, '0'); + const min = String(now.getMinutes()).padStart(2, '0'); + const s = String(now.getSeconds()).padStart(2, '0'); + return `${y}-${m}-${d}-${h}${min}${s}`; +} + +function exportSqlite(outputPath: string): void { + const db = new Database(resolve(process.cwd(), dbPath), { readonly: true }); + try { + db.backup(outputPath); + console.log(`Exported to ${outputPath}`); + } finally { + db.close(); + } +} + +function exportPostgres(outputPath: string): void { + const connString = process.env.DATABASE_URL || 'postgresql://localhost:5432/spanglish'; + const result = spawnSync( + 'pg_dump', + ['--clean', '--if-exists', connString], + { + stdio: ['ignore', 'pipe', 'pipe'], + encoding: 'utf-8', + } + ); + + if (result.error) { + console.error('pg_dump failed. Ensure pg_dump is installed and in PATH.'); + console.error(result.error.message); + process.exit(1); + } + + if (result.status !== 0) { + console.error('pg_dump failed:', result.stderr); + process.exit(1); + } + + writeFileSync(outputPath, result.stdout); + console.log(`Exported to ${outputPath}`); +} + +async function main() { + const { output } = parseArgs(); + const ext = dbType === 'postgres' ? '.sql' : '.db'; + const defaultName = `spanglish-${getTimestamp()}${ext}`; + + const outputPath = output + ? resolve(process.cwd(), output) + : resolve(BACKUP_DIR, defaultName); + + const dir = dirname(outputPath); + if (!existsSync(dir)) { + mkdirSync(dir, { recursive: true }); + } + + console.log(`Database type: ${dbType}`); + if (dbType === 'sqlite') { + exportSqlite(outputPath); + } else { + exportPostgres(outputPath); + } + process.exit(0); +} + +main().catch((err) => { + console.error('Export failed:', err); + process.exit(1); +}); diff --git a/backend/src/db/import.ts b/backend/src/db/import.ts new file mode 100644 index 0000000..f80dc05 --- /dev/null +++ b/backend/src/db/import.ts @@ -0,0 +1,91 @@ +import 'dotenv/config'; +import { copyFileSync, existsSync, readFileSync } from 'fs'; +import { resolve } from 'path'; +import { spawnSync } from 'child_process'; + +const dbType = process.env.DB_TYPE || 'sqlite'; +const dbPath = process.env.DATABASE_URL || './data/spanglish.db'; + +function parseArgs(): { file?: string; yes?: boolean } { + const args = process.argv.slice(2); + const result: { file?: string; yes?: boolean } = {}; + for (let i = 0; i < args.length; i++) { + if (args[i] === '-y' || args[i] === '--yes') { + result.yes = true; + } else if (!args[i].startsWith('-')) { + result.file = args[i]; + } + } + return result; +} + +function importSqlite(backupPath: string): void { + const targetPath = resolve(process.cwd(), dbPath); + copyFileSync(backupPath, targetPath); + console.log(`Restored from ${backupPath} to ${targetPath}`); +} + +function importPostgres(backupPath: string): void { + const connString = process.env.DATABASE_URL || 'postgresql://localhost:5432/spanglish'; + const sql = readFileSync(backupPath, 'utf-8'); + + const result = spawnSync( + 'psql', + [connString], + { + stdio: ['pipe', 'inherit', 'inherit'], + input: sql, + } + ); + + if (result.error) { + console.error('psql failed. Ensure psql is installed and in PATH.'); + console.error(result.error.message); + process.exit(1); + } + + if (result.status !== 0) { + process.exit(1); + } + + console.log(`Restored from ${backupPath}`); +} + +async function main() { + const { file, yes } = parseArgs(); + + if (!file) { + console.error('Usage: npm run db:import -- [--yes]'); + console.error('Example: npm run db:import -- ./data/backups/spanglish-2025-03-07.db'); + process.exit(1); + } + + const backupPath = resolve(process.cwd(), file); + if (!existsSync(backupPath)) { + console.error(`Backup file not found: ${backupPath}`); + process.exit(1); + } + + if (!yes) { + console.log('WARNING: Import will overwrite the current database.'); + console.log('Stop the backend server before importing.'); + console.log('Press Ctrl+C to cancel, or run with --yes to skip this warning.'); + await new Promise((r) => setTimeout(r, 3000)); + } + + console.log(`Database type: ${dbType}`); + if (dbType === 'sqlite') { + importSqlite(backupPath); + } else if (dbType === 'postgres') { + importPostgres(backupPath); + } else { + console.error('Unknown DB_TYPE. Use sqlite or postgres.'); + process.exit(1); + } + process.exit(0); +} + +main().catch((err) => { + console.error('Import failed:', err); + process.exit(1); +}); diff --git a/backend/src/routes/payments.ts b/backend/src/routes/payments.ts index ca8dbd4..cd60ebe 100644 --- a/backend/src/routes/payments.ts +++ b/backend/src/routes/payments.ts @@ -30,6 +30,8 @@ paymentsRouter.get('/', requireAuth(['admin']), async (c) => { const status = c.req.query('status'); const provider = c.req.query('provider'); const pendingApproval = c.req.query('pendingApproval'); + const eventId = c.req.query('eventId'); + const eventIds = c.req.query('eventIds'); // Get all payments with their associated tickets let allPayments = await dbAll( @@ -55,7 +57,7 @@ paymentsRouter.get('/', requireAuth(['admin']), async (c) => { } // Enrich with ticket and event data - const enrichedPayments = await Promise.all( + let enrichedPayments = await Promise.all( allPayments.map(async (payment: any) => { const ticket = await dbGet( (db as any) @@ -94,6 +96,16 @@ paymentsRouter.get('/', requireAuth(['admin']), async (c) => { }) ); + // Filter by event(s) + if (eventId) { + enrichedPayments = enrichedPayments.filter((p: any) => p.event?.id === eventId); + } else if (eventIds) { + const ids = eventIds.split(',').map((s: string) => s.trim()).filter(Boolean); + if (ids.length > 0) { + enrichedPayments = enrichedPayments.filter((p: any) => p.event && ids.includes(p.event.id)); + } + } + return c.json({ payments: enrichedPayments }); }); diff --git a/backend/src/routes/site-settings.ts b/backend/src/routes/site-settings.ts index db35dc6..5ff0592 100644 --- a/backend/src/routes/site-settings.ts +++ b/backend/src/routes/site-settings.ts @@ -200,8 +200,15 @@ siteSettingsRouter.put('/featured-event', requireAuth(['admin']), zValidator('js if (event.status !== 'published') { return c.json({ error: 'Event must be published to be featured' }, 400); } + const eventEndTime = event.endDatetime || event.startDatetime; + if (new Date(eventEndTime).getTime() <= Date.now()) { + return c.json( + { error: 'Cannot feature an event that has already ended' }, + 400 + ); + } } - + // Get or create settings const existing = await dbGet( (db as any).select().from(siteSettings).limit(1) diff --git a/frontend/.env.example b/frontend/.env.example index 73dc506..c7d9259 100644 --- a/frontend/.env.example +++ b/frontend/.env.example @@ -25,6 +25,9 @@ NEXT_PUBLIC_TIKTOK=spanglishsocialpy # Must match the REVALIDATE_SECRET in backend/.env REVALIDATE_SECRET=change-me-to-a-random-secret +# Next event cache revalidation (seconds) - homepage metadata/social preview refresh interval. Default: 3600 +NEXT_EVENT_REVALIDATE_SECONDS=3600 + # Plausible Analytics (optional - leave empty to disable tracking) NEXT_PUBLIC_PLAUSIBLE_URL=https://analytics.azzamo.net NEXT_PUBLIC_PLAUSIBLE_DOMAIN=spanglishcommunity.com diff --git a/frontend/public/images/spanglish-icon.png b/frontend/public/images/spanglish-icon.png new file mode 100644 index 0000000..70f331f Binary files /dev/null and b/frontend/public/images/spanglish-icon.png differ diff --git a/frontend/src/app/(public)/page.tsx b/frontend/src/app/(public)/page.tsx index e6a7283..ed4e23e 100644 --- a/frontend/src/app/(public)/page.tsx +++ b/frontend/src/app/(public)/page.tsx @@ -38,8 +38,10 @@ interface NextEvent { async function getNextUpcomingEvent(): Promise { try { + const revalidateSeconds = + parseInt(process.env.NEXT_EVENT_REVALIDATE_SECONDS || '3600', 10) || 3600; const response = await fetch(`${apiUrl}/api/events/next/upcoming`, { - next: { tags: ['next-event'] }, + next: { tags: ['next-event'], revalidate: revalidateSeconds }, }); if (!response.ok) return null; const data = await response.json(); diff --git a/frontend/src/app/admin/bookings/page.tsx b/frontend/src/app/admin/bookings/page.tsx index 161727c..cb5fdc7 100644 --- a/frontend/src/app/admin/bookings/page.tsx +++ b/frontend/src/app/admin/bookings/page.tsx @@ -154,12 +154,23 @@ export default function AdminBookingsPage() { }; const getPaymentMethodLabel = (provider: string) => { - switch (provider) { - case 'bancard': return 'TPago / Card'; - case 'lightning': return 'Bitcoin Lightning'; - case 'cash': return 'Cash at Event'; - default: return provider; + const labels: Record = { + cash: locale === 'es' ? 'Efectivo en el Evento' : 'Cash at Event', + bank_transfer: locale === 'es' ? 'Transferencia Bancaria' : 'Bank Transfer', + lightning: 'Lightning', + tpago: 'TPago', + bancard: 'Bancard', + }; + return labels[provider] || provider; + }; + + const getDisplayProvider = (ticket: TicketWithDetails) => { + if (ticket.payment?.provider) return ticket.payment.provider; + if (ticket.bookingId) { + const sibling = tickets.find(t => t.bookingId === ticket.bookingId && t.payment?.provider); + return sibling?.payment?.provider ?? 'cash'; } + return 'cash'; }; const filteredTickets = tickets.filter((ticket) => { @@ -394,7 +405,7 @@ export default function AdminBookingsPage() { {ticket.payment?.status || 'pending'} -

{getPaymentMethodLabel(ticket.payment?.provider || 'cash')}

+

{getPaymentMethodLabel(getDisplayProvider(ticket))}

{ticket.payment && (

{bookingInfo.bookingTotal.toLocaleString()} {ticket.payment.currency}

)} diff --git a/frontend/src/app/admin/payments/page.tsx b/frontend/src/app/admin/payments/page.tsx index 007669d..c90b650 100644 --- a/frontend/src/app/admin/payments/page.tsx +++ b/frontend/src/app/admin/payments/page.tsx @@ -22,6 +22,7 @@ import { CreditCardIcon, EnvelopeIcon, FunnelIcon, + MagnifyingGlassIcon, XMarkIcon, } from '@heroicons/react/24/outline'; import toast from 'react-hot-toast'; @@ -38,6 +39,8 @@ export default function AdminPaymentsPage() { const [activeTab, setActiveTab] = useState('pending_approval'); const [statusFilter, setStatusFilter] = useState(''); const [providerFilter, setProviderFilter] = useState(''); + const [eventFilter, setEventFilter] = useState([]); + const [searchQuery, setSearchQuery] = useState(''); const [mobileFilterOpen, setMobileFilterOpen] = useState(false); // Modal state @@ -59,7 +62,7 @@ export default function AdminPaymentsPage() { useEffect(() => { loadData(); - }, [statusFilter, providerFilter]); + }, [statusFilter, providerFilter, eventFilter]); const loadData = async () => { try { @@ -68,7 +71,8 @@ export default function AdminPaymentsPage() { paymentsApi.getPendingApproval(), paymentsApi.getAll({ status: statusFilter || undefined, - provider: providerFilter || undefined + provider: providerFilter || undefined, + eventIds: eventFilter.length > 0 ? eventFilter : undefined, }), eventsApi.getAll(), ]); @@ -751,11 +755,40 @@ export default function AdminPaymentsPage() { )} {/* All Payments Tab */} - {activeTab === 'all' && ( + {activeTab === 'all' && (() => { + const q = searchQuery.trim().toLowerCase(); + const filteredPayments = q + ? payments.filter((p) => { + const name = `${p.ticket?.attendeeFirstName || ''} ${p.ticket?.attendeeLastName || ''}`.trim().toLowerCase(); + const email = (p.ticket?.attendeeEmail || '').toLowerCase(); + const phone = (p.ticket?.attendeePhone || '').toLowerCase(); + const eventTitle = (p.event?.title || '').toLowerCase(); + const payerName = (p.payerName || '').toLowerCase(); + const reference = (p.reference || '').toLowerCase(); + const id = (p.id || '').toLowerCase(); + return name.includes(q) || email.includes(q) || phone.includes(q) || + eventTitle.includes(q) || payerName.includes(q) || reference.includes(q) || id.includes(q); + }) + : payments; + + return ( <> {/* Desktop Filters */}
+
+ +
+ + setSearchQuery(e.target.value)} + className="w-full pl-9 pr-3 py-2 rounded-btn border border-secondary-light-gray text-sm focus:outline-none focus:ring-2 focus:ring-primary-yellow min-w-[200px]" + /> +
+
+
+ + + {eventFilter.length > 0 && ( +
+ {eventFilter.map((id) => { + const ev = events.find(e => e.id === id); + return ( + + {ev?.title || id} + + + ); + })} + +
+ )} +
- {/* Mobile Filter Toolbar */} -
- - {(statusFilter || providerFilter) && ( - - )} + {/* Mobile Search & Filter Toolbar */} +
+
+ + setSearchQuery(e.target.value)} + className="w-full pl-9 pr-3 py-2.5 rounded-btn border border-secondary-light-gray text-sm focus:outline-none focus:ring-2 focus:ring-primary-yellow" + /> +
+
+ + {(statusFilter || providerFilter || eventFilter.length > 0 || searchQuery) && ( + + )} +
{/* Desktop: Table */} @@ -810,10 +888,10 @@ export default function AdminPaymentsPage() { - {payments.length === 0 ? ( + {filteredPayments.length === 0 ? ( {locale === 'es' ? 'No se encontraron pagos' : 'No payments found'} ) : ( - payments.map((payment) => { + filteredPayments.map((payment) => { const bookingInfo = getBookingInfo(payment); return ( @@ -858,13 +936,18 @@ export default function AdminPaymentsPage() {
+ {(searchQuery || filteredPayments.length !== payments.length) && ( +

+ {locale === 'es' ? 'Mostrando' : 'Showing'} {filteredPayments.length} {locale === 'es' ? 'de' : 'of'} {payments.length} +

+ )} {/* Mobile: Card List */}
- {payments.length === 0 ? ( + {filteredPayments.length === 0 ? (
{locale === 'es' ? 'No se encontraron pagos' : 'No payments found'}
) : ( - payments.map((payment) => { + filteredPayments.map((payment) => { const bookingInfo = getBookingInfo(payment); return ( @@ -911,6 +994,25 @@ export default function AdminPaymentsPage() { {/* Mobile Filter BottomSheet */} setMobileFilterOpen(false)} title={locale === 'es' ? 'Filtros' : 'Filters'}>
+
+ +
+ {events.map((event) => ( + + ))} +
+
- +
- )} + ); + })()}
diff --git a/frontend/src/app/linktree/page.tsx b/frontend/src/app/linktree/page.tsx index 85dddf8..47bd346 100644 --- a/frontend/src/app/linktree/page.tsx +++ b/frontend/src/app/linktree/page.tsx @@ -2,13 +2,13 @@ import { useState, useEffect } from 'react'; import Link from 'next/link'; +import Image from 'next/image'; import { useLanguage } from '@/context/LanguageContext'; import { eventsApi, Event } from '@/lib/api'; import { formatPrice, formatDateShort, formatTime } from '@/lib/utils'; import { CalendarIcon, MapPinIcon, - ChatBubbleLeftRightIcon, } from '@heroicons/react/24/outline'; export default function LinktreePage() { @@ -59,8 +59,8 @@ export default function LinktreePage() {
{/* Profile Header */}
-
- +
+ Spanglish

Spanglish

{t('linktree.tagline')}

diff --git a/frontend/src/lib/api.ts b/frontend/src/lib/api.ts index 22614b5..c59f3c2 100644 --- a/frontend/src/lib/api.ts +++ b/frontend/src/lib/api.ts @@ -236,11 +236,13 @@ export const usersApi = { // Payments API export const paymentsApi = { - getAll: (params?: { status?: string; provider?: string; pendingApproval?: boolean }) => { + getAll: (params?: { status?: string; provider?: string; pendingApproval?: boolean; eventId?: string; eventIds?: string[] }) => { const query = new URLSearchParams(); if (params?.status) query.set('status', params.status); if (params?.provider) query.set('provider', params.provider); if (params?.pendingApproval) query.set('pendingApproval', 'true'); + if (params?.eventId) query.set('eventId', params.eventId); + if (params?.eventIds && params.eventIds.length > 0) query.set('eventIds', params.eventIds.join(',')); return fetchApi<{ payments: PaymentWithDetails[] }>(`/api/payments?${query}`); }, diff --git a/package.json b/package.json index 62f63d5..ab9987f 100644 --- a/package.json +++ b/package.json @@ -15,7 +15,9 @@ "start:frontend": "npm run start --workspace=frontend", "db:generate": "npm run db:generate --workspace=backend", "db:migrate": "npm run db:migrate --workspace=backend", - "db:studio": "npm run db:studio --workspace=backend" + "db:studio": "npm run db:studio --workspace=backend", + "db:export": "npm run db:export --workspace=backend --", + "db:import": "npm run db:import --workspace=backend --" }, "workspaces": [ "backend",