first commit

Made-with: Cursor
This commit is contained in:
Michaël
2026-02-26 18:33:00 -03:00
commit 3734365463
76 changed files with 14133 additions and 0 deletions

62
backend/.env.example Normal file
View File

@@ -0,0 +1,62 @@
# Server
PORT=3001
TRUST_PROXY=false
# Comma-separated origins for CORS (default allows 5173 and 5174)
# ALLOWED_ORIGINS=http://localhost:5173,http://localhost:5174
# Database: omit for SQLite (default); set for Postgres
# DATABASE_URL=postgresql://user:pass@localhost:5432/faucet
# SQLITE_PATH=./data/faucet.db
# Security (required)
HMAC_IP_SECRET=your-secret-key-min-32-chars
JWT_SECRET=your-jwt-secret-min-32-chars
JWT_EXPIRES_IN_SECONDS=604800
NIP98_MAX_SKEW_SECONDS=300
NONCE_TTL_SECONDS=600
# Faucet economics
FAUCET_ENABLED=true
EMERGENCY_STOP=false
FAUCET_MIN_SATS=10
FAUCET_MAX_SATS=100
PAYOUT_WEIGHT_SMALL=50
PAYOUT_WEIGHT_MEDIUM=30
PAYOUT_WEIGHT_LARGE=15
PAYOUT_WEIGHT_JACKPOT=5
PAYOUT_SMALL_SATS=10
PAYOUT_MEDIUM_SATS=25
PAYOUT_LARGE_SATS=50
PAYOUT_JACKPOT_SATS=100
DAILY_BUDGET_SATS=10000
MAX_CLAIMS_PER_DAY=100
MIN_WALLET_BALANCE_SATS=1000
# Eligibility
MIN_ACCOUNT_AGE_DAYS=14
MIN_ACTIVITY_SCORE=30
MIN_NOTES_COUNT=5
MIN_FOLLOWING_COUNT=10
MIN_FOLLOWERS_COUNT=0
ACTIVITY_LOOKBACK_DAYS=90
# Cooldowns
COOLDOWN_DAYS=7
IP_COOLDOWN_DAYS=7
MAX_CLAIMS_PER_IP_PER_PERIOD=1
# Nostr
NOSTR_RELAYS=wss://relay.damus.io,wss://relay.nostr.band
RELAY_TIMEOUT_MS=5000
MAX_EVENTS_FETCH=500
METADATA_CACHE_HOURS=24
# LNbits
LNBITS_BASE_URL=https://azzamo.online
LNBITS_ADMIN_KEY=your-admin-key
LNBITS_WALLET_ID=your-wallet-id
DEPOSIT_LIGHTNING_ADDRESS=faucet@yourdomain.com
DEPOSIT_LNURLP=https://yourdomain.com/.well-known/lnurlp/faucet
# Cashu redeem (optional; default: https://cashu-redeem.azzamo.net)
# CASHU_REDEEM_API_URL=https://cashu-redeem.azzamo.net

2303
backend/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

32
backend/package.json Normal file
View File

@@ -0,0 +1,32 @@
{
"name": "lnfaucet-backend",
"version": "1.0.0",
"private": true,
"type": "module",
"scripts": {
"build": "tsc",
"start": "node dist/index.js",
"dev": "tsx watch src/index.ts",
"migrate": "tsx src/db/migrate.ts"
},
"dependencies": {
"better-sqlite3": "^11.6.0",
"cors": "^2.8.5",
"dotenv": "^16.4.5",
"express": "^4.21.1",
"express-rate-limit": "^7.4.1",
"nostr-tools": "^2.4.4",
"pg": "^8.13.1",
"uuid": "^10.0.0"
},
"devDependencies": {
"@types/better-sqlite3": "^7.6.11",
"@types/cors": "^2.8.17",
"@types/express": "^4.17.21",
"@types/node": "^22.9.0",
"@types/pg": "^8.11.10",
"@types/uuid": "^10.0.0",
"tsx": "^4.19.2",
"typescript": "^5.6.3"
}
}

32
backend/src/auth/jwt.ts Normal file
View File

@@ -0,0 +1,32 @@
import { createHmac } from "crypto";
import { config } from "../config.js";
const HEADER = Buffer.from(JSON.stringify({ alg: "HS256", typ: "JWT" })).toString("base64url");
const SEP = ".";
export function signJwt(pubkey: string): string {
const exp = Math.floor(Date.now() / 1000) + config.jwtExpiresInSeconds;
const payload = Buffer.from(JSON.stringify({ pubkey, exp })).toString("base64url");
const message = `${HEADER}${SEP}${payload}`;
const sig = createHmac("sha256", config.jwtSecret).update(message).digest("base64url");
return `${message}${SEP}${sig}`;
}
export function verifyJwt(token: string): { pubkey: string } | null {
try {
const parts = token.split(SEP);
if (parts.length !== 3) return null;
const [headerB64, payloadB64, sigB64] = parts;
const message = `${headerB64}${SEP}${payloadB64}`;
const expected = createHmac("sha256", config.jwtSecret).update(message).digest("base64url");
if (sigB64 !== expected) return null;
const payload = JSON.parse(
Buffer.from(payloadB64, "base64url").toString("utf-8")
) as { pubkey?: string; exp?: number };
if (!payload.pubkey || typeof payload.pubkey !== "string") return null;
if (!payload.exp || payload.exp < Math.floor(Date.now() / 1000)) return null;
return { pubkey: payload.pubkey };
} catch {
return null;
}
}

87
backend/src/config.ts Normal file
View File

@@ -0,0 +1,87 @@
import "dotenv/config";
function env(key: string, defaultValue?: string): string {
const v = process.env[key];
if (v !== undefined) return v;
if (defaultValue !== undefined) return defaultValue;
throw new Error(`Missing required env: ${key}`);
}
function envInt(key: string, defaultValue: number): number {
const v = process.env[key];
if (v === undefined) return defaultValue;
const n = parseInt(v, 10);
if (Number.isNaN(n)) throw new Error(`Invalid integer env: ${key}`);
return n;
}
function envBool(key: string, defaultValue: boolean): boolean {
const v = process.env[key];
if (v === undefined) return defaultValue;
return v === "true" || v === "1";
}
export const config = {
port: envInt("PORT", 3001),
trustProxy: envBool("TRUST_PROXY", false),
allowedOrigins: (process.env.ALLOWED_ORIGINS ?? process.env.FRONTEND_URL ?? "http://localhost:5173,http://localhost:5174").split(",").map((s) => s.trim()),
// Database: omit DATABASE_URL for SQLite; set for Postgres
databaseUrl: process.env.DATABASE_URL as string | undefined,
sqlitePath: process.env.SQLITE_PATH ?? "./data/faucet.db",
// Security
hmacIpSecret: env("HMAC_IP_SECRET"),
jwtSecret: env("JWT_SECRET", process.env.HMAC_IP_SECRET ?? ""),
jwtExpiresInSeconds: envInt("JWT_EXPIRES_IN_SECONDS", 86400 * 7), // 7 days
nip98MaxSkewSeconds: envInt("NIP98_MAX_SKEW_SECONDS", 300),
nonceTtlSeconds: envInt("NONCE_TTL_SECONDS", 600),
// Faucet economics
faucetEnabled: envBool("FAUCET_ENABLED", true),
emergencyStop: envBool("EMERGENCY_STOP", false),
faucetMinSats: envInt("FAUCET_MIN_SATS", 1),
faucetMaxSats: envInt("FAUCET_MAX_SATS", 5),
payoutWeightSmall: envInt("PAYOUT_WEIGHT_SMALL", 50),
payoutWeightMedium: envInt("PAYOUT_WEIGHT_MEDIUM", 30),
payoutWeightLarge: envInt("PAYOUT_WEIGHT_LARGE", 15),
payoutWeightJackpot: envInt("PAYOUT_WEIGHT_JACKPOT", 5),
payoutSmallSats: envInt("PAYOUT_SMALL_SATS", 10),
payoutMediumSats: envInt("PAYOUT_MEDIUM_SATS", 25),
payoutLargeSats: envInt("PAYOUT_LARGE_SATS", 50),
payoutJackpotSats: envInt("PAYOUT_JACKPOT_SATS", 100),
dailyBudgetSats: envInt("DAILY_BUDGET_SATS", 10000),
maxClaimsPerDay: envInt("MAX_CLAIMS_PER_DAY", 100),
minWalletBalanceSats: envInt("MIN_WALLET_BALANCE_SATS", 1000),
// Eligibility
minAccountAgeDays: envInt("MIN_ACCOUNT_AGE_DAYS", 14),
minActivityScore: envInt("MIN_ACTIVITY_SCORE", 30),
minNotesCount: envInt("MIN_NOTES_COUNT", 5),
minFollowingCount: envInt("MIN_FOLLOWING_COUNT", 10),
minFollowersCount: envInt("MIN_FOLLOWERS_COUNT", 0),
activityLookbackDays: envInt("ACTIVITY_LOOKBACK_DAYS", 90),
// Cooldowns
cooldownDays: envInt("COOLDOWN_DAYS", 7),
ipCooldownDays: envInt("IP_COOLDOWN_DAYS", 7),
maxClaimsPerIpPerPeriod: envInt("MAX_CLAIMS_PER_IP_PER_PERIOD", 1),
// Nostr (defaults include relays common for remote signers / NIP-05)
nostrRelays: (process.env.NOSTR_RELAYS ?? "wss://relay.damus.io,wss://relay.nostr.band,wss://relay.getalby.com,wss://nos.lol").split(",").map((s) => s.trim()),
relayTimeoutMs: envInt("RELAY_TIMEOUT_MS", 5000),
maxEventsFetch: envInt("MAX_EVENTS_FETCH", 500),
metadataCacheHours: envInt("METADATA_CACHE_HOURS", 24),
// LNbits
lnbitsBaseUrl: env("LNBITS_BASE_URL").replace(/\/$/, ""),
lnbitsAdminKey: env("LNBITS_ADMIN_KEY"),
lnbitsWalletId: env("LNBITS_WALLET_ID"),
depositLightningAddress: process.env.DEPOSIT_LIGHTNING_ADDRESS ?? "",
depositLnurlp: process.env.DEPOSIT_LNURLP ?? "",
cashuRedeemApiUrl: (process.env.CASHU_REDEEM_API_URL ?? "https://cashu-redeem.azzamo.net").replace(/\/$/, ""),
};
export function usePostgres(): boolean {
return Boolean(config.databaseUrl);
}

24
backend/src/db/index.ts Normal file
View File

@@ -0,0 +1,24 @@
import { config, usePostgres } from "../config.js";
import { createPgDb } from "./pg.js";
import { createSqliteDb } from "./sqlite.js";
import type { Db } from "./types.js";
import { mkdirSync, existsSync } from "fs";
import { dirname } from "path";
let dbInstance: Db | null = null;
export function getDb(): Db {
if (!dbInstance) {
if (usePostgres() && config.databaseUrl) {
dbInstance = createPgDb(config.databaseUrl);
} else {
const dir = dirname(config.sqlitePath);
if (!existsSync(dir)) mkdirSync(dir, { recursive: true });
dbInstance = createSqliteDb(config.sqlitePath);
}
}
return dbInstance;
}
export type { Db } from "./types.js";
export type { UserRow, ClaimRow, QuoteRow, IpLimitRow } from "./types.js";

14
backend/src/db/migrate.ts Normal file
View File

@@ -0,0 +1,14 @@
import "dotenv/config";
import { getDb } from "./index.js";
async function main() {
const db = getDb();
await db.runMigrations();
console.log("Migrations complete.");
process.exit(0);
}
main().catch((err) => {
console.error(err);
process.exit(1);
});

347
backend/src/db/pg.ts Normal file
View File

@@ -0,0 +1,347 @@
import pg from "pg";
import { readFileSync } from "fs";
import { dirname, join } from "path";
import { fileURLToPath } from "url";
import type { ClaimRow, Db, DepositSource, IpLimitRow, QuoteRow, UserRow } from "./types.js";
const __dirname = dirname(fileURLToPath(import.meta.url));
export function createPgDb(connectionString: string): Db {
const pool = new pg.Pool({ connectionString });
function toUserRow(r: pg.QueryResultRow): UserRow {
return {
pubkey: r.pubkey,
nostr_first_seen_at: r.nostr_first_seen_at != null ? Number(r.nostr_first_seen_at) : null,
notes_count: Number(r.notes_count),
followers_count: Number(r.followers_count),
following_count: Number(r.following_count),
activity_score: Number(r.activity_score),
last_metadata_fetch_at: r.last_metadata_fetch_at != null ? Number(r.last_metadata_fetch_at) : null,
lightning_address: r.lightning_address ?? null,
name: r.name ?? null,
created_at: Number(r.created_at),
updated_at: Number(r.updated_at),
};
}
function toClaimRow(r: pg.QueryResultRow): ClaimRow {
return {
id: r.id,
pubkey: r.pubkey,
claimed_at: Number(r.claimed_at),
payout_sats: r.payout_sats,
ip_hash: r.ip_hash,
payout_destination_hash: r.payout_destination_hash,
status: r.status,
lnbits_payment_hash: r.lnbits_payment_hash,
error_message: r.error_message,
};
}
function toQuoteRow(r: pg.QueryResultRow): QuoteRow {
return {
quote_id: r.quote_id,
pubkey: r.pubkey,
payout_sats: r.payout_sats,
lightning_address: r.lightning_address ?? null,
created_at: Number(r.created_at),
expires_at: Number(r.expires_at),
status: r.status,
};
}
return {
async runMigrations() {
const schema = readFileSync(join(__dirname, "schema.pg.sql"), "utf-8");
await pool.query(schema);
try {
await pool.query("ALTER TABLE users ADD COLUMN lightning_address TEXT");
} catch (_) {}
try {
await pool.query("ALTER TABLE users ADD COLUMN name TEXT");
} catch (_) {}
try {
await pool.query(
`CREATE TABLE IF NOT EXISTS deposits (
id SERIAL PRIMARY KEY,
created_at BIGINT NOT NULL,
amount_sats INTEGER NOT NULL,
source TEXT NOT NULL CHECK(source IN ('lightning','cashu')),
lnbits_payment_hash TEXT
)`
);
await pool.query("CREATE INDEX IF NOT EXISTS idx_deposits_created_at ON deposits(created_at)");
} catch (_) {}
try {
await pool.query("ALTER TABLE deposits ADD COLUMN lnbits_payment_hash TEXT");
} catch (_) {}
try {
await pool.query("CREATE INDEX IF NOT EXISTS idx_deposits_lnbits_payment_hash ON deposits(lnbits_payment_hash)");
} catch (_) {}
try {
await pool.query(
"UPDATE deposits SET amount_sats = amount_sats / 1000 WHERE source = 'lightning' AND lnbits_payment_hash IS NOT NULL AND amount_sats >= 1000"
);
} catch (_) {}
},
async getUser(pubkey: string): Promise<UserRow | null> {
const res = await pool.query("SELECT * FROM users WHERE pubkey = $1", [pubkey]);
return res.rows.length ? toUserRow(res.rows[0]) : null;
},
async upsertUser(row: Omit<UserRow, "created_at" | "updated_at">): Promise<void> {
const now = Math.floor(Date.now() / 1000);
await pool.query(
`INSERT INTO users (pubkey, nostr_first_seen_at, notes_count, followers_count, following_count, activity_score, last_metadata_fetch_at, lightning_address, name, created_at, updated_at)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $10)
ON CONFLICT(pubkey) DO UPDATE SET
nostr_first_seen_at = EXCLUDED.nostr_first_seen_at,
notes_count = EXCLUDED.notes_count,
followers_count = EXCLUDED.followers_count,
following_count = EXCLUDED.following_count,
activity_score = EXCLUDED.activity_score,
last_metadata_fetch_at = EXCLUDED.last_metadata_fetch_at,
lightning_address = EXCLUDED.lightning_address,
name = EXCLUDED.name,
updated_at = EXCLUDED.updated_at`,
[
row.pubkey,
row.nostr_first_seen_at ?? null,
row.notes_count ?? 0,
row.followers_count ?? 0,
row.following_count ?? 0,
row.activity_score ?? 0,
row.last_metadata_fetch_at ?? null,
row.lightning_address ?? null,
row.name ?? null,
now,
]
);
},
async updateUserNostrCache(
pubkey: string,
data: {
nostr_first_seen_at: number | null;
notes_count: number;
followers_count: number;
following_count: number;
activity_score: number;
last_metadata_fetch_at: number;
}
): Promise<void> {
const now = Math.floor(Date.now() / 1000);
await pool.query(
`UPDATE users SET
nostr_first_seen_at = $1, notes_count = $2, followers_count = $3, following_count = $4,
activity_score = $5, last_metadata_fetch_at = $6, updated_at = $7
WHERE pubkey = $8`,
[
data.nostr_first_seen_at,
data.notes_count,
data.followers_count,
data.following_count,
data.activity_score,
data.last_metadata_fetch_at,
now,
pubkey,
]
);
},
async getLastSuccessfulClaimByPubkey(pubkey: string): Promise<ClaimRow | null> {
const res = await pool.query(
"SELECT * FROM claims WHERE pubkey = $1 AND status = 'paid' ORDER BY claimed_at DESC LIMIT 1",
[pubkey]
);
return res.rows.length ? toClaimRow(res.rows[0]) : null;
},
async getLastClaimByIpHash(ipHash: string): Promise<ClaimRow | null> {
const res = await pool.query(
"SELECT * FROM claims WHERE ip_hash = $1 AND status = 'paid' ORDER BY claimed_at DESC LIMIT 1",
[ipHash]
);
return res.rows.length ? toClaimRow(res.rows[0]) : null;
},
async getClaimCountForIpSince(ipHash: string, sinceTs: number): Promise<number> {
const res = await pool.query(
"SELECT COUNT(*) as c FROM claims WHERE ip_hash = $1 AND status = 'paid' AND claimed_at >= $2",
[ipHash, sinceTs]
);
return parseInt(res.rows[0]?.c ?? "0", 10);
},
async createClaim(row: Omit<ClaimRow, "id">): Promise<number> {
const res = await pool.query(
`INSERT INTO claims (pubkey, claimed_at, payout_sats, ip_hash, payout_destination_hash, status, lnbits_payment_hash, error_message)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8) RETURNING id`,
[
row.pubkey,
row.claimed_at,
row.payout_sats,
row.ip_hash,
row.payout_destination_hash ?? null,
row.status,
row.lnbits_payment_hash ?? null,
row.error_message ?? null,
]
);
return res.rows[0].id;
},
async updateClaimStatus(
id: number,
status: ClaimRow["status"],
lnbitsPaymentHash?: string,
errorMessage?: string
): Promise<void> {
await pool.query(
"UPDATE claims SET status = $1, lnbits_payment_hash = $2, error_message = $3 WHERE id = $4",
[status, lnbitsPaymentHash ?? null, errorMessage ?? null, id]
);
},
async getIpLimit(ipHash: string): Promise<IpLimitRow | null> {
const res = await pool.query("SELECT * FROM ip_limits WHERE ip_hash = $1", [ipHash]);
if (!res.rows.length) return null;
const r = res.rows[0];
return {
ip_hash: r.ip_hash,
last_claimed_at: Number(r.last_claimed_at),
claim_count_period: Number(r.claim_count_period),
};
},
async upsertIpLimit(ipHash: string, lastClaimedAt: number, claimCountPeriod: number): Promise<void> {
await pool.query(
`INSERT INTO ip_limits (ip_hash, last_claimed_at, claim_count_period) VALUES ($1, $2, $3)
ON CONFLICT(ip_hash) DO UPDATE SET last_claimed_at = $2, claim_count_period = $3`,
[ipHash, lastClaimedAt, claimCountPeriod]
);
},
async createQuote(quoteId: string, pubkey: string, payoutSats: number, lightningAddress: string, expiresAt: number): Promise<void> {
const now = Math.floor(Date.now() / 1000);
await pool.query(
"INSERT INTO quotes (quote_id, pubkey, payout_sats, lightning_address, created_at, expires_at, status) VALUES ($1, $2, $3, $4, $5, $6, 'active')",
[quoteId, pubkey, payoutSats, lightningAddress, now, expiresAt]
);
},
async getQuote(quoteId: string): Promise<QuoteRow | null> {
const res = await pool.query("SELECT * FROM quotes WHERE quote_id = $1", [quoteId]);
return res.rows.length ? toQuoteRow(res.rows[0]) : null;
},
async consumeQuote(quoteId: string): Promise<void> {
await pool.query("UPDATE quotes SET status = 'consumed' WHERE quote_id = $1", [quoteId]);
},
async setNonce(nonce: string, expiresAt: number): Promise<void> {
await pool.query(
"INSERT INTO nonces (nonce, expires_at) VALUES ($1, $2) ON CONFLICT (nonce) DO UPDATE SET expires_at = $2",
[nonce, expiresAt]
);
},
async hasNonce(nonce: string): Promise<boolean> {
const now = Math.floor(Date.now() / 1000);
const res = await pool.query("SELECT 1 FROM nonces WHERE nonce = $1 AND expires_at > $2", [nonce, now]);
return res.rows.length > 0;
},
async deleteExpiredNonces(): Promise<void> {
const now = Math.floor(Date.now() / 1000);
await pool.query("DELETE FROM nonces WHERE expires_at <= $1", [now]);
},
async getTotalPaidSats(): Promise<number> {
const res = await pool.query(
"SELECT COALESCE(SUM(payout_sats), 0)::bigint as total FROM claims WHERE status = 'paid'"
);
return parseInt(res.rows[0]?.total ?? "0", 10);
},
async getTotalClaimsCount(): Promise<number> {
const res = await pool.query("SELECT COUNT(*) as c FROM claims WHERE status = 'paid'");
return parseInt(res.rows[0]?.c ?? "0", 10);
},
async getClaimsCountSince(sinceTs: number): Promise<number> {
const res = await pool.query(
"SELECT COUNT(*) as c FROM claims WHERE status = 'paid' AND claimed_at >= $1",
[sinceTs]
);
return parseInt(res.rows[0]?.c ?? "0", 10);
},
async getPaidSatsSince(sinceTs: number): Promise<number> {
const res = await pool.query(
"SELECT COALESCE(SUM(payout_sats), 0)::bigint as total FROM claims WHERE status = 'paid' AND claimed_at >= $1",
[sinceTs]
);
return parseInt(res.rows[0]?.total ?? "0", 10);
},
async getRecentPayouts(
limit: number
): Promise<{ pubkey_prefix: string; payout_sats: number; claimed_at: number }[]> {
const res = await pool.query(
"SELECT pubkey, payout_sats, claimed_at FROM claims WHERE status = 'paid' ORDER BY claimed_at DESC LIMIT $1",
[limit]
);
return res.rows.map((r) => ({
pubkey_prefix: r.pubkey.slice(0, 8) + "…",
payout_sats: r.payout_sats,
claimed_at: Number(r.claimed_at),
}));
},
async insertDeposit(
amountSats: number,
source: DepositSource,
lnbitsPaymentHash?: string | null,
createdAt?: number
): Promise<void> {
const now = createdAt ?? Math.floor(Date.now() / 1000);
await pool.query(
"INSERT INTO deposits (created_at, amount_sats, source, lnbits_payment_hash) VALUES ($1, $2, $3, $4)",
[now, amountSats, source, lnbitsPaymentHash ?? null]
);
},
async hasDepositWithPaymentHash(paymentHash: string): Promise<boolean> {
const res = await pool.query(
"SELECT 1 FROM deposits WHERE lnbits_payment_hash = $1 LIMIT 1",
[paymentHash]
);
return res.rows.length > 0;
},
async updateDepositCreatedAtIfMissing(paymentHash: string, createdAt: number): Promise<boolean> {
const res = await pool.query(
"UPDATE deposits SET created_at = $1 WHERE lnbits_payment_hash = $2 AND (created_at IS NULL OR created_at < 1000000000) RETURNING id",
[createdAt, paymentHash]
);
return res.rowCount !== null && res.rowCount > 0;
},
async getRecentDeposits(
limit: number
): Promise<{ amount_sats: number; source: DepositSource; created_at: number }[]> {
const res = await pool.query(
"SELECT amount_sats, source, created_at FROM deposits ORDER BY created_at DESC LIMIT $1",
[limit]
);
return res.rows.map((r) => ({
amount_sats: Number(r.amount_sats),
source: r.source as DepositSource,
created_at: Number(r.created_at),
}));
},
};
}

View File

@@ -0,0 +1,50 @@
-- SQLite schema for LNFaucet
CREATE TABLE IF NOT EXISTS users (
pubkey TEXT PRIMARY KEY,
nostr_first_seen_at INTEGER,
notes_count INTEGER NOT NULL DEFAULT 0,
followers_count INTEGER NOT NULL DEFAULT 0,
following_count INTEGER NOT NULL DEFAULT 0,
activity_score INTEGER NOT NULL DEFAULT 0,
last_metadata_fetch_at INTEGER,
created_at INTEGER NOT NULL,
updated_at INTEGER NOT NULL
);
CREATE TABLE IF NOT EXISTS claims (
id INTEGER PRIMARY KEY AUTOINCREMENT,
pubkey TEXT NOT NULL,
claimed_at INTEGER NOT NULL,
payout_sats INTEGER NOT NULL,
ip_hash TEXT NOT NULL,
payout_destination_hash TEXT,
status TEXT NOT NULL,
lnbits_payment_hash TEXT,
error_message TEXT,
quote_id TEXT
);
CREATE TABLE IF NOT EXISTS ip_limits (
ip_hash TEXT PRIMARY KEY,
last_claimed_at INTEGER,
claim_count_period INTEGER NOT NULL DEFAULT 0
);
CREATE TABLE IF NOT EXISTS quotes (
quote_id TEXT PRIMARY KEY,
pubkey TEXT NOT NULL,
payout_sats INTEGER NOT NULL,
created_at INTEGER NOT NULL,
expires_at INTEGER NOT NULL,
status TEXT NOT NULL
);
CREATE TABLE IF NOT EXISTS nonces (
nonce TEXT PRIMARY KEY,
expires_at INTEGER NOT NULL
);
CREATE INDEX IF NOT EXISTS idx_claims_pubkey ON claims(pubkey);
CREATE INDEX IF NOT EXISTS idx_claims_claimed_at ON claims(claimed_at);
CREATE INDEX IF NOT EXISTS idx_quotes_expires_at ON quotes(expires_at);
CREATE INDEX IF NOT EXISTS idx_quotes_status ON quotes(status);

View File

@@ -0,0 +1,69 @@
-- Postgres schema
CREATE TABLE IF NOT EXISTS users (
pubkey TEXT PRIMARY KEY,
nostr_first_seen_at BIGINT,
notes_count INTEGER DEFAULT 0,
followers_count INTEGER DEFAULT 0,
following_count INTEGER DEFAULT 0,
activity_score INTEGER DEFAULT 0,
last_metadata_fetch_at BIGINT,
lightning_address TEXT,
name TEXT,
created_at BIGINT NOT NULL DEFAULT (EXTRACT(EPOCH FROM NOW())::BIGINT),
updated_at BIGINT NOT NULL DEFAULT (EXTRACT(EPOCH FROM NOW())::BIGINT)
);
CREATE TABLE IF NOT EXISTS claims (
id SERIAL PRIMARY KEY,
pubkey TEXT NOT NULL REFERENCES users(pubkey),
claimed_at BIGINT NOT NULL,
payout_sats INTEGER NOT NULL,
ip_hash TEXT NOT NULL,
payout_destination_hash TEXT,
status TEXT NOT NULL CHECK(status IN ('pending','paid','failed')),
lnbits_payment_hash TEXT,
error_message TEXT
);
CREATE TABLE IF NOT EXISTS ip_limits (
ip_hash TEXT PRIMARY KEY,
last_claimed_at BIGINT NOT NULL,
claim_count_period INTEGER NOT NULL DEFAULT 0
);
CREATE TABLE IF NOT EXISTS quotes (
quote_id TEXT PRIMARY KEY,
pubkey TEXT NOT NULL,
payout_sats INTEGER NOT NULL,
lightning_address TEXT,
created_at BIGINT NOT NULL,
expires_at BIGINT NOT NULL,
status TEXT NOT NULL CHECK(status IN ('active','consumed','expired'))
);
CREATE TABLE IF NOT EXISTS daily_stats (
date TEXT PRIMARY KEY,
total_paid_sats INTEGER NOT NULL DEFAULT 0,
total_claims INTEGER NOT NULL DEFAULT 0,
unique_pubkeys INTEGER NOT NULL DEFAULT 0
);
CREATE TABLE IF NOT EXISTS nonces (
nonce TEXT PRIMARY KEY,
expires_at BIGINT NOT NULL
);
CREATE TABLE IF NOT EXISTS deposits (
id SERIAL PRIMARY KEY,
created_at BIGINT NOT NULL,
amount_sats INTEGER NOT NULL,
source TEXT NOT NULL CHECK(source IN ('lightning','cashu')),
lnbits_payment_hash TEXT
);
CREATE INDEX IF NOT EXISTS idx_claims_pubkey ON claims(pubkey);
CREATE INDEX IF NOT EXISTS idx_claims_claimed_at ON claims(claimed_at);
CREATE INDEX IF NOT EXISTS idx_quotes_expires_at ON quotes(expires_at);
CREATE INDEX IF NOT EXISTS idx_quotes_status ON quotes(status);
CREATE INDEX IF NOT EXISTS idx_deposits_created_at ON deposits(created_at);
CREATE INDEX IF NOT EXISTS idx_deposits_lnbits_payment_hash ON deposits(lnbits_payment_hash);

70
backend/src/db/schema.sql Normal file
View File

@@ -0,0 +1,70 @@
-- SQLite schema
CREATE TABLE IF NOT EXISTS users (
pubkey TEXT PRIMARY KEY,
nostr_first_seen_at INTEGER,
notes_count INTEGER DEFAULT 0,
followers_count INTEGER DEFAULT 0,
following_count INTEGER DEFAULT 0,
activity_score INTEGER DEFAULT 0,
last_metadata_fetch_at INTEGER,
lightning_address TEXT,
name TEXT,
created_at INTEGER NOT NULL DEFAULT (unixepoch()),
updated_at INTEGER NOT NULL DEFAULT (unixepoch())
);
CREATE TABLE IF NOT EXISTS claims (
id INTEGER PRIMARY KEY AUTOINCREMENT,
pubkey TEXT NOT NULL,
claimed_at INTEGER NOT NULL,
payout_sats INTEGER NOT NULL,
ip_hash TEXT NOT NULL,
payout_destination_hash TEXT,
status TEXT NOT NULL CHECK(status IN ('pending','paid','failed')),
lnbits_payment_hash TEXT,
error_message TEXT,
FOREIGN KEY (pubkey) REFERENCES users(pubkey)
);
CREATE TABLE IF NOT EXISTS ip_limits (
ip_hash TEXT PRIMARY KEY,
last_claimed_at INTEGER NOT NULL,
claim_count_period INTEGER NOT NULL DEFAULT 0
);
CREATE TABLE IF NOT EXISTS quotes (
quote_id TEXT PRIMARY KEY,
pubkey TEXT NOT NULL,
payout_sats INTEGER NOT NULL,
lightning_address TEXT,
created_at INTEGER NOT NULL,
expires_at INTEGER NOT NULL,
status TEXT NOT NULL CHECK(status IN ('active','consumed','expired'))
);
CREATE TABLE IF NOT EXISTS daily_stats (
date TEXT PRIMARY KEY,
total_paid_sats INTEGER NOT NULL DEFAULT 0,
total_claims INTEGER NOT NULL DEFAULT 0,
unique_pubkeys INTEGER NOT NULL DEFAULT 0
);
CREATE TABLE IF NOT EXISTS nonces (
nonce TEXT PRIMARY KEY,
expires_at INTEGER NOT NULL
);
CREATE TABLE IF NOT EXISTS deposits (
id INTEGER PRIMARY KEY AUTOINCREMENT,
created_at INTEGER NOT NULL,
amount_sats INTEGER NOT NULL,
source TEXT NOT NULL CHECK(source IN ('lightning','cashu')),
lnbits_payment_hash TEXT
);
CREATE INDEX IF NOT EXISTS idx_claims_pubkey ON claims(pubkey);
CREATE INDEX IF NOT EXISTS idx_claims_claimed_at ON claims(claimed_at);
CREATE INDEX IF NOT EXISTS idx_quotes_expires_at ON quotes(expires_at);
CREATE INDEX IF NOT EXISTS idx_quotes_status ON quotes(status);
CREATE INDEX IF NOT EXISTS idx_deposits_created_at ON deposits(created_at);
CREATE INDEX IF NOT EXISTS idx_deposits_lnbits_payment_hash ON deposits(lnbits_payment_hash);

287
backend/src/db/sqlite.ts Normal file
View File

@@ -0,0 +1,287 @@
import Database from "better-sqlite3";
import { readFileSync } from "fs";
import { dirname, join } from "path";
import { fileURLToPath } from "url";
import type { ClaimRow, Db, DepositSource, IpLimitRow, QuoteRow, UserRow } from "./types.js";
const __dirname = dirname(fileURLToPath(import.meta.url));
export function createSqliteDb(path: string): Db {
const db = new Database(path);
return {
async runMigrations() {
const schema = readFileSync(join(__dirname, "schema.sql"), "utf-8");
db.exec(schema);
try {
db.exec("ALTER TABLE users ADD COLUMN lightning_address TEXT");
} catch (_) {}
try {
db.exec("ALTER TABLE users ADD COLUMN name TEXT");
} catch (_) {}
try {
db.exec(
"CREATE TABLE IF NOT EXISTS deposits (id INTEGER PRIMARY KEY AUTOINCREMENT, created_at INTEGER NOT NULL, amount_sats INTEGER NOT NULL, source TEXT NOT NULL CHECK(source IN ('lightning','cashu')), lnbits_payment_hash TEXT)"
);
db.exec("CREATE INDEX IF NOT EXISTS idx_deposits_created_at ON deposits(created_at)");
} catch (_) {}
try {
db.exec("ALTER TABLE deposits ADD COLUMN lnbits_payment_hash TEXT");
} catch (_) {}
try {
db.exec("CREATE INDEX IF NOT EXISTS idx_deposits_lnbits_payment_hash ON deposits(lnbits_payment_hash)");
} catch (_) {}
try {
db.exec(
"UPDATE deposits SET amount_sats = amount_sats / 1000 WHERE source = 'lightning' AND lnbits_payment_hash IS NOT NULL AND amount_sats >= 1000"
);
} catch (_) {}
},
async getUser(pubkey: string): Promise<UserRow | null> {
const row = db.prepare("SELECT * FROM users WHERE pubkey = ?").get(pubkey) as UserRow | undefined;
return row ?? null;
},
async upsertUser(row: Omit<UserRow, "created_at" | "updated_at">): Promise<void> {
const now = Math.floor(Date.now() / 1000);
db.prepare(
`INSERT INTO users (pubkey, nostr_first_seen_at, notes_count, followers_count, following_count, activity_score, last_metadata_fetch_at, lightning_address, name, created_at, updated_at)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT(pubkey) DO UPDATE SET
nostr_first_seen_at = excluded.nostr_first_seen_at,
notes_count = excluded.notes_count,
followers_count = excluded.followers_count,
following_count = excluded.following_count,
activity_score = excluded.activity_score,
last_metadata_fetch_at = excluded.last_metadata_fetch_at,
lightning_address = excluded.lightning_address,
name = excluded.name,
updated_at = excluded.updated_at`
).run(
row.pubkey,
row.nostr_first_seen_at ?? null,
row.notes_count ?? 0,
row.followers_count ?? 0,
row.following_count ?? 0,
row.activity_score ?? 0,
row.last_metadata_fetch_at ?? null,
row.lightning_address ?? null,
row.name ?? null,
now,
now
);
},
async updateUserNostrCache(
pubkey: string,
data: {
nostr_first_seen_at: number | null;
notes_count: number;
followers_count: number;
following_count: number;
activity_score: number;
last_metadata_fetch_at: number;
}
): Promise<void> {
const now = Math.floor(Date.now() / 1000);
db.prepare(
`UPDATE users SET
nostr_first_seen_at = ?, notes_count = ?, followers_count = ?, following_count = ?,
activity_score = ?, last_metadata_fetch_at = ?, updated_at = ?
WHERE pubkey = ?`
).run(
data.nostr_first_seen_at,
data.notes_count,
data.followers_count,
data.following_count,
data.activity_score,
data.last_metadata_fetch_at,
now,
pubkey
);
},
async getLastSuccessfulClaimByPubkey(pubkey: string): Promise<ClaimRow | null> {
const row = db
.prepare("SELECT * FROM claims WHERE pubkey = ? AND status = 'paid' ORDER BY claimed_at DESC LIMIT 1")
.get(pubkey) as ClaimRow | undefined;
return row ?? null;
},
async getLastClaimByIpHash(ipHash: string): Promise<ClaimRow | null> {
const row = db
.prepare("SELECT * FROM claims WHERE ip_hash = ? AND status = 'paid' ORDER BY claimed_at DESC LIMIT 1")
.get(ipHash) as ClaimRow | undefined;
return row ?? null;
},
async getClaimCountForIpSince(ipHash: string, sinceTs: number): Promise<number> {
const row = db
.prepare(
"SELECT COUNT(*) as c FROM claims WHERE ip_hash = ? AND status = 'paid' AND claimed_at >= ?"
)
.get(ipHash, sinceTs) as { c: number };
return row?.c ?? 0;
},
async createClaim(row: Omit<ClaimRow, "id">): Promise<number> {
const result = db
.prepare(
`INSERT INTO claims (pubkey, claimed_at, payout_sats, ip_hash, payout_destination_hash, status, lnbits_payment_hash, error_message)
VALUES (?, ?, ?, ?, ?, ?, ?, ?)`
)
.run(
row.pubkey,
row.claimed_at,
row.payout_sats,
row.ip_hash,
row.payout_destination_hash ?? null,
row.status,
row.lnbits_payment_hash ?? null,
row.error_message ?? null
);
return result.lastInsertRowid as number;
},
async updateClaimStatus(
id: number,
status: ClaimRow["status"],
lnbitsPaymentHash?: string,
errorMessage?: string
): Promise<void> {
db.prepare(
"UPDATE claims SET status = ?, lnbits_payment_hash = ?, error_message = ? WHERE id = ?"
).run(status, lnbitsPaymentHash ?? null, errorMessage ?? null, id);
},
async getIpLimit(ipHash: string): Promise<IpLimitRow | null> {
const row = db.prepare("SELECT * FROM ip_limits WHERE ip_hash = ?").get(ipHash) as IpLimitRow | undefined;
return row ?? null;
},
async upsertIpLimit(ipHash: string, lastClaimedAt: number, claimCountPeriod: number): Promise<void> {
db.prepare(
`INSERT INTO ip_limits (ip_hash, last_claimed_at, claim_count_period) VALUES (?, ?, ?)
ON CONFLICT(ip_hash) DO UPDATE SET last_claimed_at = ?, claim_count_period = ?`
).run(ipHash, lastClaimedAt, claimCountPeriod, lastClaimedAt, claimCountPeriod);
},
async createQuote(quoteId: string, pubkey: string, payoutSats: number, lightningAddress: string, expiresAt: number): Promise<void> {
const now = Math.floor(Date.now() / 1000);
db.prepare(
"INSERT INTO quotes (quote_id, pubkey, payout_sats, lightning_address, created_at, expires_at, status) VALUES (?, ?, ?, ?, ?, ?, 'active')"
).run(quoteId, pubkey, payoutSats, lightningAddress, now, expiresAt);
},
async getQuote(quoteId: string): Promise<QuoteRow | null> {
const row = db.prepare("SELECT * FROM quotes WHERE quote_id = ?").get(quoteId) as QuoteRow | undefined;
return row ?? null;
},
async consumeQuote(quoteId: string): Promise<void> {
db.prepare("UPDATE quotes SET status = 'consumed' WHERE quote_id = ?").run(quoteId);
},
async setNonce(nonce: string, expiresAt: number): Promise<void> {
db.prepare("INSERT OR REPLACE INTO nonces (nonce, expires_at) VALUES (?, ?)").run(nonce, expiresAt);
},
async hasNonce(nonce: string): Promise<boolean> {
const now = Math.floor(Date.now() / 1000);
const row = db.prepare("SELECT 1 FROM nonces WHERE nonce = ? AND expires_at > ?").get(nonce, now);
return !!row;
},
async deleteExpiredNonces(): Promise<void> {
const now = Math.floor(Date.now() / 1000);
db.prepare("DELETE FROM nonces WHERE expires_at <= ?").run(now);
},
async getTotalPaidSats(): Promise<number> {
const row = db
.prepare("SELECT COALESCE(SUM(payout_sats), 0) as total FROM claims WHERE status = 'paid'")
.get() as { total: number };
return row?.total ?? 0;
},
async getTotalClaimsCount(): Promise<number> {
const row = db
.prepare("SELECT COUNT(*) as c FROM claims WHERE status = 'paid'")
.get() as { c: number };
return row?.c ?? 0;
},
async getClaimsCountSince(sinceTs: number): Promise<number> {
const row = db
.prepare("SELECT COUNT(*) as c FROM claims WHERE status = 'paid' AND claimed_at >= ?")
.get(sinceTs) as { c: number };
return row?.c ?? 0;
},
async getPaidSatsSince(sinceTs: number): Promise<number> {
const row = db
.prepare("SELECT COALESCE(SUM(payout_sats), 0) as total FROM claims WHERE status = 'paid' AND claimed_at >= ?")
.get(sinceTs) as { total: number };
return row?.total ?? 0;
},
async getRecentPayouts(
limit: number
): Promise<{ pubkey_prefix: string; payout_sats: number; claimed_at: number }[]> {
const rows = db
.prepare(
"SELECT pubkey, payout_sats, claimed_at FROM claims WHERE status = 'paid' ORDER BY claimed_at DESC LIMIT ?"
)
.all(limit) as { pubkey: string; payout_sats: number; claimed_at: number }[];
return rows.map((r) => ({
pubkey_prefix: r.pubkey.slice(0, 8) + "…",
payout_sats: r.payout_sats,
claimed_at: r.claimed_at,
}));
},
async insertDeposit(
amountSats: number,
source: DepositSource,
lnbitsPaymentHash?: string | null,
createdAt?: number
): Promise<void> {
const now = createdAt ?? Math.floor(Date.now() / 1000);
db.prepare(
"INSERT INTO deposits (created_at, amount_sats, source, lnbits_payment_hash) VALUES (?, ?, ?, ?)"
).run(now, amountSats, source, lnbitsPaymentHash ?? null);
},
async hasDepositWithPaymentHash(paymentHash: string): Promise<boolean> {
const row = db
.prepare("SELECT 1 FROM deposits WHERE lnbits_payment_hash = ? LIMIT 1")
.get(paymentHash);
return !!row;
},
async updateDepositCreatedAtIfMissing(paymentHash: string, createdAt: number): Promise<boolean> {
const result = db
.prepare(
"UPDATE deposits SET created_at = ? WHERE lnbits_payment_hash = ? AND (created_at IS NULL OR created_at < 1000000000)"
)
.run(createdAt, paymentHash);
return result.changes > 0;
},
async getRecentDeposits(
limit: number
): Promise<{ amount_sats: number; source: DepositSource; created_at: number }[]> {
const rows = db
.prepare(
"SELECT amount_sats, source, created_at FROM deposits ORDER BY created_at DESC LIMIT ?"
)
.all(limit) as { amount_sats: number; source: DepositSource; created_at: number }[];
return rows.map((r) => ({
amount_sats: r.amount_sats,
source: r.source as DepositSource,
created_at: r.created_at,
}));
},
};
}

101
backend/src/db/types.ts Normal file
View File

@@ -0,0 +1,101 @@
export interface UserRow {
pubkey: string;
nostr_first_seen_at: number | null;
notes_count: number;
followers_count: number;
following_count: number;
activity_score: number;
last_metadata_fetch_at: number | null;
lightning_address: string | null;
name: string | null;
created_at: number;
updated_at: number;
}
export interface ClaimRow {
id: number;
pubkey: string;
claimed_at: number;
payout_sats: number;
ip_hash: string;
payout_destination_hash: string | null;
status: "pending" | "paid" | "failed";
lnbits_payment_hash: string | null;
error_message: string | null;
}
export interface QuoteRow {
quote_id: string;
pubkey: string;
payout_sats: number;
lightning_address: string | null;
created_at: number;
expires_at: number;
status: "active" | "consumed" | "expired";
}
export interface IpLimitRow {
ip_hash: string;
last_claimed_at: number;
claim_count_period: number;
}
export type DepositSource = "lightning" | "cashu";
export interface DepositRow {
id: number;
created_at: number;
amount_sats: number;
source: DepositSource;
}
export interface Db {
runMigrations(): Promise<void>;
getUser(pubkey: string): Promise<UserRow | null>;
upsertUser(row: Omit<UserRow, "created_at" | "updated_at">): Promise<void>;
updateUserNostrCache(
pubkey: string,
data: {
nostr_first_seen_at: number | null;
notes_count: number;
followers_count: number;
following_count: number;
activity_score: number;
last_metadata_fetch_at: number;
}
): Promise<void>;
getLastSuccessfulClaimByPubkey(pubkey: string): Promise<ClaimRow | null>;
getLastClaimByIpHash(ipHash: string): Promise<ClaimRow | null>;
getClaimCountForIpSince(ipHash: string, sinceTs: number): Promise<number>;
createClaim(row: Omit<ClaimRow, "id">): Promise<number>;
updateClaimStatus(id: number, status: ClaimRow["status"], lnbitsPaymentHash?: string, errorMessage?: string): Promise<void>;
getIpLimit(ipHash: string): Promise<IpLimitRow | null>;
upsertIpLimit(ipHash: string, lastClaimedAt: number, claimCountPeriod: number): Promise<void>;
createQuote(quoteId: string, pubkey: string, payoutSats: number, lightningAddress: string, expiresAt: number): Promise<void>;
getQuote(quoteId: string): Promise<QuoteRow | null>;
consumeQuote(quoteId: string): Promise<void>;
setNonce(nonce: string, expiresAt: number): Promise<void>;
hasNonce(nonce: string): Promise<boolean>;
deleteExpiredNonces(): Promise<void>;
getTotalPaidSats(): Promise<number>;
getTotalClaimsCount(): Promise<number>;
getClaimsCountSince(sinceTs: number): Promise<number>;
getPaidSatsSince(sinceTs: number): Promise<number>;
getRecentPayouts(limit: number): Promise<{ pubkey_prefix: string; payout_sats: number; claimed_at: number }[]>;
insertDeposit(
amountSats: number,
source: DepositSource,
lnbitsPaymentHash?: string | null,
createdAt?: number
): Promise<void>;
hasDepositWithPaymentHash(paymentHash: string): Promise<boolean>;
updateDepositCreatedAtIfMissing(paymentHash: string, createdAt: number): Promise<boolean>;
getRecentDeposits(limit: number): Promise<{ amount_sats: number; source: DepositSource; created_at: number }[]>;
}

64
backend/src/index.ts Normal file
View File

@@ -0,0 +1,64 @@
import express from "express";
import cors from "cors";
import rateLimit from "express-rate-limit";
import { config } from "./config.js";
import { getDb } from "./db/index.js";
import { startLnbitsDepositSync } from "./services/syncLnbitsDeposits.js";
import publicRoutes from "./routes/public.js";
import authRoutes from "./routes/auth.js";
import claimRoutes from "./routes/claim.js";
import userRoutes from "./routes/user.js";
async function main() {
const db = getDb();
await db.runMigrations();
db.deleteExpiredNonces().catch(() => {});
const app = express();
if (config.trustProxy) app.set("trust proxy", 1);
app.use(express.json({ limit: "10kb" }));
app.use(
cors({
origin: (origin, cb) => {
if (!origin) return cb(null, true);
if (origin.startsWith("http://localhost:") || origin.startsWith("http://127.0.0.1:")) return cb(null, true);
if (config.allowedOrigins.includes(origin)) return cb(null, true);
return cb(null, false);
},
credentials: true,
})
);
app.use("/", publicRoutes);
app.use("/auth", authRoutes);
app.use(
"/claim",
rateLimit({
windowMs: 60 * 1000,
max: 20,
message: { code: "rate_limited", message: "Too many requests." },
}),
claimRoutes
);
app.use(
"/user",
rateLimit({
windowMs: 60 * 1000,
max: 30,
message: { code: "rate_limited", message: "Too many requests." },
}),
userRoutes
);
app.listen(config.port, () => {
console.log(`Faucet API listening on port ${config.port}`);
if (config.lnbitsBaseUrl && config.lnbitsAdminKey) {
startLnbitsDepositSync();
}
});
}
main().catch((err) => {
console.error(err);
process.exit(1);
});

View File

@@ -0,0 +1,22 @@
import { Request, Response, NextFunction } from "express";
import { verifyJwt } from "../auth/jwt.js";
import { nip98Auth } from "./nip98.js";
const BEARER_PREFIX = "Bearer ";
/**
* Accept either Bearer JWT or NIP-98. Sets req.nostr = { pubkey }.
*/
export function authOrNip98(req: Request, res: Response, next: NextFunction): void {
const auth = req.headers.authorization;
if (auth?.startsWith(BEARER_PREFIX)) {
const token = auth.slice(BEARER_PREFIX.length).trim();
const payload = verifyJwt(token);
if (payload) {
req.nostr = { pubkey: payload.pubkey, eventId: "" };
next();
return;
}
}
nip98Auth(req, res, next);
}

View File

@@ -0,0 +1,40 @@
import { Request, Response, NextFunction } from "express";
import { createHmac } from "crypto";
import { config } from "../config.js";
declare global {
namespace Express {
interface Request {
ipHash?: string;
}
}
}
/**
* Resolve client IP: X-Forwarded-For (first hop) when TRUST_PROXY, else socket remoteAddress.
*/
function getClientIp(req: Request): string {
if (config.trustProxy) {
const forwarded = req.headers["x-forwarded-for"];
if (forwarded) {
const first = typeof forwarded === "string" ? forwarded.split(",")[0] : forwarded[0];
const ip = first?.trim();
if (ip) return ip;
}
}
const addr = req.socket?.remoteAddress;
return addr ?? "0.0.0.0";
}
/**
* HMAC-SHA256(ip, HMAC_IP_SECRET) as hex.
*/
function hmacIp(ip: string): string {
return createHmac("sha256", config.hmacIpSecret).update(ip).digest("hex");
}
export function ipHashMiddleware(req: Request, _res: Response, next: NextFunction): void {
const ip = getClientIp(req);
req.ipHash = hmacIp(ip);
next();
}

View File

@@ -0,0 +1,153 @@
import { Request, Response, NextFunction } from "express";
import { verifyEvent, getEventHash } from "nostr-tools";
import { config } from "../config.js";
import { getDb } from "../db/index.js";
const AUTH_SCHEME = "Nostr ";
export interface NostrAuthPayload {
pubkey: string;
eventId: string;
}
declare global {
namespace Express {
interface Request {
nostr?: NostrAuthPayload;
}
}
}
function getTag(event: { tags: string[][] }, name: string): string | null {
const row = event.tags.find((t) => t[0] === name);
return row && row[1] ? row[1] : null;
}
export async function nip98Auth(req: Request, res: Response, next: NextFunction): Promise<void> {
const auth = req.headers.authorization;
if (!auth || !auth.startsWith(AUTH_SCHEME)) {
res.status(401).json({
code: "invalid_nip98",
message: "Missing or invalid Authorization header. Use NIP-98 Nostr scheme.",
});
return;
}
const base64 = auth.slice(AUTH_SCHEME.length).trim();
let event: { id: string; pubkey: string; kind: number; created_at: number; tags: string[][]; content: string; sig: string };
try {
const decoded = Buffer.from(base64, "base64").toString("utf-8");
const parsed = JSON.parse(decoded) as Record<string, unknown>;
event = {
id: String(parsed.id ?? ""),
pubkey: String(parsed.pubkey ?? ""),
kind: Number(parsed.kind),
created_at: Number(parsed.created_at),
tags: Array.isArray(parsed.tags) ? (parsed.tags as string[][]) : [],
content: typeof parsed.content === "string" ? parsed.content : "",
sig: String(parsed.sig ?? ""),
};
} catch {
res.status(401).json({
code: "invalid_nip98",
message: "Invalid NIP-98 payload: not valid base64 or JSON.",
});
return;
}
if (event.kind !== 27235) {
res.status(401).json({
code: "invalid_nip98",
message: "NIP-98 event kind must be 27235.",
});
return;
}
const now = Math.floor(Date.now() / 1000);
if (Math.abs(event.created_at - now) > config.nip98MaxSkewSeconds) {
res.status(401).json({
code: "invalid_nip98",
message: "NIP-98 event timestamp is outside allowed window.",
});
return;
}
const u = getTag(event, "u");
const method = getTag(event, "method");
if (!u || !method) {
res.status(401).json({
code: "invalid_nip98",
message: "NIP-98 event must include 'u' and 'method' tags.",
});
return;
}
// Reconstruct absolute URL (protocol + host + path + query)
const proto = req.headers["x-forwarded-proto"] ?? (req.socket as { encrypted?: boolean }).encrypted ? "https" : "http";
const host = req.headers["x-forwarded-host"] ?? req.headers.host ?? "";
const path = req.originalUrl ?? req.url;
const absoluteUrl = `${proto}://${host}${path}`;
if (u !== absoluteUrl) {
res.status(401).json({
code: "invalid_nip98",
message: "NIP-98 'u' tag does not match request URL.",
});
return;
}
if (method.toUpperCase() !== req.method) {
res.status(401).json({
code: "invalid_nip98",
message: "NIP-98 'method' tag does not match request method.",
});
return;
}
const computedId = getEventHash({
kind: event.kind,
pubkey: event.pubkey,
created_at: event.created_at,
tags: event.tags,
content: event.content,
sig: event.sig,
} as Parameters<typeof getEventHash>[0]);
if (computedId !== event.id) {
res.status(401).json({
code: "invalid_nip98",
message: "NIP-98 event id does not match computed hash.",
});
return;
}
const valid = verifyEvent({
id: event.id,
kind: event.kind,
pubkey: event.pubkey,
created_at: event.created_at,
tags: event.tags,
content: event.content,
sig: event.sig,
} as Parameters<typeof verifyEvent>[0]);
if (!valid) {
res.status(401).json({
code: "invalid_nip98",
message: "NIP-98 signature verification failed.",
});
return;
}
const db = getDb();
const hasNonce = await db.hasNonce(event.id);
if (hasNonce) {
res.status(401).json({
code: "invalid_nip98",
message: "NIP-98 nonce already used (replay).",
});
return;
}
const expiresAt = now + config.nonceTtlSeconds;
await db.setNonce(event.id, expiresAt);
req.nostr = { pubkey: event.pubkey, eventId: event.id };
next();
}

View File

@@ -0,0 +1,29 @@
import { Router, Request, Response } from "express";
import { nip98Auth } from "../middleware/nip98.js";
import { signJwt, verifyJwt } from "../auth/jwt.js";
const router = Router();
/** Sign in with NIP-98 once; returns a JWT for subsequent requests. */
router.post("/login", nip98Auth, (req: Request, res: Response) => {
const pubkey = req.nostr!.pubkey;
const token = signJwt(pubkey);
res.json({ token, pubkey });
});
/** Return current user from JWT (Bearer only). Used to restore session. */
router.get("/me", (req: Request, res: Response) => {
const auth = req.headers.authorization;
if (!auth?.startsWith("Bearer ")) {
res.status(401).json({ code: "unauthorized", message: "Bearer token required." });
return;
}
const payload = verifyJwt(auth.slice(7).trim());
if (!payload) {
res.status(401).json({ code: "invalid_token", message: "Invalid or expired token." });
return;
}
res.json({ pubkey: payload.pubkey });
});
export default router;

159
backend/src/routes/claim.ts Normal file
View File

@@ -0,0 +1,159 @@
import { Router, Request, Response } from "express";
import { createHmac } from "crypto";
import { config } from "../config.js";
import { getDb } from "../db/index.js";
import { checkEligibility } from "../services/eligibility.js";
import { createQuote } from "../services/quote.js";
import { payToLightningAddress } from "../services/lnbits.js";
import { authOrNip98 } from "../middleware/auth.js";
import { ipHashMiddleware } from "../middleware/ip.js";
const router = Router();
router.use(ipHashMiddleware);
function hashDestination(lightningAddress: string): string {
return createHmac("sha256", config.hmacIpSecret).update(lightningAddress).digest("hex");
}
function parseLightningAddress(body: unknown): string | null {
if (body && typeof body === "object" && "lightning_address" in body && typeof (body as { lightning_address: unknown }).lightning_address === "string") {
const v = (body as { lightning_address: string }).lightning_address.trim();
if (/^[^@]+@[^@]+$/.test(v)) return v;
}
return null;
}
router.post("/quote", authOrNip98, async (req: Request, res: Response) => {
const pubkey = req.nostr!.pubkey;
const ipHash = req.ipHash!;
const lightningAddress = parseLightningAddress(req.body);
if (!lightningAddress) {
res.status(400).json({
code: "invalid_lightning_address",
message: "Valid lightning_address (user@domain) is required.",
});
return;
}
const eligibility = await checkEligibility(pubkey, ipHash);
if (!eligibility.eligible) {
res.status(403).json({
code: eligibility.denialCode,
message: eligibility.denialMessage,
next_eligible_at: eligibility.nextEligibleAt,
});
return;
}
const quote = await createQuote(pubkey, lightningAddress);
if (!quote) {
res.status(403).json({
code: "daily_budget_exceeded",
message: "Daily budget reached. Try again tomorrow.",
});
return;
}
res.json({
quote_id: quote.quoteId,
payout_sats: quote.payoutSats,
expires_at: quote.expiresAt,
});
});
router.post("/confirm", authOrNip98, async (req: Request, res: Response) => {
const pubkey = req.nostr!.pubkey;
const ipHash = req.ipHash!;
const quoteId = typeof req.body?.quote_id === "string" ? req.body.quote_id.trim() : null;
if (!quoteId) {
res.status(400).json({
code: "invalid_request",
message: "quote_id is required.",
});
return;
}
const db = getDb();
const quote = await db.getQuote(quoteId);
if (!quote) {
res.status(404).json({
code: "quote_expired",
message: "Quote not found or expired.",
});
return;
}
if (quote.pubkey !== pubkey) {
res.status(403).json({
code: "invalid_nip98",
message: "Quote does not belong to this pubkey.",
});
return;
}
if (quote.status !== "active") {
res.status(200).json({
success: true,
already_consumed: true,
message: "This quote was already used.",
payout_sats: quote.payout_sats,
next_eligible_at: undefined,
});
return;
}
const now = Math.floor(Date.now() / 1000);
if (quote.expires_at < now) {
res.status(400).json({
code: "quote_expired",
message: "Quote has expired.",
});
return;
}
const lightningAddress = quote.lightning_address;
if (!lightningAddress) {
res.status(400).json({
code: "invalid_lightning_address",
message: "Quote has no payout address.",
});
return;
}
const claimId = await db.createClaim({
pubkey: quote.pubkey,
claimed_at: now,
payout_sats: quote.payout_sats,
ip_hash: ipHash,
payout_destination_hash: hashDestination(lightningAddress),
status: "pending",
lnbits_payment_hash: null,
error_message: null,
});
try {
const { paymentHash } = await payToLightningAddress(lightningAddress, quote.payout_sats);
await db.updateClaimStatus(claimId, "paid", paymentHash);
await db.consumeQuote(quoteId);
const cooldownEnd = now + config.cooldownDays * 86400;
const ipSince = now - config.ipCooldownDays * 86400;
const ipCount = await db.getClaimCountForIpSince(ipHash, ipSince);
await db.upsertIpLimit(ipHash, now, ipCount);
res.json({
success: true,
payout_sats: quote.payout_sats,
next_eligible_at: cooldownEnd,
});
} catch (err) {
const message = err instanceof Error ? err.message : "Payment failed";
const stack = err instanceof Error ? err.stack : undefined;
console.error("[claim/confirm] Lightning payment failed:", message);
if (stack) console.error("[claim/confirm] Stack:", stack);
await db.updateClaimStatus(claimId, "failed", undefined, message);
res.status(502).json({
code: "payout_failed",
message: "Lightning payment failed. Your cooldown was not applied.",
details: message,
});
}
});
export default router;

View File

@@ -0,0 +1,134 @@
import { Router, Request, Response } from "express";
import { config } from "../config.js";
import { getDb } from "../db/index.js";
import { getWalletBalanceSats } from "../services/lnbits.js";
const router = Router();
router.get("/health", (_req: Request, res: Response) => {
res.json({ status: "ok" });
});
router.get("/config", (_req: Request, res: Response) => {
res.json({
faucetEnabled: config.faucetEnabled,
emergencyStop: config.emergencyStop,
cooldownDays: config.cooldownDays,
minAccountAgeDays: config.minAccountAgeDays,
minActivityScore: config.minActivityScore,
faucetMinSats: config.faucetMinSats,
faucetMaxSats: config.faucetMaxSats,
});
});
router.get("/stats", async (_req: Request, res: Response) => {
try {
const db = getDb();
const [balance, totalPaid, totalClaims, claims24h, recent, recentDeposits] = await Promise.all([
getWalletBalanceSats().catch(() => 0),
db.getTotalPaidSats(),
db.getTotalClaimsCount(),
db.getClaimsCountSince(Math.floor(Date.now() / 1000) - 86400),
db.getRecentPayouts(20),
db.getRecentDeposits(20),
]);
res.json({
balanceSats: balance,
totalPaidSats: totalPaid,
totalClaims,
claimsLast24h: claims24h,
dailyBudgetSats: config.dailyBudgetSats,
recentPayouts: recent,
recentDeposits,
});
} catch (e) {
res.status(500).json({
code: "internal_error",
message: "Failed to load stats",
});
}
});
router.get("/deposit", (_req: Request, res: Response) => {
res.json({
lightningAddress: config.depositLightningAddress,
lnurlp: config.depositLnurlp,
});
});
router.post("/deposit/redeem-cashu", async (req: Request, res: Response) => {
const token = typeof req.body?.token === "string" ? req.body.token.trim() : null;
if (!token || !token.toLowerCase().startsWith("cashu")) {
res.status(400).json({
success: false,
error: "Valid Cashu token (cashuA... or cashuB...) is required.",
});
return;
}
const lightningAddress = config.depositLightningAddress;
if (!lightningAddress || !/^[^@]+@[^@]+$/.test(lightningAddress)) {
res.status(503).json({
success: false,
error: "Faucet deposit Lightning address is not configured.",
});
return;
}
const redeemUrl = `${config.cashuRedeemApiUrl}/api/redeem`;
try {
const redeemRes = await fetch(redeemUrl, {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({ token, lightningAddress }),
});
const data = (await redeemRes.json().catch(() => ({}))) as {
success?: boolean;
error?: string;
errorType?: string;
paid?: boolean;
amount?: number;
invoiceAmount?: number;
to?: string;
netAmount?: number;
message?: string;
};
if (!redeemRes.ok) {
const status = redeemRes.status >= 500 ? 502 : redeemRes.status;
res.status(status).json({
success: false,
error: data.error ?? `Redeem failed: ${redeemRes.status}`,
...(data.errorType && { errorType: data.errorType }),
});
return;
}
if (!data.success) {
res.status(400).json({
success: false,
error: data.error ?? "Redeem failed",
...(data.errorType && { errorType: data.errorType }),
});
return;
}
const amountSats = typeof data.amount === "number" && data.amount > 0 ? data.amount : data.netAmount;
if (typeof amountSats === "number" && amountSats > 0) {
getDb().insertDeposit(amountSats, "cashu").catch((err) => console.error("[deposit] record deposit", err));
}
res.json({
success: true,
paid: data.paid,
amount: data.amount,
invoiceAmount: data.invoiceAmount,
netAmount: data.netAmount,
to: data.to ?? lightningAddress,
message: data.message,
});
} catch (e) {
const message = e instanceof Error ? e.message : "Redeem request failed";
console.error("[deposit/redeem-cashu]", message);
res.status(502).json({
success: false,
error: message,
});
}
});
export default router;

View File

@@ -0,0 +1,30 @@
import { Router, Request, Response } from "express";
import { getDb } from "../db/index.js";
import { fetchAndScorePubkey } from "../services/nostr.js";
import { authOrNip98 } from "../middleware/auth.js";
const router = Router();
/**
* Refresh Nostr profile (kind 0) and return cached lightning_address and name.
* Pre-fills the frontend and stores in DB.
*/
router.post("/refresh-profile", authOrNip98, async (req: Request, res: Response) => {
const pubkey = req.nostr!.pubkey;
try {
await fetchAndScorePubkey(pubkey, true);
const db = getDb();
const user = await db.getUser(pubkey);
res.json({
lightning_address: user?.lightning_address ?? null,
name: user?.name ?? null,
});
} catch (e) {
res.status(500).json({
code: "profile_fetch_failed",
message: e instanceof Error ? e.message : "Failed to fetch profile",
});
}
});
export default router;

View File

@@ -0,0 +1,126 @@
import { config } from "../config.js";
import { getDb } from "../db/index.js";
import { getWalletBalanceSats } from "./lnbits.js";
import { fetchAndScorePubkey } from "./nostr.js";
export type DenialCode =
| "faucet_disabled"
| "emergency_stop"
| "insufficient_balance"
| "daily_budget_exceeded"
| "cooldown_pubkey"
| "cooldown_ip"
| "account_too_new"
| "low_activity"
| "invalid_nip98"
| "invalid_lightning_address"
| "quote_expired"
| "payout_failed";
export interface EligibilityResult {
eligible: boolean;
denialCode?: DenialCode;
denialMessage?: string;
nextEligibleAt?: number;
}
const SECONDS_PER_DAY = 86400;
export async function checkEligibility(pubkey: string, ipHash: string): Promise<EligibilityResult> {
if (config.emergencyStop) {
return {
eligible: false,
denialCode: "emergency_stop",
denialMessage: "The faucet is temporarily in maintenance. Please try again later.",
};
}
if (!config.faucetEnabled) {
return {
eligible: false,
denialCode: "faucet_disabled",
denialMessage: "The faucet is currently disabled.",
};
}
let balanceSats: number;
try {
balanceSats = await getWalletBalanceSats();
} catch {
return {
eligible: false,
denialCode: "insufficient_balance",
denialMessage: "Unable to check faucet balance. Please try again later.",
};
}
if (balanceSats < config.faucetMinSats) {
return {
eligible: false,
denialCode: "insufficient_balance",
denialMessage: balanceSats === 0
? "The faucet pool is empty. Donations welcome!"
: `The faucet pool is too low to pay out (${balanceSats} sats). Donations welcome!`,
};
}
const db = getDb();
const lastPubkeyClaim = await db.getLastSuccessfulClaimByPubkey(pubkey);
const cooldownEnd = lastPubkeyClaim
? lastPubkeyClaim.claimed_at + config.cooldownDays * SECONDS_PER_DAY
: 0;
const now = Math.floor(Date.now() / 1000);
if (cooldownEnd > now) {
return {
eligible: false,
denialCode: "cooldown_pubkey",
denialMessage: "You have already claimed recently.",
nextEligibleAt: cooldownEnd,
};
}
const ipSince = now - config.ipCooldownDays * SECONDS_PER_DAY;
const ipClaimCount = await db.getClaimCountForIpSince(ipHash, ipSince);
if (ipClaimCount >= config.maxClaimsPerIpPerPeriod) {
const lastIpClaim = await db.getLastClaimByIpHash(ipHash);
const ipNextAt = lastIpClaim ? lastIpClaim.claimed_at + config.ipCooldownDays * SECONDS_PER_DAY : 0;
return {
eligible: false,
denialCode: "cooldown_ip",
denialMessage: "This IP has reached the claim limit for this period.",
nextEligibleAt: ipNextAt,
};
}
const profile = await fetchAndScorePubkey(pubkey);
const minAgeSec = config.minAccountAgeDays * SECONDS_PER_DAY;
const cutoff = now - minAgeSec;
if (profile.nostrFirstSeenAt === null || profile.nostrFirstSeenAt > cutoff) {
return {
eligible: false,
denialCode: "account_too_new",
denialMessage: `Your Nostr account must be at least ${config.minAccountAgeDays} days old.`,
};
}
if (profile.activityScore < config.minActivityScore) {
return {
eligible: false,
denialCode: "low_activity",
denialMessage: `Your account does not meet the minimum activity score (${config.minActivityScore}). Be more active on Nostr and try again.`,
};
}
const since24h = now - 86400;
const claims24h = await db.getClaimsCountSince(since24h);
if (claims24h >= config.maxClaimsPerDay) {
return {
eligible: false,
denialCode: "daily_budget_exceeded",
denialMessage: "Daily claim limit reached. Try again tomorrow.",
};
}
return { eligible: true };
}

View File

@@ -0,0 +1,190 @@
import { config } from "../config.js";
const base = config.lnbitsBaseUrl;
const adminKey = config.lnbitsAdminKey;
const walletId = config.lnbitsWalletId;
export async function getWalletBalanceSats(): Promise<number> {
const res = await fetch(`${base}/api/v1/wallet`, {
headers: { "X-Api-Key": adminKey },
});
if (!res.ok) {
const text = await res.text();
throw new Error(`LNbits wallet fetch failed: ${res.status} ${text}`);
}
const data = (await res.json()) as { balance?: number };
return Math.floor((data.balance ?? 0) / 1000);
}
/**
* Pay to a Lightning address via LNURL.
* 1. Resolve Lightning address to LNURL (GET https://domain/.well-known/lnurlp/user)
* 2. Call callback with amount in millisats
*/
export async function payToLightningAddress(
lightningAddress: string,
sats: number
): Promise<{ paymentHash: string }> {
const [user, domain] = lightningAddress.split("@");
if (!user || !domain) {
console.error("[lnbits] Invalid Lightning address format:", lightningAddress);
throw new Error("Invalid Lightning address format");
}
const lnurlpUrl = `https://${domain}/.well-known/lnurlp/${user}`;
const lnurlRes = await fetch(lnurlpUrl);
if (!lnurlRes.ok) {
const text = await lnurlRes.text();
console.error("[lnbits] LNURLp resolution failed:", {
lightningAddress,
lnurlpUrl,
status: lnurlRes.status,
statusText: lnurlRes.statusText,
body: text.slice(0, 500),
});
throw new Error(`Could not resolve Lightning address: ${lnurlRes.status} ${text.slice(0, 200)}`);
}
const lnurlData = (await lnurlRes.json()) as { callback?: string; minSendable?: number; maxSendable?: number };
const callback = lnurlData.callback;
if (!callback) {
console.error("[lnbits] No callback in LNURLp response:", { lightningAddress, lnurlpUrl, lnurlData });
throw new Error("No callback in LNURLp");
}
const millisats = sats * 1000;
const separator = callback.includes("?") ? "&" : "?";
const payReqUrl = `${callback}${separator}amount=${millisats}`;
const payRes = await fetch(payReqUrl);
const payBody = await payRes.text();
if (!payRes.ok) {
let parsed: unknown;
try {
parsed = JSON.parse(payBody);
} catch {
parsed = payBody;
}
console.error("[lnbits] LNURL pay request failed:", {
lightningAddress,
sats,
millisats,
callbackHost: new URL(callback).host,
status: payRes.status,
statusText: payRes.statusText,
body: parsed,
});
const detail = typeof parsed === "object" && parsed !== null && "reason" in parsed
? (parsed as { reason?: string }).reason
: payBody.slice(0, 300);
throw new Error(`LNURL pay request failed: ${payRes.status} ${detail}`);
}
const payData = JSON.parse(payBody) as { pr?: string; reason?: string };
const pr = payData.pr;
if (!pr) {
console.error("[lnbits] No invoice (pr) in pay response:", { lightningAddress, payData });
throw new Error(`No invoice in pay response: ${payData.reason ?? JSON.stringify(payData).slice(0, 200)}`);
}
const payResult = await fetch(`${base}/api/v1/payments`, {
method: "POST",
headers: { "Content-Type": "application/json", "X-Api-Key": adminKey },
body: JSON.stringify({ out: true, bolt11: pr }),
});
if (!payResult.ok) {
const errText = await payResult.text();
console.error("[lnbits] LNbits bolt11 payment failed:", {
lightningAddress,
sats,
status: payResult.status,
body: errText.slice(0, 500),
});
throw new Error(`LNbits pay failed: ${payResult.status} ${errText}`);
}
const result = (await payResult.json()) as { payment_hash?: string };
return { paymentHash: result.payment_hash ?? "" };
}
/** LNbits payment list item (GET /api/v1/payments). Amount in millisatoshis; positive = incoming, negative = outgoing. */
/** Per LNbits OpenAPI: time, created_at, updated_at are "string" format "date-time" (ISO 8601). */
export interface LnbitsPaymentItem {
payment_hash?: string;
amount?: number;
pending?: boolean;
time?: number | string;
created_at?: number | string;
updated_at?: number | string;
timestamp?: number;
date?: number;
[key: string]: unknown;
}
const MIN_VALID_UNIX = 1e9;
function parsePaymentTime(raw: unknown): number {
if (raw == null) return 0;
if (typeof raw === "number") {
const ts = raw > 1e12 ? Math.floor(raw / 1000) : raw;
return ts >= MIN_VALID_UNIX ? ts : 0;
}
if (typeof raw === "string") {
const ms = Date.parse(raw);
if (Number.isNaN(ms)) return 0;
return Math.floor(ms / 1000);
}
return 0;
}
function normalizePaymentTime(p: LnbitsPaymentItem): number {
const ts =
parsePaymentTime(p.time) ||
parsePaymentTime(p.created_at) ||
parsePaymentTime(p.updated_at) ||
parsePaymentTime(p.timestamp) ||
parsePaymentTime(p.date);
if (ts >= MIN_VALID_UNIX) return ts;
return Math.floor(Date.now() / 1000);
}
/**
* Fetch recent payments from LNbits and return paid incoming ones (amount > 0, not pending).
* LNbits returns amount in millisatoshis; we convert to sats for storage.
*/
export async function getIncomingPaymentsFromLnbits(limit = 100): Promise<
{ payment_hash: string; amount_sats: number; paid_at: number }[]
> {
const res = await fetch(
`${base}/api/v1/payments?limit=${limit}&sortby=time&direction=desc`,
{ headers: { "X-Api-Key": adminKey } }
);
if (!res.ok) {
const text = await res.text();
throw new Error(`LNbits payments list failed: ${res.status} ${text}`);
}
const data = (await res.json()) as LnbitsPaymentItem[] | { detail?: string; payments?: LnbitsPaymentItem[] };
let items: LnbitsPaymentItem[];
if (Array.isArray(data)) {
items = data;
} else if (data && typeof data === "object" && Array.isArray((data as { payments?: LnbitsPaymentItem[] }).payments)) {
items = (data as { payments: LnbitsPaymentItem[] }).payments;
} else {
const detail = (data as { detail?: string })?.detail;
throw new Error(detail ?? "LNbits payments list invalid response");
}
const incoming: { payment_hash: string; amount_sats: number; paid_at: number }[] = [];
for (const p of items) {
const hash = p.payment_hash;
const amountMsats = Number(p.amount ?? 0);
const pending = Boolean(p.pending);
const paidAt = normalizePaymentTime(p);
if (!hash || typeof hash !== "string") continue;
if (pending) continue;
if (amountMsats <= 0) continue;
const amountSats = Math.floor(amountMsats / 1000);
if (amountSats <= 0) continue;
incoming.push({
payment_hash: hash,
amount_sats: amountSats,
paid_at: paidAt,
});
}
return incoming;
}

View File

@@ -0,0 +1,150 @@
import { SimplePool } from "nostr-tools";
import { config } from "../config.js";
import { getDb } from "../db/index.js";
const pool = new SimplePool();
export interface NostrProfile {
nostrFirstSeenAt: number | null;
notesCount: number;
followingCount: number;
followersCount: number;
activityScore: number;
}
function withTimeout<T>(promise: Promise<T>, ms: number): Promise<T> {
return Promise.race([
promise,
new Promise<T>((_, rej) => setTimeout(() => rej(new Error("timeout")), ms)),
]);
}
/**
* Fetch events from relays in parallel (kinds 0, 1, 3), compute metrics, optionally cache.
* When forceRefreshProfile is true, always fetch from relays (skip cache) so kind 0 is parsed and lightning_address/name updated.
*/
export async function fetchAndScorePubkey(pubkey: string, forceRefreshProfile = false): Promise<NostrProfile> {
const db = getDb();
const cached = await db.getUser(pubkey);
const nowSec = Math.floor(Date.now() / 1000);
const cacheHours = config.metadataCacheHours;
const cacheValidUntil = (cached?.last_metadata_fetch_at ?? 0) + cacheHours * 3600;
if (!forceRefreshProfile && cached && cacheValidUntil > nowSec) {
return {
nostrFirstSeenAt: cached.nostr_first_seen_at,
notesCount: cached.notes_count,
followingCount: cached.following_count,
followersCount: cached.followers_count,
activityScore: cached.activity_score,
};
}
let events: { kind: number; created_at: number; content?: string; tags: string[][] }[] = [];
try {
const result = await withTimeout(
pool.querySync(config.nostrRelays, { kinds: [0, 1, 3], authors: [pubkey], limit: config.maxEventsFetch }),
config.relayTimeoutMs
);
events = Array.isArray(result) ? result : [];
} catch (_) {
// Timeout or relay error: use cache if any; otherwise upsert minimal user so /refresh-profile returns a row
if (cached) {
return {
nostrFirstSeenAt: cached.nostr_first_seen_at,
notesCount: cached.notes_count,
followingCount: cached.following_count,
followersCount: cached.followers_count,
activityScore: cached.activity_score,
};
}
const lastMetadataFetchAt = Math.floor(Date.now() / 1000);
await db.upsertUser({
pubkey,
nostr_first_seen_at: null,
notes_count: 0,
followers_count: 0,
following_count: 0,
activity_score: 0,
last_metadata_fetch_at: lastMetadataFetchAt,
lightning_address: null,
name: null,
});
return {
nostrFirstSeenAt: null,
notesCount: 0,
followingCount: 0,
followersCount: 0,
activityScore: 0,
};
}
const kind0 = events.filter((e) => e.kind === 0);
const kind1 = events.filter((e) => e.kind === 1);
const kind3 = events.filter((e) => e.kind === 3);
const earliestCreatedAt = events.length
? Math.min(...events.map((e) => e.created_at))
: null;
const lookbackSince = nowSec - config.activityLookbackDays * 86400;
const notesInLookback = kind1.filter((e) => e.created_at >= lookbackSince).length;
let followingCount = 0;
if (kind3.length > 0) {
const contacts = kind3[0].tags?.filter((t) => t[0] === "p").length ?? 0;
followingCount = contacts;
}
const hasMetadata = kind0.length > 0;
let score = 0;
if (hasMetadata) score += 10;
if (notesInLookback >= config.minNotesCount) score += 20;
if (followingCount >= config.minFollowingCount) score += 10;
if (0 >= config.minFollowersCount) score += 10; // followers not fetched for MVP; treat as 0
let lightning_address: string | null = null;
let name: string | null = null;
const lightningAddressRe = /^[^@]+@[^@]+$/;
if (kind0.length > 0 && kind0[0].content) {
try {
const meta = JSON.parse(kind0[0].content) as Record<string, unknown>;
// NIP-19 / common: lud16 is the Lightning address (user@domain). Fallbacks for other clients.
for (const key of ["lud16", "lightning", "ln_address", "nip05"] as const) {
const v = meta[key];
if (typeof v === "string") {
const s = v.trim();
if (lightningAddressRe.test(s)) {
lightning_address = s;
break;
}
}
}
if (typeof meta.name === "string" && meta.name.trim()) name = meta.name.trim();
else if (typeof meta.display_name === "string" && meta.display_name.trim()) name = meta.display_name.trim();
} catch (_) {}
}
const nostrFirstSeenAt = earliestCreatedAt;
const lastMetadataFetchAt = Math.floor(Date.now() / 1000);
await db.upsertUser({
pubkey,
nostr_first_seen_at: nostrFirstSeenAt,
notes_count: notesInLookback,
followers_count: 0,
following_count: followingCount,
activity_score: score,
last_metadata_fetch_at: lastMetadataFetchAt,
lightning_address,
name,
});
return {
nostrFirstSeenAt,
notesCount: notesInLookback,
followingCount,
followersCount: 0,
activityScore: score,
};
}

View File

@@ -0,0 +1,69 @@
import { randomInt } from "crypto";
import { v4 as uuidv4 } from "uuid";
import { config } from "../config.js";
import { getDb } from "../db/index.js";
import { getWalletBalanceSats } from "./lnbits.js";
const QUOTE_TTL_SECONDS = 60;
interface PayoutBucket {
sats: number;
weight: number;
}
function getPayoutBuckets(): PayoutBucket[] {
return [
{ sats: config.payoutSmallSats, weight: config.payoutWeightSmall },
{ sats: config.payoutMediumSats, weight: config.payoutWeightMedium },
{ sats: config.payoutLargeSats, weight: config.payoutWeightLarge },
{ sats: config.payoutJackpotSats, weight: config.payoutWeightJackpot },
];
}
/**
* Weighted random selection. Returns sats amount.
*/
export function selectWeightedPayout(): number {
const buckets = getPayoutBuckets();
const totalWeight = buckets.reduce((s, b) => s + b.weight, 0);
let r = randomInt(0, totalWeight);
for (const b of buckets) {
if (r < b.weight) return b.sats;
r -= b.weight;
}
return config.payoutSmallSats;
}
/**
* Compute payout for this claim: weighted selection, capped by daily budget remaining.
*/
export function computePayoutForClaim(todayPaidSats: number): number {
const remaining = Math.max(0, config.dailyBudgetSats - todayPaidSats);
if (remaining < config.faucetMinSats) return 0;
const selected = selectWeightedPayout();
return Math.min(selected, remaining, config.faucetMaxSats);
}
export interface CreateQuoteResult {
quoteId: string;
payoutSats: number;
expiresAt: number;
}
export async function createQuote(pubkey: string, lightningAddress: string): Promise<CreateQuoteResult | null> {
const db = getDb();
const now = Math.floor(Date.now() / 1000);
const dayStart = now - (now % 86400);
const todayPaid = await db.getPaidSatsSince(dayStart);
let payout = computePayoutForClaim(todayPaid);
if (payout <= 0) return null;
const walletBalance = await getWalletBalanceSats();
payout = Math.min(payout, Math.max(0, walletBalance));
if (payout < config.faucetMinSats) return null;
const quoteId = uuidv4();
const expiresAt = now + QUOTE_TTL_SECONDS;
await db.createQuote(quoteId, pubkey, payout, lightningAddress, expiresAt);
return { quoteId, payoutSats: payout, expiresAt };
}

View File

@@ -0,0 +1,44 @@
import { getDb } from "../db/index.js";
import { getIncomingPaymentsFromLnbits } from "./lnbits.js";
const SYNC_INTERVAL_MS = 2 * 60 * 1000;
const MIN_VALID_UNIX = 1e9;
export async function syncLnbitsDeposits(): Promise<void> {
const db = getDb();
try {
const payments = await getIncomingPaymentsFromLnbits(100);
let added = 0;
let updated = 0;
for (const p of payments) {
const exists = await db.hasDepositWithPaymentHash(p.payment_hash);
if (!exists) {
await db.insertDeposit(
p.amount_sats,
"lightning",
p.payment_hash,
p.paid_at
);
added++;
} else if (p.paid_at >= MIN_VALID_UNIX) {
const didUpdate = await db.updateDepositCreatedAtIfMissing(p.payment_hash, p.paid_at);
if (didUpdate) updated++;
}
}
if (added > 0) {
console.log(`[sync] LNbits deposits: ${added} new incoming payment(s) synced`);
}
if (updated > 0) {
console.log(`[sync] LNbits deposits: ${updated} date(s) backfilled`);
}
} catch (e) {
const msg = e instanceof Error ? e.message : String(e);
console.error("[sync] LNbits deposits failed:", msg);
}
}
export function startLnbitsDepositSync(): void {
syncLnbitsDeposits();
setInterval(syncLnbitsDeposits, SYNC_INTERVAL_MS);
}

17
backend/tsconfig.json Normal file
View File

@@ -0,0 +1,17 @@
{
"compilerOptions": {
"target": "ES2022",
"module": "NodeNext",
"moduleResolution": "NodeNext",
"outDir": "dist",
"rootDir": "src",
"strict": true,
"esModuleInterop": true,
"skipLibCheck": true,
"declaration": true,
"declarationMap": true,
"sourceMap": true
},
"include": ["src/**/*"],
"exclude": ["node_modules", "dist"]
}