First cut

This commit is contained in:
kadil
2026-04-17 16:08:31 -05:00
parent d10105ac00
commit 4ee4cb8e7c
58 changed files with 3243 additions and 1 deletions

55
server/src/app.ts Normal file
View File

@@ -0,0 +1,55 @@
import express, { type Express } from 'express';
import cors from 'cors';
import rateLimit from 'express-rate-limit';
import { createHealthRouter } from './routes/health.js';
import { createApiEditRouter } from './routes/api-edit.js';
import { createWebhookSmsRouter } from './routes/webhook-sms.js';
import type { EditQueue } from './queue/edit-queue.js';
export interface CreateAppDeps {
queue: EditQueue;
}
export function createApp(deps: CreateAppDeps): Express {
const app = express();
// Telnyx webhook needs raw body for signature verification — mount BEFORE json parser
app.use('/webhooks', express.raw({ type: '*/*' }), (req, _res, next) => {
// Parse raw body to JSON for webhook handler
if (req.body && Buffer.isBuffer(req.body)) {
try {
req.body = JSON.parse(req.body.toString());
} catch { /* leave as-is */ }
}
next();
});
// JSON parser for everything else
app.use(express.json());
// CORS for editor cross-origin requests
const allowedOrigin = process.env.CORS_ALLOWED_ORIGIN || 'http://localhost:4321';
app.use('/api', cors({
origin: allowedOrigin,
methods: ['GET', 'POST', 'OPTIONS'],
allowedHeaders: ['Authorization', 'Content-Type'],
credentials: true,
}));
// Rate limiting on API edit routes
const apiLimiter = rateLimit({
windowMs: 60_000,
max: 60,
standardHeaders: true,
legacyHeaders: false,
message: { error: 'Too many requests' },
});
app.use('/api/edit', apiLimiter);
// Mount routes
app.use('/', createHealthRouter());
app.use('/api', createApiEditRouter({ queue: deps.queue }));
app.use('/webhooks', createWebhookSmsRouter({ queue: deps.queue }));
return app;
}

161
server/src/db.ts Normal file
View File

@@ -0,0 +1,161 @@
import Database from 'better-sqlite3';
import path from 'node:path';
import fs from 'node:fs';
import crypto from 'node:crypto';
const DB_PATH = process.env.IDEMPOTENCY_DB_PATH || process.env.DATABASE_PATH || './data/dynamic-sites.db';
let db: Database.Database | null = null;
export function openDb(): Database.Database {
if (db) return db;
const dir = path.dirname(DB_PATH);
if (!fs.existsSync(dir)) fs.mkdirSync(dir, { recursive: true });
db = new Database(DB_PATH);
db.pragma('journal_mode = WAL');
db.pragma('busy_timeout = 5000');
runMigrations(db);
return db;
}
function runMigrations(d: Database.Database) {
d.exec(`
CREATE TABLE IF NOT EXISTS idempotency_keys (
key TEXT PRIMARY KEY,
expires_at INTEGER NOT NULL
);
CREATE TABLE IF NOT EXISTS pending_proposals (
proposal_id TEXT PRIMARY KEY,
repo_relative_path TEXT NOT NULL,
proposed_json TEXT NOT NULL,
summary_text TEXT NOT NULL,
status TEXT NOT NULL DEFAULT 'pending' CHECK(status IN ('pending','applied','rejected','expired')),
source TEXT NOT NULL DEFAULT 'http',
phone_hash TEXT,
created_at INTEGER NOT NULL DEFAULT (unixepoch()),
expires_at INTEGER NOT NULL
);
CREATE TABLE IF NOT EXISTS sms_rate_limits (
phone_hash TEXT NOT NULL,
window_start INTEGER NOT NULL,
count INTEGER NOT NULL DEFAULT 1,
PRIMARY KEY (phone_hash, window_start)
);
CREATE TABLE IF NOT EXISTS edit_audit_log (
id INTEGER PRIMARY KEY AUTOINCREMENT,
proposal_id TEXT,
repo_relative_path TEXT NOT NULL,
before_hash TEXT,
after_hash TEXT,
applied_at INTEGER NOT NULL DEFAULT (unixepoch()),
source TEXT NOT NULL
);
`);
}
// ── Idempotency ──
export function claimOnce(key: string, ttlSeconds: number = 3600): boolean {
const d = openDb();
const expiresAt = Math.floor(Date.now() / 1000) + ttlSeconds;
const result = d.prepare('INSERT OR IGNORE INTO idempotency_keys (key, expires_at) VALUES (?, ?)').run(key, expiresAt);
return result.changes > 0;
}
export function pruneIdempotencyKeys(): void {
const d = openDb();
d.prepare('DELETE FROM idempotency_keys WHERE expires_at < ?').run(Math.floor(Date.now() / 1000));
}
// ── Proposals ──
export interface ProposalRow {
proposal_id: string;
repo_relative_path: string;
proposed_json: string;
summary_text: string;
status: string;
source: string;
phone_hash: string | null;
created_at: number;
expires_at: number;
}
export function createProposal(params: {
proposalId: string;
repoRelativePath: string;
proposedJson: string;
summaryText: string;
source: string;
phoneHash?: string;
ttlMs?: number;
}): void {
const d = openDb();
const ttl = params.ttlMs || parseInt(process.env.PROPOSAL_TTL_MS || '900000', 10);
const expiresAt = Math.floor(Date.now() / 1000) + Math.floor(ttl / 1000);
if (params.phoneHash) {
d.prepare(`UPDATE pending_proposals SET status = 'expired' WHERE phone_hash = ? AND status = 'pending'`).run(params.phoneHash);
}
d.prepare(`INSERT INTO pending_proposals (proposal_id, repo_relative_path, proposed_json, summary_text, source, phone_hash, expires_at) VALUES (?, ?, ?, ?, ?, ?, ?)`).run(
params.proposalId, params.repoRelativePath, params.proposedJson, params.summaryText, params.source, params.phoneHash || null, expiresAt
);
}
export function getProposal(proposalId: string): ProposalRow | null {
return openDb().prepare('SELECT * FROM pending_proposals WHERE proposal_id = ?').get(proposalId) as ProposalRow | null;
}
export function getPendingProposalByPhone(phoneHash: string): ProposalRow | null {
return openDb().prepare(`SELECT * FROM pending_proposals WHERE phone_hash = ? AND status = 'pending' ORDER BY created_at DESC LIMIT 1`).get(phoneHash) as ProposalRow | null;
}
export function updateProposalStatus(proposalId: string, status: 'applied' | 'rejected' | 'expired'): void {
openDb().prepare('UPDATE pending_proposals SET status = ? WHERE proposal_id = ?').run(status, proposalId);
}
export function pruneExpiredProposals(): void {
openDb().prepare(`UPDATE pending_proposals SET status = 'expired' WHERE expires_at < ? AND status = 'pending'`).run(Math.floor(Date.now() / 1000));
}
// ── Rate Limiting ──
export function checkSmsRateLimit(phoneHash: string, maxPerHour: number = 10): boolean {
const d = openDb();
const now = Math.floor(Date.now() / 1000);
const windowStart = now - 3600;
d.prepare('DELETE FROM sms_rate_limits WHERE window_start < ?').run(windowStart);
const row = d.prepare('SELECT SUM(count) as total FROM sms_rate_limits WHERE phone_hash = ? AND window_start >= ?').get(phoneHash, windowStart) as { total: number | null } | undefined;
if ((row?.total || 0) >= maxPerHour) return false;
const currentWindow = Math.floor(now / 60) * 60;
d.prepare('INSERT INTO sms_rate_limits (phone_hash, window_start, count) VALUES (?, ?, 1) ON CONFLICT(phone_hash, window_start) DO UPDATE SET count = count + 1').run(phoneHash, currentWindow);
return true;
}
// ── Audit ──
export function writeAuditLog(params: {
proposalId?: string;
repoRelativePath: string;
beforeHash?: string;
afterHash?: string;
source: string;
}): void {
openDb().prepare('INSERT INTO edit_audit_log (proposal_id, repo_relative_path, before_hash, after_hash, source) VALUES (?, ?, ?, ?, ?)').run(
params.proposalId || null, params.repoRelativePath, params.beforeHash || null, params.afterHash || null, params.source
);
}
// ── Helpers ──
export function hashPhone(phone: string): string {
return crypto.createHash('sha256').update(phone).digest('hex').slice(0, 16);
}
export function closeDb(): void {
if (db) { db.close(); db = null; }
}

61
server/src/index.ts Normal file
View File

@@ -0,0 +1,61 @@
import { createApp } from './app.js';
import { createEditQueue } from './queue/edit-queue.js';
import { processEditJob } from './queue/process-edit-job.js';
import { openDb, closeDb, pruneExpiredProposals, pruneIdempotencyKeys } from './db.js';
import { logger } from './logger.js';
const PORT = parseInt(process.env.ORCHESTRATOR_PORT || '3001', 10);
export async function startServer() {
// Initialize database
openDb();
logger.info({ event: 'db.opened' }, 'SQLite database opened');
// Create queue and wire consumer
const queue = createEditQueue();
queue.startConsumer(processEditJob);
// Periodic cleanup
const cleanupInterval = setInterval(() => {
pruneExpiredProposals();
pruneIdempotencyKeys();
}, 60_000);
// Create and start HTTP server
const app = createApp({ queue });
const server = app.listen(PORT, () => {
logger.info({ event: 'server.started', port: PORT }, `Orchestrator listening on port ${PORT}`);
});
// Graceful shutdown
let shuttingDown = false;
async function shutdown(signal: string) {
if (shuttingDown) return;
shuttingDown = true;
logger.info({ event: 'server.shutdown', signal }, `Received ${signal}, shutting down...`);
clearInterval(cleanupInterval);
// Stop accepting new connections
server.close(() => {
logger.info({ event: 'server.closed' }, 'HTTP server closed');
});
// Drain queue (finish current job)
await queue.shutdown();
// Close database
closeDb();
logger.info({ event: 'db.closed' }, 'Database closed');
process.exit(0);
}
process.on('SIGTERM', () => shutdown('SIGTERM'));
process.on('SIGINT', () => shutdown('SIGINT'));
}
startServer().catch(err => {
logger.fatal({ error: (err as Error).message }, 'Failed to start server');
process.exit(1);
});

View File

@@ -0,0 +1,68 @@
import fs from 'node:fs';
import path from 'node:path';
import crypto from 'node:crypto';
import { stringifyCanonical } from '@dynamic-sites/shared';
import { writeAuditLog } from '../db.js';
import { logger } from '../logger.js';
const REPO_ROOT = process.env.REPO_ROOT || '.';
const MAX_BACKUPS = 20;
function ensureDir(dir: string) {
if (!fs.existsSync(dir)) fs.mkdirSync(dir, { recursive: true });
}
function fileHash(content: string): string {
return crypto.createHash('sha256').update(content).digest('hex').slice(0, 12);
}
/**
* Atomic write of canonical JSON to a repo-relative path.
* Creates a pre-write backup and audit log entry.
*/
export function writeContentFile(
repoRelativePath: string,
data: unknown,
opts?: { proposalId?: string; source?: string }
): void {
const absPath = path.join(REPO_ROOT, repoRelativePath);
const canonical = stringifyCanonical(data);
// Pre-write backup
let beforeHash: string | undefined;
if (fs.existsSync(absPath)) {
const existing = fs.readFileSync(absPath, 'utf-8');
beforeHash = fileHash(existing);
const backupDir = path.join(REPO_ROOT, 'content', '.backups', repoRelativePath);
ensureDir(backupDir);
const ts = new Date().toISOString().replace(/[:.]/g, '-');
fs.copyFileSync(absPath, path.join(backupDir, `${ts}.json`));
// Prune old backups
const backups = fs.readdirSync(backupDir).sort();
while (backups.length > MAX_BACKUPS) {
const oldest = backups.shift()!;
fs.unlinkSync(path.join(backupDir, oldest));
}
}
// Atomic write: temp file + rename
ensureDir(path.dirname(absPath));
const tmpPath = absPath + '.tmp.' + process.pid;
fs.writeFileSync(tmpPath, canonical, 'utf-8');
fs.renameSync(tmpPath, absPath);
const afterHash = fileHash(canonical);
// Audit log
writeAuditLog({
proposalId: opts?.proposalId,
repoRelativePath,
beforeHash,
afterHash,
source: opts?.source || 'http',
});
logger.info({ event: 'content.written', path: repoRelativePath, size: canonical.length }, 'Content file written');
}

172
server/src/llm/client.ts Normal file
View File

@@ -0,0 +1,172 @@
import { z } from 'zod';
import { routingOutputSchema, type RoutingOutput } from '@dynamic-sites/shared';
import { logger } from '../logger.js';
const OLLAMA_HOST = process.env.OLLAMA_HOST || 'https://ollama.com';
const OLLAMA_API_KEY = process.env.OLLAMA_API_KEY || '';
const PRIMARY_MODEL = process.env.OLLAMA_MODEL || 'qwen3.5:397b-cloud';
const FALLBACK_MODEL = process.env.OLLAMA_FALLBACK_MODEL || 'gpt-oss:120b';
const MAX_RETRIES = 3;
export interface LlmChatCaller {
(messages: Array<{ role: string; content: string }>, model: string): Promise<string>;
}
/** Default chat caller using Ollama HTTP API */
async function ollamaChat(messages: Array<{ role: string; content: string }>, model: string): Promise<string> {
const resp = await fetch(`${OLLAMA_HOST}/api/chat`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
...(OLLAMA_API_KEY ? { Authorization: `Bearer ${OLLAMA_API_KEY}` } : {}),
},
body: JSON.stringify({ model, messages, stream: false }),
});
if (!resp.ok) {
throw new Error(`Ollama ${resp.status}: ${await resp.text().catch(() => 'no body')}`);
}
const data = await resp.json() as { message?: { content?: string } };
return data.message?.content || '';
}
/**
* Validate-then-retry loop: parse LLM output as JSON, validate against schema,
* re-prompt with errors if invalid, up to MAX_RETRIES per model.
*/
async function generateWithValidation<T>(params: {
messages: Array<{ role: string; content: string }>;
schema: z.ZodType<T>;
chat?: LlmChatCaller;
}): Promise<T> {
const chat = params.chat || ollamaChat;
const models = [PRIMARY_MODEL, FALLBACK_MODEL];
for (const model of models) {
const msgs = [...params.messages];
for (let attempt = 0; attempt < MAX_RETRIES; attempt++) {
logger.debug({ event: 'llm.request', model, attempt }, 'LLM call');
try {
const raw = await chat(msgs, model);
// Extract JSON from response (handle markdown code blocks)
const jsonMatch = raw.match(/```(?:json)?\s*([\s\S]*?)```/) || [null, raw];
const jsonStr = (jsonMatch[1] || raw).trim();
const parsed = JSON.parse(jsonStr);
const result = params.schema.safeParse(parsed);
if (result.success) return result.data;
const errors = result.error.issues.map(i => `${i.path.join('.')}: ${i.message}`).join('\n');
logger.debug({ event: 'llm.retry', model, attempt, errors }, 'Validation failed, retrying');
msgs.push(
{ role: 'assistant', content: raw },
{ role: 'user', content: `Your JSON output failed validation:\n${errors}\n\nPlease fix the issues and return valid JSON only.` }
);
} catch (err) {
logger.warn({ event: 'llm.retry', model, attempt, error: (err as Error).message }, 'LLM call or parse failed');
if (attempt === MAX_RETRIES - 1) break;
msgs.push(
{ role: 'user', content: `Your response was not valid JSON. Please respond with ONLY a JSON object, no markdown or extra text.` }
);
}
}
logger.warn({ event: 'llm.fallback', from: model }, 'Exhausted retries, trying fallback');
}
logger.error({ event: 'llm.exhausted' }, 'All LLM models exhausted');
throw new Error('LLM_UNAVAILABLE');
}
// ── Public API ──
export interface GenerateEditedJsonParams {
currentJson: unknown;
siteContext: unknown;
userMessage: string;
repoRelativePath: string;
schema: z.ZodTypeAny;
}
export async function generateEditedJson(params: GenerateEditedJsonParams, chat?: LlmChatCaller): Promise<unknown> {
const messages = [
{
role: 'system',
content: `You are a website content editor. You edit JSON content files for a website.
SITE CONTEXT:
${JSON.stringify(params.siteContext, null, 2)}
You will receive the current JSON content of a section file and a natural language edit request.
Return ONLY the complete updated JSON object — no explanation, no markdown, just the JSON.
Preserve all existing fields and structure. Only change what the user requested.
The output must be valid JSON matching the exact same schema as the input.`,
},
{
role: 'user',
content: `Current content of "${params.repoRelativePath}":\n\`\`\`json\n${JSON.stringify(params.currentJson, null, 2)}\n\`\`\`\n\nEdit request: "${params.userMessage}"\n\nReturn the complete updated JSON:`,
},
];
return generateWithValidation({ messages, schema: params.schema, chat });
}
export interface RouteEditIntentParams {
userMessage: string;
manifest: Array<{ id: string; type: string; title?: string; headline?: string; heading?: string; repo_relative_path: string; visible: boolean }>;
}
export async function routeEditIntent(params: RouteEditIntentParams, chat?: LlmChatCaller): Promise<RoutingOutput> {
const messages = [
{
role: 'system',
content: `You are a routing assistant for a website CMS. Given a natural language edit request and a manifest of available content sections, determine which section file the edit applies to.
Return a JSON object with:
- "repo_relative_path": the path of the target section file
- "needs_clarification": true if the request is ambiguous
- "reason": short explanation
- "clarification_message": (only if needs_clarification) a question to ask the user
If the request is about showing/hiding/enabling/disabling a section, route to that section's file.
If the request mentions events, route to "content/events.json".`,
},
{
role: 'user',
content: `MANIFEST:\n${JSON.stringify(params.manifest, null, 2)}\n\nEDIT REQUEST: "${params.userMessage}"\n\nReturn JSON:`,
},
];
return generateWithValidation({ messages, schema: routingOutputSchema, chat });
}
/** Simple summary generation (no schema validation needed) */
export async function generateSummary(params: {
before: unknown;
after: unknown;
repoRelativePath: string;
userMessage: string;
chat?: LlmChatCaller;
}): Promise<string> {
const chat = params.chat || ollamaChat;
const messages = [
{
role: 'system',
content: `You summarize content changes for a website owner. Keep summaries under 140 characters, plain text, no markdown. Be specific about what changed. Format: "Change X from A to B" or "Add/remove X".`,
},
{
role: 'user',
content: `File: ${params.repoRelativePath}\nRequest: "${params.userMessage}"\n\nBefore:\n${JSON.stringify(params.before, null, 2)}\n\nAfter:\n${JSON.stringify(params.after, null, 2)}\n\nSummarize the change in under 140 chars:`,
},
];
try {
const result = await chat(messages, PRIMARY_MODEL);
return result.replace(/["'`]/g, '').trim().slice(0, 280);
} catch {
// Fallback: generate a basic diff summary
return `Update ${params.repoRelativePath} as requested: "${params.userMessage.slice(0, 80)}"`;
}
}

18
server/src/logger.ts Normal file
View File

@@ -0,0 +1,18 @@
import pino from 'pino';
const LOG_LEVEL = process.env.LOG_LEVEL || 'info';
const isDev = process.env.NODE_ENV !== 'production';
export const logger = pino({
level: LOG_LEVEL,
...(isDev ? { transport: { target: 'pino-pretty', options: { colorize: true } } } : {}),
});
export function createChildLogger(bindings: Record<string, unknown>) {
return logger.child(bindings);
}
export function maskPhone(phone: string): string {
if (phone.length <= 4) return '****';
return phone.slice(0, -4).replace(/./g, '*') + phone.slice(-4);
}

View File

@@ -0,0 +1,83 @@
import type { EditJobPayload } from '@dynamic-sites/shared';
import { logger } from '../logger.js';
export interface EditQueue {
enqueue(payload: EditJobPayload): void;
startConsumer(processor: (job: EditJobPayload) => Promise<void>): void;
getQueueDepth(): number;
shutdown(): Promise<void>;
}
const MAX_QUEUE_DEPTH = parseInt(process.env.MAX_QUEUE_DEPTH || '20', 10);
export function createEditQueue(): EditQueue {
const jobs: EditJobPayload[] = [];
let processing = false;
let shuttingDown = false;
let processor: ((job: EditJobPayload) => Promise<void>) | null = null;
let resolveShutdown: (() => void) | null = null;
async function drain() {
if (processing) return;
processing = true;
while (jobs.length > 0 && !shuttingDown) {
const job = jobs.shift()!;
logger.info({ event: 'job.started', kind: job.kind, id: job.id }, 'Processing job');
try {
await processor!(job);
logger.info({ event: 'job.completed', kind: job.kind, id: job.id }, 'Job completed');
} catch (err) {
logger.error({ event: 'job.failed', kind: job.kind, id: job.id, error: (err as Error).message }, 'Job failed');
}
}
processing = false;
if (shuttingDown && jobs.length === 0 && resolveShutdown) {
resolveShutdown();
}
}
return {
enqueue(payload: EditJobPayload) {
if (shuttingDown) {
logger.warn({ event: 'job.rejected', reason: 'shutting_down' }, 'Rejecting job — shutting down');
return;
}
if (jobs.length >= MAX_QUEUE_DEPTH) {
logger.warn({ event: 'job.rejected', reason: 'queue_full', depth: jobs.length }, 'Queue depth exceeded');
throw new Error('QUEUE_FULL');
}
jobs.push(payload);
logger.info({ event: 'job.enqueued', kind: payload.kind, id: payload.id, depth: jobs.length }, 'Job enqueued');
// Start draining on next tick
if (processor) setImmediate(drain);
},
startConsumer(proc) {
processor = proc;
logger.info({ event: 'consumer.started' }, 'Edit queue consumer started');
// Start draining in case jobs were enqueued before consumer started
if (jobs.length > 0) setImmediate(drain);
},
getQueueDepth() {
return jobs.length;
},
async shutdown() {
shuttingDown = true;
const remaining = jobs.length;
if (remaining > 0) {
logger.warn({ event: 'consumer.shutdown', dropped: remaining }, `Shutting down with ${remaining} queued jobs`);
}
if (processing) {
// Wait for current job to finish
await new Promise<void>(resolve => { resolveShutdown = resolve; });
}
// Clear remaining jobs
jobs.length = 0;
},
};
}

View File

@@ -0,0 +1,55 @@
import fs from 'node:fs';
import path from 'node:path';
import { sectionFileSchema } from '@dynamic-sites/shared';
const REPO_ROOT = process.env.REPO_ROOT || '.';
export interface ManifestEntry {
id: string;
type: string;
title?: string;
headline?: string;
heading?: string;
repo_relative_path: string;
visible: boolean;
}
export function buildSectionManifest(): ManifestEntry[] {
const sectionsDir = path.join(REPO_ROOT, 'content/sections');
const manifest: ManifestEntry[] = [];
if (!fs.existsSync(sectionsDir)) return manifest;
for (const file of fs.readdirSync(sectionsDir).filter(f => f.endsWith('.json'))) {
try {
const raw = JSON.parse(fs.readFileSync(path.join(sectionsDir, file), 'utf-8'));
const parsed = sectionFileSchema.safeParse(raw);
if (!parsed.success) continue;
const s = parsed.data;
const entry: ManifestEntry = {
id: s.id,
type: s.type,
repo_relative_path: `content/sections/${file}`,
visible: s.visible,
};
if (s.type === 'hero') entry.headline = s.headline;
if (s.type === 'about' || s.type === 'features' || s.type === 'testimonials') entry.title = s.title;
if (s.type === 'text') entry.heading = s.heading;
manifest.push(entry);
} catch { /* skip bad files */ }
}
// Also add events.json
manifest.push({
id: 'events',
type: 'events',
title: 'Events',
repo_relative_path: 'content/events.json',
visible: true,
});
return manifest;
}

View File

@@ -0,0 +1,178 @@
import fs from 'node:fs';
import path from 'node:path';
import crypto from 'node:crypto';
import type { EditJobPayload } from '@dynamic-sites/shared';
import { schemaForRepoRelativePath } from '@dynamic-sites/shared';
import { createProposal, getProposal, updateProposalStatus } from '../db.js';
import { writeContentFile } from '../io/write-content.js';
import { generateEditedJson, routeEditIntent, generateSummary } from '../llm/client.js';
import { buildSectionManifest } from './manifest.js';
import { sendSms } from '../sms/reply.js';
import { SMS_TEMPLATES } from '../sms/templates.js';
import { logger } from '../logger.js';
const REPO_ROOT = process.env.REPO_ROOT || '.';
export async function processEditJob(job: EditJobPayload): Promise<void> {
if (job.kind === 'propose') {
await handlePropose(job);
} else if (job.kind === 'apply') {
await handleApply(job);
}
}
async function handlePropose(job: Extract<EditJobPayload, { kind: 'propose' }>) {
const log = logger.child({ jobId: job.id, kind: 'propose' });
try {
// Step 1: Route — determine which file the edit targets
let repoRelativePath = job.repo_relative_path;
if (!repoRelativePath) {
const manifest = buildSectionManifest();
log.debug({ event: 'routing.start', sections: manifest.length }, 'Routing edit intent');
const routing = await routeEditIntent({ userMessage: job.message, manifest });
if (routing.needs_clarification) {
log.info({ event: 'routing.ambiguous' }, 'Routing ambiguous');
if (job.smsReplyMeta) {
await sendSms(job.smsReplyMeta.from, job.smsReplyMeta.to,
routing.clarification_message || SMS_TEMPLATES.ROUTING_AMBIGUOUS(manifest.map(m => m.id).join(', '))
);
}
return;
}
repoRelativePath = routing.repo_relative_path;
}
log.info({ event: 'routing.resolved', path: repoRelativePath }, 'Route resolved');
// Step 2: Load current content + schema
const absPath = path.join(REPO_ROOT, repoRelativePath);
if (!fs.existsSync(absPath)) {
log.error({ event: 'propose.file_not_found', path: repoRelativePath }, 'Target file not found');
if (job.smsReplyMeta) {
await sendSms(job.smsReplyMeta.from, job.smsReplyMeta.to, SMS_TEMPLATES.ROUTING_NO_MATCH(repoRelativePath));
}
return;
}
const currentJson = JSON.parse(fs.readFileSync(absPath, 'utf-8'));
const schema = schemaForRepoRelativePath(repoRelativePath);
if (!schema) {
log.error({ event: 'propose.no_schema', path: repoRelativePath }, 'No schema for path');
return;
}
const siteContext = JSON.parse(fs.readFileSync(path.join(REPO_ROOT, 'site-context.json'), 'utf-8'));
// Step 3: Generate edited JSON via LLM
const editedJson = await generateEditedJson({
currentJson,
siteContext,
userMessage: job.message,
repoRelativePath,
schema,
});
// Step 4: Generate summary
const summary = await generateSummary({
before: currentJson,
after: editedJson,
repoRelativePath,
userMessage: job.message,
});
// Step 5: Store proposal
const proposalId = crypto.randomUUID();
createProposal({
proposalId,
repoRelativePath,
proposedJson: JSON.stringify(editedJson),
summaryText: summary,
source: job.source,
phoneHash: job.smsReplyMeta?.from ? crypto.createHash('sha256').update(job.smsReplyMeta.from).digest('hex').slice(0, 16) : undefined,
});
log.info({ event: 'proposal.created', proposalId, path: repoRelativePath }, 'Proposal created');
// Step 6: Notify user
if (job.smsReplyMeta) {
await sendSms(job.smsReplyMeta.from, job.smsReplyMeta.to, SMS_TEMPLATES.PROPOSAL_SUMMARY(summary, proposalId));
}
// For HTTP callers, the proposal_id is returned via the response (handled in route)
// Store on the job for the route handler to read
(job as Record<string, unknown>)._proposalId = proposalId;
(job as Record<string, unknown>)._summary = summary;
} catch (err) {
const msg = (err as Error).message;
log.error({ event: 'propose.failed', error: msg }, 'Propose failed');
if (job.smsReplyMeta) {
const template = msg === 'LLM_UNAVAILABLE' ? SMS_TEMPLATES.LLM_UNAVAILABLE() : SMS_TEMPLATES.LLM_UNAVAILABLE();
await sendSms(job.smsReplyMeta.from, job.smsReplyMeta.to, template);
}
}
}
async function handleApply(job: Extract<EditJobPayload, { kind: 'apply' }>) {
const log = logger.child({ jobId: job.id, kind: 'apply', proposalId: job.proposal_id });
const proposal = getProposal(job.proposal_id);
if (!proposal) {
log.warn({ event: 'apply.not_found' }, 'Proposal not found');
return;
}
const now = Math.floor(Date.now() / 1000);
if (proposal.status === 'applied') {
log.info({ event: 'apply.already_applied' }, 'Proposal already applied');
if (job.smsReplyMeta) {
await sendSms(job.smsReplyMeta.from, job.smsReplyMeta.to, SMS_TEMPLATES.PROPOSAL_ALREADY_APPLIED());
}
return;
}
if (proposal.status !== 'pending' || proposal.expires_at < now) {
log.info({ event: 'apply.expired' }, 'Proposal expired or invalid status');
updateProposalStatus(job.proposal_id, 'expired');
if (job.smsReplyMeta) {
await sendSms(job.smsReplyMeta.from, job.smsReplyMeta.to, SMS_TEMPLATES.PROPOSAL_EXPIRED());
}
return;
}
// Re-validate against schema
const schema = schemaForRepoRelativePath(proposal.repo_relative_path);
if (!schema) {
log.error({ event: 'apply.no_schema' }, 'No schema for proposal path');
return;
}
const proposedData = JSON.parse(proposal.proposed_json);
const validation = schema.safeParse(proposedData);
if (!validation.success) {
log.error({ event: 'apply.validation_failed', errors: validation.error.message }, 'Proposed JSON fails validation');
updateProposalStatus(job.proposal_id, 'rejected');
return;
}
// Write to disk
writeContentFile(proposal.repo_relative_path, validation.data, {
proposalId: job.proposal_id,
source: job.source,
});
updateProposalStatus(job.proposal_id, 'applied');
log.info({ event: 'proposal.confirmed', path: proposal.repo_relative_path }, 'Proposal applied');
if (job.smsReplyMeta) {
await sendSms(job.smsReplyMeta.from, job.smsReplyMeta.to, SMS_TEMPLATES.APPLIED(proposal.summary_text));
}
}

View File

@@ -0,0 +1,165 @@
import { Router, type Request, type Response } from 'express';
import crypto from 'node:crypto';
import fs from 'node:fs';
import path from 'node:path';
import { editRequestSchema, sectionFileSchema } from '@dynamic-sites/shared';
import type { EditQueue } from '../queue/edit-queue.js';
import { getProposal, updateProposalStatus } from '../db.js';
import { buildSectionManifest } from '../queue/manifest.js';
import { logger } from '../logger.js';
const REPO_ROOT = process.env.REPO_ROOT || '.';
const API_EDIT_SECRET = process.env.API_EDIT_SECRET || '';
export function verifyEditAuth(req: Request): boolean {
const auth = req.headers.authorization;
if (!auth || !API_EDIT_SECRET) return false;
return auth === `Bearer ${API_EDIT_SECRET}`;
}
export interface ApiEditRouterDeps {
queue: EditQueue;
}
export function createApiEditRouter(deps: ApiEditRouterDeps): Router {
const router = Router();
// Auth middleware for all routes
router.use((req, res, next) => {
if (!verifyEditAuth(req)) {
res.status(401).json({ error: 'Unauthorized' });
return;
}
next();
});
// GET /api/manifest — list all sections
router.get('/manifest', (_req: Request, res: Response) => {
const manifest = buildSectionManifest();
res.json({ sections: manifest });
});
// GET /api/section?path=content/sections/hero.json — get current section JSON
router.get('/section', (req: Request, res: Response) => {
const relPath = req.query.path as string;
if (!relPath) {
res.status(400).json({ error: 'Missing path query param' });
return;
}
const absPath = path.join(REPO_ROOT, relPath);
if (!fs.existsSync(absPath)) {
res.status(404).json({ error: 'File not found' });
return;
}
try {
const data = JSON.parse(fs.readFileSync(absPath, 'utf-8'));
res.json(data);
} catch {
res.status(500).json({ error: 'Failed to read file' });
}
});
// GET /api/site-context
router.get('/site-context', (_req: Request, res: Response) => {
try {
const data = JSON.parse(fs.readFileSync(path.join(REPO_ROOT, 'site-context.json'), 'utf-8'));
res.json(data);
} catch {
res.status(500).json({ error: 'Failed to read site context' });
}
});
// POST /api/edit — propose an edit (NL message)
router.post('/edit', async (req: Request, res: Response) => {
const parsed = editRequestSchema.safeParse(req.body);
if (!parsed.success) {
res.status(400).json({ error: 'Invalid request', details: parsed.error.issues });
return;
}
const { message, repo_relative_path, confirm, proposal_id } = parsed.data;
// Handle confirmation flow
if (confirm && proposal_id) {
if (confirm === 'yes') {
try {
deps.queue.enqueue({
kind: 'apply',
id: crypto.randomUUID(),
proposal_id,
source: 'http',
});
res.status(202).json({ status: 'applying', proposal_id });
} catch (err) {
res.status(503).json({ error: (err as Error).message });
}
} else {
updateProposalStatus(proposal_id, 'rejected');
res.json({ status: 'rejected', proposal_id });
}
return;
}
// Propose flow
const jobId = crypto.randomUUID();
try {
deps.queue.enqueue({
kind: 'propose',
id: jobId,
message,
repo_relative_path,
source: 'http',
});
res.status(202).json({ status: 'processing', job_id: jobId });
} catch (err) {
res.status(503).json({ error: (err as Error).message });
}
});
// POST /api/edit/create-section — create a new section file
router.post('/edit/create-section', (req: Request, res: Response) => {
const { filename, data } = req.body;
if (!filename || !data) {
res.status(400).json({ error: 'Missing filename or data' });
return;
}
const parsed = sectionFileSchema.safeParse(data);
if (!parsed.success) {
res.status(400).json({ error: 'Invalid section data', details: parsed.error.issues });
return;
}
const relPath = `content/sections/${filename.replace(/[^a-z0-9-]/gi, '').toLowerCase()}.json`;
const absPath = path.join(REPO_ROOT, relPath);
if (fs.existsSync(absPath)) {
res.status(409).json({ error: 'File already exists' });
return;
}
// Use the write-content module (imported dynamically to avoid circular deps)
import('../io/write-content.js').then(({ writeContentFile }) => {
writeContentFile(relPath, parsed.data, { source: 'editor' });
res.status(201).json({ status: 'created', path: relPath });
});
});
// GET /api/proposal/:id — check proposal status
router.get('/proposal/:id', (req: Request, res: Response) => {
const proposal = getProposal(req.params.id);
if (!proposal) {
res.status(404).json({ error: 'Proposal not found' });
return;
}
res.json({
proposal_id: proposal.proposal_id,
status: proposal.status,
summary: proposal.summary_text,
repo_relative_path: proposal.repo_relative_path,
created_at: proposal.created_at,
expires_at: proposal.expires_at,
});
});
return router;
}

View File

@@ -0,0 +1,9 @@
import { Router } from 'express';
export function createHealthRouter(): Router {
const router = Router();
router.get('/health', (_req, res) => {
res.json({ status: 'ok', timestamp: new Date().toISOString() });
});
return router;
}

View File

@@ -0,0 +1,115 @@
import { Router, type Request, type Response } from 'express';
import crypto from 'node:crypto';
import type { EditQueue } from '../queue/edit-queue.js';
import { parseTelnyxInboundMessage } from '../sms/parse.js';
import { sendSms } from '../sms/reply.js';
import { SMS_TEMPLATES } from '../sms/templates.js';
import { claimOnce, checkSmsRateLimit, hashPhone, getPendingProposalByPhone, updateProposalStatus } from '../db.js';
import { logger, maskPhone } from '../logger.js';
export interface WebhookSmsRouterDeps {
queue: EditQueue;
}
export function createWebhookSmsRouter(deps: WebhookSmsRouterDeps): Router {
const router = Router();
router.post('/telnyx', (req: Request, res: Response) => {
// Respond quickly
res.status(200).json({ status: 'received' });
// Process async
handleInbound(req.body, deps).catch(err => {
logger.error({ event: 'sms.handler_error', error: (err as Error).message }, 'SMS handler error');
});
});
return router;
}
async function handleInbound(body: unknown, deps: WebhookSmsRouterDeps) {
const parsed = parseTelnyxInboundMessage(body);
if (!parsed) {
logger.warn({ event: 'sms.parse_failed' }, 'Failed to parse inbound SMS');
return;
}
const { messageId, from, to, text, hasMedia } = parsed;
const phoneHash = hashPhone(from);
logger.info({ event: 'sms.received', from: maskPhone(from), hasMedia, messageId }, 'Inbound SMS');
// Idempotency check
if (messageId && !claimOnce(`sms:${messageId}`, 3600)) {
logger.info({ event: 'sms.idempotent_skip', messageId }, 'Duplicate SMS skipped');
return;
}
// MMS check
if (hasMedia) {
await sendSms(from, to, SMS_TEMPLATES.MMS_NOT_SUPPORTED());
return;
}
// Rate limit
const maxPerHour = parseInt(process.env.SMS_RATE_LIMIT_PER_HOUR || '10', 10);
if (!checkSmsRateLimit(phoneHash, maxPerHour)) {
logger.info({ event: 'sms.rate_limited', phone: maskPhone(from) }, 'SMS rate limited');
await sendSms(from, to, SMS_TEMPLATES.RATE_LIMITED());
return;
}
const upperText = text.toUpperCase().trim();
// Check for YES/NO confirmation
if (upperText === 'YES' || upperText === 'Y') {
const pending = getPendingProposalByPhone(phoneHash);
if (!pending) {
await sendSms(from, to, SMS_TEMPLATES.PROPOSAL_EXPIRED());
return;
}
try {
deps.queue.enqueue({
kind: 'apply',
id: crypto.randomUUID(),
proposal_id: pending.proposal_id,
source: 'sms',
smsReplyMeta: { from, to },
});
} catch {
await sendSms(from, to, SMS_TEMPLATES.LLM_UNAVAILABLE());
}
return;
}
if (upperText === 'NO' || upperText === 'N') {
const pending = getPendingProposalByPhone(phoneHash);
if (pending) {
updateProposalStatus(pending.proposal_id, 'rejected');
logger.info({ event: 'proposal.rejected', proposalId: pending.proposal_id }, 'Proposal rejected via SMS');
}
await sendSms(from, to, SMS_TEMPLATES.REJECTED());
return;
}
// Check if there's a pending proposal — if user sends something other than YES/NO
const pending = getPendingProposalByPhone(phoneHash);
if (pending) {
// New message while proposal pending — could be a new edit or invalid confirm
// Expire old proposal and start fresh
updateProposalStatus(pending.proposal_id, 'expired');
}
// New propose job
try {
deps.queue.enqueue({
kind: 'propose',
id: crypto.randomUUID(),
message: text,
source: 'sms',
smsReplyMeta: { from, to },
});
} catch {
await sendSms(from, to, SMS_TEMPLATES.LLM_UNAVAILABLE());
}
}

30
server/src/sms/parse.ts Normal file
View File

@@ -0,0 +1,30 @@
export interface ParsedInboundSms {
messageId: string;
from: string;
to: string;
text: string;
hasMedia: boolean;
mediaUrls: string[];
}
export function parseTelnyxInboundMessage(body: unknown): ParsedInboundSms | null {
try {
const data = body as Record<string, unknown>;
const eventData = (data.data as Record<string, unknown>) || data;
const payload = (eventData.payload as Record<string, unknown>) || eventData;
const from = ((payload.from as Record<string, unknown>)?.phone_number as string) || (payload.from as string) || '';
const to = (Array.isArray(payload.to) ? (payload.to[0] as Record<string, unknown>)?.phone_number : (payload.to as Record<string, unknown>)?.phone_number) as string || '';
const text = (payload.text as string) || (payload.body as string) || '';
const messageId = (payload.id as string) || (eventData.id as string) || '';
const media = (payload.media as Array<{ url: string }>) || [];
const mediaUrls = media.map(m => m.url).filter(Boolean);
if (!from || !text) return null;
return { messageId, from, to, text: text.trim(), hasMedia: mediaUrls.length > 0, mediaUrls };
} catch {
return null;
}
}

30
server/src/sms/reply.ts Normal file
View File

@@ -0,0 +1,30 @@
import { logger, maskPhone } from '../logger.js';
const TELNYX_API_KEY = process.env.TELNYX_API_KEY || '';
export async function sendSms(to: string, from: string, body: string): Promise<void> {
if (!TELNYX_API_KEY) {
logger.warn({ event: 'sms.send_skipped', to: maskPhone(to) }, 'No TELNYX_API_KEY, skipping SMS send');
logger.info({ event: 'sms.would_send', body }, 'SMS body (dev mode)');
return;
}
try {
const resp = await fetch('https://api.telnyx.com/v2/messages', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Authorization: `Bearer ${TELNYX_API_KEY}`,
},
body: JSON.stringify({ from, to, text: body }),
});
if (!resp.ok) {
logger.error({ event: 'sms.send_failed', status: resp.status }, 'Failed to send SMS');
} else {
logger.info({ event: 'sms.sent', to: maskPhone(to) }, 'SMS sent');
}
} catch (err) {
logger.error({ event: 'sms.send_error', error: (err as Error).message }, 'SMS send error');
}
}

View File

@@ -0,0 +1,34 @@
export const SMS_TEMPLATES = {
PROPOSAL_SUMMARY: (summary: string, proposalId: string) =>
`Proposed change: ${summary}\n\nReply YES to apply or NO to cancel.`,
APPLIED: (summary: string) =>
`Done! ${summary} Your site will update shortly.`,
REJECTED: () =>
`Got it — change cancelled. Send a new message anytime.`,
LLM_UNAVAILABLE: () =>
`Sorry, I couldn't process that right now. Please try again in a few minutes.`,
ROUTING_AMBIGUOUS: (options: string) =>
`I'm not sure which section you mean. Did you mean: ${options}? Reply with the number or name.`,
ROUTING_NO_MATCH: (list: string) =>
`I couldn't find a section matching that request. Your current sections are: ${list}. Try again?`,
PROPOSAL_EXPIRED: () =>
`That change request has expired. Please send your edit again to start over.`,
PROPOSAL_ALREADY_APPLIED: () =>
`That change was already applied.`,
INVALID_CONFIRM: () =>
`Reply YES to apply or NO to cancel.`,
RATE_LIMITED: () =>
`You've sent several requests recently. Please wait a few minutes before trying again.`,
MMS_NOT_SUPPORTED: () =>
`Image uploads aren't supported yet. Please describe your change in text.`,
} as const;