First cut

This commit is contained in:
kadil
2026-04-17 16:08:31 -05:00
parent d10105ac00
commit 4ee4cb8e7c
58 changed files with 3243 additions and 1 deletions

View File

@@ -0,0 +1,83 @@
import type { EditJobPayload } from '@dynamic-sites/shared';
import { logger } from '../logger.js';
export interface EditQueue {
enqueue(payload: EditJobPayload): void;
startConsumer(processor: (job: EditJobPayload) => Promise<void>): void;
getQueueDepth(): number;
shutdown(): Promise<void>;
}
const MAX_QUEUE_DEPTH = parseInt(process.env.MAX_QUEUE_DEPTH || '20', 10);
export function createEditQueue(): EditQueue {
const jobs: EditJobPayload[] = [];
let processing = false;
let shuttingDown = false;
let processor: ((job: EditJobPayload) => Promise<void>) | null = null;
let resolveShutdown: (() => void) | null = null;
async function drain() {
if (processing) return;
processing = true;
while (jobs.length > 0 && !shuttingDown) {
const job = jobs.shift()!;
logger.info({ event: 'job.started', kind: job.kind, id: job.id }, 'Processing job');
try {
await processor!(job);
logger.info({ event: 'job.completed', kind: job.kind, id: job.id }, 'Job completed');
} catch (err) {
logger.error({ event: 'job.failed', kind: job.kind, id: job.id, error: (err as Error).message }, 'Job failed');
}
}
processing = false;
if (shuttingDown && jobs.length === 0 && resolveShutdown) {
resolveShutdown();
}
}
return {
enqueue(payload: EditJobPayload) {
if (shuttingDown) {
logger.warn({ event: 'job.rejected', reason: 'shutting_down' }, 'Rejecting job — shutting down');
return;
}
if (jobs.length >= MAX_QUEUE_DEPTH) {
logger.warn({ event: 'job.rejected', reason: 'queue_full', depth: jobs.length }, 'Queue depth exceeded');
throw new Error('QUEUE_FULL');
}
jobs.push(payload);
logger.info({ event: 'job.enqueued', kind: payload.kind, id: payload.id, depth: jobs.length }, 'Job enqueued');
// Start draining on next tick
if (processor) setImmediate(drain);
},
startConsumer(proc) {
processor = proc;
logger.info({ event: 'consumer.started' }, 'Edit queue consumer started');
// Start draining in case jobs were enqueued before consumer started
if (jobs.length > 0) setImmediate(drain);
},
getQueueDepth() {
return jobs.length;
},
async shutdown() {
shuttingDown = true;
const remaining = jobs.length;
if (remaining > 0) {
logger.warn({ event: 'consumer.shutdown', dropped: remaining }, `Shutting down with ${remaining} queued jobs`);
}
if (processing) {
// Wait for current job to finish
await new Promise<void>(resolve => { resolveShutdown = resolve; });
}
// Clear remaining jobs
jobs.length = 0;
},
};
}

View File

@@ -0,0 +1,55 @@
import fs from 'node:fs';
import path from 'node:path';
import { sectionFileSchema } from '@dynamic-sites/shared';
const REPO_ROOT = process.env.REPO_ROOT || '.';
export interface ManifestEntry {
id: string;
type: string;
title?: string;
headline?: string;
heading?: string;
repo_relative_path: string;
visible: boolean;
}
export function buildSectionManifest(): ManifestEntry[] {
const sectionsDir = path.join(REPO_ROOT, 'content/sections');
const manifest: ManifestEntry[] = [];
if (!fs.existsSync(sectionsDir)) return manifest;
for (const file of fs.readdirSync(sectionsDir).filter(f => f.endsWith('.json'))) {
try {
const raw = JSON.parse(fs.readFileSync(path.join(sectionsDir, file), 'utf-8'));
const parsed = sectionFileSchema.safeParse(raw);
if (!parsed.success) continue;
const s = parsed.data;
const entry: ManifestEntry = {
id: s.id,
type: s.type,
repo_relative_path: `content/sections/${file}`,
visible: s.visible,
};
if (s.type === 'hero') entry.headline = s.headline;
if (s.type === 'about' || s.type === 'features' || s.type === 'testimonials') entry.title = s.title;
if (s.type === 'text') entry.heading = s.heading;
manifest.push(entry);
} catch { /* skip bad files */ }
}
// Also add events.json
manifest.push({
id: 'events',
type: 'events',
title: 'Events',
repo_relative_path: 'content/events.json',
visible: true,
});
return manifest;
}

View File

@@ -0,0 +1,178 @@
import fs from 'node:fs';
import path from 'node:path';
import crypto from 'node:crypto';
import type { EditJobPayload } from '@dynamic-sites/shared';
import { schemaForRepoRelativePath } from '@dynamic-sites/shared';
import { createProposal, getProposal, updateProposalStatus } from '../db.js';
import { writeContentFile } from '../io/write-content.js';
import { generateEditedJson, routeEditIntent, generateSummary } from '../llm/client.js';
import { buildSectionManifest } from './manifest.js';
import { sendSms } from '../sms/reply.js';
import { SMS_TEMPLATES } from '../sms/templates.js';
import { logger } from '../logger.js';
const REPO_ROOT = process.env.REPO_ROOT || '.';
export async function processEditJob(job: EditJobPayload): Promise<void> {
if (job.kind === 'propose') {
await handlePropose(job);
} else if (job.kind === 'apply') {
await handleApply(job);
}
}
async function handlePropose(job: Extract<EditJobPayload, { kind: 'propose' }>) {
const log = logger.child({ jobId: job.id, kind: 'propose' });
try {
// Step 1: Route — determine which file the edit targets
let repoRelativePath = job.repo_relative_path;
if (!repoRelativePath) {
const manifest = buildSectionManifest();
log.debug({ event: 'routing.start', sections: manifest.length }, 'Routing edit intent');
const routing = await routeEditIntent({ userMessage: job.message, manifest });
if (routing.needs_clarification) {
log.info({ event: 'routing.ambiguous' }, 'Routing ambiguous');
if (job.smsReplyMeta) {
await sendSms(job.smsReplyMeta.from, job.smsReplyMeta.to,
routing.clarification_message || SMS_TEMPLATES.ROUTING_AMBIGUOUS(manifest.map(m => m.id).join(', '))
);
}
return;
}
repoRelativePath = routing.repo_relative_path;
}
log.info({ event: 'routing.resolved', path: repoRelativePath }, 'Route resolved');
// Step 2: Load current content + schema
const absPath = path.join(REPO_ROOT, repoRelativePath);
if (!fs.existsSync(absPath)) {
log.error({ event: 'propose.file_not_found', path: repoRelativePath }, 'Target file not found');
if (job.smsReplyMeta) {
await sendSms(job.smsReplyMeta.from, job.smsReplyMeta.to, SMS_TEMPLATES.ROUTING_NO_MATCH(repoRelativePath));
}
return;
}
const currentJson = JSON.parse(fs.readFileSync(absPath, 'utf-8'));
const schema = schemaForRepoRelativePath(repoRelativePath);
if (!schema) {
log.error({ event: 'propose.no_schema', path: repoRelativePath }, 'No schema for path');
return;
}
const siteContext = JSON.parse(fs.readFileSync(path.join(REPO_ROOT, 'site-context.json'), 'utf-8'));
// Step 3: Generate edited JSON via LLM
const editedJson = await generateEditedJson({
currentJson,
siteContext,
userMessage: job.message,
repoRelativePath,
schema,
});
// Step 4: Generate summary
const summary = await generateSummary({
before: currentJson,
after: editedJson,
repoRelativePath,
userMessage: job.message,
});
// Step 5: Store proposal
const proposalId = crypto.randomUUID();
createProposal({
proposalId,
repoRelativePath,
proposedJson: JSON.stringify(editedJson),
summaryText: summary,
source: job.source,
phoneHash: job.smsReplyMeta?.from ? crypto.createHash('sha256').update(job.smsReplyMeta.from).digest('hex').slice(0, 16) : undefined,
});
log.info({ event: 'proposal.created', proposalId, path: repoRelativePath }, 'Proposal created');
// Step 6: Notify user
if (job.smsReplyMeta) {
await sendSms(job.smsReplyMeta.from, job.smsReplyMeta.to, SMS_TEMPLATES.PROPOSAL_SUMMARY(summary, proposalId));
}
// For HTTP callers, the proposal_id is returned via the response (handled in route)
// Store on the job for the route handler to read
(job as Record<string, unknown>)._proposalId = proposalId;
(job as Record<string, unknown>)._summary = summary;
} catch (err) {
const msg = (err as Error).message;
log.error({ event: 'propose.failed', error: msg }, 'Propose failed');
if (job.smsReplyMeta) {
const template = msg === 'LLM_UNAVAILABLE' ? SMS_TEMPLATES.LLM_UNAVAILABLE() : SMS_TEMPLATES.LLM_UNAVAILABLE();
await sendSms(job.smsReplyMeta.from, job.smsReplyMeta.to, template);
}
}
}
async function handleApply(job: Extract<EditJobPayload, { kind: 'apply' }>) {
const log = logger.child({ jobId: job.id, kind: 'apply', proposalId: job.proposal_id });
const proposal = getProposal(job.proposal_id);
if (!proposal) {
log.warn({ event: 'apply.not_found' }, 'Proposal not found');
return;
}
const now = Math.floor(Date.now() / 1000);
if (proposal.status === 'applied') {
log.info({ event: 'apply.already_applied' }, 'Proposal already applied');
if (job.smsReplyMeta) {
await sendSms(job.smsReplyMeta.from, job.smsReplyMeta.to, SMS_TEMPLATES.PROPOSAL_ALREADY_APPLIED());
}
return;
}
if (proposal.status !== 'pending' || proposal.expires_at < now) {
log.info({ event: 'apply.expired' }, 'Proposal expired or invalid status');
updateProposalStatus(job.proposal_id, 'expired');
if (job.smsReplyMeta) {
await sendSms(job.smsReplyMeta.from, job.smsReplyMeta.to, SMS_TEMPLATES.PROPOSAL_EXPIRED());
}
return;
}
// Re-validate against schema
const schema = schemaForRepoRelativePath(proposal.repo_relative_path);
if (!schema) {
log.error({ event: 'apply.no_schema' }, 'No schema for proposal path');
return;
}
const proposedData = JSON.parse(proposal.proposed_json);
const validation = schema.safeParse(proposedData);
if (!validation.success) {
log.error({ event: 'apply.validation_failed', errors: validation.error.message }, 'Proposed JSON fails validation');
updateProposalStatus(job.proposal_id, 'rejected');
return;
}
// Write to disk
writeContentFile(proposal.repo_relative_path, validation.data, {
proposalId: job.proposal_id,
source: job.source,
});
updateProposalStatus(job.proposal_id, 'applied');
log.info({ event: 'proposal.confirmed', path: proposal.repo_relative_path }, 'Proposal applied');
if (job.smsReplyMeta) {
await sendSms(job.smsReplyMeta.from, job.smsReplyMeta.to, SMS_TEMPLATES.APPLIED(proposal.summary_text));
}
}