Files
helix-engage-server/widget-src/src/chat-stream.ts
saridsa2 aa41a2abb7 feat: widget chat with generative UI, branch selection, captcha gate, lead dedup
- Streaming AI chat via Vercel AI SDK v6 UI message stream — tool-based
  generative UI (pick_branch, list_departments, show_clinic_timings,
  show_doctors, show_doctor_slots, suggest_booking). Typing indicator,
  markdown suppressed, text parts hidden when widgets are rendered.
- Centralized Preact store (store.tsx) for visitor, leadId, captchaToken,
  bookingPrefill, doctors roster, branches, selectedBranch — replaces prop
  drilling across chat/book/contact tabs.
- Cloudflare Turnstile captcha gate rendered via light-DOM portal so it
  renders correctly inside the shadow DOM (Turnstile CSS doesn't cross
  shadow boundaries).
- Lead dedup helper (findOrCreateLeadByPhone, 24h phone window) shared
  across chat-start / book / contact so one visitor == one lead. Booking
  upgrades existing lead status NEW → APPOINTMENT_SET via updateLeadStatus.
- Pre-chat name+phone form captures the visitor; chat transcript logged
  to leadActivity records after each stream.
- Booking wizard gains a branch step 0 (skipped for single-branch
  hospitals); departments + doctors filtered by selectedBranch. Chat slot
  picks prefill the booking details step and lock the branch.
- Window-level captcha gate, modal maximize mode, header badge showing
  selected branch, widget font inherits from host page (fix :host { all:
  initial } override).
- 23 FA Pro 7.1 duotone icons bundled — medical departments, nav, actions,
  hospital/location-dot for branch context.
- main.ts: resolve public/ from process.cwd() so widget.js serves in both
  dev and prod. tsconfig: exclude widget-src/public/data from server tsc.
- captcha.guard: switch from reCAPTCHA v3 to Cloudflare Turnstile verify.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-06 16:04:46 +05:30

62 lines
2.5 KiB
TypeScript

// Minimal SSE + UIMessageChunk parser. The backend writes
// data: ${JSON.stringify(chunk)}\n\n
// for each AI SDK UIMessageChunk, plus a final `data: [DONE]\n\n`.
// We reconstruct events by buffering stream text and splitting on blank lines.
export type UIMessageChunk =
| { type: 'start'; messageId?: string }
| { type: 'start-step' }
| { type: 'finish-step' }
| { type: 'finish' }
| { type: 'error'; errorText: string }
| { type: 'text-start'; id: string }
| { type: 'text-delta'; id: string; delta: string }
| { type: 'text-end'; id: string }
| { type: 'tool-input-start'; toolCallId: string; toolName: string }
| { type: 'tool-input-delta'; toolCallId: string; inputTextDelta: string }
| { type: 'tool-input-available'; toolCallId: string; toolName: string; input: any }
| { type: 'tool-output-available'; toolCallId: string; output: any }
| { type: 'tool-output-error'; toolCallId: string; errorText: string }
| { type: string; [key: string]: any };
// Reads the SSE body byte stream and yields UIMessageChunk objects.
export async function* readChatStream(
body: ReadableStream<Uint8Array>,
): AsyncGenerator<UIMessageChunk> {
const reader = body.getReader();
const decoder = new TextDecoder();
let buffer = '';
try {
while (true) {
const { done, value } = await reader.read();
if (done) break;
buffer += decoder.decode(value, { stream: true });
// Each SSE event is terminated by a blank line. Split off complete
// events and keep the trailing partial in buffer.
let sep: number;
while ((sep = buffer.indexOf('\n\n')) !== -1) {
const rawEvent = buffer.slice(0, sep);
buffer = buffer.slice(sep + 2);
// Grab lines starting with "data:" (there may be comments or
// event: lines too — we ignore them).
const lines = rawEvent.split('\n');
for (const line of lines) {
if (!line.startsWith('data:')) continue;
const payload = line.slice(5).trimStart();
if (!payload || payload === '[DONE]') continue;
try {
yield JSON.parse(payload) as UIMessageChunk;
} catch {
// Bad JSON — skip this event rather than crash the stream.
}
}
}
}
} finally {
reader.releaseLock();
}
}