/* Screen components: Home (with inline recording), List, Detail */ const { useState, useEffect, useRef, useCallback, useMemo } = React; /* ---------- Home ---------- */ // Recording happens inline — no screen change. Button toggles idle → recording // (red, blinking) → transcribing (gray, spinner). When a transcript is ready, // onDone fires and the app navigates to Detail for the letter. function HomeScreen({ onDone, onList, recordingsCount }) { const [phase, setPhase] = useState("idle"); // idle | recording | transcribing | error const [elapsed, setElapsed] = useState(0); const [error, setError] = useState(""); const mediaRecorderRef = useRef(null); const chunksRef = useRef([]); const startTimeRef = useRef(0); const streamRef = useRef(null); const audioCtxRef = useRef(null); const durationRef = useRef(0); const timerRef = useRef(null); // Timer tick while recording useEffect(() => { if (phase !== "recording") return; timerRef.current = setInterval(() => { setElapsed((Date.now() - startTimeRef.current) / 1000); }, 250); return () => clearInterval(timerRef.current); }, [phase]); const teardownMic = useCallback(() => { if (streamRef.current) streamRef.current.getTracks().forEach(t => t.stop()); streamRef.current = null; if (audioCtxRef.current) { try { audioCtxRef.current.close(); } catch {} } audioCtxRef.current = null; }, []); const startRecording = useCallback(async () => { setError(""); try { const stream = await navigator.mediaDevices.getUserMedia({ audio: true }); streamRef.current = stream; const candidates = [ "audio/webm;codecs=opus", "audio/webm", "audio/ogg;codecs=opus", "audio/mp4", ]; const mime = candidates.find(t => window.MediaRecorder && MediaRecorder.isTypeSupported(t)) || ""; const mr = new MediaRecorder(stream, mime ? { mimeType: mime } : undefined); chunksRef.current = []; mr.ondataavailable = (e) => { if (e.data && e.data.size > 0) chunksRef.current.push(e.data); }; mr.start(1000); mediaRecorderRef.current = mr; startTimeRef.current = Date.now(); setElapsed(0); setPhase("recording"); } catch (err) { teardownMic(); setError(err.name === "NotAllowedError" ? "Mikrofonzugriff nicht erlaubt. Bitte Berechtigung erteilen." : "Mikrofon konnte nicht gestartet werden: " + (err.message || err)); setPhase("error"); } }, [teardownMic]); const stopAndTranscribe = useCallback(async () => { const mr = mediaRecorderRef.current; if (!mr) return; durationRef.current = (Date.now() - startTimeRef.current) / 1000; setPhase("transcribing"); const blob = await new Promise((resolve) => { if (mr.state === "inactive") { const type = (chunksRef.current[0] && chunksRef.current[0].type) || "audio/webm"; resolve(new Blob(chunksRef.current, { type })); return; } mr.onstop = () => { const type = mr.mimeType || "audio/webm"; resolve(new Blob(chunksRef.current, { type })); }; try { mr.stop(); } catch { resolve(new Blob(chunksRef.current, { type: "audio/webm" })); } }); teardownMic(); if (!blob || blob.size === 0) { setError("Keine Audiodaten aufgenommen."); setPhase("error"); return; } try { const ext = (blob.type.includes("ogg") && "ogg") || (blob.type.includes("mp4") && "m4a") || "webm"; const transcript = await window.claude.transcribe(blob, `dictation.${ext}`); if (!transcript.trim()) { setError("Leere Transkription. Bitte erneut versuchen."); setPhase("error"); return; } onDone({ transcript: transcript.trim(), duration: durationRef.current }); setPhase("idle"); setElapsed(0); } catch (err) { setError("Transkription fehlgeschlagen: " + (err.message || err)); setPhase("error"); } }, [onDone, teardownMic]); const handleButton = () => { if (phase === "idle" || phase === "error") startRecording(); else if (phase === "recording") stopAndTranscribe(); }; // Cleanup on unmount useEffect(() => () => { clearInterval(timerRef.current); try { mediaRecorderRef.current && mediaRecorderRef.current.state !== "inactive" && mediaRecorderRef.current.stop(); } catch {} teardownMic(); }, [teardownMic]); const recording = phase === "recording"; const transcribing = phase === "transcribing"; const errored = phase === "error"; let hint = "Drücken zum Starten"; if (recording) hint = fmtDuration(elapsed); else if (transcribing) hint = "Transkribiere…"; else if (errored) hint = "Erneut versuchen"; const btnClass = "record-btn" + (recording ? " recording" : "") + (transcribing ? " transcribing" : ""); const ariaLabel = recording ? "Aufnahme stoppen" : "Aufnahme starten"; return (
Kinderchirurgie · Diktatsystem

Bereit zur Aufnahme.

Starten Sie das Gespräch. Wir transkribieren und generieren den passenden Brief automatisch.
{hint}
{errored &&
{error}
}
); } /* ---------- List ---------- */ function ListScreen({ recordings, onOpen, onNew, onDelete }) { return (

Aufnahmen

{recordings.length} {recordings.length === 1 ? "Eintrag" : "Einträge"}
{recordings.length === 0 ? (

Noch keine Aufnahmen

Starten Sie eine neue Aufnahme, um zu beginnen.
) : (
    {recordings.map(r => (
  • onOpen(r.id)}>
    {r.title || "Ohne Titel"}
    {LETTER_FORMATS[r.format]?.name || "—"} · {fmtDuration(r.duration || 0)}
    {fmtDate(r.createdAt)}
    {r.status === "processing" ? "Generiert…" : r.status === "ready" ? "Fertig" : r.status}
  • ))}
)}
); } /* ---------- Detail ---------- */ // Render markdown → sanitized HTML. marked + DOMPurify are loaded via CDN in index.html. function renderMarkdown(src) { if (!src) return ""; const html = (window.marked && window.marked.parse) ? window.marked.parse(src, { breaks: true, gfm: true }) : src.replace(/\n/g, "
"); return window.DOMPurify ? window.DOMPurify.sanitize(html) : html; } function DetailScreen({ recording, onBack, onChange, onDelete, onRegenerate, showToast }) { const [letter, setLetter] = useState(recording.letter || ""); const [saved, setSaved] = useState(true); const [showFormatPicker, setShowFormatPicker] = useState(recording.needsFormat === true); const [regenerating, setRegenerating] = useState(recording.status === "processing"); const [mode, setMode] = useState("preview"); // "preview" | "edit" const saveTimer = useRef(null); // Sync when recording changes from outside (e.g. generation finished) useEffect(() => { setLetter(recording.letter || ""); setRegenerating(recording.status === "processing"); setShowFormatPicker(recording.needsFormat === true); }, [recording.id, recording.letter, recording.status, recording.needsFormat]); const handleLetterChange = (v) => { setLetter(v); setSaved(false); clearTimeout(saveTimer.current); saveTimer.current = setTimeout(() => { onChange(recording.id, { letter: v }); setSaved(true); }, 600); }; const handleFormatPick = async (fmtKey) => { setShowFormatPicker(false); setRegenerating(true); onChange(recording.id, { format: fmtKey, status: "processing", needsFormat: false }); try { const newLetter = await generateLetter(recording.transcript, fmtKey); onChange(recording.id, { letter: newLetter, format: fmtKey, status: "ready" }); setLetter(newLetter); } catch { onChange(recording.id, { status: "ready", letter: "Fehler bei der Generierung. Bitte erneut versuchen." }); } setRegenerating(false); }; const handleRegenerateFormat = (fmtKey) => { setShowFormatPicker(false); handleFormatPick(fmtKey); }; const handleCopy = () => { navigator.clipboard.writeText(letter); showToast("In Zwischenablage kopiert"); }; const handleDelete = () => { if (confirm("Aufnahme wirklich löschen?")) { onDelete(recording.id); } }; return (
{LETTER_FORMATS[recording.format]?.name || "Unbekannt"}

{recording.title || "Ohne Titel"}

{fmtDate(recording.createdAt)} · Dauer {fmtDuration(recording.duration || 0)}
{saved ? (
Gespeichert
) : (
Speichern…
)}
{regenerating ? (
Brief wird generiert…
{Array.from({length: 12}).map((_, i) => (
))}
) : mode === "preview" ? ( letter.trim() ? (
) : (
Brief wird generiert…
) ) : (