2:I[7012,["4765","static/chunks/4765-f5afdf8061f456f3.js","9856","static/chunks/9856-3b185291364d9bef.js","6687","static/chunks/app/docs/%5B...slug%5D/page-e07536548216bee4.js"],"MarkdownRenderer"] 4:I[9856,["4765","static/chunks/4765-f5afdf8061f456f3.js","9856","static/chunks/9856-3b185291364d9bef.js","6687","static/chunks/app/docs/%5B...slug%5D/page-e07536548216bee4.js"],""] 5:I[4126,[],""] 7:I[9630,[],""] 8:I[4278,["9856","static/chunks/9856-3b185291364d9bef.js","8172","static/chunks/8172-b3a2d6fe4ae10d40.js","3185","static/chunks/app/layout-2814fa5d15b84fe4.js"],"HeadingProvider"] 9:I[1476,["9856","static/chunks/9856-3b185291364d9bef.js","8172","static/chunks/8172-b3a2d6fe4ae10d40.js","3185","static/chunks/app/layout-2814fa5d15b84fe4.js"],"Header"] a:I[3167,["9856","static/chunks/9856-3b185291364d9bef.js","8172","static/chunks/8172-b3a2d6fe4ae10d40.js","3185","static/chunks/app/layout-2814fa5d15b84fe4.js"],"Sidebar"] b:I[7409,["9856","static/chunks/9856-3b185291364d9bef.js","8172","static/chunks/8172-b3a2d6fe4ae10d40.js","3185","static/chunks/app/layout-2814fa5d15b84fe4.js"],"PageFrame"] 3:T2153, # StreamingTextDisplay Component **Phase 3 - Voice Mode v4.1** A text display component optimized for streaming AI responses with RTL support and typewriter effects. ## Overview The StreamingTextDisplay component renders text character-by-character as it streams from the AI, providing visual feedback that the assistant is actively responding. It handles bidirectional text, code blocks, and mixed-language content. ```` +------------------------------------------+ | The assistant is responding... | | Hello! I can help you with that.| | ← Blinking cursor | | | ```python | | def hello(): | | print("Hello, World!") | | ``` [●] | ← Streaming indicator +------------------------------------------+ ```` ## Features - **Typewriter Effect**: Smooth character-by-character rendering - **RTL Auto-Detection**: Detects Arabic, Hebrew, Farsi, Urdu content - **Bidirectional Support**: Handles mixed LTR/RTL text per paragraph - **Code Block Rendering**: Syntax-highlighted code sections - **Streaming Indicator**: Visual badge showing streaming status - **Cursor Animation**: Blinking cursor at insertion point - **Auto-Scroll**: Keeps latest content in view ## Usage ### Basic Usage ```tsx import { StreamingTextDisplay } from "@/components/voice/StreamingTextDisplay"; function ResponsePanel() { const [text, setText] = useState(""); const [isStreaming, setIsStreaming] = useState(false); return ( console.log("Stream finished")} /> ); } ``` ### With useStreamingText Hook ```tsx import { StreamingTextDisplay, useStreamingText } from "@/components/voice/StreamingTextDisplay"; function AIResponse() { const { text, isStreaming, startStreaming, appendText, stopStreaming, reset } = useStreamingText(); useEffect(() => { // Simulate streaming from API startStreaming(); const chunks = ["Hello", " there!", " How", " can", " I", " help?"]; let i = 0; const interval = setInterval(() => { if (i < chunks.length) { appendText(chunks[i]); i++; } else { stopStreaming(); clearInterval(interval); } }, 100); return () => clearInterval(interval); }, []); return ; } ``` ## Props | Prop | Type | Default | Description | | ------------------ | ------------ | -------------------------- | ------------------------------------ | | `text` | `string` | required | Text content to display | | `isStreaming` | `boolean` | `false` | Whether text is still streaming | | `languageCode` | `string` | - | Override RTL detection | | `typewriterSpeed` | `number` | `60` | Characters per second (0 = instant) | | `showCursor` | `boolean` | `true` | Show blinking cursor while streaming | | `onStreamComplete` | `() => void` | - | Callback when streaming finishes | | `className` | `string` | - | Additional CSS classes | | `testId` | `string` | `"streaming-text-display"` | Test ID attribute | ## RTL Support ### Automatic Detection The component detects RTL content using Unicode character ranges: ```tsx // RTL character detection regex const RTL_CHAR_REGEX = /[\u0590-\u05FF\u0600-\u06FF\u0750-\u077F...]/; // Detects these language scripts: // - Hebrew (0590-05FF) // - Arabic (0600-06FF, 0750-077F, 08A0-08FF) // - Arabic Presentation Forms (FB50-FDFF, FE70-FEFF) ``` ### Per-Paragraph Direction Each paragraph is analyzed independently for optimal bidirectional display: ```tsx // Mixed content example const text = ` English paragraph here. مرحبا بك في المساعد الصوتي // Arabic - RTL This paragraph follows. `; // Each line gets appropriate dir="ltr" or dir="rtl" ``` ### Manual Override ```tsx // Force RTL for entire component ; // Or via store settings const { rtlEnabled, rtlAutoDetect } = useVoiceSettingsStore(); ``` ## Code Block Handling Code blocks are always rendered LTR regardless of surrounding text direction: ```tsx const text = ` Here's an example: \`\`\`python def greet(name): return f"Hello, {name}!" \`\`\` The function returns a greeting. `; // Code block rendered with: // - dir="ltr" (always) // - Monospace font // - Background highlight // - Horizontal scroll for long lines ``` ## Typewriter Effect ### Speed Control ```tsx // Fast typing (120 chars/sec) // Slow typing (30 chars/sec) // Instant (no animation) ``` ### Effect Behavior - New characters animate in at specified speed - Deleted/replaced text updates instantly - Animation pauses when streaming stops - Cursor disappears when not streaming ## useStreamingText Hook The component exports a convenience hook for managing streaming state: ```tsx const { text, // Current text content isStreaming, // Streaming status startStreaming, // Begin streaming appendText, // Add text chunk stopStreaming, // End streaming reset, // Clear all text setText, // Direct text setter } = useStreamingText(initialText); ``` ### Hook Usage Example ```tsx function useAIStream(sessionId: string) { const streaming = useStreamingText(); useEffect(() => { const ws = new WebSocket(`/voice/${sessionId}/stream`); ws.onopen = () => streaming.startStreaming(); ws.onmessage = (e) => streaming.appendText(e.data); ws.onclose = () => streaming.stopStreaming(); return () => ws.close(); }, [sessionId]); return streaming; } ``` ## StreamingTextLine Component A compact single-line variant for inline displays: ```tsx import { StreamingTextLine } from "@/components/voice/StreamingTextDisplay"; function StatusLine() { return (
Status:
); } ``` ## Accessibility - `role="region"` for screen reader context - `aria-live="polite"` for streaming updates - `aria-atomic="false"` for incremental reading - `aria-busy` indicates streaming status ```tsx
{/* Streaming content */}
``` ## Styling ### Default Styles ```css .streaming-text-display { /* Container */ position: relative; overflow-y: auto; /* Text */ color: neutral-900 / neutral-100 (dark); /* Code blocks */ .pre { background: neutral-100 / neutral-800 (dark); border-radius: 0.5rem; padding: 0.75rem; font-family: monospace; } } ``` ### Custom Styling ```tsx ``` ## Performance Considerations - Text is segmented for efficient re-rendering - Cursor animation uses CSS, not JS - Auto-scroll debounced for smooth experience - Large texts truncated with "..." indicator ## Integration Example ```tsx import { StreamingTextDisplay } from "@/components/voice/StreamingTextDisplay"; import { useVoiceResponse } from "@/hooks/useVoiceResponse"; function VoiceResponsePanel() { const { responseText, isGenerating, detectedLanguage } = useVoiceResponse(); return (
{ // Play completion sound, etc. }} />
); } ``` ## Related Documentation - [VoiceFirstInputBar](./voice-first-input-bar.md) - [RTL Support Guide](./rtl-support-guide.md) - [Voice Mode v4 Overview](./voice-mode-v4-overview.md) 6:["slug","voice/streaming-text-display","c"] 0:["X7oMT3VrOffzp0qvbeOas",[[["",{"children":["docs",{"children":[["slug","voice/streaming-text-display","c"],{"children":["__PAGE__?{\"slug\":[\"voice\",\"streaming-text-display\"]}",{}]}]}]},"$undefined","$undefined",true],["",{"children":["docs",{"children":[["slug","voice/streaming-text-display","c"],{"children":["__PAGE__",{},[["$L1",["$","div",null,{"children":[["$","div",null,{"className":"mb-6 flex items-center justify-between gap-4","children":[["$","div",null,{"children":[["$","p",null,{"className":"text-sm text-gray-500 dark:text-gray-400","children":"Docs / Raw"}],["$","h1",null,{"className":"text-3xl font-bold text-gray-900 dark:text-white","children":"StreamingTextDisplay Component"}],["$","p",null,{"className":"text-sm text-gray-600 dark:text-gray-400","children":["Sourced from"," ",["$","code",null,{"className":"font-mono text-xs","children":["docs/","voice/streaming-text-display.md"]}]]}]]}],["$","a",null,{"href":"https://github.com/mohammednazmy/VoiceAssist/edit/main/docs/voice/streaming-text-display.md","target":"_blank","rel":"noreferrer","className":"inline-flex items-center gap-2 rounded-md border border-gray-200 dark:border-gray-700 px-3 py-1.5 text-sm text-gray-700 dark:text-gray-200 hover:border-primary-500 dark:hover:border-primary-400 hover:text-primary-700 dark:hover:text-primary-300","children":"Edit on GitHub"}]]}],["$","div",null,{"className":"rounded-lg border border-gray-200 dark:border-gray-800 bg-white dark:bg-gray-900 p-6","children":["$","$L2",null,{"content":"$3"}]}],["$","div",null,{"className":"mt-6 flex flex-wrap gap-2 text-sm","children":[["$","$L4",null,{"href":"/reference/all-docs","className":"inline-flex items-center gap-1 rounded-md bg-gray-100 px-3 py-1 text-gray-700 hover:bg-gray-200 dark:bg-gray-800 dark:text-gray-200 dark:hover:bg-gray-700","children":"← All documentation"}],["$","$L4",null,{"href":"/","className":"inline-flex items-center gap-1 rounded-md bg-gray-100 px-3 py-1 text-gray-700 hover:bg-gray-200 dark:bg-gray-800 dark:text-gray-200 dark:hover:bg-gray-700","children":"Home"}]]}]]}],null],null],null]},[null,["$","$L5",null,{"parallelRouterKey":"children","segmentPath":["children","docs","children","$6","children"],"error":"$undefined","errorStyles":"$undefined","errorScripts":"$undefined","template":["$","$L7",null,{}],"templateStyles":"$undefined","templateScripts":"$undefined","notFound":"$undefined","notFoundStyles":"$undefined"}]],null]},[null,["$","$L5",null,{"parallelRouterKey":"children","segmentPath":["children","docs","children"],"error":"$undefined","errorStyles":"$undefined","errorScripts":"$undefined","template":["$","$L7",null,{}],"templateStyles":"$undefined","templateScripts":"$undefined","notFound":"$undefined","notFoundStyles":"$undefined"}]],null]},[[[["$","link","0",{"rel":"stylesheet","href":"/_next/static/css/7f586cdbbaa33ff7.css","precedence":"next","crossOrigin":"$undefined"}]],["$","html",null,{"lang":"en","className":"h-full","children":["$","body",null,{"className":"__className_f367f3 h-full bg-white dark:bg-gray-900","children":[["$","a",null,{"href":"#main-content","className":"skip-to-content","children":"Skip to main content"}],["$","$L8",null,{"children":[["$","$L9",null,{}],["$","$La",null,{}],["$","main",null,{"id":"main-content","className":"lg:pl-64","role":"main","aria-label":"Documentation content","children":["$","$Lb",null,{"children":["$","$L5",null,{"parallelRouterKey":"children","segmentPath":["children"],"error":"$undefined","errorStyles":"$undefined","errorScripts":"$undefined","template":["$","$L7",null,{}],"templateStyles":"$undefined","templateScripts":"$undefined","notFound":[["$","title",null,{"children":"404: This page could not be found."}],["$","div",null,{"style":{"fontFamily":"system-ui,\"Segoe UI\",Roboto,Helvetica,Arial,sans-serif,\"Apple Color Emoji\",\"Segoe UI Emoji\"","height":"100vh","textAlign":"center","display":"flex","flexDirection":"column","alignItems":"center","justifyContent":"center"},"children":["$","div",null,{"children":[["$","style",null,{"dangerouslySetInnerHTML":{"__html":"body{color:#000;background:#fff;margin:0}.next-error-h1{border-right:1px solid rgba(0,0,0,.3)}@media (prefers-color-scheme:dark){body{color:#fff;background:#000}.next-error-h1{border-right:1px solid rgba(255,255,255,.3)}}"}}],["$","h1",null,{"className":"next-error-h1","style":{"display":"inline-block","margin":"0 20px 0 0","padding":"0 23px 0 0","fontSize":24,"fontWeight":500,"verticalAlign":"top","lineHeight":"49px"},"children":"404"}],["$","div",null,{"style":{"display":"inline-block"},"children":["$","h2",null,{"style":{"fontSize":14,"fontWeight":400,"lineHeight":"49px","margin":0},"children":"This page could not be found."}]}]]}]}]],"notFoundStyles":[]}]}]}]]}]]}]}]],null],null],["$Lc",null]]]] c:[["$","meta","0",{"name":"viewport","content":"width=device-width, initial-scale=1"}],["$","meta","1",{"charSet":"utf-8"}],["$","title","2",{"children":"StreamingTextDisplay Component | Docs | VoiceAssist Docs"}],["$","meta","3",{"name":"description","content":"React component for streaming AI responses with RTL support and typewriter effects."}],["$","meta","4",{"name":"keywords","content":"VoiceAssist,documentation,medical AI,voice assistant,healthcare,HIPAA,API"}],["$","meta","5",{"name":"robots","content":"index, follow"}],["$","meta","6",{"name":"googlebot","content":"index, follow"}],["$","link","7",{"rel":"canonical","href":"https://assistdocs.asimo.io"}],["$","meta","8",{"property":"og:title","content":"VoiceAssist Documentation"}],["$","meta","9",{"property":"og:description","content":"Comprehensive documentation for VoiceAssist - Enterprise Medical AI Assistant"}],["$","meta","10",{"property":"og:url","content":"https://assistdocs.asimo.io"}],["$","meta","11",{"property":"og:site_name","content":"VoiceAssist Docs"}],["$","meta","12",{"property":"og:type","content":"website"}],["$","meta","13",{"name":"twitter:card","content":"summary"}],["$","meta","14",{"name":"twitter:title","content":"VoiceAssist Documentation"}],["$","meta","15",{"name":"twitter:description","content":"Comprehensive documentation for VoiceAssist - Enterprise Medical AI Assistant"}],["$","meta","16",{"name":"next-size-adjust"}]] 1:null