import { useEffect, useRef, useState } from "react"; import SpeechRecognition, { useSpeechRecognition } from 'react-speech-recognition'; import { Countdown } from "../comps/timer"; import { Status, useChat } from "../util/useChat"; import { getSummary } from "../util/chat"; import { saveHistory } from "../util/output"; import NumPad from "../comps/numpad"; import { Light } from "../comps/light"; import { useData } from "../util/useData"; import VoiceAnalysis from "../comps/voiceanalysis"; import { sendOsc, OSC_ADDRESS } from "../util/osc"; const EmojiType={ phone: '๐Ÿ“ž', headphone: '๐ŸŽง', speaker: '๐Ÿ”Š', chat: '๐Ÿค–', user_input: '๐Ÿ’ฌ', } export function Flow(){ const { data }=useData(); const [cuelist, setCuelist] = useState([]); const [currentCue, setCurrentCue] = useState(null); const [chatWelcome, setChatWelcome] = useState(null); const [audioInput, setAudioInput] = useState(true); const [autoSend, setAutoSend] = useState(true); const [userId, setUserId] = useState(); const refTimer=useRef(); const refAudio=useRef(); const refInput=useRef(); const refLight=useRef(); const refCurrentCue= useRef(null); const { history, status, reset, sendMessage, setStatus, audioOutput, setAudioOutput, stop:stopChat, audioUrl, }=useChat(); const { transcript, finalTranscript, listening, resetTranscript, browserSupportsSpeechRecognition, isMicrophoneAvailable, }=useSpeechRecognition(); function playAudio(url){ if(!url) return; console.log('Playing audio:', url); if(refAudio.current) { refAudio.current.pause(); // Stop any currently playing audio } const audio = new Audio(url); audio.loop=refCurrentCue.current?.loop || false; // Set loop if defined in cue audio.play().catch(error => { console.error("Audio playback error:", error); }); audio.onended = () => { onCueEnd(); } refAudio.current = audio; // Store the new audio reference audio.addEventListener("loadedmetadata", () => { refTimer.current?.restart(audio.duration*1000 || 0); }); } function playCue(cue) { if(!cue) return; console.log('Playing cue:', cue); setCurrentCue(cue); refCurrentCue.current = cue; // Store the current cue in ref if(parseFloat(cue.id)<=4.2){ // Special case for starting a conversation console.log('clear conversation...'); reset(); } if(cue.type=='chat'){ // Special case for starting a conversation resetTranscript(); if(cue.callback=="start_conversation"){ console.log('Starting conversation...'); sendMessage(); setChatWelcome(true); }else{ const message= refInput.current?.value?.trim(); if(message && message.length>0) { sendMessage(message); setChatWelcome(false); }else{ onCueEnd(cue); // if no message, just continue to next cue } } } if(cue.status){ sendOsc(OSC_ADDRESS.STATUS, cue.status); // Send OSC status message } if(cue.audioFile){ playAudio(cue.audioFile); } if(cue.duration){ refTimer.current.restart(cue.duration*1000, ()=>{ onCueEnd(cue); }); } } function onCueEnd() { refTimer.current?.stop(); // Stop the timer when cue ends if(!refCurrentCue.current) return; const cue= refCurrentCue.current; // Get the current cue from ref console.log('onCueEnd:', cue.id); if(cue.callback=='start_conversation') refLight.current.fadeOut(); // Fade in light for conversation start if(cue.callback=='summary') refLight.current.fadeIn(); // Fade out light for conversation end resetTranscript(); // Reset transcript after cue ends if(cue.auto) { playCue(cuelist.find(c => c.id === cue.nextcue)); } } function onStop(){ console.log('Stopping current cue'); if(refAudio.current) { refAudio.current.pause(); refAudio.current = null; } setCurrentCue(null); refCurrentCue.current = null; // Clear the current cue reference refTimer.current.restart(0); stopChat(); // Stop chat processing } function onNumpad(mess){ if(refCurrentCue.current?.callback!='numpad') return; console.log('Numpad input:', mess); setUserId(()=>mess); } function saveImage(){ sendOsc('/export', 'output/test.png'); // Send OSC message to save image } useEffect(()=>{ if(userId>=1 && userId<=24) { console.log('User ID set:', userId); playCue(cuelist.find(c => c.id === currentCue.nextcue)); // Play cue 5 when userId is set } },[userId]); function onSpeechEnd(){ console.log('onSpeechEnd:', finalTranscript); if(currentCue?.type!='user_input') return; // Only process if current cue is user input if(autoSend && transcript.trim().length > 0) { console.log('Auto sending transcript:', transcript); onCueEnd(); } } useEffect(()=>{ onSpeechEnd(); // Call onSpeechEnd when finalTranscript changes },[finalTranscript]); useEffect(()=>{ if(audioInput && isMicrophoneAvailable) { SpeechRecognition.startListening({ continuous: true, language: 'zh-TW' }).then(() => { console.log("Speech recognition started."); }).catch(error => { console.error("Error starting speech recognition:", error); }); const recognition= SpeechRecognition.getRecognition(); recognition.onspeechstart=(e)=>{ console.log('Sound start:', e); }; // recognition.onspeechend=(e)=>{ // console.log('Speech end:', e); // if(autoSend && transcript.trim().length > 0) { // onCueEnd(); // } // }; // recognition.onaudioend=(e)=>{ // console.log('Audio end:', e); // if(autoSend && transcript.trim().length > 0) { // onCueEnd(); // } // }; // recognition.onsoundend=(e)=>{ // console.log('Sound end:', e); // if(autoSend && transcript.trim().length > 0) { // onCueEnd(); // } // }; }else{ console.log('Stopping speech recognition...'); SpeechRecognition.stopListening(); } },[audioInput]); useEffect(()=>{ // if(listening){ if(currentCue?.type=='user_input') refInput.current.value = transcript; // } },[transcript]); useEffect(()=>{ if(audioUrl) playAudio(audioUrl); },[audioUrl]); useEffect(()=>{ switch(status) { case Status.SUCCESS: console.log('Success!'); setStatus(Status.IDLE); refInput.current.value = ''; resetTranscript(); if(chatWelcome) { return; } // play next cue // if(currentCue.nextcue!=5 && currentCue.nextcue!=6){ // Q5 & Q6 wait for audio end // if(currentCue.nextcue) { // playCue(cuelist.find(c => c.id === currentCue.nextcue)); // } else { // setCurrentCue(null); // } // } if(refCurrentCue.current?.callback=='summary'){ // get summary console.log('Getting summary...'); getSummary(history.map(el=>`${el.role}:${el.content}`).join('\n'), data).then(summary => { console.log('Summary:', summary); }).catch(error => { console.error('Error getting summary:', error); }); } break; } },[status]); useEffect(()=>{ fetch('/cuelist.json') .then(response => response.json()) .then(data => { console.log('Cuelist data:', data); setCuelist(data.cuelist); }) .catch(error => { console.error('Error fetching cuelist:', error); }); },[]); return (
{refCurrentCue.current?.name}
{/* */} {cuelist?.map(({id, name, description, type, auto, audioFile,...props}, index) => ( {/* */} ))}
IDName Description Type Auto Audio / Due
{id}{name} {description} {EmojiType[type]} {auto ? 'โคต๏ธ' : ''} {audioFile || props.duration} {props.callback && `<${props.callback}>`}
{history?.map((msg, index) => (
{msg.content}
{msg.prompt &&
{msg.prompt}
}
))}
setAudioOutput(e.target.checked)} /> setAudioInput(e.target.checked)} /> setAutoSend(e.target.checked)} />
chat_status= {status}
); }