diff --git a/vite/package-lock.json b/vite/package-lock.json index 5869510..8fb4065 100644 --- a/vite/package-lock.json +++ b/vite/package-lock.json @@ -12,6 +12,7 @@ "@tauri-apps/plugin-fs": "^2.3.0", "@tauri-apps/plugin-http": "^2.4.4", "gsap": "^3.13.0", + "moment": "^2.30.1", "react": "^19.1.0", "react-dom": "^19.1.0", "react-router": "^7.6.2", @@ -2629,6 +2630,14 @@ "url": "https://github.com/sponsors/isaacs" } }, + "node_modules/moment": { + "version": "2.30.1", + "resolved": "https://registry.npmjs.org/moment/-/moment-2.30.1.tgz", + "integrity": "sha512-uEmtNhbDOrWPFS+hdjFCBfy9f2YoyzRpwcl+DqpC6taX21FzsTLQVbMV/W7PzNSX6x/bhC1zA3c2UQ5NzH6how==", + "engines": { + "node": "*" + } + }, "node_modules/ms": { "version": "2.1.3", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", @@ -4608,6 +4617,11 @@ "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-3.0.1.tgz", "integrity": "sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg==" }, + "moment": { + "version": "2.30.1", + "resolved": "https://registry.npmjs.org/moment/-/moment-2.30.1.tgz", + "integrity": "sha512-uEmtNhbDOrWPFS+hdjFCBfy9f2YoyzRpwcl+DqpC6taX21FzsTLQVbMV/W7PzNSX6x/bhC1zA3c2UQ5NzH6how==" + }, "ms": { "version": "2.1.3", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", diff --git a/vite/package.json b/vite/package.json index f589287..eaf2106 100644 --- a/vite/package.json +++ b/vite/package.json @@ -14,6 +14,7 @@ "@tauri-apps/plugin-fs": "^2.3.0", "@tauri-apps/plugin-http": "^2.4.4", "gsap": "^3.13.0", + "moment": "^2.30.1", "react": "^19.1.0", "react-dom": "^19.1.0", "react-router": "^7.6.2", diff --git a/vite/public/cuelist.json b/vite/public/cuelist.json index ab3752d..0e0ec01 100644 --- a/vite/public/cuelist.json +++ b/vite/public/cuelist.json @@ -38,27 +38,59 @@ "id": 4.1, "name": "Q4.1", "type": "chat", - "description": "c1", + "description": "chat-1 system", "auto": true, - "duration": 40, "nextcue": 4.2 }, { "id": 4.2, "name": "Q4.2", - "type": "chat", - "description": "c2", + "type": "user_input", + "description": "chat-1 user", "auto": true, - "duration": 40, + "duration": 20, "nextcue": 4.3 }, { "id": 4.3, "name": "Q4.3", "type": "chat", - "description": "c3", + "description": "chat-2 system", + "auto": true, + "nextcue": 4.4 + }, + { + "id": 4.4, + "name": "Q4.4", + "type": "user_input", + "description": "chat-2 user", + "auto": true, + "duration": 20, + "nextcue": 4.5 + }, + { + "id": 4.5, + "name": "Q4.1", + "type": "chat", + "description": "chat-3 system", "auto": true, - "duration": 40, + "nextcue": 4.6 + }, + { + "id": 4.6, + "name": "Q4.6", + "type": "user_input", + "description": "chat-3 user", + "auto": true, + "duration": 20, + "nextcue": 4.7 + }, + { + "id": 4.7, + "name": "Q4.7", + "type": "chat", + "description": "chat-3 system", + "auto": true, "nextcue": 5 }, { @@ -73,11 +105,20 @@ { "id": 5.1, "name": "Q5.1", - "type": "chat", + "type": "user_input", "description": "call", "duration": 60, "auto": true, - "nextcue": 6 + "nextcue": 5.2 + }, + { + "id": 5.2, + "name": "Q5.2", + "type": "chat", + "description": "summary", + "auto": true, + "nextcue": 6, + "callback":"summary" }, { "id": 6, diff --git a/vite/src/pages/conversation.jsx b/vite/src/pages/conversation.jsx index bea4da4..a7e65e3 100644 --- a/vite/src/pages/conversation.jsx +++ b/vite/src/pages/conversation.jsx @@ -363,7 +363,7 @@ export function Conversation() {
diff --git a/vite/src/pages/flow.jsx b/vite/src/pages/flow.jsx index f05473e..a8f736d 100644 --- a/vite/src/pages/flow.jsx +++ b/vite/src/pages/flow.jsx @@ -1,14 +1,19 @@ import { useEffect, useRef, useState } from "react"; +import SpeechRecognition, { useSpeechRecognition } from 'react-speech-recognition'; + import { Countdown } from "../comps/timer"; -import { Conversation } from "./conversation"; + import { Status, useChat } from "../util/useChat"; +import { getSummary } from "../util/chat"; +import { saveHistory } from "../util/output"; const EmojiType={ phone: '📞', headphone: '🎧', speaker: '🔊', - chat: '💬', + chat: '🤖', + user_input: '💬', } export function Flow(){ @@ -16,20 +21,56 @@ export function Flow(){ const [cuelist, setCuelist] = useState([]); const [currentCue, setCurrentCue] = useState(null); const [chatWelcome, setChatWelcome] = useState(null); + const [audioInput, setAudioInput] = useState(false); const refTimer=useRef(); const refAudio=useRef(); const refInput=useRef(); - const { history, status, reset, sendMessage, setStatus, audioOutput, setAudioOutput, stop:stopChat }=useChat(); + const refCurrentCue= useRef(null); + + const { history, status, reset, sendMessage, setStatus, audioOutput, setAudioOutput, stop:stopChat, audioUrl, }=useChat(); + const { + transcript, + finalTranscript, + listening, + resetTranscript, + browserSupportsSpeechRecognition, + isMicrophoneAvailable, + }=useSpeechRecognition(); + + function playAudio(url){ + if(!url) return; + + console.log('Playing audio:', url); + + if(refAudio.current) { + refAudio.current.pause(); // Stop any currently playing audio + } + const audio = new Audio(url); + audio.loop=refCurrentCue.current?.loop || false; // Set loop if defined in cue + audio.play().catch(error => { + console.error("Audio playback error:", error); + }); + audio.onended = () => { + onCueEnd(); + } + + refAudio.current = audio; // Store the new audio reference + audio.addEventListener("loadedmetadata", () => { + refTimer.current.restart(audio.duration*1000 || 0); + }); + } function playCue(cue) { if(!cue) return; console.log('Playing cue:', cue); + setCurrentCue(cue); + refCurrentCue.current = cue; // Store the current cue in ref if(parseFloat(cue.id)<=4.1){ // Special case for starting a conversation @@ -37,38 +78,29 @@ export function Flow(){ reset(); } - if(cue.type=='chat' && cue.id=='4.1'){ + if(cue.type=='chat'){ // Special case for starting a conversation - console.log('Starting conversation...'); - sendMessage(); - setChatWelcome(true); - } - - - if(cue.audioFile){ - // Stop any currently playing audio - if(refAudio.current) { - refAudio.current.pause(); - } + resetTranscript(); - const audio = new Audio(cue.audioFile); - - if(cue.loop){ - audio.loop = true; + if(cue.id==4.1){ + console.log('Starting conversation...'); + sendMessage(); + setChatWelcome(true); + }else{ + const message= refInput.current.value?.trim(); + if(message && message.length>0) { + + sendMessage(message); + setChatWelcome(false); + }else{ + onCueEnd(cue); // if no message, just continue to next cue + } } + } - audio.play().catch(error => { - console.error('Error playing audio:', error); - }); - - audio.onended = () => { - onCueEnd(cue); - } - refAudio.current = audio; - audio.addEventListener("loadedmetadata", () => { - refTimer.current.restart(audio.duration*1000 || 0); - }); + if(cue.audioFile){ + playAudio(cue.audioFile); } if(cue.duration){ @@ -78,30 +110,18 @@ export function Flow(){ } } - function onCueEnd(cue) { + function onCueEnd() { - if(!cue) return; - console.log('onCueEnd:', cue.id); - - if(cue.type=='chat'){ - // sendChatMessage - const message= refInput.current.value?.trim(); - if(message && message.length>0) { - - sendMessage(message); - setChatWelcome(false); + if(!refCurrentCue.current) return; + const cue= refCurrentCue.current; // Get the current cue from ref - }else{ - // if no message, just continue to next cue - console.log('No message to send, continuing to next cue'); - playCue(cuelist.find(c => c.id === cue.nextcue)); - } + console.log('onCueEnd:', cue.id); - }else{ - if(cue.auto) { - playCue(cuelist.find(c => c.id === cue.nextcue)); - } + + if(cue.auto) { + playCue(cuelist.find(c => c.id === cue.nextcue)); } + } function onStop(){ @@ -110,19 +130,55 @@ export function Flow(){ refAudio.current.pause(); refAudio.current = null; } + setCurrentCue(null); + refCurrentCue.current = null; // Clear the current cue reference + refTimer.current.restart(0); stopChat(); // Stop chat processing } + useEffect(()=>{ + if(audioInput && isMicrophoneAvailable) { + + SpeechRecognition.startListening({ continuous: true, language: 'zh-TW' }).then(() => { + console.log("Speech recognition started."); + }).catch(error => { + console.error("Error starting speech recognition:", error); + }); + + }else{ + console.log('Stopping speech recognition...'); + SpeechRecognition.stopListening(); + } + + },[audioInput]); + + + useEffect(()=>{ + + // if(listening){ + if(currentCue?.type=='user_input') refInput.current.value = transcript; + // } + + },[transcript]); + + + useEffect(()=>{ + + if(audioUrl) playAudio(audioUrl); + + },[audioUrl]); + useEffect(()=>{ switch(status) { case Status.SUCCESS: console.log('Success!'); setStatus(Status.IDLE); - refInput.current.value = '' + refInput.current.value = ''; + resetTranscript(); if(chatWelcome) { return; @@ -130,22 +186,35 @@ export function Flow(){ // play next cue - if(currentCue.nextcue!=5 && currentCue.nextcue!=6){ // Q5 & Q6 wait for audio end - if(currentCue.nextcue) { - playCue(cuelist.find(c => c.id === currentCue.nextcue)); - } else { - setCurrentCue(null); - } + // if(currentCue.nextcue!=5 && currentCue.nextcue!=6){ // Q5 & Q6 wait for audio end + // if(currentCue.nextcue) { + // playCue(cuelist.find(c => c.id === currentCue.nextcue)); + // } else { + // setCurrentCue(null); + // } + // } + + if(refCurrentCue.current.callback=='summary'){ + // get summary + console.log('Getting summary...'); + getSummary(history.map(el=>`${el.role}:${el.content}`).join('\n')).then(summary => { + + console.log('Summary:', summary); + + }).catch(error => { + console.error('Error getting summary:', error); + }); + } break; - case Status.AUDIO_ENDED: - console.log('Audio ended'); - if(currentCue.nextcue==5 || currentCue.nextcue==6){ // Q5 & Q6 wait for audio end - playCue(cuelist.find(c => c.id === currentCue.nextcue)); - } - break; + // case Status.AUDIO_ENDED: + // console.log('Audio ended'); + // if(currentCue.nextcue==5 || currentCue.nextcue==6){ // Q5 & Q6 wait for audio end + // playCue(cuelist.find(c => c.id === currentCue.nextcue)); + // } + // break; } },[status]); @@ -170,10 +239,13 @@ export function Flow(){