{item}
-{item.content}
-diff --git a/vite/package-lock.json b/vite/package-lock.json
index b6ac23d..5869510 100644
--- a/vite/package-lock.json
+++ b/vite/package-lock.json
@@ -14,6 +14,7 @@
"gsap": "^3.13.0",
"react": "^19.1.0",
"react-dom": "^19.1.0",
+ "react-router": "^7.6.2",
"react-speech-recognition": "^4.0.1",
"tailwindcss": "^4.1.8"
},
@@ -1787,6 +1788,14 @@
"integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==",
"dev": true
},
+ "node_modules/cookie": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/cookie/-/cookie-1.0.2.tgz",
+ "integrity": "sha512-9Kr/j4O16ISv8zBBhJoi4bXOYNTkFLOqSL3UDB0njXxCXNezjeyVrJyGOWtgfs/q2km1gwBcfH8q1yEGoMYunA==",
+ "engines": {
+ "node": ">=18"
+ }
+ },
"node_modules/cross-spawn": {
"version": "7.0.6",
"resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz",
@@ -2806,6 +2815,27 @@
"react": "^19.1.0"
}
},
+ "node_modules/react-router": {
+ "version": "7.6.2",
+ "resolved": "https://registry.npmjs.org/react-router/-/react-router-7.6.2.tgz",
+ "integrity": "sha512-U7Nv3y+bMimgWjhlT5CRdzHPu2/KVmqPwKUCChW8en5P3znxUqwlYFlbmyj8Rgp1SF6zs5X4+77kBVknkg6a0w==",
+ "dependencies": {
+ "cookie": "^1.0.1",
+ "set-cookie-parser": "^2.6.0"
+ },
+ "engines": {
+ "node": ">=20.0.0"
+ },
+ "peerDependencies": {
+ "react": ">=18",
+ "react-dom": ">=18"
+ },
+ "peerDependenciesMeta": {
+ "react-dom": {
+ "optional": true
+ }
+ }
+ },
"node_modules/react-speech-recognition": {
"version": "4.0.1",
"resolved": "https://registry.npmjs.org/react-speech-recognition/-/react-speech-recognition-4.0.1.tgz",
@@ -2869,6 +2899,11 @@
"resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.26.0.tgz",
"integrity": "sha512-NlHwttCI/l5gCPR3D1nNXtWABUmBwvZpEQiD4IXSbIDq8BzLIK/7Ir5gTFSGZDUu37K5cMNp0hFtzO38sC7gWA=="
},
+ "node_modules/set-cookie-parser": {
+ "version": "2.7.1",
+ "resolved": "https://registry.npmjs.org/set-cookie-parser/-/set-cookie-parser-2.7.1.tgz",
+ "integrity": "sha512-IOc8uWeOZgnb3ptbCURJWNjWUPcO3ZnTTdzsurqERrP6nPyv+paC55vJM0LpOlT2ne+Ix+9+CRG1MNLlyZ4GjQ=="
+ },
"node_modules/shebang-command": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz",
@@ -4047,6 +4082,11 @@
"integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==",
"dev": true
},
+ "cookie": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/cookie/-/cookie-1.0.2.tgz",
+ "integrity": "sha512-9Kr/j4O16ISv8zBBhJoi4bXOYNTkFLOqSL3UDB0njXxCXNezjeyVrJyGOWtgfs/q2km1gwBcfH8q1yEGoMYunA=="
+ },
"cross-spawn": {
"version": "7.0.6",
"resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz",
@@ -4683,6 +4723,15 @@
"scheduler": "^0.26.0"
}
},
+ "react-router": {
+ "version": "7.6.2",
+ "resolved": "https://registry.npmjs.org/react-router/-/react-router-7.6.2.tgz",
+ "integrity": "sha512-U7Nv3y+bMimgWjhlT5CRdzHPu2/KVmqPwKUCChW8en5P3znxUqwlYFlbmyj8Rgp1SF6zs5X4+77kBVknkg6a0w==",
+ "requires": {
+ "cookie": "^1.0.1",
+ "set-cookie-parser": "^2.6.0"
+ }
+ },
"react-speech-recognition": {
"version": "4.0.1",
"resolved": "https://registry.npmjs.org/react-speech-recognition/-/react-speech-recognition-4.0.1.tgz",
@@ -4731,6 +4780,11 @@
"resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.26.0.tgz",
"integrity": "sha512-NlHwttCI/l5gCPR3D1nNXtWABUmBwvZpEQiD4IXSbIDq8BzLIK/7Ir5gTFSGZDUu37K5cMNp0hFtzO38sC7gWA=="
},
+ "set-cookie-parser": {
+ "version": "2.7.1",
+ "resolved": "https://registry.npmjs.org/set-cookie-parser/-/set-cookie-parser-2.7.1.tgz",
+ "integrity": "sha512-IOc8uWeOZgnb3ptbCURJWNjWUPcO3ZnTTdzsurqERrP6nPyv+paC55vJM0LpOlT2ne+Ix+9+CRG1MNLlyZ4GjQ=="
+ },
"shebang-command": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz",
diff --git a/vite/package.json b/vite/package.json
index 0ae3cd4..f589287 100644
--- a/vite/package.json
+++ b/vite/package.json
@@ -16,6 +16,7 @@
"gsap": "^3.13.0",
"react": "^19.1.0",
"react-dom": "^19.1.0",
+ "react-router": "^7.6.2",
"react-speech-recognition": "^4.0.1",
"tailwindcss": "^4.1.8"
},
diff --git a/vite/public/assets/q1.mp3 b/vite/public/assets/q1.mp3
new file mode 100644
index 0000000..d1f1b7c
Binary files /dev/null and b/vite/public/assets/q1.mp3 differ
diff --git a/vite/public/assets/q2.mp3 b/vite/public/assets/q2.mp3
new file mode 100644
index 0000000..2cfc82a
Binary files /dev/null and b/vite/public/assets/q2.mp3 differ
diff --git a/vite/public/assets/q3.mp3 b/vite/public/assets/q3.mp3
new file mode 100644
index 0000000..8393490
Binary files /dev/null and b/vite/public/assets/q3.mp3 differ
diff --git a/vite/public/assets/q6.mp3 b/vite/public/assets/q6.mp3
new file mode 100644
index 0000000..8c5e1de
Binary files /dev/null and b/vite/public/assets/q6.mp3 differ
diff --git a/vite/public/cuelist.json b/vite/public/cuelist.json
new file mode 100644
index 0000000..208f412
--- /dev/null
+++ b/vite/public/cuelist.json
@@ -0,0 +1,52 @@
+{
+ "cuelist": [
+ {
+ "id": 1,
+ "name": "Q1",
+ "type": "space",
+ "description": "Annonce",
+ "audioFile": "assets/q1.mp3",
+ "loop": true
+ },
+ {
+ "id": 2,
+ "name": "Q2",
+ "type": "headphone",
+ "description": "Guide for drink",
+ "auto": true,
+ "audioFile": "assets/q2.mp3"
+ },
+ {
+ "id": 3,
+ "name": "Q3",
+ "description": "Guide for phone",
+ "type": "headphone",
+ "auto": false,
+ "audioFile": "assets/q3.mp3"
+ },
+ {
+ "id": 4,
+ "name": "Q4",
+ "type": "phone",
+ "description": "Guide to construct scene",
+ "auto": true,
+ "duration": 60
+ },
+ {
+ "id": 5,
+ "name": "Q5",
+ "type": "phone",
+ "description": "Guide to call",
+ "duration": 60,
+ "auto": true
+ },
+ {
+ "id": 6,
+ "name": "Q6",
+ "type": "space",
+ "description": "Ending",
+ "audioFile": "assets/q6.mp3"
+ }
+ ]
+}
+
\ No newline at end of file
diff --git a/vite/src/App.css b/vite/src/App.css
index a461c50..44ccbc2 100644
--- a/vite/src/App.css
+++ b/vite/src/App.css
@@ -1 +1,12 @@
-@import "tailwindcss";
\ No newline at end of file
+@import "tailwindcss";
+
+#root{
+ @apply flex flex-col h-screen;
+}
+.checkbox{
+ @apply flex flex-row items-center gap-1;
+}
+
+main{
+ @apply flex-1 flex flex-col gap-4 justify-start p-8 overflow-y-auto;
+}
\ No newline at end of file
diff --git a/vite/src/App.jsx b/vite/src/App.jsx
index dd7430a..ac106c3 100644
--- a/vite/src/App.jsx
+++ b/vite/src/App.jsx
@@ -1,366 +1,17 @@
import { useEffect, useRef, useState } from 'react';
import './App.css'
-import { sendChatMessage } from './util/chat';
-import SpeechRecognition, { useSpeechRecognition } from 'react-speech-recognition';
-import { textToSpeech } from './util/tts';
-import { gsap } from "gsap";
-import { SplitText } from 'gsap/SplitText';
-import { invoke } from '@tauri-apps/api/core';
-import Input from './comps/input';
-gsap.registerPlugin(SplitText);
-const BASE_URL='http://localhost:3333';
function App() {
- const [history, setHistory] = useState([]);
- const [processing, setProcessing] = useState(false);
- const [showProcessing, setShowProcessing] = useState(false);
- const [audioOutput, setAudioOutput] = useState(false);
-
- const [prompt, setPrompt] = useState([]);
-
- const refHistoryContainer= useRef(null);
- const refPrompContainer= useRef(null);
- const refInput=useRef(null);
-
- const {
- transcript,
- finalTranscript,
- listening,
- resetTranscript,
- browserSupportsSpeechRecognition,
- isMicrophoneAvailable,
- }=useSpeechRecognition();
-
-
- function restart(){
- console.log("Restarting...");
- setHistory([]);
- setPrompt([]);
- refInput.current.value = '';
- resetTranscript();
- SpeechRecognition.stopListening();
-
- // create start message
- const startTime=Date.now();
- setProcessing(true);
- sendChatMessage([]).then(response => {
- if (!response.ok) {
- throw new Error('Network response was not ok');
- }
-
- let data=response;
- console.log('get reply: ', data, new Date(Date.now()-startTime).toISOString().slice(11, 19));
-
-
- // add to history
- setHistory(() => [{
- role: 'assistant',
- content: data.output_text,
- }]);
- setPrompt(()=>[
- data.prompt,
- ]);
-
- // tts
- if(!audioOutput) {
-
- setProcessing(false);
-
- }else{
- console.log('create speech:', data.output_text);
- textToSpeech(data.output_text).then(audioUrl => {
- const audio = new Audio(audioUrl);
-
- console.log('play audio...', new Date(Date.now()-startTime).toISOString().slice(11, 19));
-
- audio.play().catch(error => {
- console.error('Audio playback failed:', error);
- });
-
- setProcessing(false);
-
- }).catch(error => {
- console.error('TTS error:', error);
- });
-
- }
-
- });
- }
-
- function toggleAudio(value) {
- console.log("onclickAudio", listening, browserSupportsSpeechRecognition, isMicrophoneAvailable);
- if(!browserSupportsSpeechRecognition) {
- console.warn("Browser does not support speech recognition.");
- return;
- }
- if(!isMicrophoneAvailable) {
- console.warn("Microphone is not available.");
- return;
- }
-
- if(!listening && value){
- SpeechRecognition.startListening({ continuous: true, language: 'zh-TW' }).then(() => {
- console.log("Speech recognition started.");
- }).catch(error => {
- console.error("Error starting speech recognition:", error);
- });
-
- }else{
- SpeechRecognition.stopListening();
- }
- }
-
-
-
-
- function onSubmit(event) {
- event.preventDefault();
-
- if(processing) {
- console.warn("Already processing, ignoring submission.");
- return;
- }
- setProcessing(true);
- setShowProcessing(true);
-
- const input = event.target.elements.input.value;
-
- if(!input.trim()?.length) {
- console.warn("Input is empty, ignoring submission.");
- return;
- }
-
- const startTime=Date.now();
- console.log("Submit reply:", input);
-
- sendChatMessage([
- ...history,
- {
- role:'user',
- content: input,
- }
- ]).then(response => {
- if (!response.ok) {
- throw new Error('Network response was not ok');
- setProcessing(false);
- }
-
- let data=response;
- console.log('get reply: ', data, new Date(Date.now()-startTime).toISOString().slice(11, 19));
-
- // add to history
-
-
- setPrompt([
- ...prompt,
- data.prompt,
- ]);
-
-
-
- if(!audioOutput) {
-
- setHistory(prev => [...prev, {
- role: 'assistant',
- content: data.output_text,
- }]);
-
- setProcessing(false);
- setShowProcessing(false);
- }else{
- // tts
- console.log('create speech:', data.output_text);
- textToSpeech(data.output_text).then(audioUrl => {
- const audio = new Audio(audioUrl);
-
- console.log('play audio...', new Date(Date.now()-startTime).toISOString().slice(11, 19));
- setShowProcessing(false);
- setHistory(prev => [...prev, {
- role: 'assistant',
- content: data.output_text,
- }]);
-
- audio.play().catch(error => {
- console.error('Audio playback failed:', error);
- });
-
- audio.addEventListener('ended',() => {
- console.log('Audio playback ended');
- setProcessing(()=>false);
- });
-
- }).catch(error => {
- console.error('TTS error:', error);
- setProcessing(()=>false);
- });
- }
-
- });
-
- // clear input
- event.target.elements.input.value = '';
- // setProcessing(()=>false);
- setHistory(prev => [...prev, {
- role: 'user',
- content:input,
- }]);
-
-
- }
- useEffect(()=>{
- refHistoryContainer.current.scrollTop = refHistoryContainer.current.scrollHeight;
-
- // Animate the history items
- if(history.length === 0) return;
-
- let last_item=document.querySelector('.last_history');
-
- if(!last_item) return;
- if(last_item.classList.contains('user')) return;
- console.log('last_item', last_item);
-
- let split=SplitText.create(last_item, {
- type: "chars",
- aria:'hidden'
- });
- console.log('split', split);
- gsap.fromTo(split.chars, {
- opacity: 0,
- }, {
- opacity: 1,
- y: 0,
- duration: 0.5,
- ease: "steps(1)",
- stagger: 0.1
- });
-
-
-
- },[history]);
- useEffect(()=>{
- refPrompContainer.current.scrollTop = refPrompContainer.current.scrollHeight;
- },[prompt]);
-
-
- useEffect(()=>{
-
- if(listening){
- refInput.current.value = transcript;
- }
-
- },[transcript]);
-
-
- useEffect(()=>{
- if(finalTranscript){
- refInput.current.value = finalTranscript;
- console.log('Final Transcript:', finalTranscript);
-
- if(processing) return; // Prevent submission if already processing
-
-
- // Submit the final transcript
- onSubmit({
- preventDefault: () => {},
- target: {
- elements: {
- input: refInput.current
- }
- }
- });
- resetTranscript(); // Clear the transcript after submission
-
- }
- },[finalTranscript]);
-
- useEffect(()=>{
-
- console.log('window.SpeechRecognition=', window.SpeechRecognition || window.webkitSpeechRecognition);
-
- // if (navigator.getUserMedia){
-
- // navigator.getUserMedia({audio:true},
- // function(stream) {
- // // start_microphone(stream);
- // console.log('Microphone access granted.');
- // },
- // function(e) {
- // alert('Error capturing audio.');
- // }
- // );
-
- // } else { alert('getUserMedia not supported in this browser.'); }
-
-
- },[]);
-
-
return (
- {item} {item.content}
{item}
+{item.content}
+| ID | */} +Name | +Description | +Type | +Auto | +Audio File | ++ |
|---|---|---|---|---|---|---|
| {id} | */} +{name} | +{description} | +{type=='phone'?'📞':(type=='headphone'?'🎧':'🔊')} | +{auto ? '↩️' : ''} | +{audioFile} | ++ + | +
This page is under construction.
+