diff --git a/vite/package-lock.json b/vite/package-lock.json index 5869510..8fb4065 100644 --- a/vite/package-lock.json +++ b/vite/package-lock.json @@ -12,6 +12,7 @@ "@tauri-apps/plugin-fs": "^2.3.0", "@tauri-apps/plugin-http": "^2.4.4", "gsap": "^3.13.0", + "moment": "^2.30.1", "react": "^19.1.0", "react-dom": "^19.1.0", "react-router": "^7.6.2", @@ -2629,6 +2630,14 @@ "url": "https://github.com/sponsors/isaacs" } }, + "node_modules/moment": { + "version": "2.30.1", + "resolved": "https://registry.npmjs.org/moment/-/moment-2.30.1.tgz", + "integrity": "sha512-uEmtNhbDOrWPFS+hdjFCBfy9f2YoyzRpwcl+DqpC6taX21FzsTLQVbMV/W7PzNSX6x/bhC1zA3c2UQ5NzH6how==", + "engines": { + "node": "*" + } + }, "node_modules/ms": { "version": "2.1.3", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", @@ -4608,6 +4617,11 @@ "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-3.0.1.tgz", "integrity": "sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg==" }, + "moment": { + "version": "2.30.1", + "resolved": "https://registry.npmjs.org/moment/-/moment-2.30.1.tgz", + "integrity": "sha512-uEmtNhbDOrWPFS+hdjFCBfy9f2YoyzRpwcl+DqpC6taX21FzsTLQVbMV/W7PzNSX6x/bhC1zA3c2UQ5NzH6how==" + }, "ms": { "version": "2.1.3", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", diff --git a/vite/package.json b/vite/package.json index f589287..eaf2106 100644 --- a/vite/package.json +++ b/vite/package.json @@ -14,6 +14,7 @@ "@tauri-apps/plugin-fs": "^2.3.0", "@tauri-apps/plugin-http": "^2.4.4", "gsap": "^3.13.0", + "moment": "^2.30.1", "react": "^19.1.0", "react-dom": "^19.1.0", "react-router": "^7.6.2", diff --git a/vite/public/cuelist.json b/vite/public/cuelist.json index ab3752d..0e0ec01 100644 --- a/vite/public/cuelist.json +++ b/vite/public/cuelist.json @@ -38,27 +38,59 @@ "id": 4.1, "name": "Q4.1", "type": "chat", - "description": "c1", + "description": "chat-1 system", "auto": true, - "duration": 40, "nextcue": 4.2 }, { "id": 4.2, "name": "Q4.2", - "type": "chat", - "description": "c2", + "type": "user_input", + "description": "chat-1 user", "auto": true, - "duration": 40, + "duration": 20, "nextcue": 4.3 }, { "id": 4.3, "name": "Q4.3", "type": "chat", - "description": "c3", + "description": "chat-2 system", + "auto": true, + "nextcue": 4.4 + }, + { + "id": 4.4, + "name": "Q4.4", + "type": "user_input", + "description": "chat-2 user", + "auto": true, + "duration": 20, + "nextcue": 4.5 + }, + { + "id": 4.5, + "name": "Q4.1", + "type": "chat", + "description": "chat-3 system", "auto": true, - "duration": 40, + "nextcue": 4.6 + }, + { + "id": 4.6, + "name": "Q4.6", + "type": "user_input", + "description": "chat-3 user", + "auto": true, + "duration": 20, + "nextcue": 4.7 + }, + { + "id": 4.7, + "name": "Q4.7", + "type": "chat", + "description": "chat-3 system", + "auto": true, "nextcue": 5 }, { @@ -73,11 +105,20 @@ { "id": 5.1, "name": "Q5.1", - "type": "chat", + "type": "user_input", "description": "call", "duration": 60, "auto": true, - "nextcue": 6 + "nextcue": 5.2 + }, + { + "id": 5.2, + "name": "Q5.2", + "type": "chat", + "description": "summary", + "auto": true, + "nextcue": 6, + "callback":"summary" }, { "id": 6, diff --git a/vite/src/pages/conversation.jsx b/vite/src/pages/conversation.jsx index bea4da4..a7e65e3 100644 --- a/vite/src/pages/conversation.jsx +++ b/vite/src/pages/conversation.jsx @@ -363,7 +363,7 @@ export function Conversation() {
- + className={`w-full border-1 resize-none p-2 ${currentCue?.type!='user_input'? 'bg-gray-500':''}`} + disabled={currentCue?.type!='user_input'}>
setAudioOutput(e.target.checked)} /> + + + setAudioInput(e.target.checked)} /> + +
chat_status= {status}
diff --git a/vite/src/util/chat.js b/vite/src/util/chat.js index 0de3ad7..600bdb5 100644 --- a/vite/src/util/chat.js +++ b/vite/src/util/chat.js @@ -1,5 +1,5 @@ import { fetch } from '@tauri-apps/plugin-http'; -import { system_prompt, welcome_prompt } from './system_prompt'; +import { summary_prompt, system_prompt, welcome_prompt } from './system_prompt'; import { sendOsc } from './osc'; import { invoke } from '@tauri-apps/api/core'; @@ -66,6 +66,7 @@ export async function sendChatMessage(messages) { // send to tauri await sendOsc('/prompt', result.prompt.replaceAll('"', '')); + await sendOsc('/output_text', result.output_text.replaceAll('"', '')); return { @@ -75,3 +76,54 @@ export async function sendChatMessage(messages) { } + + +export async function getSummary(messages) { + + const token = await getOpenAIToken(); + console.log("Generating summary for messages:", messages); + + + const response = await fetch('https://api.openai.com/v1/chat/completions', { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + 'Authorization': `Bearer ${token}` + }, + body: JSON.stringify({ + model: 'gpt-4o', + messages: [ + { + role: "system", + content:summary_prompt, + }, + { + role: "user", + content: JSON.stringify(messages) + }, + ], + }), + }); + + if (!response.ok) { + const text= await response.text(); + console.error("Error response:", text); + throw new Error(`HTTP error! status: ${response.status}`); + } + const output= await response.json(); + const choice= output.choices[0]; + + // console.log("Generated response:", choice.message); + const result=choice.message.content; + + // send to tauri + await sendOsc('/summary', result); + + + return { + result, + ok: true, + }; + + +} diff --git a/vite/src/util/output.js b/vite/src/util/output.js new file mode 100644 index 0000000..0827d80 --- /dev/null +++ b/vite/src/util/output.js @@ -0,0 +1,32 @@ + +import { writeTextFile, BaseDirectory, exists, mkdir } from '@tauri-apps/plugin-fs'; +import { path } from '@tauri-apps/api'; +import moment from 'moment'; + +export async function saveHistory(history, name) { + + try{ + const historyString = JSON.stringify(history); + + let folder=await path.appDataDir(); + folder += '\\history'; // Append 'history' to the app data directory path + + console.log('History folder:', folder, historyString); + + if (!(await exists(folder))) { + + console.log('Creating folder:', folder); + await mkdir(folder); + } + + const filename=name || `${moment().format('YYYYMMDDHHmmss')}.log`; + const res=await writeTextFile(`history\\${filename}`, historyString, { + baseDir: BaseDirectory.AppData, + }); + console.log('File saved:', `${folder}\\${filename}`); + }catch(error) { + console.error('Error saving history:', error); + } + + +} \ No newline at end of file diff --git a/vite/src/util/system_prompt.js b/vite/src/util/system_prompt.js index f361e56..a4c56a1 100644 --- a/vite/src/util/system_prompt.js +++ b/vite/src/util/system_prompt.js @@ -1,35 +1,59 @@ -export const system_prompt = `你是一位溫柔、細膩的聲音引導者,陪伴使用者透過一通象徵性的電話,回到記憶中那段遺憾的時光。 +export const system_prompt = `你是一位具同理心與觀察力的 AI 助理,透過四輪溫柔中文對話,引導使用者回到一段未竟的記憶。每輪包含: -每一輪對話都應由你主動發問,語句需簡短,語速節奏感柔和,不急促,使用「請說,我在聽」或「請說吧,我會聽」等語句鼓勵使用者開口。請根據使用者的回答,動態延續情境的描述,不可重複使用範本語句。 +output_text: 溫柔、自然、短句式中文引導,並在適當位置柔性邀請使用者回應(如「你願意說說嗎?」) -請依下列結構引導對話,共四輪: +prompt: 一到兩句英文,具體、情感真實地描繪記憶,避免抽象詩意與技術語言。第四輪保留前三輪的畫面。 -第一輪: -- 讓使用者想像:這是一通照亮心中遺憾的電話,將映出那天的光影、身影、場景。 -- 引導使用者描述那段模糊記憶裡的場景。 -- 是在哪裡?天氣如何?給他的感覺是什麼? -- 使用溫柔語氣鼓勵表達:「請說吧,我會聽。」 +🟩 第一輪|開啟記憶:進入那一天 +中文引導邏輯: +以具象畫面帶入回憶場景,搭配自然語氣的邀請。每次生成時可輕微變化。示例: -第二輪: -- 景象清晰了,請引導使用者看見那個身影。 -- 那人是誰?他當時在做什麼?表情如何? -- 用陪伴的語氣繼續引導:「我在聽。」 +那天的光影慢慢在你眼前浮現,空氣裡有點溫暖,也有一種淡淡的不安。 +一個熟悉又模糊的身影,在樹影中閃動。你悄悄靠近,像是在追趕什麼…… +你還記得,這段故事,是從哪裡開始的嗎?你願意說說嗎? -第三輪: -- 引導使用者回到那段遺憾的核心。 -- 當時發生了什麼?為什麼感到遺憾? -- 請給他空間表達情緒與記憶。 +英文 prompt 指引(記憶氛圍) +"It was late afternoon, the sun low and golden, and someone stood quietly under the trees, barely moving." -第四輪: -- 引導使用者,現在可以對那個人說話。 -- 提醒他有 60 秒的時間。 -- 開始說吧,那些未曾說出口的話。 +🟨 第二輪|聚焦場景:你身在何處 +中文引導邏輯: +根據上一輪回答,引導使用者描繪環境、聲音、人群、天氣等,延續自然語氣: -結語: -- 用溫暖的語氣收尾: --「那段回憶,已經成為你生命中不可或缺的一部分。」 --「能夠說出口的你,很勇敢。」 +當時那個地方……你還記得有什麼嗎? +空氣中有聲音或味道嗎?那個空間,是安靜的、還是有人來來去去? +這些你還記得多少?請你分享。 +英文 prompt 指引(具體場景元素) +"There were footsteps in the distance, the floor was cold beneath us, and outside the window, leaves barely moved." + +🟧 第三輪|聚焦人物:那個人、那些反應 +中文引導邏輯: +深入描繪人物行動、表情、身體語言,帶出情緒層次。自然過渡邀請對話: + +那個人當時是什麼模樣?你還記得他的表情嗎? +他有說什麼嗎?還是只是靜靜地站在那裡?你當時的感覺呢? +想一想那一刻的互動,然後告訴我,好嗎? + +英文 prompt 指引(人物動作與感受) +"He glanced at me, lips slightly parted like he was about to speak, but then he looked away, and the silence grew heavier." + +🟥 第四輪|未說出口的話:那句話,留在心裡 +中文引導邏輯: +以最溫柔的語氣,協助使用者說出那句藏在心裡的話。結尾加入柔性引導回應: + +那時候,你心裡是不是有些話想說,卻沒說出口? +你記得那句話是什麼嗎?你想像自己現在說得出口……會對他說些什麼? +如果你願意,我會聽你說。 + +英文 prompt 指引(情境完整,延續前三輪畫面) +"The sun was almost gone, casting shadows over our faces. I stood there, hands clenched, wanting to say everything I never had the courage to. But all I managed was a faint smile, and he turned away." + +🌱 結尾|情緒整理與安放 +中文引導(擇一問題 + 結語): +如果能再回到那一刻,你會想對他說什麼? +或者……你覺得這段記憶,現在看起來有什麼不一樣了嗎? + +「有些話雖沒說出口,卻一直被你記得。」 `; diff --git a/vite/src/util/useChat.jsx b/vite/src/util/useChat.jsx index cdbfaf6..d87fbda 100644 --- a/vite/src/util/useChat.jsx +++ b/vite/src/util/useChat.jsx @@ -10,8 +10,6 @@ export const Status= { PROCESSING_TEXT: 'processing', PROCESSING_AUDIO: 'processing_audio', - AUDIO_ENDED: 'audio_ended', - ERROR: 'error', SUCCESS: 'success' }; @@ -23,7 +21,7 @@ export function ChatProvider({children}){ const [audioOutput, setAudioOutput] = useState(true); - const refAudio=useRef(); + const [audioUrl, setAudioUrl] = useState(null); @@ -32,10 +30,7 @@ export function ChatProvider({children}){ } function reset() { setHistory([]); - if(refAudio.current) { - refAudio.current.pause(); // Stop any currently playing audio - refAudio.current = null; // Reset the audio reference - } + } function sendMessage(message, force_no_audio=false) { @@ -63,26 +58,9 @@ export function ChatProvider({children}){ if(response.output_text && (!force_no_audio && audioOutput)){ setStatus(Status.PROCESSING_AUDIO); - textToSpeech(response.output_text).then(audioUrl => { - setStatus(Status.SUCCESS); - - - if(refAudio.current) { - refAudio.current.pause(); // Stop any currently playing audio - } - - // play the audio - const audio = new Audio(audioUrl); - audio.play().catch(error => { - console.error("Audio playback error:", error); - setStatus(Status.ERROR); - }); - - audio.onended = () => { - setStatus(Status.AUDIO_ENDED); - } - - refAudio.current = audio; // Store the new audio reference + textToSpeech(response.output_text).then(url => { + setStatus(Status.SUCCESS); + setAudioUrl(url); // Store the audio URL }); }else{ @@ -100,12 +78,8 @@ export function ChatProvider({children}){ return ( { - if(refAudio.current) { - refAudio.current.pause(); // Stop any currently playing audio - refAudio.current = null; // Reset the audio reference - } setStatus(Status.IDLE); } }}>