diff --git a/vite/public/assets/0721/onyx/q4-2.mp3 b/vite/public/assets/0721/onyx/q4-2.mp3
index e0cdfd1..48e1804 100644
Binary files a/vite/public/assets/0721/onyx/q4-2.mp3 and b/vite/public/assets/0721/onyx/q4-2.mp3 differ
diff --git a/vite/public/assets/0721/shimmer/q4-2.mp3 b/vite/public/assets/0721/shimmer/q4-2.mp3
index dc51477..be39439 100644
Binary files a/vite/public/assets/0721/shimmer/q4-2.mp3 and b/vite/public/assets/0721/shimmer/q4-2.mp3 differ
diff --git a/vite/public/assets/0721/shimmer/q4.mp3 b/vite/public/assets/0721/shimmer/q4.mp3
index 4d8c829..91e321a 100644
Binary files a/vite/public/assets/0721/shimmer/q4.mp3 and b/vite/public/assets/0721/shimmer/q4.mp3 differ
diff --git a/vite/public/cuelist_free.json b/vite/public/cuelist_free.json
index 42aac57..39a1b86 100644
--- a/vite/public/cuelist_free.json
+++ b/vite/public/cuelist_free.json
@@ -30,7 +30,7 @@
{
"id": 4,
"name": "Q4",
- "type": "phone",
+ "type": "headphone",
"description": "引導撥號",
"auto": false,
"audioFile": "assets/0721/onyx/q4.mp3",
@@ -54,7 +54,8 @@
"auto": true,
"audioFile": "assets/0721/onyx/q4-2.mp3",
"nextcue": 4.3,
- "status":"intro"
+ "status":"intro",
+ "callback":"fade_out_light"
},
{
"id": 4.3,
@@ -73,28 +74,19 @@
"description": "對話收尾",
"auto": true,
"nextcue": 5.1
- },
+ },
{
"id": 5.1,
"name": "Q5.1",
- "type": "phone",
- "description": "引導打給遺憾對象",
- "auto": true,
- "audioFile": "assets/0721/onyx/q5.mp3",
- "nextcue": 5.2
- },
- {
- "id": 5.2,
- "name": "Q5.2",
"type": "user_input",
"description": "call",
"duration": 60,
"auto": true,
- "nextcue": 5.3
+ "nextcue": 5.2
},
{
- "id": 5.3,
- "name": "Q5.3",
+ "id": 5.2,
+ "name": "Q5.2",
"type": "summary",
"description": "summary",
"auto": true,
@@ -107,7 +99,8 @@
"type": "space",
"description": "Ending",
"audioFile": "assets/q6.mp3",
- "status":"end"
+ "status":"end",
+ "callback":"fade_in_light"
}
]
}
diff --git a/vite/public/default.json b/vite/public/default.json
index 353cf97..a5c5b5c 100644
--- a/vite/public/default.json
+++ b/vite/public/default.json
@@ -11,7 +11,7 @@
"speech_idle_time":3000,
"sd_prompt_prefix":"a hazy memory of a {{ ",
- "sd_prompt_suffix":" }}, seen through soft atmospheric blur, distant silhouettes and faded contours, pastel light and cinematic haze, (analog film texture), (shallow depth of field:1.3), shallow depth of field, memory fragment effect, light leak, subtle grain, chromatic aberration, surreal glow, in muted warm tones, cinematic framing,",
+ "sd_prompt_suffix":"}}, soft atmospheric blur, centered ghostly silhouette, fading contours, pastel glow, cinematic haze, (analog film grain), (shallow depth of field:1.3), impressionist style, ethereal light, dreamlike mood, memory fragment haze",
"sd_negative_propmt":"photorealism, digital art, sharp details, hard edges, CGI, anime, cartoon, studio light"
}
\ No newline at end of file
diff --git a/vite/src/comps/debug.jsx b/vite/src/comps/debug.jsx
index a25acbb..7acaf4d 100644
--- a/vite/src/comps/debug.jsx
+++ b/vite/src/comps/debug.jsx
@@ -3,19 +3,22 @@ import { useData } from '../util/useData.jsx';
const TEST_PROMPT='a hazy memory of a {{ Scene }}, seen through soft atmospheric blur, distant silhouettes and faded contours, pastel light and cinematic haze, (analog film texture), (shallow depth of field:1.3), shallow depth of field, memory fragment effect, light leak, subtle grain, chromatic aberration, surreal glow, in muted warm tones, cinematic framing,';
-export function DebugControl(){
+export function DebugControl({refLight}){
const {data} = useData();
function sendPrompt(prompt_raw) {
console.log('Sending prompt:', prompt_raw);
const prompt = `${data?.sd_prompt_prefix || ''}${prompt_raw.replaceAll('"', '')}${data?.sd_prompt_suffix || ''}`;
- updatePrompt(prompt);
+ updatePrompt(prompt);
}
return (
- sendOsc(OSC_ADDRESS.STATUS, 'reset')}>reset
+ {
+ sendOsc(OSC_ADDRESS.STATUS, 'reset');
+ refLight.current.set(1);
+ }}>reset
sendOsc(OSC_ADDRESS.STATUS, 'intro')}>intro
sendOsc(OSC_ADDRESS.STATUS, 'go')}>go
sendOsc(OSC_ADDRESS.STATUS, 'end')}>end
diff --git a/vite/src/comps/light.jsx b/vite/src/comps/light.jsx
index 2ed898b..a855fba 100644
--- a/vite/src/comps/light.jsx
+++ b/vite/src/comps/light.jsx
@@ -9,24 +9,35 @@ export const Light=forwardRef((props, ref)=>{
const refVal=useRef({val: 0});
const refInput=useRef();
const refContainer=useRef();
+ const refGsap=useRef();
function fade(from, to){
const time= parseFloat(refInput.current.value) || FADE_TIME;
- gsap.fromTo(refVal.current,{val: from}, {
+
+ gsap.killTweensOf(refVal.current); // Kill all tweens of refVal
+
+ gsap.current=gsap.fromTo(refVal.current,{val: from}, {
val: to,
duration: time,
onUpdate: () => {
// console.log(refVal.current.val);
// sendOsc(OSC_ADDRESS.LIGHT, refVal.current.val.toString());
- invoke('send_dmx_message', {
- message: Math.floor(refVal.current.val * 255).toString(),
- }).then(() => {
- console.log(`dmx message sent: ${Math.floor(refVal.current.val * 255)}`);
- }).catch((error) => {
- console.error('Error sending DMX message:', error);
- });
+ // invoke('send_dmx_message', {
+ // message: Math.floor(refVal.current.val * 255).toString(),
+ // }).then(() => {
+ // console.log(`dmx message sent: ${Math.floor(refVal.current.val * 255)}`);
+ // }).catch((error) => {
+ // console.error('Error sending DMX message:', error);
+ // });
+
+ invoke('send_osc_message', {
+ key: '/light',
+ message: refVal.current.val.toString(),
+ host:`0.0.0.0:0`,
+ target: '127.0.0.1:8888',
+ });
if(refContainer.current)
refContainer.current.style.background= `rgba(0, 255, 0, ${refVal.current.val})`; // Update background color based on value
@@ -41,7 +52,33 @@ export const Light=forwardRef((props, ref)=>{
fadeOut: ()=>{
console.log('fadeOut');
fade(1, 0);
- }
+ },
+ reset:()=>{
+ console.log('reset');
+ refVal.current.val=0;
+ refContainer.current.style.background= `rgba(0, 255, 0, 0)`; // Reset background color
+ refInput.current.value=FADE_TIME; // Reset input value
+
+ gsap.killTweensOf(refVal.current); // Kill all tweens of refVal
+
+ invoke('send_osc_message', {
+ key: '/light',
+ message: '0',
+ host:`0.0.0.0:0`,
+ target: '127.0.0.1:8888',
+ });
+ },
+ set: (value)=>{
+ console.log('set', value);
+ refVal.current.val=value;
+ refContainer.current.style.background= `rgba(0, 255, 0, ${value})`; // Update background color based on value
+ invoke('send_osc_message', {
+ key: '/light',
+ message: value.toString(),
+ host:`0.0.0.0:0`,
+ target: '127.0.0.1:8888',
+ });
+ },
}));
useEffect(()=>{
@@ -58,6 +95,8 @@ export const Light=forwardRef((props, ref)=>{
*/}
fade(0,1)}>fadeIn
fade(1,0)}>fadeOut
+ ref.current.set(1)}>on
+ ref.current.set(0)}>off
{/* */}
)
diff --git a/vite/src/pages/conversation.jsx b/vite/src/pages/conversation.jsx
index f25f0e3..6da6b71 100644
--- a/vite/src/pages/conversation.jsx
+++ b/vite/src/pages/conversation.jsx
@@ -136,7 +136,7 @@ export function Conversation() {
const input = event.target.elements?.input.value || refInput.current?.value;
- if(!input.trim()?.length) {
+ if(!input.trim()?.length && !isLastMessage) {
console.warn("Input is empty, ignoring submission.");
return;
}
diff --git a/vite/src/pages/flow_free.jsx b/vite/src/pages/flow_free.jsx
index 2a59fc1..a9f2723 100644
--- a/vite/src/pages/flow_free.jsx
+++ b/vite/src/pages/flow_free.jsx
@@ -19,6 +19,7 @@ const EmojiType={
headphone: '🎧',
speaker: '🔊',
chat: '🤖',
+ chat_end: '🤖',
user_input: '💬',
}
@@ -162,10 +163,12 @@ export function FreeFlow(){
console.log('User input cue, setting chat status to User');
setChatStatus(ChatStatus.User); // Set chat status to User for user input cues
resetTranscript(); // Reset transcript for user input
- break;
+ break;
}
+ if(cue.callback=='fade_in_light') refLight.current.fadeIn(); // Fade in light for conversation start
+ if(cue.callback=='fade_out_light') refLight.current.fadeOut(); // Fade out light for conversation end
if(cue.audioFile){
@@ -182,6 +185,9 @@ export function FreeFlow(){
// control unity
if(cue.status){
sendOsc(OSC_ADDRESS.STATUS, cue.status); // Send OSC status message
+ if(cue.status=='reset') {
+ refLight.current.set(1);
+ }
}
if(cue.type=='chat' || cue.type=='user_input') {
sendOsc(OSC_ADDRESS.COUNTDOWN, cue.duration || '0'); // Send OSC countdown message
@@ -200,9 +206,7 @@ export function FreeFlow(){
console.log('onCueEnd:', cue.id);
- if(cue.callback=='start_conversation') refLight.current.fadeOut(); // Fade in light for conversation start
- if(cue.callback=='summary') refLight.current.fadeIn(); // Fade out light for conversation end
-
+
resetTranscript(); // Reset transcript after cue ends
if(cue.auto) {
@@ -247,8 +251,10 @@ export function FreeFlow(){
},[userId]);
function onSpeechEnd(){
+
+
+ if(currentCue?.type!='chat') return; // Only process if current cue is user input
console.log('onSpeechEnd:', finalTranscript);
- if(currentCue?.type!='chat') return; // Only process if current cue is user input
if(autoSend && transcript.trim().length > 0) {
console.log('Auto sending transcript:', transcript);
@@ -351,7 +357,7 @@ export function FreeFlow(){
return (
-
+
{refCurrentCue.current?.name}
@@ -373,33 +379,35 @@ export function FreeFlow(){
}}>Save Log
-
+
{/* ID */}
+
Name
Description
Type
Auto
Audio / Due
-
+ Action
{cuelist?.map(({id, name, description, type, auto, audioFile,...props}, index) => (
{/* {id} */}
- {name}
- {description}
- {EmojiType[type]}
- {auto ? '⤵️' : ''}
- {audioFile || props.duration} {props.callback && `<${props.callback}>`}
{
playCue({id, name, description, type, auto, audioFile, ...props});
}}>go
+ {name}
+ {description}
+ {EmojiType[type]}
+ {auto ? '⤵️' : ''}
+ {audioFile || props.duration}
+ {props.callback && `<${props.callback}>`}{props.status && `(${props.status})`}
))}
diff --git a/vite/src/util/chat.js b/vite/src/util/chat.js
index a2adaae..58338df 100644
--- a/vite/src/util/chat.js
+++ b/vite/src/util/chat.js
@@ -79,7 +79,7 @@ export async function sendChatMessage(messages, data, isLastMessage = false) {
},
"prompt": {
"type": "string",
- "description": "The generated image prompt based on the user's input and the system's guidance."
+ "description": "The generated image prompt based on the user's input and the system's guidance. Less than 60 English characters."
}
},
required: ["output_text", "prompt"],