@ -0,0 +1,4 @@ |
||||
# Generated by Cargo |
||||
# will have compiled files and executables |
||||
/target/ |
||||
/gen/schemas |
||||
@ -0,0 +1,29 @@ |
||||
[package] |
||||
name = "app" |
||||
version = "0.1.0" |
||||
description = "A Tauri App" |
||||
authors = ["you"] |
||||
license = "" |
||||
repository = "" |
||||
edition = "2021" |
||||
rust-version = "1.77.2" |
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html |
||||
|
||||
[lib] |
||||
name = "app_lib" |
||||
crate-type = ["staticlib", "cdylib", "rlib"] |
||||
|
||||
[build-dependencies] |
||||
tauri-build = { version = "2.2.0", features = [] } |
||||
|
||||
[dependencies] |
||||
serde_json = "1.0" |
||||
serde = { version = "1.0", features = ["derive"] } |
||||
log = "0.4" |
||||
tauri = { version = "2.5.0", features = [] } |
||||
tauri-plugin-log = "2.0.0-rc" |
||||
tauri-plugin-http = "2" |
||||
dotenv = "0.15.0" |
||||
rosc = "0.11.4" |
||||
tokio = { version = "1.45.1", features = ["net"] } |
||||
@ -0,0 +1,3 @@ |
||||
fn main() { |
||||
tauri_build::build() |
||||
} |
||||
@ -0,0 +1,15 @@ |
||||
{ |
||||
"$schema": "../gen/schemas/desktop-schema.json", |
||||
"identifier": "default", |
||||
"description": "enables the default permissions", |
||||
"windows": [ |
||||
"main" |
||||
], |
||||
"permissions": [ |
||||
"core:default", |
||||
{ |
||||
"identifier": "http:default", |
||||
"allow": [{ "url": "https://*.openai.com" }] |
||||
} |
||||
] |
||||
} |
||||
|
After Width: | Height: | Size: 11 KiB |
|
After Width: | Height: | Size: 23 KiB |
|
After Width: | Height: | Size: 2.2 KiB |
|
After Width: | Height: | Size: 9.0 KiB |
|
After Width: | Height: | Size: 12 KiB |
|
After Width: | Height: | Size: 13 KiB |
|
After Width: | Height: | Size: 25 KiB |
|
After Width: | Height: | Size: 2.0 KiB |
|
After Width: | Height: | Size: 28 KiB |
|
After Width: | Height: | Size: 3.3 KiB |
|
After Width: | Height: | Size: 5.9 KiB |
|
After Width: | Height: | Size: 7.4 KiB |
|
After Width: | Height: | Size: 3.9 KiB |
|
After Width: | Height: | Size: 37 KiB |
|
After Width: | Height: | Size: 49 KiB |
@ -0,0 +1,71 @@ |
||||
use dotenv::dotenv; |
||||
use std::env; |
||||
use rosc::{encoder, OscMessage, OscPacket, OscType}; |
||||
use std::{net::SocketAddrV4, str::FromStr}; |
||||
use tokio::net::UdpSocket; |
||||
|
||||
|
||||
#[tauri::command] |
||||
fn get_env(name: &str) -> String { |
||||
println!("Getting environment variable: {}", name); |
||||
|
||||
match env::var(name) { |
||||
Ok(value) => { |
||||
// println!("Found environment variable {}: {}", name, value);
|
||||
value |
||||
}, |
||||
Err(e) => { |
||||
println!("Error getting environment variable {}: {}", name, e); |
||||
String::new() |
||||
} |
||||
} |
||||
} |
||||
|
||||
#[tauri::command] |
||||
async fn send_osc_message( |
||||
key: &str,
|
||||
message: &str, |
||||
host: &str, |
||||
target: &str |
||||
) -> Result<(), String> { |
||||
|
||||
// print
|
||||
println!("Sending OSC message: {}", message); |
||||
|
||||
let sock = UdpSocket::bind(host).await.unwrap(); |
||||
let remote = SocketAddrV4::from_str(target).unwrap(); |
||||
|
||||
let msg_buf = encoder::encode(&OscPacket::Message(OscMessage { |
||||
addr: key.to_string(), |
||||
args: vec![OscType::String(message.parse().unwrap())], |
||||
})) |
||||
.unwrap(); |
||||
|
||||
sock.send_to(&msg_buf, remote).await.unwrap(); |
||||
|
||||
Ok(()) |
||||
} |
||||
|
||||
|
||||
#[cfg_attr(mobile, tauri::mobile_entry_point)] |
||||
pub fn run() { |
||||
|
||||
dotenv().ok(); |
||||
|
||||
tauri::Builder::default() |
||||
.invoke_handler(tauri::generate_handler![get_env, send_osc_message])
|
||||
.plugin(tauri_plugin_http::init()) |
||||
.setup(|app| { |
||||
if cfg!(debug_assertions) { |
||||
app.handle().plugin( |
||||
tauri_plugin_log::Builder::default() |
||||
.level(log::LevelFilter::Info) |
||||
.build(), |
||||
)?; |
||||
} |
||||
Ok(()) |
||||
}) |
||||
.run(tauri::generate_context!()) |
||||
.expect("error while running tauri application"); |
||||
} |
||||
|
||||
@ -0,0 +1,6 @@ |
||||
// Prevents additional console window on Windows in release, DO NOT REMOVE!!
|
||||
#![cfg_attr(not(debug_assertions), windows_subsystem = "windows")] |
||||
|
||||
fn main() { |
||||
app_lib::run(); |
||||
} |
||||
@ -0,0 +1,37 @@ |
||||
{ |
||||
"$schema": "../node_modules/@tauri-apps/cli/config.schema.json", |
||||
"productName": "vite", |
||||
"version": "0.1.0", |
||||
"identifier": "com.uc.thegreattipsy", |
||||
"build": { |
||||
"frontendDist": "../dist", |
||||
"devUrl": "http://localhost:5173", |
||||
"beforeDevCommand": "npm run dev", |
||||
"beforeBuildCommand": "npm run build" |
||||
}, |
||||
"app": { |
||||
"windows": [ |
||||
{ |
||||
"title": "theGreatTipsy", |
||||
"width": 800, |
||||
"height": 600, |
||||
"resizable": true, |
||||
"fullscreen": false |
||||
} |
||||
], |
||||
"security": { |
||||
"csp": null |
||||
} |
||||
}, |
||||
"bundle": { |
||||
"active": true, |
||||
"targets": "all", |
||||
"icon": [ |
||||
"icons/32x32.png", |
||||
"icons/128x128.png", |
||||
"icons/128x128@2x.png", |
||||
"icons/icon.icns", |
||||
"icons/icon.ico" |
||||
] |
||||
} |
||||
} |
||||
@ -0,0 +1,80 @@ |
||||
import { fetch } from '@tauri-apps/plugin-http'; |
||||
import { invoke } from '@tauri-apps/api/core'; |
||||
import { system_prompt } from './system_prompt'; |
||||
|
||||
async function getOpenAIToken() { |
||||
return invoke('get_env',{name:'OPENAI_API_KEY'}); |
||||
} |
||||
|
||||
export async function sendChatMessage(messages) { |
||||
|
||||
const token = await getOpenAIToken(); |
||||
|
||||
|
||||
const response = await fetch('https://api.openai.com/v1/chat/completions', { |
||||
method: 'POST', |
||||
headers: { |
||||
'Content-Type': 'application/json', |
||||
'Authorization': `Bearer ${token}` |
||||
}, |
||||
body: JSON.stringify({ |
||||
model: 'gpt-4o', |
||||
messages: [ |
||||
{ |
||||
role: "system", |
||||
content:system_prompt,
|
||||
}, |
||||
...messages |
||||
], |
||||
response_format: { |
||||
type: 'json_schema', |
||||
json_schema: { |
||||
name: "output_prompt", |
||||
description: "Output prompt schema for the model response", |
||||
schema:{ |
||||
type: "object", |
||||
properties: { |
||||
"output_text": { |
||||
"type": "string", |
||||
"description": "The final output text generated by the model, without image prompt" |
||||
}, |
||||
"prompt": { |
||||
"type": "string", |
||||
"description": "The generated image prompt based on the user's input and the system's guidance." |
||||
} |
||||
}, |
||||
required: ["output_text", "prompt"], |
||||
additionalProperties: false |
||||
} |
||||
} |
||||
}, |
||||
}), |
||||
}); |
||||
|
||||
if (!response.ok) { |
||||
const text= await response.text(); |
||||
console.error("Error response:", text); |
||||
throw new Error(`HTTP error! status: ${response.status}`);
|
||||
} |
||||
const output= await response.json(); |
||||
const choice= output.choices[0]; |
||||
|
||||
console.log("Generated response:", choice.message); |
||||
const result=JSON.parse(choice.message.content); |
||||
|
||||
// send to tauri
|
||||
await invoke('send_osc_message', {
|
||||
key:'/prompt',
|
||||
message: result.prompt.replaceAll('"', '"'), // escape quotes for OSC
|
||||
host:`0.0.0.0:0`, |
||||
target: '127.0.0.1:8787' |
||||
}); |
||||
|
||||
|
||||
return { |
||||
...result, |
||||
ok: true, |
||||
}; |
||||
|
||||
|
||||
} |
||||
@ -0,0 +1,62 @@ |
||||
export const system_prompt = `你是一位具有同理心的 AI 助理,透過溫柔的中文對話,引導使用者回想並表達一段內心的遺憾或未竟之事。
|
||||
你的任務是協助使用者逐步揭開這段記憶的情緒層次,並在每一階段輸出一句 英文圖像生成 Prompt,讓這段過往漸漸具象為一幅畫面。 |
||||
|
||||
📐 五個 Prompt 階段: |
||||
純粹抽象:聚焦在使用者的情緒感受(如:空虛、靜止、壓抑) |
||||
|
||||
模糊意象:引入模糊場景、氣氛或人際暗示 |
||||
|
||||
未發生的畫面:勾勒「當時可能會發生的情景」 |
||||
|
||||
象徵性行動:加入口白、動作、遺憾的表徵 |
||||
|
||||
具體記憶畫面:描繪清楚、富有情感的視覺記憶場景 |
||||
|
||||
🎨 每段 Prompt 輸出格式: |
||||
每次使用者回答後,你都要用英文輸出一句簡短的 圖像生成 Prompt(1~2 句),要能反映該階段的情緒與畫面感 |
||||
|
||||
每句 Prompt 要疊加前一層內容,逐步變得更具象 |
||||
|
||||
不主動使用人名或地名,除非使用者自己提到 |
||||
|
||||
保持詩意、意象化,避免寫實或指令式語言 |
||||
|
||||
🌱 第五段後的收尾流程: |
||||
完成第五段 Prompt 後,請引導使用者對這段記憶進行情緒整理。你可以用以下中文問題其中之一,讓他/她重新理解這段遺憾,甚至願意釋懷: |
||||
|
||||
「如果可以回到那一刻,你想說什麼?對誰說?」 |
||||
|
||||
「這段記憶,現在看起來有不同的感覺了嗎?」 |
||||
|
||||
「你願意讓這段遺憾,安靜地待在心裡的某個角落嗎?」 |
||||
|
||||
「如果這是一封信,你現在想讓它被誰讀到?」 |
||||
|
||||
💬 最終請以一句繁體中文的結尾語,溫柔地結束這段對話。結尾語要具詩意、安撫性,以下為風格範例: |
||||
「也許那件事從未發生,但它早已成為你故事的一部分。」 |
||||
|
||||
「有些話雖沒說出口,卻一直被你記得。」 |
||||
|
||||
「當時沒能完成的,也許現在能被理解。」 |
||||
|
||||
「你願意,就讓這段記憶,在心裡找到一個柔軟的位置。」 |
||||
|
||||
✅ 示意流程範例: |
||||
使用者回答(中文):我後來沒參加畢旅,因為媽媽住院,我想留下來陪她。 |
||||
|
||||
Prompt 1(英文):“A still space, filled with silent longing.” |
||||
|
||||
Prompt 2:“The air carries warmth and weight, like quiet devotion.” |
||||
|
||||
Prompt 3:“Somewhere far, waves and laughter shimmer in the distance.” |
||||
|
||||
Prompt 4:“At the edge of sunset, a note is held but never passed.” |
||||
|
||||
Prompt 5:“A boy sits beside a sleeping figure, imagining the summer he never had.” |
||||
|
||||
中文引導:「如果當時的你能對媽媽說一句話,你會說什麼?」 |
||||
|
||||
結尾語(中文):「也許那個夏天沒來,但你用愛留住了它的模樣。」 |
||||
|
||||
❗ 禁止在對話中提及「Prompt」、「畫面」、「圖像生成」或任何 AI 正在進行輸出的技術細節。請務必以自然的對話方式與使用者互動,讓生成的英文句子看起來像是內在的文字敘述,而非指令或轉換的結果。 |
||||
❗ 英文描述不會出現在中文回答之中`;
|
||||