From e4d173b0cd03886861ac368bdbb7eca67a496ab7 Mon Sep 17 00:00:00 2001 From: reng Date: Thu, 5 Jun 2025 11:33:24 +0800 Subject: [PATCH] update --- index.js | 86 +++++++++++++++++++++++++++++++++++++++++------- system_prompt.js | 5 ++- vite/src/App.jsx | 6 ++-- 3 files changed, 81 insertions(+), 16 deletions(-) diff --git a/index.js b/index.js index 1f50f88..48b0244 100644 --- a/index.js +++ b/index.js @@ -11,18 +11,18 @@ config(); // Load environment variables from .env file const Output = { "type": "object", "properties": { - "prompt": { - "type": "string", - "description": "The generated image prompt based on the user's input and the system's guidance.", - }, "output_text": { "type": "string", "description": "The final output text generated by the model, without image prompt", + }, + "prompt": { + "type": "string", + "description": "The generated image prompt based on the user's input and the system's guidance.", } }, "additionalProperties": false, "required": [ - "prompt", "output_text" + "output_text","prompt" ] } @@ -53,13 +53,73 @@ const port = process.env.PORT || 3000; app.use(express.json()); app.use(cors()); +app.post("/generate_stream", async (req, res) => { + const { input } = req.body; + + + try { + const response = await client.responses.create({ + model: "gpt-4.1", + input: [ + { + role: "system", + content: [ + { + type:'input_text', + text: system_prompt, + } + ] + }, + ...input + ], + text:{ + format:{ + type:'json_schema', + name:"output_prompt", + schema: Output, + } + }, + stream:true, + }); + + for await (const event of response){ + console.log(event); + if(event.type=='response.output_text.delta'){ + + // console.log(event.delta); + + }else if(event.type=='response.output_item.done'){ + + console.log("Generated response:", event.item.content); + const json=JSON.parse(event.item.content[0].text); + + // send prompt to TD + osc_client.send('/prompt', json.prompt, (error) => { + if (error) { + console.error('Error sending OSC message:', error); + } else { + console.log('OSC message sent successfully'); + } + }); + + res.json(json); + } + + } + + + + + } catch (error) { + console.error("Error generating response:", error); + res.status(500).json({ error: "Failed to generate response" }); + } +}); + + app.post("/generate", async (req, res) => { const { input } = req.body; - // console.log(input[input.length-1], 'input received'); - // osc_client.send('/prompt', input[input.length-1]?.content[0]?.text, (err) => { - // console.log('OSC', err ? `Error: ${err}` : 'Success'); - // }); - // return; + try { const response = await client.responses.create({ @@ -82,10 +142,11 @@ app.post("/generate", async (req, res) => { name:"output_prompt", schema: Output, } - } + }, }); - console.log("Generated response:", response); + + console.log("Generated response:", response.output_text); const json=JSON.parse(response.output_text); // send prompt to TD @@ -98,6 +159,7 @@ app.post("/generate", async (req, res) => { }); res.json(json); + } catch (error) { diff --git a/system_prompt.js b/system_prompt.js index 91d3231..d052bc6 100644 --- a/system_prompt.js +++ b/system_prompt.js @@ -56,4 +56,7 @@ Prompt 5:“A boy sits beside a sleeping figure, imagining the summer he never 中文引導:「如果當時的你能對媽媽說一句話,你會說什麼?」 -結尾語(中文):「也許那個夏天沒來,但你用愛留住了它的模樣。」`; \ No newline at end of file +結尾語(中文):「也許那個夏天沒來,但你用愛留住了它的模樣。」 + +❗ 禁止在對話中提及「Prompt」、「畫面」、「圖像生成」或任何 AI 正在進行輸出的技術細節。請務必以自然的對話方式與使用者互動,讓生成的英文句子看起來像是內在的文字敘述,而非指令或轉換的結果。 +❗ 英文描述不會出現在中文回答之中`; \ No newline at end of file diff --git a/vite/src/App.jsx b/vite/src/App.jsx index daf8ea9..1ccf62c 100644 --- a/vite/src/App.jsx +++ b/vite/src/App.jsx @@ -103,7 +103,7 @@ function App() { history.map((item, index) => (
{item.content.map((content, idx) => ( -

{content.text}

+

{content.text}

))}
)) @@ -111,8 +111,8 @@ function App() {
-
- + +