1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
import OpenAI from "npm:openai";
const openai = new OpenAI();
import process from "node:process";
// Define our assistant.
const assistant = await openai.beta.assistants.create({
name: "Val Tutor",
instructions: `You are a personal Val tutor.
You help brainstorm ideas for fun Vals to write on Val Town.
You only suggest ideas that can be implemented on https://val.town.
You keep your responses brief and to the point. `,
model: "gpt-4o",
});
// Create a thread to chat in.
const thread = await openai.beta.threads.create();
// These are the messages we'll send to the assistant.
const messages = ["What should I build today?", "Very cool. Can you make it even cooler?"];
export default async function(req: Request): Promise<Response> {
const url = new URL(req.url);
if (url.pathname === "/favicon.ico") {
return new Response(null, { status: 404 });
}
let ended = false;
let interval;
// Return out SSE ReadableStream
const body = new ReadableStream({
async start(controller) {
const write = (str: string) => {
!ended && controller.enqueue(new TextEncoder().encode(str));
};
for (let i = 0; i < messages.length; i++) {
if (ended) break;
await new Promise<void>(async (resolve) => {
write("\nuser > " + messages[i] + "\n");
// Prints dots to indicate that we're loading.
interval = setInterval(() => {
write(".");
}, 100);
const message = await openai.beta.threads.messages.create(
thread.id,
{ role: "user", content: messages[i] },
);
const run = openai.beta.threads.runs.stream(thread.id, {
assistant_id: assistant.id,
// Make sure we only display messages we haven't seen yet.
truncation_strategy: { type: "auto" },
})
.on("textCreated", (text) => {
clearInterval(interval);
write("\nassistant > ");
})
.on("textDelta", (textDelta, snapshot) => write(textDelta.value))
.on("textDone", async () => {
resolve();
if (i === 1) {
// Clean up when we're done.
!ended && controller.close();
}
});
});
}
},
cancel() {
// Stop chatting if the request is terminated.
ended = true;
clearInterval(interval);
},
});
return new Response(body, {
headers: {
"Content-Type": "text/event-stream",
},
});
}