Search
Code2,386
// @ts-ignore// Using the official npm package for maximum stabilityimport { OpenAI } from "https://esm.town/v/std/openai";import { sqlite } from "https://esm.town/v/std/sqlite?v=4";import { Hono } from "npm:hono@4.4.12";app.use("/api/*", cors());// Helper function to safely create an OpenAI clientfunction getOpenAIClient() { return new OpenAI();} try { const openai = getOpenAIClient(); const now = new Date().toISOString(); const conversationId = crypto.randomUUID(); }); const completion = await openai.chat.completions.create({ model: "gpt-4o", messages: [{ role: "system", content: INITIAL_PROMPT }], try { const openai = getOpenAIClient(); const convResult = await sqlite.execute({ sql: `SELECT user_id, history, latitude, longitude FROM ${T.conversations} WHERE id = :id`, if (historyArr.length >= 7) { // --- Summarize the conversation --- const completion = await openai.chat.completions.create({ model: "gpt-4o", messages: [ } else { // --- Continue the conversation --- const completion = await openai.chat.completions.create({ model: "gpt-4o", messages: [
*/import { OpenAI } from "https://esm.town/v/std/openai";import { Octokit } from "https://esm.sh/@octokit/rest@20.0.2";import { WebClient } from "https://esm.sh/@slack/web-api@7.0.2";async function isBugReportLLM(text: string): Promise<boolean> { try { // Check if OpenAI API key is available if (!Deno.env.get("OPENAI_API_KEY")) { console.warn("OpenAI API key not found - bug detection disabled"); return false; } const openai = new OpenAI(); const completion = await openai.chat.completions.create({ messages: [ {async function findRelatedIssues(slackMessage: string, issues: any[]): Promise<any[]> { try { // Check if OpenAI API key is available if (!Deno.env.get("OPENAI_API_KEY")) { return []; } }).join("\n\n"); const openai = new OpenAI(); const completion = await openai.chat.completions.create({ messages: [ {
1. When a new message is posted in a configured Slack channel (ie. #bugs, or #support), Slack sends an event to this Val2. The val makes an OpenAI call to determine if the message is a bug 3. If it is, then it searches GitHub for semantically related open issues with a separate OpenAI call4. It posts a comment in the Slack thread with links to related GitHub issues, with a "Relevance Score"
// @ts-ignoreimport { OpenAI } from "https://esm.town/v/std/openai?v=4";import { sqlite } from "https://esm.town/v/std/sqlite?v=4";import { Hono } from "npm:hono@4.4.12"; } const openai = new OpenAI(); const completion = await openai.chat.completions.create({ model: "gpt-4o", messages: [ } const openai = new OpenAI(); const completion = await openai.chat.completions.create({ model: "gpt-4o", messages: [
// @ts-ignoreimport { OpenAI } from "https://esm.town/v/std/openai?v=4";import { stream, streamText } from "npm:hono/streaming";import { Hono } from "npm:hono@4.4.12";app.post("/journey", async (c) => { const { birthData, tripType, tripDuration } = await c.req.json(); const openai = new OpenAI(); const astroData = astrocartography.getPlanetaryLines(birthData); try { const completion = await openai.chat.completions.create({ model: "gpt-4o", messages: [{ role: "system", content: GENERATION_STYLE_GUIDE }, { role: "user", content: prompt }],
// @ts-ignoreimport { blob } from "https://esm.town/v/std/blob?v=11";import { OpenAI } from "https://esm.town/v/std/openai?v=4";import { sqlite } from "https://esm.town/v/std/sqlite?v=4";import { Hono } from "npm:hono@4.4.12"; const promptData = rows[0][0]; const openai = new OpenAI(); const scriptCompletion = await openai.chat.completions.create({ model: "gpt-4o", messages: [ if (!scriptText) throw new Error("AI did not generate a valid script."); const audioResponse = await openai.audio.speech.create({ model: "tts-1-hd", voice: TTS_VOICE,
// @ts-ignoreimport { OpenAI } from "https://esm.town/v/std/openai?v=4";import { sqlite } from "https://esm.town/v/std/sqlite?v=4";import { Hono } from "npm:hono@4.4.12"; </main> <footer> <p>Powered by <a href="${safeSourceUrl}" target="_blank">Val Town</a> & OpenAI</p> </footer></div> } const openai = new OpenAI({ apiKey: Deno.env.get("OPENAI_API_KEY") }); const completion = await openai.chat.completions.create({ model: "gpt-4o", messages: [
// @ts-ignoreimport { OpenAI } from "https://esm.town/v/std/openai?v=4";import { sqlite } from "https://esm.town/v/std/sqlite?v=4"; } const openai = new OpenAI(); const completion = await openai.chat.completions.create({ model: "gpt-4o", messages: [
// @ts-ignoreimport { OpenAI } from "https://esm.town/v/std/openai?v=4";import { Hono } from "npm:hono@4.4.12"; if (!userText) throw new Error("User description is required."); const openai = new OpenAI(); const completion = await openai.chat.completions.create({ model: "gpt-4o", messages: [ } const openai = new OpenAI(); const completion = await openai.chat.completions.create({ model: "gpt-4o", messages: [ } const openai = new OpenAI(); const completion = await openai.chat.completions.create({ model: "gpt-4o", messages: [
export class ApiLogger { /** * Log OpenAI API response with headers and request ID */ static logOpenAIResponse(response: Response, data: Record<string, unknown>, context: string): void { const respId = response.headers.get("x-request-id") || response.headers.get("resp-id") || "unknown"; const model = data.model || "unknown"; console.log(`OpenAI ${context}: model=${model} resp_id=${respId} status=${response.status}`); console.log("Response headers:", Object.fromEntries(response.headers.entries())); console.log("Response data:", JSON.stringify(data, null, 2));
reconsumeralization
import { OpenAI } from "https://esm.town/v/std/openai";
import { sqlite } from "https://esm.town/v/stevekrouse/sqlite";
/**
* Practical Implementation of Collective Content Intelligence
* Bridging advanced AI with collaborative content creation
*/
exp
kwhinnery_openai
lost1991
import { OpenAI } from "https://esm.town/v/std/openai";
export default async function(req: Request): Promise<Response> {
if (req.method === "OPTIONS") {
return new Response(null, {
headers: {
"Access-Control-Allow-Origin": "*",
No docs found