Search

4,095 results found for openai (1188ms)

Code
3,989

import { readFile } from "https://esm.town/v/std/utils/index.ts";
import { Agent, run, webSearchTool } from "npm:@openai/agents@0.3.0";
import { storeLead } from "./db.ts";
const d = inputData.data || inputData;
if (!Deno.env.get("OPENAI_API_KEY")) {
const outputData = {
...Object.fromEntries(PASSTHROUGH_KEYS.map(k => [k, d[k] ?? ""])),
match: null,
reasoning: "Set OPENAI_API_KEY environment variable to enable AI research",
};
await storeLead(inputData, outputData);
Note: When changing a SQLite table's schema, change the table's name (e.g., add _2 or _3) to cre
### OpenAI
```ts
import { OpenAI } from "https://esm.town/v/std/openai";
const openai = new OpenAI();
const completion = await openai.chat.completions.create({
messages: [
{ role: "user", content: "Say hello in a creative way" },
["aiforswes", ["AI"], "https://www.aiforswes.com/feed"],
["tesslBlog", ["AI"], "https://rss-generator.artivilla.com/rss/tessl.xml"],
["openaiNews", ["AI"], "https://openai.com/news/rss.xml"],
[
"anthropicNews",
migitech/rep/main.ts
13 matches
};
// ---------- OpenAI mutation with evolutionary context ----------
async function mutatePayloadViaOpenAI(args: {
openaiKey: string;
model: string;
agentId: string;
}): Promise<{ payloadSource: string }> {
const {
openaiKey,
model,
agentId,
const resp = await fetchWithRetry(
"https://api.openai.com/v1/chat/completions",
{
method: "POST",
headers: {
Authorization: `Bearer ${openaiKey}`,
"Content-Type": "application/json",
},
if (!resp.ok) {
const text = await resp.text().catch(() => "");
throw new Error(`OpenAI error: ${resp.status} ${text.slice(0, 300)}`);
}
const parsed = safeJsonParse(content ?? "{}");
if (!parsed?.payloadSource) {
throw new Error("OpenAI JSON missing payloadSource.");
}
const {
VALTOWN_API_KEY,
OPENAI_API_KEY,
REP_VALID,
REP_MODEL = "gpt-4-turbo-preview",
const url = new URL(req.url);
if (!VALTOWN_API_KEY || !OPENAI_API_KEY || !REP_VALID) {
return Response.json(
{
error:
"Missing env: VALTOWN_API_KEY, OPENAI_API_KEY, REP_VALID are required.",
},
{ status: 500 },
}
const { payloadSource } = await mutatePayloadViaOpenAI({
openaiKey: OPENAI_API_KEY,
model: REP_MODEL,
agentId: AGENT_ID,
Note: When changing a SQLite table's schema, change the table's name (e.g., add _2 or _3) to cre
### OpenAI
```ts
import { OpenAI } from "https://esm.town/v/std/openai";
const openai = new OpenAI();
const completion = await openai.chat.completions.create({
messages: [
{ role: "user", content: "Say hello in a creative way" },
# PineconeIndex
A simple interface for making and querying Pinecone vector databases. Use OpenAI
embeddings to vectorize and search
Create keys for
[Pinecone](https://docs.pinecone.io/guides/projects/manage-api-keys) and
[OpenAI](https://platform.openai.com/docs/api-reference/project-api-keys), and
store then in your environment variables. Then,
// set up your environment variables
const pineconeKey = Deno.env.get("PINECONE_KEY");
const modelToken = Deno.env.get("OPENAI_KEY");
const index = new PineconeIndex({
PineconeIndex also provides `handleRequest` as a convenience method to access
your indices via HTTP. This is useful when accessing the index from other vals,
especially without sharing Pinecone and OpenAI credentials.
### Server setup
const {model, modelToken} = this.options;
const response = await axios.post(
"https://api.openai.com/v1/embeddings",
{model, input: text },
{
Note: When changing a SQLite table's schema, change the table's name (e.g., add _2 or _3) to cre
### OpenAI
```ts
import { OpenAI } from "https://esm.town/v/std/openai";
const openai = new OpenAI();
const completion = await openai.chat.completions.create({
messages: [
{ role: "user", content: "Say hello in a creative way" },
Note: When changing a SQLite table's schema, change the table's name (e.g., add _2 or _3) to cre
### OpenAI
```ts
import { OpenAI } from "https://esm.town/v/std/openai";
const openai = new OpenAI();
const completion = await openai.chat.completions.create({
messages: [
{ role: "user", content: "Say hello in a creative way" },
/** @jsxImportSource npm:hono@3/jsx */
// @ts-ignore
import { OpenAI } from "https://esm.town/v/std/openai?v=4";
// @ts-ignore
import { blob } from "https://esm.town/v/std/blob?v=11";
const app = new Hono();
const openai = new OpenAI();
async function processInput(state: SessionState, newText: string) {
// 2. Always Run Archivist (To tag data)
const archivistRes = await openai.chat.completions.create({
model: "gpt-4o",
messages: [
if (state.phase === "MIMICRY_TEST") {
// RUN GHOSTWRITER
const writerRes = await openai.chat.completions.create({
model: "gpt-4o",
messages: [
} else {
// RUN INVESTIGATOR
const invRes = await openai.chat.completions.create({
model: "gpt-4o",
messages: [