Search
autoGPT_Test
@stevekrouse
An interactive, runnable TypeScript val by stevekrouse
Script
export let autoGPT_Test = (async () => {
const { Configuration, OpenAIApi } = await import("npm:openai@3.2.1");
const configuration = new Configuration({
apiKey: process.env.openai,
const openai = new OpenAIApi(configuration);
const completion = await openai.createChatCompletion({
model: "gpt-4",
getModelBuilder
@bluemsn
An interactive, runnable TypeScript val by bluemsn
Script
export async function getModelBuilder(spec: {
provider?: "openai" | "huggingface";
} = { type: "llm", provider: "openai" }, options?: any) {
if (spec?.provider === "openai")
args.openAIApiKey = process.env.OPENAI_API_KEY;
matches({ type: "llm", provider: "openai" }),
const { OpenAI } = await import("npm:langchain/llms/openai");
return new OpenAI(args);
matches({ type: "chat", provider: "openai" }),
const { ChatOpenAI } = await import("npm:langchain/chat_models/openai");
sqliteExplorerApp
@nbbaier
SQLite Explorer View and interact with your Val Town SQLite data. It's based off Steve's excellent SQLite Admin val, adding the ability to run SQLite queries directly in the interface. This new version has a revised UI and that's heavily inspired by LibSQL Studio by invisal . This is now more an SPA, with tables, queries and results showing up on the same page. Install Install the latest stable version (v86) by forking this val: Authentication Login to your SQLite Explorer with password authentication with your Val Town API Token as the password. Todos / Plans [ ] improve error handling [ ] improve table formatting [ ] sticky table headers [x] add codemirror [ ] add loading indication to the run button (initial version shipped) [ ] add ability to favorite queries [ ] add saving of last query run for a table (started) [ ] add visible output for non-query statements [ ] add schema viewing [ ] add refresh to table list sidebar after CREATE/DROP/ALTER statements [ ] add automatic execution of initial select query on double click [x] add views to the sidebar [ ] add triggers to sidebar [ ] add upload from SQL, CSV and JSON [ ] add ability to connect to a non-val town Turso database [x] fix wonky sidebar separator height problem (thanks to @stevekrouse) [x] make result tables scrollable [x] add export to CSV, and JSON (CSV and JSON helper functions written in this val . Thanks to @pomdtr for merging the initial version!) [x] add listener for cmd+enter to submit query
HTTP
- [x] make result tables scrollable
- [x] add export to CSV, and JSON (CSV and JSON helper functions written in [this val](https://www.val.town/v/nbbaier/sqliteExportHelpers). Thanks to @pomdtr for merging the initial version!)
- [x] add listener for cmd+enter to submit query
chatAgentWithCustomPrompt
@jacoblee93
An interactive, runnable TypeScript val by jacoblee93
Script
export const chatAgentWithCustomPrompt = (async () => {
const { ChatOpenAI } = await import(
"https://esm.sh/langchain/chat_models/openai"
const { initializeAgentExecutorWithOptions } = await import(
"https://esm.sh/langchain/tools/calculator"
const model = new ChatOpenAI({
temperature: 0,
openAIApiKey: process.env.OPENAI_API_KEY,
const tools = [
modelSampleChatCall
@webup
An interactive, runnable TypeScript val by webup
Script
const builder = await getModelBuilder({
type: "chat",
provider: "openai",
const model = await builder();
const { SystemMessage, HumanMessage } = await import("npm:langchain/schema");
GreetingCard
@banebot
Generate a greeting card! This script exposes two endpoints, one for a form to capture information to utilize in creating a greeting card utilizing OpenAI, and the other to generate the greeting card based off the query parameters provided.
HTTP
# Generate a greeting card!
This script exposes two endpoints, one for a form to capture information to utilize in creating a greeting card utilizing OpenAI, and the other to generate the greeting card based off the query parameters provided.
import { OpenAI } from "https://esm.town/v/std/openai";
import { Hono } from "npm:hono";
import { cors } from "npm:hono/cors";
const openai = new OpenAI(),
chat = openai.chat,
chatCompletions = chat.completions;
<script>
function submitForm(event) {
event.preventDefault();
web_whzyD0pbS0
@dhvanil
An interactive, runnable TypeScript val by dhvanil
HTTP
export async function web_whzyD0pbS0(req) {
return new Response(`<!DOCTYPE html>
<html>
langchainEx
@jacoblee93
// Forked from @stevekrouse.langchainEx
Script
export const langchainEx = (async () => {
const { ChatOpenAI } = await import(
"https://esm.sh/langchain/chat_models/openai"
const { PromptTemplate } = await import("https://esm.sh/langchain/prompts");
const { LLMChain } = await import("https://esm.sh/langchain/chains");
const model = new ChatOpenAI({
temperature: 0.9,
openAIApiKey: process.env.OPENAI_API_KEY,
verbose: true,
braveAgent
@jacoblee93
// Shows how to use the Brave Search tool in a LangChain agent
Script
export const braveAgent = (async () => {
const { ChatOpenAI } = await import(
"https://esm.sh/langchain/chat_models/openai"
const { BraveSearch } = await import("https://esm.sh/langchain/tools");
"https://esm.sh/langchain/agents"
const model = new ChatOpenAI({
temperature: 0,
openAIApiKey: process.env.OPENAI_API_KEY,
const tools = [
emeraldRaccoon
@willthereader
ChatGPT Implemented in Val Town Demonstrated how to use assistants and threads with the OpenAI SDK and how to stream the response with Server-Sent Events
HTTP
Demonstrated how to use assistants and threads with the OpenAI SDK and how to stream the response with Server-Sent Events
import OpenAI from "npm:openai";
const openai = new OpenAI();
document.getElementById("input").addEventListener("submit", function(event) {
eventSource.onmessage = function(event) {
eventSource.onerror = function() {
const thread = await openai.beta.threads.create();
const assistant = await openai.beta.assistants.create({
await openai.beta.threads.messages.create(
const run = openai.beta.threads.runs.stream(threadId, {
conversationalRetrievalQAChainSummaryMemory
@jacoblee93
An interactive, runnable TypeScript val by jacoblee93
Script
export const conversationalRetrievalQAChainSummaryMemory = (async () => {
const { ChatOpenAI } = await import(
"https://esm.sh/langchain/chat_models/openai"
const { OpenAIEmbeddings } = await import(
"https://esm.sh/langchain/embeddings/openai"
const { ConversationSummaryMemory } = await import(
"https://esm.sh/langchain/chains"
const chatModel = new ChatOpenAI({
openAIApiKey: process.env.OPENAI_API_KEY,
/* Create the vectorstore */
[{ id: 2 }, { id: 1 }, { id: 3 }],
new OpenAIEmbeddings({
openAIApiKey: process.env.OPENAI_API_KEY,
/* Create the chain */
debugValEmbeddings
@janpaul123
An interactive, runnable TypeScript val by janpaul123
Script
import _ from "npm:lodash";
import OpenAI from "npm:openai";
const openai = new OpenAI();
const queryEmbedding = (await openai.embeddings.create({
model: "text-embedding-3-small",
console.log(queryEmbedding.slice(0, 4));
const embedding = await openai.embeddings.create({
model: "text-embedding-3-small",
import { email } from "https://esm.town/v/std/email?v=12";
export default async function(interval: Interval) {
const dynamiclandWebsiteHash = await blob.getJSON("dynamiclandWebsiteHash");
console.log("Hash is the same, no email sent.", { dynamiclandWebsiteHash });
const queryEmbeddingVal = (await openai.embeddings.create({
model: "text-embedding-3-small",
multipleKeysAndMemoryConversationChainExample
@jacoblee93
An interactive, runnable TypeScript val by jacoblee93
Script
export const multipleKeysAndMemoryConversationChainExample = (async () => {
const { ChatOpenAI } = await import(
"https://esm.sh/langchain/chat_models/openai"
const { BufferMemory } = await import("https://esm.sh/langchain/memory");
const { ConversationChain } = await import("https://esm.sh/langchain/chains");
const llm = new ChatOpenAI({
modelName: "gpt-3.5-turbo",
openAIApiKey: process.env.OPENAI_API_KEY,
temperature: 0,
cronprompt
@stevekrouse
An interactive, runnable TypeScript val by stevekrouse
HTTP
import { OpenAI } from "https://esm.town/v/std/openai";
export async function cronprompt(prompt: string) {
const openai = new OpenAI();
const functionExpression = await openai.chat.completions.create({
messages: [
max_tokens: 30,
return functionExpression.choices[0].message.content;
export default async function(req: Request): Promise<Response> {
const query = new URL(req.url).searchParams;
connect4_agent
@saolsen
An interactive, runnable TypeScript val by saolsen
Script
export type Agent = (state: connect4.State) => connect4.Action;
export type AsyncAgent = (state: connect4.State) => Promise<connect4.Action>;
export function connect4_agent(agent: Agent | AsyncAgent) {
const app = new Hono();
app.get("/", (c) => c.json("connect4 agent"));