Search
key_value_api
@brianleroux
An interactive, runnable TypeScript val by brianleroux
HTTP
let res = await sqlite.execute(`select key, value from kv`);
return c.json(res);
app.post("/", async function(c) {
let res = await sqlite.execute({
sql: `insert into kv(key, value) values (:key, :value)`,
![dantaeyoung avatar](https://images.clerk.dev/oauth_github/img_2PhRJV5CizAxF2bsCR64RgR1hVx.jpeg)
weatherGPT
@dantaeyoung
If you fork this, you'll need to set OPENAI_API_KEY in your Val Town Secrets .
Cron
If you fork this, you'll need to set `OPENAI_API_KEY` in your [Val Town Secrets](https://www.val.town/settings/secrets).
import { email } from "https://esm.town/v/std/email?v=11";
import { OpenAI } from "npm:openai";
let location = "brooklyn ny";
).then(r => r.json());
const openai = new OpenAI();
let chatCompletion = await openai.chat.completions.create({
messages: [{
console.log(text);
export async function weatherGPT() {
await email({ subject: "Weather Today", text });
val_hrjcREMr1T
@dhvanil
An interactive, runnable TypeScript val by dhvanil
HTTP
export async function val_hrjcREMr1T(req) {
try {
// Execute the code directly and capture its result
API_URL
@std
Val Town API URL When Val Town code is run on Val Town servers we use a local URL so we can save time by skipping a roundtrip to the public internet. However, if you want to run your vals that use our API, ie std library vals, locally, you'll want to use our public API's URL, https://api.val.town . We recommend importing and using std/API_URL whenever you use our API so that you are always using the most efficient route. Example Usage import { API_URL } from "https://esm.town/v/std/API_URL";
const response = await fetch(`${API_URL}/v1/me`, {
headers: {
Authorization: `Bearer ${Deno.env.get("valtown")}`,
Accept: "application/json",
},
});
const data = await response.json();
console.log(data)
Script
function envOrUndefined(key: string): string | undefined {
// try/catch prevents crashes if the script doesn't have env access
try {
![lionad avatar](https://images.clerk.dev/oauth_github/img_2RSOoRuDk74x1wEjUOpK9laOUeb.png)
myApi
@lionad
An interactive, runnable TypeScript val by lionad
Script
export function myApi(name) {
console.email("hi " + name);
val_P5Jos9W9mM
@dhvanil
An interactive, runnable TypeScript val by dhvanil
HTTP
export async function val_P5Jos9W9mM(req) {
try {
const body = await req.text();
// Create a function from the provided code and execute it
const userFunction = async () => {
const findPrimes = (n) => {
// Execute and capture the result
const result = await userFunction();
// Handle different types of results
falDemoApp
@ascarden
@jsxImportSource https://esm.sh/react
HTTP
import { falProxyRequest } from "https://esm.town/v/stevekrouse/falProxyRequest";
function App() {
const [prompt, setPrompt] = useState("");
</div>
function client() {
createRoot(document.getElementById("root")).render(<App />);
if (typeof document !== "undefined") { client(); }
export default async function server(req: Request): Promise<Response> {
const url = new URL(req.url);
val_PJEj4CadkM
@dhvanil
An interactive, runnable TypeScript val by dhvanil
HTTP
export async function val_PJEj4CadkM(req) {
try {
// Execute the code directly and capture its result
competentCoffeeTyrannosaurus
@shivammunday
@jsxImportSource https://esm.sh/react@18.2.0
HTTP
import { createRoot } from "https://esm.sh/react-dom@18.2.0/client";
function WebsiteChatbot() {
const [messages, setMessages] = useState([
const [isLoading, setIsLoading] = useState(false);
async function handleSubmit(e: React.FormEvent) {
e.preventDefault();
</div>
function client() {
const chatbotRoot = document.createElement('div');
if (typeof document !== 'undefined') { client(); }
export default async function server(request: Request) {
if (request.method === 'POST' && new URL(request.url).pathname === '/chat') {
const { OpenAI } = await import("https://esm.town/v/std/openai");
const openai = new OpenAI();
const body = await request.json();
Be conversational, helpful, and website-specific in your responses.`;
const completion = await openai.chat.completions.create({
messages: [
![stevekrouse avatar](https://images.clerk.dev/uploaded/img_2PqHa2Gsy93xQrjh2w78Xu0cChW.jpeg)
dailyDadJoke
@stevekrouse
Daily Dad Joke How do you make a programmer laugh every morning? A dad joke cron job! Setup Fork this val Click Create fork 🤣🤣🤣🤣 API This val uses the icanhazdadjoke API . You can find more docs here , such as how to filter by type .
Cron
import { email } from "https://esm.town/v/std/email";
import { fetchJSON } from "https://esm.town/v/stevekrouse/fetchJSON";
export async function dailyDadJoke() {
let { setup, punchline } = await fetchJSON("https://official-joke-api.appspot.com/random_joke");
return email({
genval
@andreterron
Generate a Val Uses the OpenAI API to generate code for a val based on the description given by the user. TODO: [ ] Improve code detection on GPT response [ ] Give more context on val town exclusive features like console.email or @references [ ] Enable the AI to search val town to find other vals to use
HTTP
# [Generate a Val](https://andreterron-genval.express.val.run)
Uses the OpenAI API to generate code for a val based on the description given by the user.
TODO:
const labelClass = "w-full text-sm font-bold uppercase text-emerald-800 [&>span]:pl-0.5 flex flex-col gap-2";
function parseCookies(cookie: string) {
const out: Record<string, string> = {};
const code = await generateValCode(
process.env.VT_OPENAI_KEY,
value.description,
<span>Description</span>
<input required class=${inputClass} id="description" name="description" type="text" placeholder="Function to return a random number" autocomplete="off" />
</label>
tidyRedWhale
@websrai
@jsxImportSource https://esm.sh/react
HTTP
import { createRoot } from "https://esm.sh/react-dom/client";
function AssistantChat() {
const [messages, setMessages] = useState<{role: string, content: string}[]>([
</div>
function client() {
createRoot(document.getElementById("root")).render(<AssistantChat />);
if (typeof document !== "undefined") { client(); }
export default async function server(request: Request): Promise<Response> {
if (request.method === 'POST' && new URL(request.url).pathname === '/chat') {
const { OpenAI } = await import("https://esm.town/v/std/openai");
const openai = new OpenAI();
try {
const { messages } = await request.json();
const completion = await openai.chat.completions.create({
model: "gpt-4o-mini",
![yawnxyz avatar](https://images.clerk.dev/oauth_github/img_2NnaHhpxNuH1xWRIRjQNoo16TVc.jpeg)
stringInferExample
@yawnxyz
example of inferring params from a prompt: https://x.com/yawnxyz/status/1812922642510586039 putting results in does wonders use an "extraction guide" to direct results
Script
import { modelProvider } from "https://esm.town/v/yawnxyz/ai";
// selects from xml selectors like <json></json> gets better results
export function selectFromString(str, selector = "json", returnAll = false) {
const regex = new RegExp(`<${selector}>([\\s\\S]*?)</${selector}>`, 'gm');
const matches = [...str.matchAll(regex)].map(match => match[1]);
myApi
@dialnco
An interactive, runnable TypeScript val by dialnco
HTTP
export function myApi(name) {
return "hi " + name;
![webup avatar](https://images.clerk.dev/uploaded/img_2S4HPCT6HNZNKzadzuovxeU1BRZ.webp)
pipeSampleLLMBind
@webup
An interactive, runnable TypeScript val by webup
Script
type: "chat",
provider: "openai",
const model = await mb();
const tracer = await tb();
const functionSchema = [
name: "joke",
const chain = prompt.pipe(model.bind({
functions: functionSchema,
function_call: { name: "joke" },
return await chain.invoke({ subject: "bears" }, { callbacks: [tracer] });