Search

Results include substring matches and semantically similar vals. Learn more
iamseeley avatar
testingRunAllInValTown
@iamseeley
An interactive, runnable TypeScript val by iamseeley
HTTP
import { runPythonCode } from "https://esm.town/v/iamseeley/pyodideMod";
export default async function handler(req: Request): Promise<Response> {
const pythonCode = `
import numpy as np
browniebroke avatar
myApi
@browniebroke
An interactive, runnable TypeScript val by browniebroke
Script
export function myApi(name) {
return "hi " + name;
neverstew avatar
createVal
@neverstew
An interactive, runnable TypeScript val by neverstew
Script
readme?: string;
privacy?: "public" | "unlisted" | "private"; // Added privacy option
export function createVal({ token, code, name, readme, privacy }: CreateValArgs): Promise<ValResponse> {
const body: Record<string, unknown> = {
code,
stevekrouse avatar
nakedAPIEx
@stevekrouse
An interactive, runnable TypeScript val by stevekrouse
Script
export function nakedAPIEx(req, res) {
res.send("my returned srting");
stevekrouse avatar
runAllTests
@stevekrouse
An interactive, runnable TypeScript val by stevekrouse
Script
import { fetchJSON } from "https://esm.town/v/stevekrouse/fetchJSON";
export async function runAllTests({
names,
api,
christancula avatar
cerebras_coder
@christancula
This is an AI code assistant powered by Cerebras , running llama3.3-70b. Inspired by Hassan's Llama Coder . Setup Sign up for Cerebras Get a Cerebras API Key Save it in a Val Town environment variable called CEREBRAS_API_KEY Todos I'm looking for collaborators to help. Fork & send me PRs! [ ] Experiment with two prompt chain (started here )
HTTP
</a>
// Rest of the code remains the same, with only name changes
export default async function fiftyTwoStartupsCoder(req: Request): Promise<Response> {
// Dynamic import for SQLite to avoid client-side import
const { sqlite } = await import("https://esm.town/v/stevekrouse/sqlite");
<meta property="og:site_name" content="52 Startups Coder">
<meta property="og:url" content="https://52startups.com"/>
<meta property="og:description" content="Turn your startup ideas into fully functional apps in less than a second – powered b
<meta property="og:type" content="website">
<meta property="og:image" content="https://stevekrouse-blob_admin.web.val.run/api/public/52StartupsCoder.jpg">
tekknolagi avatar
myApi
@tekknolagi
An interactive, runnable TypeScript val by tekknolagi
Script
export function myApi(name) {
return "hi " + name;
dhvanil avatar
val_80sL9x3ODx
@dhvanil
An interactive, runnable TypeScript val by dhvanil
HTTP
export async function val_80sL9x3ODx(req) {
try {
const body = await req.text();
// Create a function from the provided code and execute it
const userFunction = async () => {
const findPrimes = (n) => {
// Execute and capture the result
const result = await userFunction();
// Handle different types of results
ethanoscarson avatar
BrainF_Interpreter
@ethanoscarson
An interactive, runnable TypeScript val by ethanoscarson
Script
function interpret(code, options = {}) {
let {
tapeSize = 1024,
stevekrouse avatar
valwriter_react_clientside
@stevekrouse
[ ] streaming we can't stream into an actual val - we can only write a full piece of text, i guess we can stream back the code so we don't get bored while we wait [ ] send the code of the valwriter back to gpt only if it's related, might need some threads [ ] send errors, logs back to gpt [ ] get screenshots of the output back to gpt [ ] experiment with applying diffs instead of regenerating from scratch every time could also have it as a conversation as the main thing and only the diffs get applied or the whole code gets replaced, maybe tool use it the key here... which does make it seem like a custom gpt may be the better fit...
HTTP
import { fetchText } from "https://esm.town/v/stevekrouse/fetchText";
import { chat } from "https://esm.town/v/stevekrouse/openai";
export default basicAuth(async (req) => {
"content-type": "text/html",
export function App() {
const example = examples[Math.floor(Math.random() * examples.length)];
content: `/** @jsxImportSource npm:react */
export default function() {
return <h1>{new Date().toLocaleTimeString()}</h1>;
content: await fetchText("https://esm.town/v/stevekrouse/cron2"),
export async function compile(description: string) {
const messages = [
await email({ subject: "Subject line", text: "Body of message" });
// OpenAI
import { OpenAI } from "https://esm.town/v/std/openai";
const openai = new OpenAI();
const completion = await openai.chat.completions.create({
messages: [
niek avatar
spotify
@niek
// Uncomment this line for one run of the Val to create the DB lol
HTTP
} catch (e) {
return c.json({ error: "Failed callback" + e });
async function getToken(c) {
const state = c.req.query("state") as string;
const authData = await db.select().from(table).where(eq(table.id, state)).limit(1);
lbb00 avatar
checkTensorArtWebStatus
@lbb00
An interactive, runnable TypeScript val by lbb00
Script
import { fetch } from "https://esm.town/v/std/fetch";
import process from "node:process";
export async function checkTensorArtWebStatus() {
async function sendLarkMessage(message) {
return fetch(process.env.larkTensorRobotUrl, {
method: "POST",
pomdtr avatar
brownBoar
@pomdtr
An interactive, runnable TypeScript val by pomdtr
Script
export default function(req) {
return new Response("Hello from sandbox");
namit avatar
myApi
@namit
An interactive, runnable TypeScript val by namit
Script
export function myApi(name) {
return "hi " + name;
nifei avatar
myApi
@nifei
An interactive, runnable TypeScript val by nifei
Script
export function myApi(name) {
return "hi " + name;