Search
CodeRunner
@jeffreyyoung
A simple poe bot
HTTP
A simple poe bot
const { loadPyodide } = pyodideModule;
const pyodide = await loadPyodide();
export default serve({
async *handleMessage(req) {
const lastMessage = req.query.at(-1)?.content;
const supportedCodeBlocks: {
type: "python";
content: string;
marked.use({
aqi
@zbeastly1
An interactive, runnable TypeScript val by zbeastly1
Script
export let aqi = async () => {
return await fetchJSON(
"https://api.openaq.org/v2/latest?" +
new URLSearchParams({
limit: "10",
page: "1",
location_id: "61498",
offset: "0",
sort: "desc",
radius: "100000",
upscaleThisUrl
@andrew
// Forked from @liamdanielduffy.upscaleThisUrl
Express (deprecated)
export const upscaleThisUrl = async (req, res) => {
const authToken = process.env.REPLICATE_API_TOKEN;
const url = "https://api.replicate.com/v1/predictions";
const jobId = req.body?.jobId;
const imageUrl = req.body?.imageUrl;
if (!jobId && !imageUrl) {
res.status(400).json({
error:
"You must pass either a jobId or an imageUrl. You did not pass either.",
if (jobId && imageUrl) {
modelSampleLLMCall
@bluemsn
An interactive, runnable TypeScript val by bluemsn
Script
import { getModelBuilder } from "https://esm.town/v/bluemsn/getModelBuilder";
export const modelSampleLLMCall = (async () => {
const builder = await getModelBuilder();
const model = await builder();
return await model.call("说明一句名言");
deletebio
@yawnxyz
// at ms2, we're collecting mge vs. bacteria data over time, to build models that help us create, read, update, and delete bacteria from anywhere.
HTTP
const app = new Hono();
app.use('*', cors({
origin: '*',
allowMethods: ['GET', 'POST'],
allowHeaders: ['Content-Type'],
const markdown = marked(`
- [x] <s>read bacteria.</s>
- [x] <s>update bacteria.</s>
- [ ] delete bacteria.
- [ ] create bacteria.
blackRodent
@stevekrouse
@jsxImportSource https://esm.sh/hono@latest/jsx
HTTP
/** @jsxImportSource https://esm.sh/hono@latest/jsx **/
export const projects = (c: Context) => {
return c.html(
<html>
<head>
<title>Projects</title>
<style
dangerouslySetInnerHTML={{
__html:
`:root{--slate1: hsl(200, 7%, 8.8%);--slate2: hsl(195, 7.1%, 11%);--slate3: hsl(197, 6.8%, 13.6%);--slate4: hsl(198, 6.6%, 15.8%);--slate5: hsl(199, 6.4%, 17.9%);--slate6: hsl(201, 6.2%, 20.5%);--slate7: hsl(203, 6%, 24.3%);--slate8: hsl(207, 5.6%, 31.6%);--slate9: hsl(206, 6%, 43.9%);--slate10: hsl(206, 5.2%, 49.5%);--slate11: hsl(206, 6%, 63%);--slate12: hsl(210, 6%, 93%);--blue1: hsl(212, 35%, 9.2%);--blue2: hsl(216, 50%, 11.8%);--blue3: hsl(214, 59.4%, 15.3%);--blue4: hsl(214, 65.8%, 17.9%);--blue5: hsl(213, 71.2%, 20.2%);--blue6: hsl(212, 77.4%, 23.1%);--blue7: hsl(211, 85.1%, 27.4%);--blue8: hsl(211, 89.7%, 34.1%);--blue9: hsl(206, 100%, 50%);--blue10: hsl(209, 100%, 60.6%);--blue11: hsl(210, 100%, 66.1%);--blue12: hsl(206, 98%, 95.8%)}body{font-family:system-ui,sans-serif;margin:auto;padding:20px;max-width:65ch;text-align:left;word-wrap:break-word;overflow-wrap:break-word;line-height:1.5}h1,h2,h3,h4,h5,h6,strong,b{font-weight:500}a{color:var(--blue10)}nav a{margin-right:10px}textarea{width:100%;font-size:16px}input{font-size:16px}content{line-height:1.6}table{width:100%}img{max-width:100%;height:auto}code{padding:2px 5px;background-color:var(--slate4);font-family:menlo,monospace}pre{padding:1rem}pre>code{all:unset}blockquote{border:1px solid var(--slate10);color:var(--slate11);padding:2px 0 2px 20px;margin:0;font-style:italic}a[data-astro-cid-eimmu3lg]{display:inline-block;text-decoration:none}a[data-astro-cid-eimmu3lg].active{font-weight:600;text-decoration:underline}header[data-astro-cid-3ef6ksr2]{margin:0 0 2em}h2[data-astro-cid-3ef6ksr2]{margin:.5em 0}`,
examplebot_commands
@mattx
An interactive, runnable TypeScript val by mattx
Script
export const examplebot_commands = register_discord_commands(
process.env.discord_appid,
process.env.discord_guildid,
process.env.discord_token,
name: "ping",
description: "Responds with something from val.town",
name: "eval",
description: "Evaluates something on val.town",
options: [
type: 3,
hiapi
@dnomadb
An interactive, runnable TypeScript val by dnomadb
Script
export let hiapi = (a) => {
console.log(a);
return [...new Array(100)].map((_, i) => {
return [a, i];
pythonValExample
@iamseeley
An interactive, runnable TypeScript val by iamseeley
Script
import { runPythonCode } from "https://deno.land/x/py_town@v0.0.2/mod.ts";
const pythonCode = `
def greet(name):
return f"Hello, {name}!"
greet("Val Town")
runPythonCode(pythonCode).then(result => {
console.log("Python Code Result:", result); // Should log: Hello, Val Town!
}).catch(error => {
console.error("Error running Python code:", error);
untitled4965473
@snm
An interactive, runnable TypeScript val by snm
Script
import { trackOpenAiFreeUsage } from "https://esm.town/v/snm/trackOpenAiFreeUsage";
import { openAiTextCompletion } from "https://esm.town/v/patrickjm/openAiTextCompletion?v=8";
import { openAiModeration } from "https://esm.town/v/snm/openAiModeration";
import { openAiFreeQuotaExceeded } from "https://esm.town/v/patrickjm/openAiFreeQuotaExceeded?v=2";
import { openAiFreeUsageConfig } from "https://esm.town/v/snm/openAiFreeUsageConfig";
* OpenAI text completion. https://platform.openai.com/docs/api-reference/completions
* To see if the quota has been met, you can run @patrickjm.openAiFreeQuotaExceeded()
* For full REST API access, see @patrickjm.openAiTextCompletion
openAiKey?: string,
const apiKey = params.openAiKey ?? openAiFreeUsageConfig.key;
untitled_silverPinniped
@jacoblee93
An interactive, runnable TypeScript val by jacoblee93
Script
const { ChatPromptTemplate } = await import("npm:langchain/prompts");
const { ChatOpenAI } = await import("npm:langchain/chat_models/openai");
const { StringOutputParser } = await import(
["human", "{input}"],
const model = new ChatOpenAI({
modelName: "gpt-4",
temperature: 0.2,
openAIApiKey: process.env.OPENAI_API_KEY,
// Output parser converts the chat message into a raw string. Also works with streaming.
untitled_chocolateSquid
@jacoblee93
An interactive, runnable TypeScript val by jacoblee93
Script
export const untitled_chocolateSquid = (async () => {
const { ChatOpenAI } = await import(
"https://esm.sh/langchain@0.0.146/chat_models/openai"
const { LLMChain } = await import("https://esm.sh/langchain@0.0.146/chains");
["human", humanTemplate],
const chat = new ChatOpenAI({
temperature: 0,
openAIApiKey: process.env.OPENAI_API_KEY,
const chain = new LLMChain({
untitled_orangeImpala
@stevekrouse
An interactive, runnable TypeScript val by stevekrouse
Script
import { fetchJSON } from "https://esm.town/v/stevekrouse/fetchJSON";
export const untitled_orangeImpala = fetchJSON(
"https://api.openai.com/v1/usage?date=2023-11-01",
headers: {
authorization: "Bearer " + Deno.env.get("openai"),