Search

Results include substring matches and semantically similar vals. Learn more
jeffreyyoung avatar
poe_bot
@jeffreyyoung
// copied from https://github.com/poe-platform/fastapi_poe/blob/72e0ffdd00553d24ef23755138ca9b8d5f08b201/src/fastapi_poe/types.py
Script
function: {
function: {
function: {
export function createBot(bot: BotBase<ValTownCtx>): { handleRequest: Handler } {
async function handleRequest(req: Request): Promise<Response> {
if (typeof bot.settings === "function") {
(async function() {
export function sleep(timeInMs: number) {
function encodeEvent(event: string, data: any = {}) {
export default function serve(bot: BotBase<ValTownCtx>): Handler {
kj9 avatar
summary
@kj9
// await sqlite.execute("CREATE TABLE IF NOT EXISTS tweets (published TEXT PRIMARY KEY, entry TEXT NOT NULL)");
HTTP
maxOutputTokens: 8192,
responseMimeType: "text/plain",
async function run() {
const chatSession = model.startChat({
generationConfig,
only return html, with no code enclosure \`\`\`,no yapping.`);
return result.response.text();
export default async function(req: Request): Promise<Response> {
const res = await run();
// return res as html Response
janpaul123 avatar
valle_tmp_50677281064121176482256801591227
@janpaul123
// Improvements Made:
HTTP
// - Updated form elements to handle common form submission patterns more gracefully.
// - Improved existing functions and refactored to maintain a clean structure while keeping it consistent with the existing c
import valleGetValsContextWindow from "https://esm.town/v/janpaul123/valleGetValsContextWindow";
import _ from "npm:lodash";
import OpenAI from "npm:openai";
import { renderToString } from "npm:react-dom/server";
const app = new Hono();
// Helper Function: Render JSX to HTML Response
const jsxResponse = (jsx) => {
<script>
function openTab(tab) {
const tabButtonCode = document.getElementById("tab-button-code");
const scrollingElement = document.getElementById("conversation-container");
const callback = function (mutationsList, observer) {
scrollingElement.scrollTo({ left: 0, top: scrollingElement.scrollHeight, behavior: "instant" });
const contextWindow = await valleGetValsContextWindow(model);
const openai = new OpenAI();
const stream = await openai.chat.completions.create({
model,
let fullStr = "";
window.addToken = function(str) {
fullStr += str;
scio avatar
aleister_chatley
@scio
An interactive, runnable TypeScript val by scio
Script
aleister_chatley_countdown = 8 + Math.floor(Math.random() * 6);
const { OpenAI } = await import("https://deno.land/x/openai/mod.ts");
const openAI = new OpenAI(process.env.OPENAI_KEY);
const chatCompletion = await openAI.createChatCompletion(
prompts.philip_k_dick,
stevekrouse avatar
blob_admin
@stevekrouse
Blob Admin This is a lightweight Blob Admin interface to view and debug your Blob data. Versions 0-17 of this val were done with Hono and server-rendering. Versions 18+ were generated with Townie and use client-side React. To use this val, fork it to your account. Authentication is handled by @stevekrouse/lastlogin, so only the owner of the val will be able to see and edit their own blobs.
HTTP
content: string;
function Tooltip({ children, content }: TooltipProps) {
const [isVisible, setIsVisible] = useState(false);
</div>
function formatBytes(bytes: number, decimals = 2) {
if (bytes === 0) return "0 Bytes";
return parseFloat((bytes / Math.pow(k, i)).toFixed(dm)) + " " + sizes[i];
function copyToClipboard(text: string) {
navigator.clipboard.writeText(text).then(() => {
console.error("Failed to copy text: ", err);
function ActionMenu({ blob, onDownload, onRename, onDelete, onMoveToPublic, onMoveOutOfPublic }) {
const [isOpen, setIsOpen] = useState(false);
useEffect(() => {
function handleClickOutside(event) {
if (menuRef.current && !menuRef.current.contains(event.target)) {
</div>
function BlobItem({ blob, onSelect, isSelected, onDownload, onRename, onDelete, onMoveToPublic, onMoveOutOfPublic }) {
const [isLoading, setIsLoading] = useState(false);
lastModified: string;
function App({ initialEmail, initialProfile }) {
const encodeKey = (key: string) => encodeURIComponent(key);
</div>
function client() {
const initialEmail = document.getElementById("root").getAttribute("data-email");
g avatar
textToSpeechApp
@g
* Client-side Text-to-Speech Application * Uses Microsoft Edge TTS service to generate speech * Supports English (Connor Neural) voice for now
HTTP
import escapeXML from 'npm:xml-escape';
function html() {
<!DOCTYPE html>
</html>
function css() {
body {
flex-direction: column;
function js() {
const ttsForm = document.getElementById('ttsForm');
languageSelect.dispatchEvent(new Event('change'));
async function onPaste() {
const text = textInput.value.trim();
submitBtn.classList.remove('generating');
function selectNodes(data) {
if (Array.isArray(data)) {
toowired avatar
memorySampleSummary
@toowired
// Initialize the database
Script
import { sqlite } from "https://esm.town/v/stevekrouse/sqlite";
import { OpenAI } from "https://esm.town/v/std/openai";
const KEY = new URL(import.meta.url).pathname.split("/").at(-1);
const SCHEMA_VERSION = 1;
const openai = new OpenAI();
// Initialize the database
async function initDB() {
await sqlite.execute(`
// Generate embedding for a given text
async function generateEmbedding(text: string): Promise<number[]> {
const response = await openai.embeddings.create({
model: "text-embedding-ada-002",
// Add a new memory to the bank
export async function addMemory(content: string): Promise<void> {
await initDB();
// Retrieve similar memories
export async function getSimilarMemories(query: string, limit: number = 5): Promise<string[]> {
await initDB();
// Cosine similarity calculation
function cosineSimilarity(vecA: number[], vecB: number[]): number {
const dotProduct = vecA.reduce((sum, a, i) => sum + a * vecB[i], 0);
// Example usage
export async function memoryBankDemo() {
// Add some memories
charlypoly avatar
exampleTopHackerNewsDailyEmail
@charlypoly
An interactive, runnable TypeScript val by charlypoly
Cron
import { OpenAI } from "https://esm.town/v/std/openai?v=4";
export default async function(interval: Interval) {
// we create a OpenAI Tool that takes our schema as argument
type: "function",
function: {
const openai = new OpenAI();
// We ask OpenAI to extract the content from the given web page.
const completion = await openai.chat.completions.create({
// we retrieve the serialized arguments generated by OpenAI
const result = completion.choices[0].message.tool_calls![0].function.arguments;
yawnxyz avatar
pubmed
@yawnxyz
Pubmed Search Search Pubmed using a public pubmedisearch endpoint https://yawnxyz-pubmed.web.val.run?query=phage therapy
HTTP
import { fetch } from "https://esm.town/v/std/fetch";
const app = new Hono();
// PubMed search function (modified to accept more parameters)
async function pubmedSearch(query, userId, additionalParams = {}) {
const url = 'https://www.pubmedisearch.com/api/fetch_articles';
const options = {
adagradschool avatar
claude_96d58357_f689_4969_90e1_10f04f012b9d
@adagradschool
An interactive, runnable TypeScript val by adagradschool
HTTP
export default function handler(req) {
v class=\"role\">Human</div>\n <div class=\"content\">export default function handler(req) {\n return new
headers: {
"Content-Type": "text/html",
kyutarou avatar
gpt4Example
@kyutarou
GPT4 Example This uses the brand new gpt-4-1106-preview . To use this, set OPENAI_API_KEY in your Val Town Secrets .
Script
This uses the brand new `gpt-4-1106-preview`.
To use this, set `OPENAI_API_KEY` in your [Val Town Secrets](https://www.val.town/settings/secrets).
import { OpenAI } from "npm:openai";
Deno.env.get("OPENAI_API_KEY");
const openai = new OpenAI();
let chatCompletion = await openai.chat.completions.create({
messages: [{
janpaul123 avatar
valleGetValsContextWindow
@janpaul123
An interactive, runnable TypeScript val by janpaul123
Script
export default async function getValsContextWindow(model: any) {
prompt: "Write a val that uses OpenAI",
code: `import { OpenAI } from "https://esm.town/v/std/openai";
const openai = new OpenAI();
const completion = await openai.chat.completions.create({
export default async function(req: Request) {
function tsResponse(code) {
Our Blob SDK also includes some utility functions to make working with blobs easier.
"\n```ts\nexport default async function (req: Request): Promise<Response> {\n return Response.json(\"Hello world\")\
"\n```ts\nexport default async function(req: Request): Promise<Response> {\n const query = new URL(req.url).searchPa
sarahxc avatar
slackScout
@sarahxc
Slack scout sends a slack notification every time your keywords are mentioned on Twitter, Hacker News, or Reddit. Get notified whenever you, your company, or topics of interest are mentioned online. Built with Browserbase . Inspired by f5bot.com . Full code tutorial . Getting Started To run Slack Scout, you’ll need a Browserbase API key Slack Webhook URL: setup here Twitter Developer API key Browserbase Browserbase is a developer platform to run, manage, and monitor headless browsers at scale. We’ll use Browserbase to navigate to, and scrape our different news sources. We’ll also use Browserbase’s Proxies to ensure we simulate authentic user interactions across multiple browser sessions . Get started with Browserbase for free here . Twitter We’ve decided to use the Twitter API to include Twitter post results. It costs $100 / month to have a Basic Twitter Developer account. If you decide to use Browserbase, we can lend our token. Comment below for access. Once you have the SLACK_WEBHOOK_URL , BROWSERBASE_API_KEY , and TWITTER_BEARER_TOKEN , input all of these as Val Town Environment Variables . Project created by Sarah Chieng and Alex Phan 💌
Cron
export default async function(interval: Interval): Promise<void> {
async function createTable(): Promise<void> {
async function fetchHackerNewsResults(topic: string): Promise<Website[]> {
async function fetchTwitterResults(topic: string): Promise<Website[]> {
async function fetchRedditResults(topic: string): Promise<Website[]> {
function formatSlackMessage(website: Website): string {
async function sendSlackMessage(message: string): Promise<Response> {
async function isURLInTable(url: string): Promise<boolean> {
async function addWebsiteToTable(website: Website): Promise<void> {
async function processResults(results: Website[]): Promise<void> {
KALPESHPATEL avatar
textToGenerate
@KALPESHPATEL
@jsxImportSource https://esm.sh/react@18.2.0
HTTP
import React, { useState } from "https://esm.sh/react@18.2.0";
function App() {
const [prompt, setPrompt] = useState("");
textDecoration: "none",
function client() {
createRoot(document.getElementById("root")).render(<App />);
if (typeof document !== "undefined") { client(); }
export default function server(request: Request) {
const app = new Hono();
const { prompt, mode } = await c.req.json();
const { OpenAI } = await import("https://esm.town/v/std/openai");
const openai = new OpenAI();
if (mode === "image") {
const response = await openai.images.generate({
model: "dall-e-3",
} else {
const response = await openai.chat.completions.create({
messages: [{ role: "user", content: prompt }],
stevekrouse avatar
valwriter
@stevekrouse
[ ] streaming [ ] send the code of the valwriter back to gpt (only if it's related, might need some threads, maybe a custom gpt would be a better fix, of course, could do it as a proxy...) [ ] make it easy to send errors back to gpt [ ] make it easy to get screenshots of the output back to gpt
HTTP
import { fetchText } from "https://esm.town/v/stevekrouse/fetchText";
import { chat } from "https://esm.town/v/stevekrouse/openai";
import cronstrue from "npm:cronstrue";
content: `/** @jsxImportSource npm:react */
export default function() {
return <h1>{new Date().toLocaleTimeString()}</h1>;
</html>,
export async function compile(description: string) {
const messages = [
await email({ subject: "Subject line", text: "Body of message" });
// OpenAI
import { OpenAI } from "https://esm.town/v/std/openai";
const openai = new OpenAI();
const completion = await openai.chat.completions.create({
messages: [