Search
modelSampleLLMCall
@webup
An interactive, runnable TypeScript val by webup
Script
import { getModelBuilder } from "https://esm.town/v/webup/getModelBuilder";
export const modelSampleLLMCall = (async () => {
const builder = await getModelBuilder();
const model = await builder();
return await model.call("Tell me a famous saying");
testExaJs
@eugenechantk
Testing Exa's JS SDK
Script
Testing Exa's JS SDK
const exa = new Exa("f2e3bc4c-a68d-4dcb-abf6-a34ee090a576", "https://api-internal.exa.sh");
const result = await exa.getContents(
"https://aisera.com/",
"https://savavo.com/",
"https://www.gridspace.com/",
"https://www.cien.ai/",
"https://www.cognitivescale.com/",
"https://decagon.ai/",
"https://www.talla.com/",
aiSimpleGroq
@yawnxyz
// set Deno.env.get("GROQ_API_KEY")
Script
import { ai } from "https://esm.town/v/yawnxyz/ai";
// set Deno.env.get("GROQ_API_KEY")
// console.log(await ai("tell me a joke in Spanish"))
console.log(await ai("tell me a reddit joke", {
provider: "anthropic",
model: "claude-3-haiku-20240307",
specialBlueGopher
@jeffreyyoung
An interactive, runnable TypeScript val by jeffreyyoung
HTTP
const replicate = new Replicate({
auth: Deno.env.get("REPLICATE_API_KEY"),
export default serve({
async *handleMessage(req) {
const lastMsg = req.query.at(-1);
const imgUrl = lastMsg?.attachments?.at?.(0)?.url;
const maskUrl = lastMsg?.attachments?.at?.(1)?.url;
const prompt = lastMsg?.content?.trim();
if (!imgUrl || !maskUrl || !prompt) {
yield "Please include a prompt, an image and a mask";
gptApiFramework
@vlad
Allows for automatic generation of Hono API comatible with GPTs. Endpoints' inputs and outputs need to be specified via types from which the Open API spec is generated automatically and available via /gpt/schema endpoint. Usage example: import { GptApi } from "https://esm.town/v/xkonti/gptApiFramework";
import { z } from "npm:zod";
/**
* COMMON TYPES
*/
const ResponseCommandSchema = z.object({
feedback: z.string().describe("Feedback regarding submitted action"),
command: z.string().describe("The command for the Mediator AI to follow strictly"),
data: z.string().optional().describe("Additional data related to the given command"),
}).describe("Contains feedback and further instructions to follow");
export type ResponseCommand = z.infer<typeof ResponseCommandSchema>;
/**
* INITIALIZE API
*/
const api = new GptApi({
url: "https://xkonti-planoverseerai.web.val.run",
title: "Overseer AI API",
description: "The API for interacting with the Overseer AI",
version: "1.0.0",
});
/**
* REQUIREMENTS GATHERING ENDPOINTS
*/
api.nothingToJson<ResponseCommand>({
verb: "POST",
path: "/newproblem",
operationId: "new-problem",
desc: "Endpoint for informing Overseer AI about a new problem presented by the User",
requestSchema: null,
requestDesc: null,
responseSchema: ResponseCommandSchema,
responseDesc: "Instruction on how to proceed with the new problem",
}, async (ctx) => {
return {
feedback: "User input downloaded. Problem analysis is required.",
command: await getPrompt("analyze-problem"),
data: "",
};
});
export default api.serve();
Script
Allows for automatic generation of Hono API comatible with GPTs. Endpoints' inputs and outputs need to be specified via types from which the Open API spec is generated automatically and available via `/gpt/schema` endpoint.
Usage example:
```ts
* COMMON TYPES
const ResponseCommandSchema = z.object({
feedback: z.string().describe("Feedback regarding submitted action"),
export interface ApiInfo {
url: string;
title: string;
description: string;
assertiveBeigeCarp
@jeffreyyoung
An interactive, runnable TypeScript val by jeffreyyoung
HTTP
const replicate = new Replicate({
auth: Deno.env.get("REPLICATE_API_KEY"),
export default serve({
async *handleMessage(req) {
const lastMsg = req.query.at(-1);
const lastAttachment = lastMsg?.attachments?.at?.(0);
const url = lastAttachment?.url;
const content = lastMsg?.content?.trim();
if (!url) {
yield "Please send a image";
ask_ai_web
@pomdtr
An interactive, runnable TypeScript val by pomdtr
Script
import { Hono } from "npm:hono";
const app = new Hono();
aiStreamingExample
@yawnxyz
An interactive, runnable TypeScript val by yawnxyz
HTTP
export default async (req) => {
const { readable, writable } = new TransformStream();
const writer = writable.getWriter();
const textEncoder = new TextEncoder();
// Get the prompt from the URL query
const url = new URL(req.url);
const prompt = url.searchParams.get("prompt") || "tell me a joke";
if (!prompt || prompt.trim() === "") {
return new Response("Please provide a prompt in the URL query.", {
headers: {
scribbleToDrawing
@jeffreyyoung
An interactive, runnable TypeScript val by jeffreyyoung
HTTP
const replicate = new Replicate({
auth: Deno.env.get("REPLICATE_API_KEY"),
export default serve({
async *handleMessage(req) {
const lastMsg = req.query.at(-1);
const lastAttachment = lastMsg?.attachments?.at?.(0);
const url = lastAttachment?.url;
const content = lastMsg?.content?.trim();
if (!url) {
yield "Please send a image";
falSDXLExample
@isidentical
An interactive, runnable TypeScript val by isidentical
Script
import * as fal from "npm:@fal-ai/serverless-client";
fal.config({
// Can also be auto-configured using environment variables:
credentials: Deno.env.get("FAL_KEY"),
const prompt = "a cute and happy dog";
const result: any = await fal.run("fal-ai/fast-lightning-sdxl", { input: { prompt } });
console.log(result.images[0].url);
oura
@just_be
An interactive, runnable TypeScript val by just_be
Script
import { Oura } from "https://deno.land/x/oura_api@0.3.1/mod.ts";
export const oura = (ouraPersonalAccessToken) => new Oura(ouraPersonalAccessToken);
proxyCall
@eric
An interactive, runnable TypeScript val by eric
Script
import { objectExperiment } from "https://esm.town/v/eric/objectExperiment";
export let proxyCall = objectExperiment().b
aiBasicExample
@yawnxyz
// basic text generation
Script
import { ModelProvider, modelProvider } from "https://esm.town/v/yawnxyz/ai";
// basic text generation
let response = await modelProvider.gen({
prompt: 'hello, who am I speaking to?',
provider: 'google',
console.log('res:', response);
gemini2FlashExample
@stevekrouse
An interactive, runnable TypeScript val by stevekrouse
Script
import { GoogleGenerativeAI } from "npm:@google/generative-ai";
const prompt = "What is the meaning of life?";
const genAI = new GoogleGenerativeAI(Deno.env.get("GOOGLE_GENERATIVE_AI"));
const model = genAI.getGenerativeModel(
{ model: "gemini-2.0-flash-exp" },
const result = await model.generateContent(prompt);
console.log(result.response.text());
openAQLocation
@stevekrouse
An interactive, runnable TypeScript val by stevekrouse
Script
export let openAQLocation = async ({ lat, lon }: {
lat: number;
lon: number;
const { results } = await fetchJSON(
"https://api.openaq.org/v2/locations?"
+ new URLSearchParams({
coordinates: lat.toPrecision(8) + "," + lon.toPrecision(8),
order_by: "distance",
sort: "asc",
radius: "25000",