1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
import { getVectorStoreBuilder } from "https://esm.town/v/webup/getVectorStoreBuilder";
import { getSampleDocuments } from "https://esm.town/v/webup/getSampleDocuments";
import { getModelBuilder } from "https://esm.town/v/webup/getModelBuilder";
export const pipeSampleLLMRetrieverConversation = (async () => {
const { PromptTemplate } = await import("npm:langchain/prompts");
const { RunnableSequence, RunnablePassthrough } = await import(
"npm:langchain/schema/runnable"
);
const { StringOutputParser } = await import(
"npm:langchain/schema/output_parser"
);
const { Document } = await import("npm:langchain/document");
const modelBuilder = await getModelBuilder();
const model = await modelBuilder();
const docs = await getSampleDocuments();
const vectorBuilder = await getVectorStoreBuilder(docs);
const vector = await vectorBuilder();
const retriever = vector.asRetriever();
const condenseQuestionTemplate =
`Given the following conversation and a follow up question, rephrase the follow up question to be a standalone question, in its original language.
Chat History:
{chat_history}
Follow Up Input: {question}
Standalone question:`;
const CONDENSE_QUESTION_PROMPT = PromptTemplate.fromTemplate(
condenseQuestionTemplate,
);
const answerTemplate =
`Answer the question based only on the following context:
{context}
Question: {question}
`;
const ANSWER_PROMPT = PromptTemplate.fromTemplate(answerTemplate);
const combineDocumentsFn = (docs, separator = "\n\n") => {
const serializedDocs = docs.map((doc) => doc.pageContent);
return serializedDocs.join(separator);
};
const formatChatHistory = (chatHistory: [
string,
string,
][]) => {
const formattedDialogueTurns = chatHistory.map((dialogueTurn) =>
`Human: ${dialogueTurn[0]}\nAssistant: ${dialogueTurn[1]}`
);
return formattedDialogueTurns.join("\n");
};
type ConversationalRetrievalQAChainInput = {
question: string;
chat_history: [
string,
string,
][];
};
const standaloneQuestionChain = RunnableSequence.from([
{
question: (input: ConversationalRetrievalQAChainInput) => input.question,
chat_history: (input: ConversationalRetrievalQAChainInput) =>
formatChatHistory(input.chat_history),
},
CONDENSE_QUESTION_PROMPT,
model,
new StringOutputParser(),
]);
const answerChain = RunnableSequence.from([
{
context: retriever.pipe(combineDocumentsFn),
question: new RunnablePassthrough(),
},
ANSWER_PROMPT,
model,
]);
const conversationalRetrievalQAChain = standaloneQuestionChain.pipe(
answerChain,
);
const result1 = await conversationalRetrievalQAChain.invoke({
question: "What is pinecone?",
chat_history: [],
});
console.log(result1);
return await conversationalRetrievalQAChain.invoke({
question: "Is pinecone a db?",
chat_history: [
[
"What is pinecone?",
"Pinecone is the woody fruiting body of a pine tree.",
],
],
});
})();
Val Town is a social website to write and deploy JavaScript.
Build APIs and schedule functions from your browser.
Comments
Nobody has commented on this val yet: be the first!
October 23, 2023