ollama
77 строк · 1.8 Кб
1import * as readline from "readline";2
3const model = "llama2";4type Message = {5role: "assistant" | "user" | "system";6content: string;7}
8const messages: Message[] = [{9role: "system",10content: "You are a helpful AI agent."11}]12
13const rl = readline.createInterface({14input: process.stdin,15output: process.stdout16})17
18async function chat(messages: Message[]): Promise<Message> {19const body = {20model: model,21messages: messages22}23
24const response = await fetch("http://localhost:11434/api/chat", {25method: "POST",26body: JSON.stringify(body)27})28
29const reader = response.body?.getReader()30if (!reader) {31throw new Error("Failed to read response body")32}33let content = ""34while (true) {35const { done, value } = await reader.read()36if (done) {37break;38}39const rawjson = new TextDecoder().decode(value);40const json = JSON.parse(rawjson)41
42if (json.done === false) {43process.stdout.write(json.message.content);44content += json.message.content45}46
47}48return { role: "assistant", content: content };49}
50
51async function askQuestion(): Promise<void> {52return new Promise<void>((resolve) => {53rl.question("\n\nAsk a question: (press enter alone to quit)\n\n", async (user_input) => {54if (user_input.trim() === "") {55rl.close();56console.log("Thankyou. Goodbye.\n")57console.log("=======\nHere is the message history that was used in this conversation.\n=======\n")58messages.forEach(message => {59console.log(message)60})61resolve();62} else {63console.log();64messages.push({ role: "user", content: user_input });65messages.push(await chat(messages));66await askQuestion(); // Ask the next question67}68});69});70}
71
72async function main() {73await askQuestion();74
75}
76
77main();