2023-12-22 17:10:41 +00:00
|
|
|
import { Ollama } from 'langchain/llms/ollama';
|
|
|
|
import * as readline from "readline";
|
2023-10-11 16:50:15 +00:00
|
|
|
|
|
|
|
async function main() {
|
|
|
|
const ollama = new Ollama({
|
|
|
|
model: 'mistral'
|
|
|
|
// other parameters can be found at https://js.langchain.com/docs/api/llms_ollama/classes/Ollama
|
2023-12-22 17:10:41 +00:00
|
|
|
});
|
|
|
|
|
|
|
|
const rl = readline.createInterface({
|
|
|
|
input: process.stdin,
|
|
|
|
output: process.stdout,
|
|
|
|
});
|
2023-10-11 16:50:15 +00:00
|
|
|
|
2023-12-22 17:10:41 +00:00
|
|
|
rl.question("What is your question: \n", async (user_input) => {
|
|
|
|
const stream = await ollama.stream(user_input);
|
|
|
|
|
|
|
|
for await (const chunk of stream) {
|
|
|
|
process.stdout.write(chunk);
|
|
|
|
}
|
|
|
|
rl.close();
|
|
|
|
})
|
2023-10-11 16:50:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
main();
|