main.ts 609 B

12345678910111213141516171819202122232425
  1. import { Ollama } from 'langchain/llms/ollama';
  2. import * as readline from "readline";
  3. async function main() {
  4. const ollama = new Ollama({
  5. model: 'mistral'
  6. // other parameters can be found at https://js.langchain.com/docs/api/llms_ollama/classes/Ollama
  7. });
  8. const rl = readline.createInterface({
  9. input: process.stdin,
  10. output: process.stdout,
  11. });
  12. rl.question("What is your question: \n", async (user_input) => {
  13. const stream = await ollama.stream(user_input);
  14. for await (const chunk of stream) {
  15. process.stdout.write(chunk);
  16. }
  17. rl.close();
  18. })
  19. }
  20. main();