interstellar_ai/app/backend/ai_api.ts
2024-09-18 11:52:40 +02:00

12 lines
369 B
TypeScript

import ollama from 'ollama'
async function name(model: string, prompt: string, system: string,) {
var message = [{ role: 'user', content: prompt }, { role: 'system', content: system }]
var response = await ollama.chat({ model: model, messages: message, stream: true })
for await (const part of response) {
process.stdout.write(part.message.content)
}
}