interstellar_ai/app/backend/ai_api.ts

13 lines
369 B
TypeScript
Raw Normal View History

2024-09-18 11:52:40 +02:00
import ollama from 'ollama'
async function name(model: string, prompt: string, system: string,) {
var message = [{ role: 'user', content: prompt }, { role: 'system', content: system }]
var response = await ollama.chat({ model: model, messages: message, stream: true })
for await (const part of response) {
process.stdout.write(part.message.content)
}
}