from mistralai import Mistral
from openai import OpenAI
import anthropic
import ollama


class AI:
    @staticmethod
    def process_local(model, messages, return_class, access_token):
        stream = ollama.chat(
            model=model,
            messages=messages,
            stream=True,
            options={"temperature": 0.5},
        )

        with return_class.ai_response_lock:
            return_class.ai_response[access_token] = ""

        for chunk in stream:
            with return_class.ai_response_lock:
                return_class.ai_response[access_token] += chunk['message']['content']

    @staticmethod
    def process_mistralai(model, messages, return_class, access_token, api_key):

        client = Mistral(api_key=api_key)

        stream_response = client.chat.stream(
            model=model,
            messages=messages
        )

        with return_class.ai_response_lock:
            return_class.ai_response[access_token] = ""

        for chunk in stream_response:
            with return_class.ai_response_lock:
                return_class.ai_response[access_token] += chunk.data.choices[0].delta.content

    @staticmethod
    def process_openai(model, messages, return_class, access_token, api_key):

        client = OpenAI(api_key=api_key)

        stream_response = client.chat.completions.create(
            model=model,
            messages=messages,
            stream=True
        )

        with return_class.ai_response_lock:
            return_class.ai_response[access_token] = ""

        for chunk in stream_response:
            with return_class.ai_response_lock:
                return_class.ai_response[access_token] += chunk.choices[0].delta.content

    @staticmethod
    def process_anthropic(model, messages, return_class, access_token, api_key):

        client = anthropic.Anthropic(api_key=api_key)

        with return_class.ai_response_lock:
            return_class.ai_response[access_token] = ""

        with client.messages.stream(
                max_tokens=1024,
                model=model,
                messages=messages,
        ) as stream:
            for text in stream.text_stream:
                with return_class.ai_response_lock:
                    return_class.ai_response[access_token] += text