diff --git a/py/api.py b/py/api.py index 8ed3afb..413f951 100644 --- a/py/api.py +++ b/py/api.py @@ -2,6 +2,7 @@ import requests import json from transformers import AutoTokenizer, LlamaForCausalLM + class API: @staticmethod def process_text_transformers(prompt, model): @@ -46,4 +47,4 @@ class API: answer = self.process_text_transformers(message, "meta-llama/Meta-Llama-3.1-8B") else: return "Invalid choice" - return answer \ No newline at end of file + return answer diff --git a/py/static/index.html b/py/static/index.html index 087940a..d688702 100644 --- a/py/static/index.html +++ b/py/static/index.html @@ -4,7 +4,7 @@
- +