DAMHelper / repository /ollama.py
enricorampazzo's picture
first E2E working implementation
0fadcb9
raw
history blame
No virus
908 Bytes
import ollama
from ollama import Options
from llm.llm import ModelRoles, Model
class OllamaRepository:
def __init__(self, model:Model, system_msg):
self.model: Model = model
self.system_msg: str = system_msg
self.message_history: list[dict[str, str]] = [{"role": self.model.roles.system_role, "content": system_msg}]
def send_prompt(self, prompt:str, add_to_history:bool = True) -> dict[str, str]:
options: Options = Options(temperature=0)
self.message_history.append({"role": self.model.roles.user_role, "content":prompt})
response = ollama.chat(self.model.name, self.message_history, options=options)
answer = {"role": self.model.roles.ai_role, "content": response["message"]["content"]}
if add_to_history:
self.message_history.append(answer)
else:
self.message_history.pop()
return answer