DAMHelper / repository /ondemand.py
enricorampazzo's picture
streamlit ui and ondemand integration
224e4de
raw
history blame
1.55 kB
import os
from pathlib import Path
import requests
from repository.repository_abc import Repository, Model, ModelRoles
class OndemandRepository(Repository):
session_url = "https://api.on-demand.io/chat/v1/sessions"
def __init__(self, model_info: Model, system_message: str = None, log_to_file:Path=None):
self.model_info = model_info
self.system_message = system_message
self.log_to_file = log_to_file
self.session_id = None
def init(self):
if not self.session_id:
headers = {"apiKey": os.getenv("API_KEY")}
session_body = {"pluginIds": [], "externalUserId": "virtualDAM"}
response = requests.post(self.session_url, headers=headers, json=session_body)
response_data = response.json()
self.session_id = response_data["data"]["id"]
def get_model_roles(self) -> ModelRoles:
return self.model_info.roles
def get_model_info(self) -> Model:
return self.model_info
def send_prompt(self, prompt: str, add_to_history: bool = None) -> dict[str, str]:
headers = {"apiKey": os.getenv("API_KEY")}
body = {'endpointId': 'predefined-openai-gpt3.5turbo', 'query': prompt, 'pluginIds': [], 'responseMode': 'sync'}
url = f'https://api.on-demand.io/chat/v1/sessions/{self.session_id}/query'
response = requests.post(url, headers=headers, json=body)
return {"content": response.json()["data"]["answer"]}
def get_message_history(self) -> list[dict[str, str]]:
return []