MTC / src /llm_interface.py
userlocallm's picture
Upload 17 files
500516e verified
# src/llm_interface.py
import llama_cpp
import logging
# Configure logging
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
class LLMInterface:
def __init__(self, model_path: str, default_system_prompt: str = ""):
self.model = llama_cpp.Llama(model_path=model_path)
self.default_system_prompt = default_system_prompt
def send_message(self, message: str, system_prompt: str = None, max_tokens: int = 512, temperature: float = 0.7, top_p: float = 0.95) -> str:
if system_prompt is None:
system_prompt = self.default_system_prompt
prompt = f"{system_prompt}\nUser: {message}\nAssistant: "
response = self.model(prompt, max_tokens=max_tokens, temperature=temperature, top_p=top_p)
return response['choices'][0]['text'].strip()