File size: 1,435 Bytes
efd11a3 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 |
from openai import OpenAI
class LlaMa3:
def __init__(self) -> None:
self.client = OpenAI(
base_url="https://integrate.api.nvidia.com/v1",
api_key="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"
)
self.name = "Llama3"
# Initial greeting and request for decision topic
self.initial_prompt = """
Hello! I can assist you in making a decision. What decision would you like to make today?
Please describe the decision and provide any relevant details to help me understand.
"""
def chat(self, messages):
# If this is the first message, we use the initial prompt to greet and ask for the decision topic
if len(messages) == 0: # Initial conversation step
messages.append({"role": "system", "content": self.initial_prompt})
# Call the API to get the model's response
completion = self.client.chat.completions.create(
model="nvidia/llama-3.1-nemotron-70b-instruct",
messages=messages,
temperature=0.5,
top_p=1,
max_tokens=1024,
stream=True
)
response = ""
for chunk in completion:
if chunk.choices[0].delta.content is not None:
response += chunk.choices[0].delta.content
return response
|