File size: 1,157 Bytes
56a3465
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
from openai import OpenAI
from langchain_openai import ChatOpenAI
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import PromptTemplate

def generate_response_openai(prompt: str,stream:bool = False,model = "gpt-4o-mini") -> str:
    client = OpenAI()
    response = client.chat.completions.create(
        model=model,
        messages=[
            {"role": "user", "content": prompt}
        ],
        stream=stream
    )
    
    return response.choices[0].message.content


def generate_response_via_langchain(query: str, stream: bool = False, model: str = "gpt-4o-mini") :
    # Define the prompt template
    template = "{query}"
    prompt = PromptTemplate.from_template(template)
    
    # Initialize the OpenAI LLM with the specified model
    llm = ChatOpenAI(model=model)
    
    # Create an LLM chain with the prompt and the LLM
    llm_chain = prompt | llm | StrOutputParser()

    if stream:
        # Return a generator that yields streamed responses
        return llm_chain.stream({"query": query})
    
    # Invoke the LLM chain and return the result
    return llm_chain.invoke({"query": query})