### Vectorstores from langchain_community.vectorstores import Chroma import openai def retrieve_relevant_context(vectordb, input_text, num_documents=5): # Use the vector store to retrieve the most relevant documents results = vectordb.similarity_search(query=input_text,k=num_documents) # Without MMR # Extract the page content of the retrieved documents context = "\n".join([result.page_content for result in results]) return context def generate_response_with_context(input_text, context): prompt = ( f"You're a GenAI powered assistant to users who're using communication platforms such as Whatsapp, Instagram, Facebook Messenger, etc. to interact with other users. " f"You need to generate responses to the incoming texts on these platforms using the Words, their Meanings, and the Usages contained in the context: {context}\n\n" f"Input text: {input_text}\nResponse:" ) response = openai.completions.create( model="gpt-3.5-turbo-instruct", prompt=prompt, max_tokens=150, n=1, # Generate one completion per prompt stop=None, temperature=0.5, frequency_penalty=0.5, presence_penalty=0.5, best_of=1, ) return response.choices[0].text.strip()