import gradio as gr from paperqa import Docs, SentenceTransformerEmbeddingModel from langchain_anthropic import ChatAnthropic MODEL_NAME = "claude-3-5-sonnet-20240620" class MyChatAnthropic(ChatAnthropic): model_name = MODEL_NAME llm = MyChatAnthropic( model=MODEL_NAME, temperature=0.2, max_tokens=4096,) class MyEmb(SentenceTransformerEmbeddingModel): async def aembed_documents(self, texts): return await self.embed_documents(None, texts) emb = MyEmb(model_name="mixedbread-ai/mxbai-embed-large-v1") docs = Docs(llm="langchain", embedding="langchain", embedding_client=emb, client=llm) docs.max_concurrent = 1 docs.add('knowledge_extraction.csv', disable_check=True) def respond(message): return docs.query(messages).answer """ For information on how to customize the ChatInterface, peruse the gradio docs: https://www.gradio.app/docs/chatinterface """ demo = gr.ChatInterface(respond) if __name__ == "__main__": demo.launch()