LLM4SciLit / src /gradio_app.py
tommymarto's picture
first attempt to hf spaces
e04cd14
import hydra
from omegaconf import DictConfig
from demo import App
from llm4scilit_gradio_interface import LLM4SciLitChatInterface
def echo(text, history):
asdf = "asdf"
values = [f"{x}\n{x*2}" for x in asdf]
return text, *values
@hydra.main(version_base=None, config_path="../config", config_name="gradio_config")
def main(cfg : DictConfig) -> None:
cfg.document_parsing['enabled'] = False
app = App(cfg)
app._bootstrap()
def wrapped_ask_chat(text, history):
result = app.ask_chat(text, history)
sources = [
f"{x.metadata['paper_title']}\n{x.page_content}"
for x in result['source_documents']
]
return result['result'], *sources
LLM4SciLitChatInterface(wrapped_ask_chat, title="LLM4SciLit").launch()
# LLM4SciLitChatInterface(echo, title="LLM4SciLit").launch()
# textbox = gr.Textbox(placeholder="Ask a question about scientific literature", lines=2, label="Question", elem_id="textbox")
# chatbot = gr.Chatbot(label="LLM4SciLit", elem_id="chat")
# gr.Interface(fn=echo, inputs=[textbox, chatbot], outputs=[chatbot], title="LLM4SciLit").launch()
# with gr.Blocks() as demo:
# chatbot = gr.Chatbot()
# msg = gr.Textbox(container=False)
# clear = gr.ClearButton([msg, chatbot])
# def respond(message, chat_history):
# bot_message = "How are you?"
# chat_history.append((message, bot_message))
# return "", chat_history
# msg.submit(respond, [msg, chatbot], [msg, chatbot])
# with gr.Blocks(title="LLM4SciLit") as demo:
# with gr.Row():
# with gr.Column(scale=5):
# with gr.Row():
# gr.Chatbot(fn=echo)
# with gr.Row():
# gr.Button("Submit")
# with gr.Column(scale=5):
# with gr.Accordion("Retrieved documents"):
# gr.Label("Document 1")
# webapp = gr.ChatInterface(fn=app.ask_chat, examples=["hello", "hola", "merhaba"], title="LLM4SciLit")
# webapp = gr.ChatInterface(fn=echo, examples=["hello", "hola", "merhaba"], title="LLM4SciLit")
# demo.launch()
# webapp.launch(share=True)
if __name__ == "__main__":
main() # pylint: disable=no-value-for-parameter