TableChat / app.py
xiaofengalg
init
a8e8d70
raw
history blame
4.12 kB
# !/usr/bin/env python
# -*- coding:utf-8 -*-
# ==================================================================
# [CreatedDate] : Thursday, 1970-01-01 08:00:00
# [Author] : shixiaofeng
# [Descriptions] :
# ==================================================================
# [ChangeLog]:
# [Date] [Author] [Comments]
# ------------------------------------------------------------------
import json
import logging
import time
import gradio as gr
import requests
logger = logging.getLogger('gradio')
gr.close_all()
host = "127.0.0.1:9172"
def tableqa(input_question, history=""):
logger.info("run tableqa")
if history == "":
history_sql = None
else:
history_sql = json.loads(history.replace("\'", "\""))
data = {"raw_data": {'question': input_question, 'history_sql': history_sql}}
ts = time.time()
r = requests.post(f"http://{host}/tableqa", json=data)
response = json.loads(r.text)
print("response", response)
te = time.time()
print("run inference_mask_sam success [{}], time_Cost is [{}]".format(response["code"] == 200, te-ts))
if response["code"] == 200:
df_value = response["result"]["select_df"]
df = {"data": df_value["rows"], "headers": df_value["header_name"]}
return [df, response["result"]["sql_string"], response["result"]["sql_query"], response["result"]["history"], response["result"]["query_result"]]
else:
return ["1", "2", "3", "4",]
example_iface = [
["长江流域的小型水库的库容总量是多少?", ""],
["那平均值是多少?", "{'agg': [5], 'cond_conn_op': 1, 'conds': [[3, 2, '小型'], [4, 2, '长江']], 'from': ['reservoir'], 'sel': [2]}"],
["那水库的名称呢?", "{'agg': [1], 'cond_conn_op': 1, 'conds': [[3, 2, '小型'], [4, 2, '长江']], 'from': ['reservoir'], 'sel': [2]}"],
["汕尾市的水库有吗", "{'agg': [0], 'cond_conn_op': 1, 'conds': [[3, 2, '小型'], [4, 2, '长江']], 'from': ['reservoir'], 'sel': [0]}"],
["", ""],
["上个月收益率超过3的有几个基金?", ""],
["这是哪只基金呢?并且它什么类型的呢?", "{'agg': [4], 'cond_conn_op': 0, 'conds': [[5, 0, '3']], 'from': ['fund'], 'sel': [1]}"],
["", ""],
["有哪些型号的SUV油耗高于8?", ""],
["他们是多大排量的", "{'agg': [0], 'cond_conn_op': 1, 'conds': [[1, 2, 'suv'], [2, 0, '8']], 'from': ['car'], 'sel': [0]}"],
["", ""],
["本部博士生中平均身高是多少?", ""],
["他们是什么专业的?", "{'agg': [1], 'cond_conn_op': 1, 'conds': [[2, 2, '博士'], [7, 2, '本部']], 'from': ['student'], 'sel': [5]}"]
]
# iface = gr.Interface(fn=greet, inputs="text", outputs=["输出sql语句","输出可执行sql语句","执行结果"])
iface = gr.Interface(fn=tableqa, inputs=[gr.Textbox(label="input_question", info="请输入想要查询的问题."),
gr.Textbox(label="history sql", info="上下文对话历史信息.")],
outputs=[gr.DataFrame(label="索引到的数据库"),
gr.Textbox(label="输出sql语句"),
gr.Textbox(label="输出可执行sql语句"),
gr.Textbox(label="多轮对话历史sql"),
gr.Textbox(label="SQL执行结果")],
examples=example_iface,
examples_per_page=len(example_iface),
allow_flagging="auto",
cache_examples=True,
description="<p> \
Choose an example below &#128293; &#128293; &#128293; \
Or, give question by yourself: <br>\
</p>",
)
title = "TableChat: Chat Model deployment on Table <br>"
demo = gr.TabbedInterface([iface], ['TableChat_V0'], title=title)
# iface.launch(enable_queue=False, server_name="0.0.0.0", server_port=9176, debug=True)
demo.launch(enable_queue=False, server_name="0.0.0.0", server_port=9176, share=True)