File size: 8,266 Bytes
592c3d9 e49f5ad 7c3a548 e49f5ad d4647c0 aabdf81 e49f5ad 5e13129 418ba7e aabdf81 418ba7e aabdf81 418ba7e be93ed6 aabdf81 6e587e4 dc55e5d 418ba7e f79a364 418ba7e e49f5ad 418ba7e f79a364 e49f5ad 592c3d9 e49f5ad 418ba7e be93ed6 b032e38 e49f5ad b032e38 e49f5ad 1635e60 418ba7e 1635e60 418ba7e 1635e60 b032e38 418ba7e c557ead 418ba7e c557ead e49f5ad c557ead e49f5ad 09534c2 418ba7e 09534c2 ca216e5 09534c2 418ba7e 09534c2 c557ead 418ba7e 09534c2 e49f5ad 418ba7e e49f5ad 418ba7e e49f5ad 592c3d9 be93ed6 418ba7e be93ed6 592c3d9 418ba7e 592c3d9 418ba7e e49f5ad 418ba7e e49f5ad 418ba7e 592c3d9 e49f5ad 418ba7e e49f5ad f0793eb e49f5ad 418ba7e c5b375b e49f5ad 418ba7e e49f5ad 418ba7e e49f5ad 567babf 418ba7e e49f5ad 418ba7e 592c3d9 418ba7e be93ed6 418ba7e e57fc18 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 |
import time
import streamlit as st
import logging
from json import JSONDecodeError
from markdown import markdown
import random
from typing import List, Dict, Any, Tuple, Optional
from annotated_text import annotation
from urllib.parse import unquote
from haystack_utils import start_haystack, set_state_if_absent, load_questions
INDEX_DIR = 'data/index'
QUESTIONS_PATH = 'data/questions.txt'
RETRIEVER_MODEL = "sentence-transformers/multi-qa-mpnet-base-dot-v1"
RETRIEVER_MODEL_FORMAT = "sentence_transformers"
READER_MODEL = "deepset/roberta-base-squad2"
READER_CONFIG_THRESHOLD = 0.15
RETRIEVER_TOP_K = 10
READER_TOP_K = 5
# the following function is a wrapper for start_haystack,
# which loads document store, retriever, reader and creates pipeline.
# cached to make index and models load only at start
@st.cache(hash_funcs={"builtins.SwigPyObject": lambda _: None},
allow_output_mutation=True)
def start_app():
return start_haystack()
@st.cache()
def load_questions_wrapper():
return load_questions()
pipe = start_app()
# the pipeline is not included as parameter of the following function,
# because it is difficult to cache
@st.cache(persist=True, allow_output_mutation=True)
def query(question: str, retriever_top_k: int = 10, reader_top_k: int = 5):
"""Run query and get answers"""
params = {"Retriever": {"top_k": retriever_top_k},
"Reader": {"top_k": reader_top_k}}
results = pipe.run(question, params=params)
return results
def main():
questions = load_questions()
# Persistent state
set_state_if_absent('question', "Where is Twin Peaks?")
set_state_if_absent('answer', '')
set_state_if_absent('results', None)
set_state_if_absent('raw_json', None)
set_state_if_absent('random_question_requested', False)
# Small callback to reset the interface in case the text of the question changes
def reset_results(*args):
st.session_state.answer = None
st.session_state.results = None
st.session_state.raw_json = None
# sidebar style
st.markdown(
"""
<style>
[data-testid="stSidebar"][aria-expanded="true"] > div:first-child{
width: 350px;
}
[data-testid="stSidebar"][aria-expanded="false"] > div:first-child{
width: 350px;
margin-left: -350px;
}
""",
unsafe_allow_html=True,
)
# Title
st.write("# Who killed Laura Palmer?")
st.write("### The first Twin Peaks Question Answering system!")
st.markdown("""
Ask any question about [Twin Peaks] (https://twinpeaks.fandom.com/wiki/Twin_Peaks)
and see if the AI ββcan find an answer...
*Note: do not use keywords, but full-fledged questions.*
""")
# Sidebar
st.sidebar.header("Who killed Laura Palmer?")
st.sidebar.image(
"https://upload.wikimedia.org/wikipedia/it/3/39/Twin-peaks-1990.jpg")
st.sidebar.markdown('<p align="center"><b>Twin Peaks Question Answering system</b></p>',
unsafe_allow_html=True)
st.sidebar.markdown(f"""
<style>
a {{
text-decoration: none;
}}
.haystack-footer {{
text-align: center;
}}
.haystack-footer h4 {{
margin: 0.1rem;
padding:0;
}}
footer {{
opacity: 0;
}}
.haystack-footer img {{
display: block;
margin-left: auto;
margin-right: auto;
width: 85%;
}}
</style>
<div class="haystack-footer">
<p><a href="https://github.com/anakin87/who-killed-laura-palmer">GitHub</a> -
Built with <a href="https://github.com/deepset-ai/haystack/">Haystack</a><br/>
<small>Data crawled from <a href="https://twinpeaks.fandom.com/wiki/Twin_Peaks_Wiki">
Twin Peaks Wiki</a>.</small>
</p>
<img src = 'https://static.wikia.nocookie.net/twinpeaks/images/e/ef/Laura_Palmer%2C_the_Queen_Of_Hearts.jpg'/>
<br/>
</div>
""", unsafe_allow_html=True)
# spotify webplayer
st.sidebar.markdown("""
<p align="center">
<iframe style="border-radius:12px" src="https://open.spotify.com/embed/playlist/38rrtWgflrw7grB37aMlsO?utm_source=generator" width="85%" height="380" frameBorder="0" allowfullscreen="" allow="autoplay; clipboard-write; encrypted-media; fullscreen; picture-in-picture"></iframe>
</p>""", unsafe_allow_html=True)
# Search bar
question = st.text_input("",
value=st.session_state.question,
max_chars=100,
on_change=reset_results
)
col1, col2 = st.columns(2)
col1.markdown(
"<style>.stButton button {width:100%;}</style>", unsafe_allow_html=True)
col2.markdown(
"<style>.stButton button {width:100%;}</style>", unsafe_allow_html=True)
# Run button
run_pressed = col1.button("Run")
# Get next random question from the CSV
if col2.button("Random question"):
reset_results()
question = random.choice(questions)
# Avoid picking the same question twice (the change is not visible on the UI)
while question == st.session_state.question:
question = random.choice(questions)
st.session_state.question = question
st.session_state.random_question_requested = True
# Re-runs the script setting the random question as the textbox value
# Unfortunately necessary as the Random Question button is _below_ the textbox
raise st.script_runner.RerunException(
st.script_request_queue.RerunData(None))
else:
st.session_state.random_question_requested = False
run_query = (run_pressed or question != st.session_state.question) \
and not st.session_state.random_question_requested
# Get results for query
if run_query and question:
time_start = time.time()
reset_results()
st.session_state.question = question
with st.spinner(
"π§ Performing neural search on documents..."
):
try:
st.session_state.results = query(
question, RETRIEVER_TOP_K, READER_TOP_K)
time_end = time.time()
print(f'elapsed time: {time_end - time_start}')
except JSONDecodeError as je:
st.error(
"π An error occurred reading the results. Is the document store working?")
return
except Exception as e:
logging.exception(e)
st.error("π An error occurred during the request.")
return
if st.session_state.results:
st.write("## Results:")
alert_irrelevance = True
if len(st.session_state.results['answers']) == 0:
st.info("π€ Haystack is unsure whether any of the documents contain an answer to your question. Try to reformulate it!")
for count, result in enumerate(st.session_state.results['answers']):
result = result.to_dict()
if result["answer"]:
if alert_irrelevance and result['score'] < 0.50:
alert_irrelevance = False
st.write("""
<h4 style='color: darkred'>Attention, the
following answers have low relevance:</h4>""",
unsafe_allow_html=True)
answer, context = result["answer"], result["context"]
start_idx = context.find(answer)
end_idx = start_idx + len(answer)
# Hack due to this bug: https://github.com/streamlit/streamlit/issues/3190
st.write(markdown("- ..."+context[:start_idx] +
str(annotation(answer, "ANSWER", "#3e1c21")) + context[end_idx:]+"..."),
unsafe_allow_html=True)
source = ""
name = unquote(result['meta']['name']).replace('_', ' ')
url = result['meta']['url']
source = f"[{name}]({url})"
st.markdown(
f"**Score:** {result['score']:.2f} - **Source:** {source}")
main()
|