Galatea007 commited on
Commit
44c6810
1 Parent(s): 5545e81

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +24 -13
app.py CHANGED
@@ -121,28 +121,37 @@ hf_retriever = asyncio.run(run())
121
  """
122
  ### 1. DEFINE STRING TEMPLATE
123
 
124
- from langchain_core.prompts import ChatPromptTemplate
 
 
125
 
126
- RAG_TEMPLATE = """\
127
- You are a helpful and kind assistant. Use the context provided below to answer the question.
128
- Never reference this prompt, or the existence of context. Use the chat history to maintain continuity in the conversation
129
- If you do not know the answer, or are unsure, say you don't know.
130
-
131
- Query:
132
- {question}
133
 
134
  Context:
135
- {context}
 
 
136
  """
137
 
138
- rag_prompt = ChatPromptTemplate.from_template(RAG_TEMPLATE)
139
 
140
  # -- GENERATION -- #
141
  """
142
  1. Create a HuggingFaceEndpoint for the LLM
143
  """
144
  ### 1. CREATE HUGGINGFACE ENDPOINT FOR LLM
145
- hf_llm = HF_LLM_ENDPOINT
 
 
 
 
 
 
 
 
 
146
 
147
  @cl.author_rename
148
  def rename(original_author: str):
@@ -169,8 +178,10 @@ async def start_chat():
169
  ### BUILD LCEL RAG CHAIN THAT ONLY RETURNS TEXT
170
 
171
 
172
- lcel_rag_chain = {"context": itemgetter("query") | hf_retriever, "query": itemgetter("query")}| rag_prompt | hf_llm
173
-
 
 
174
 
175
  cl.user_session.set("lcel_rag_chain", lcel_rag_chain)
176
 
 
121
  """
122
  ### 1. DEFINE STRING TEMPLATE
123
 
124
+ RAG_PROMPT_TEMPLATE = """\
125
+ <|start_header_id|>system<|end_header_id|>
126
+ You are a helpful assistant. You answer user questions based on provided context. If you can't answer the question with the provided context, say you don't know.<|eot_id|>
127
 
128
+ <|start_header_id|>user<|end_header_id|>
129
+ User Query:
130
+ {query}
 
 
 
 
131
 
132
  Context:
133
+ {context}<|eot_id|>
134
+
135
+ <|start_header_id|>assistant<|end_header_id|>
136
  """
137
 
138
+ rag_prompt = PromptTemplate.from_template(RAG_PROMPT_TEMPLATE)
139
 
140
  # -- GENERATION -- #
141
  """
142
  1. Create a HuggingFaceEndpoint for the LLM
143
  """
144
  ### 1. CREATE HUGGINGFACE ENDPOINT FOR LLM
145
+
146
+ hf_llm = HuggingFaceEndpoint(
147
+ endpoint_url=HF_LLM_ENDPOINT,
148
+ max_new_tokens=512,
149
+ top_k=10,
150
+ top_p=0.95,
151
+ temperature=0.3,
152
+ repetition_penalty=1.15,
153
+ huggingfacehub_api_token=HF_TOKEN,
154
+ )
155
 
156
  @cl.author_rename
157
  def rename(original_author: str):
 
178
  ### BUILD LCEL RAG CHAIN THAT ONLY RETURNS TEXT
179
 
180
 
181
+ lcel_rag_chain = (
182
+ {"context": itemgetter("query") | hf_retriever, "query": itemgetter("query")}
183
+ | rag_prompt | hf_llm
184
+ )
185
 
186
  cl.user_session.set("lcel_rag_chain", lcel_rag_chain)
187