kajila commited on
Commit
72795c3
·
verified ·
1 Parent(s): 169c0ec

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +26 -9
app.py CHANGED
@@ -144,9 +144,10 @@ Here are some documents that are relevant to the question.
144
  ```
145
  """
146
  # Define the predict function
147
- def predict(user_input, company):
148
- filter = f"dataset/{company}-10-k-2023.pdf"
149
- relevant_document_chunks = vectorstore_persisted.similarity_search(user_input, k=5, filter={"source": filter})
 
150
 
151
  # Create context_for_query
152
  context_list = [d.page_content for d in relevant_document_chunks]
@@ -162,20 +163,36 @@ def predict(user_input, company):
162
  }
163
  ]
164
 
165
-
166
  try:
167
- # Get response from the LLM
168
- response = openai.ChatCompletion.create(
169
- model='gpt-3.5-turbo',
170
  messages=prompt,
171
  temperature=0
172
  )
173
- prediction = response['choices'][0]['message']['content']
 
174
 
175
  except Exception as e:
176
- prediction = f"Error: {str(e)}"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
177
 
178
  return prediction
 
179
 
180
  examples = [
181
  ["What are the company's policies and frameworks regarding AI ethics, governance, and responsible AI use as detailed in their 10-K reports?", "AWS"],
 
144
  ```
145
  """
146
  # Define the predict function
147
+ def predict(user_input,company):
148
+
149
+ filter = "dataset/"+company+"-10-k-2023.pdf"
150
+ relevant_document_chunks = vectorstore_persisted.similarity_search(user_input, k=5, filter={"source":filter})
151
 
152
  # Create context_for_query
153
  context_list = [d.page_content for d in relevant_document_chunks]
 
163
  }
164
  ]
165
 
166
+ # Get response from the LLM
167
  try:
168
+ response = client.chat.completions.create(
169
+ model='mistralai/Mixtral-8x7B-Instruct-v0.1',
 
170
  messages=prompt,
171
  temperature=0
172
  )
173
+
174
+ prediction = response.choices[0].message.content
175
 
176
  except Exception as e:
177
+ prediction = e
178
+
179
+ # While the prediction is made, log both the inputs and outputs to a local log file
180
+ # While writing to the log file, ensure that the commit scheduler is locked to avoid parallel
181
+ # access
182
+
183
+ with scheduler.lock:
184
+ with log_file.open("a") as f:
185
+ f.write(json.dumps(
186
+ {
187
+ 'user_input': user_input,
188
+ 'retrieved_context': context_for_query,
189
+ 'model_response': prediction
190
+ }
191
+ ))
192
+ f.write("\n")
193
 
194
  return prediction
195
+
196
 
197
  examples = [
198
  ["What are the company's policies and frameworks regarding AI ethics, governance, and responsible AI use as detailed in their 10-K reports?", "AWS"],