Mattral commited on
Commit
9ecaaeb
·
verified ·
1 Parent(s): f03b214

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -1
app.py CHANGED
@@ -69,7 +69,8 @@ def answer_question(question, documents):
69
  question_with_context = prompt.format(question=question, context=full_context)
70
 
71
  # Use the Hugging Face InferenceClient to generate the response
72
- response = client.query(question_with_context)
 
73
 
74
  # Assuming the response contains a 'generated_text' field with the model's output
75
  return response["generated_text"]
 
69
  question_with_context = prompt.format(question=question, context=full_context)
70
 
71
  # Use the Hugging Face InferenceClient to generate the response
72
+ # Replacing the `query` method with `predict` method
73
+ response = client.predict(question_with_context)
74
 
75
  # Assuming the response contains a 'generated_text' field with the model's output
76
  return response["generated_text"]