alfraser commited on
Commit
1215037
·
1 Parent(s): 3af3634

Fixed a bug where the response from the LLM was being trimmed of content.

Browse files
Files changed (1) hide show
  1. src/models.py +1 -3
src/models.py CHANGED
@@ -66,9 +66,7 @@ class HFLlamaChatModel:
66
  response = requests.post(api_url, headers=headers, json=query_payload)
67
  if response.status_code == 200:
68
  resp_json = json.loads(response.text)
69
- llm_text = resp_json[0]['generated_text']
70
- query_len = len(query_input)
71
- llm_text = llm_text[query_len:].strip()
72
  return llm_text
73
  else:
74
  error_detail = f"Error from hugging face code: {response.status_code}: {response.reason} ({response.content})"
 
66
  response = requests.post(api_url, headers=headers, json=query_payload)
67
  if response.status_code == 200:
68
  resp_json = json.loads(response.text)
69
+ llm_text = resp_json[0]['generated_text'].strip()
 
 
70
  return llm_text
71
  else:
72
  error_detail = f"Error from hugging face code: {response.status_code}: {response.reason} ({response.content})"