alexkueck commited on
Commit
37e77d2
1 Parent(s): 64c5a27

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -3
app.py CHANGED
@@ -433,7 +433,7 @@ def generate_text_zu_doc(file, prompt, k, rag_option, chatbot, history, db):
433
  #mit oder ohne RAG möglich
434
  def generate_text (prompt, chatbot, history, rag_option, model_option, openai_api_key, db, websuche, k=3, top_p=0.6, temperature=0.5, max_new_tokens=4048, max_context_length_tokens=2048, repetition_penalty=1.3, top_k=35):
435
  global splittet
436
- hugchat=False
437
  suche_im_Netz="Antwort der KI ..."
438
  print("Text pur..............................")
439
 
@@ -490,7 +490,7 @@ def generate_text (prompt, chatbot, history, rag_option, model_option, openai_ap
490
  else:
491
  #splittet = False
492
  print("LLM aufrufen ohne RAG: ...........")
493
- resulti = llm_chain(llm, history_text_und_prompt, hugchat)
494
  result = resulti.strip()
495
  """
496
  #Alternativ mit API_URL - aber das model braucht 93 B Space!!!
@@ -518,7 +518,7 @@ def generate_text (prompt, chatbot, history, rag_option, model_option, openai_ap
518
  #result = create_assistant_suche_hf(llm, history_text_und_prompt)
519
  #else:
520
  #mit tavily:
521
- result = create_assistant_suche(prompt) #history_text_und_prompt)
522
 
523
 
524
  """
 
433
  #mit oder ohne RAG möglich
434
  def generate_text (prompt, chatbot, history, rag_option, model_option, openai_api_key, db, websuche, k=3, top_p=0.6, temperature=0.5, max_new_tokens=4048, max_context_length_tokens=2048, repetition_penalty=1.3, top_k=35):
435
  global splittet
436
+ #hugchat=False
437
  suche_im_Netz="Antwort der KI ..."
438
  print("Text pur..............................")
439
 
 
490
  else:
491
  #splittet = False
492
  print("LLM aufrufen ohne RAG: ...........")
493
+ resulti = llm_chain(llm, history_text_und_prompt)
494
  result = resulti.strip()
495
  """
496
  #Alternativ mit API_URL - aber das model braucht 93 B Space!!!
 
518
  #result = create_assistant_suche_hf(llm, history_text_und_prompt)
519
  #else:
520
  #mit tavily:
521
+ result = create_assistant_suche(history_text_und_prompt)
522
 
523
 
524
  """