Update app.py
Browse files
app.py
CHANGED
@@ -492,12 +492,14 @@ def generate_text_zu_bild(file, prompt, k):
|
|
492 |
document_storage_chroma(splits)
|
493 |
db = document_retrieval_chroma()
|
494 |
#mit RAG:
|
|
|
495 |
neu_text_mit_chunks = rag_chain2(prompt, db, k)
|
496 |
#für Chat LLM:
|
497 |
#prompt = generate_prompt_with_history_openai(neu_text_mit_chunks, history)
|
498 |
#als reiner prompt:
|
499 |
prompt_neu = generate_prompt_with_history(neu_text_mit_chunks, history)
|
500 |
-
|
|
|
501 |
headers, payload = process_image(file, prompt_neu)
|
502 |
response = requests.post("https://api.openai.com/v1/chat/completions", headers=headers, json=payload)
|
503 |
#als json ausgeben
|
|
|
492 |
document_storage_chroma(splits)
|
493 |
db = document_retrieval_chroma()
|
494 |
#mit RAG:
|
495 |
+
print("hier!!!!!!!!!!!!!!!!!!!!")
|
496 |
neu_text_mit_chunks = rag_chain2(prompt, db, k)
|
497 |
#für Chat LLM:
|
498 |
#prompt = generate_prompt_with_history_openai(neu_text_mit_chunks, history)
|
499 |
#als reiner prompt:
|
500 |
prompt_neu = generate_prompt_with_history(neu_text_mit_chunks, history)
|
501 |
+
print("prompt hier:.......................")
|
502 |
+
print(prompt_neu)
|
503 |
headers, payload = process_image(file, prompt_neu)
|
504 |
response = requests.post("https://api.openai.com/v1/chat/completions", headers=headers, json=payload)
|
505 |
#als json ausgeben
|