File size: 1,149 Bytes
fa6ded7
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
from transformers import pipeline
# repo_id = "YOUR_USERNAME/YOUR_LEARNER_NAME"j
repo_id = "jonruida/model-IC"

query_pipeline = transformers.pipeline(
        "text-generation",
        model=model,
        tokenizer=tokenizer,
        torch_dtype=torch.float16,
        device_map="auto", max_new_tokens=200)

def test_rag(pipeline, input_text):
  docs = chroma_db/chroma.sqlite3.similarity_search_with_score(query)
  context = []
  for doc,score in docs:
    if(score<7):
      doc_details = doc.to_json()['kwargs']
      context.append( doc_details['page_content'])
  if(len(context)!=0):
    messages = [{"role": "user", "content": "Basándote en la siguiente información: " + "\n".join(context) + "\n Responde en castellano a la pregunta: " + query}]
    prompt = pipeline.tokenizer.apply_chat_template(messages, tokenize=False, add_generation_prompt=True)
    outputs = pipeline(prompt, max_new_tokens=256, do_sample=True, temperature=0.7, top_k=50, top_p=0.95)
    answer = outputs[0]["generated_text"]
    return answer[answer.rfind("[/INST]")+8:],docs
  else:
    return "No tengo información para responder a esta pregunta",docs