|
from dotenv import load_dotenv |
|
import gradio as gr |
|
import os |
|
from llama_index.core import StorageContext, load_index_from_storage, VectorStoreIndex, SimpleDirectoryReader, ChatPromptTemplate, Settings |
|
from llama_index.llms.huggingface import HuggingFaceInferenceAPI |
|
from llama_index.embeddings.huggingface import HuggingFaceEmbedding |
|
from sentence_transformers import SentenceTransformer |
|
|
|
|
|
load_dotenv() |
|
|
|
|
|
Settings.llm = HuggingFaceInferenceAPI( |
|
model_name="meta-llama/Meta-Llama-3-8B-Instruct", |
|
tokenizer_name="meta-llama/Meta-Llama-3-8B-Instruct", |
|
context_window=3000, |
|
token=os.getenv("HF_TOKEN"), |
|
max_new_tokens=512, |
|
generate_kwargs={"temperature": 0.1}, |
|
) |
|
Settings.embed_model = HuggingFaceEmbedding( |
|
model_name="BAAI/bge-small-en-v1.5" |
|
) |
|
|
|
|
|
PERSIST_DIR = "db" |
|
PDF_DIRECTORY = 'data' |
|
|
|
|
|
os.makedirs(PDF_DIRECTORY, exist_ok=True) |
|
os.makedirs(PERSIST_DIR, exist_ok=True) |
|
|
|
|
|
current_chat_history = [] |
|
|
|
def data_ingestion_from_directory(): |
|
|
|
documents = SimpleDirectoryReader(PDF_DIRECTORY).load_data() |
|
storage_context = StorageContext.from_defaults() |
|
index = VectorStoreIndex.from_documents(documents) |
|
index.storage_context.persist(persist_dir=PERSIST_DIR) |
|
|
|
def handle_query(query): |
|
chat_text_qa_msgs = [ |
|
( |
|
"user", |
|
""" |
|
Your name is FernAI. Your aim is to provide the best service and information about RedFerns Tech company. You should provide answers based on the conversation flow and your final aim is attract the user to connect our company services. And summarize the answer try to give the answer with less words and effectively. |
|
{context_str} |
|
Question: |
|
{query_str} |
|
""" |
|
) |
|
] |
|
text_qa_template = ChatPromptTemplate.from_messages(chat_text_qa_msgs) |
|
|
|
|
|
storage_context = StorageContext.from_defaults(persist_dir=PERSIST_DIR) |
|
index = load_index_from_storage(storage_context) |
|
|
|
|
|
context_str = "" |
|
for past_query, response in reversed(current_chat_history): |
|
if past_query.strip(): |
|
context_str += f"User asked: '{past_query}'\nBot answered: '{response}'\n" |
|
|
|
query_engine = index.as_query_engine(text_qa_template=text_qa_template, context_str=context_str) |
|
answer = query_engine.query(query) |
|
|
|
if hasattr(answer, 'response'): |
|
response = answer.response |
|
elif isinstance(answer, dict) and 'response' in answer: |
|
response = answer['response'] |
|
else: |
|
response = "Sorry, I couldn't find an answer." |
|
|
|
|
|
current_chat_history.append((query, response)) |
|
|
|
return response |
|
|
|
|
|
print("Processing PDF ingestion from directory:", PDF_DIRECTORY) |
|
data_ingestion_from_directory() |
|
|
|
|
|
"""def predict(message,history): |
|
response = handle_query(message) |
|
return response""" |
|
def predict(message, history): |
|
logo_html = ''' |
|
<div class="circle-logo"> |
|
<img src="https://rb.gy/8r06eg" alt="FernAi"> |
|
</div> |
|
''' |
|
response = handle_query(message) |
|
response_with_logo = f'<div class="response-with-logo">{logo_html}<div class="response-text">{response}</div></div>' |
|
return response_with_logo |
|
|
|
css = ''' |
|
.circle-logo { |
|
display: inline-block; |
|
width: 40px; |
|
height: 40px; |
|
border-radius: 50%; |
|
overflow: hidden; |
|
margin-right: 10px; |
|
vertical-align: middle; |
|
} |
|
|
|
.circle-logo img { |
|
width: 100%; |
|
height: 100%; |
|
object-fit: cover; |
|
} |
|
|
|
.response-with-logo { |
|
display: flex; |
|
align-items: center; |
|
margin-bottom: 10px; |
|
} |
|
|
|
''' |
|
gr.ChatInterface(predict, |
|
title="FernAi_chatBot", |
|
css=css, |
|
description="Ask any Redfernstech any questions", |
|
clear_btn=None, undo_btn=None, retry_btn=None, |
|
examples=['Tell me about Redfernstech?', 'Services in Redfernstech?'] |
|
).launch() |