Upload 3 files
Browse filesdeploying the streamlit app
- app.py +21 -0
- query.py +68 -0
- requirements.txt +0 -0
app.py
ADDED
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import streamlit as st
|
2 |
+
from query import generate_response
|
3 |
+
|
4 |
+
def main():
|
5 |
+
st.title("📚 CourseLens")
|
6 |
+
st.write("Ask any question about the free courses available on Analytics Vidhya!")
|
7 |
+
|
8 |
+
# User input
|
9 |
+
user_input = st.text_input("Enter your query:", "")
|
10 |
+
|
11 |
+
if st.button("Search"):
|
12 |
+
if user_input.strip():
|
13 |
+
with st.spinner("Searching for the best courses..."):
|
14 |
+
response = generate_response(user_input)
|
15 |
+
st.subheader("Search Results:")
|
16 |
+
st.write(response)
|
17 |
+
else:
|
18 |
+
st.warning("Please enter a query to search.")
|
19 |
+
|
20 |
+
if __name__ == "__main__":
|
21 |
+
main()
|
query.py
ADDED
@@ -0,0 +1,68 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
from dotenv import load_dotenv
|
3 |
+
from langchain_groq import ChatGroq
|
4 |
+
from langchain.embeddings import HuggingFaceInferenceAPIEmbeddings
|
5 |
+
from langchain_pinecone import PineconeVectorStore
|
6 |
+
from langchain_core.output_parsers import StrOutputParser
|
7 |
+
from langchain_core.runnables import RunnablePassthrough, RunnableParallel
|
8 |
+
from langchain.prompts import PromptTemplate
|
9 |
+
|
10 |
+
#Configuring API keys
|
11 |
+
load_dotenv()
|
12 |
+
groq_api_key = os.getenv('GROQ_API_KEY')
|
13 |
+
hf_token = os.getenv('HF_TOKEN')
|
14 |
+
pinecone_api_key = os.getenv("PINECONE_API_KEY")
|
15 |
+
|
16 |
+
|
17 |
+
#Instantiating the LLM
|
18 |
+
chat = ChatGroq(
|
19 |
+
temperature=0,
|
20 |
+
model="llama-3.3-70b-versatile",
|
21 |
+
api_key= groq_api_key
|
22 |
+
)
|
23 |
+
|
24 |
+
#Embedding Model
|
25 |
+
embedding = HuggingFaceInferenceAPIEmbeddings(api_key= hf_token)
|
26 |
+
|
27 |
+
|
28 |
+
# Connect to Pinecone
|
29 |
+
vectorstore = PineconeVectorStore.from_existing_index(
|
30 |
+
index_name= 'courselens',
|
31 |
+
embedding= embedding,
|
32 |
+
)
|
33 |
+
|
34 |
+
#Setting up the retriever
|
35 |
+
retriever = vectorstore.as_retriever()
|
36 |
+
|
37 |
+
#Prompt Template
|
38 |
+
template = '''
|
39 |
+
You are CourseLens, an AI-powered assistant designed to power an AI-driven smart search system for Analytics Vidhya's free courses.
|
40 |
+
Your primary role is to assist users by answering questions about the available free courses on Analytics Vidhya and quickly suggesting the most relevant courses based on their natural language queries or keywords.
|
41 |
+
Help users discover course topics, prerequisites, learning paths, and curriculum details by leveraging a RAG-based (Retrieval-Augmented Generation) approach.
|
42 |
+
Retrieve and recommend courses from a structured knowledge base containing course links, headings, and descriptions while maintaining accuracy and relevance.
|
43 |
+
Provide clear, concise responses and ensure the search experience is user-friendly and efficient. Also include the course link as well.
|
44 |
+
{context}:
|
45 |
+
{Question}:
|
46 |
+
|
47 |
+
Answer:
|
48 |
+
|
49 |
+
'''
|
50 |
+
prompt_template = PromptTemplate.from_template(template=template)
|
51 |
+
|
52 |
+
#Setting up the chain
|
53 |
+
|
54 |
+
#1
|
55 |
+
set_ret = RunnableParallel(
|
56 |
+
{"context": retriever, "Question": RunnablePassthrough()}
|
57 |
+
)
|
58 |
+
rag_chain = set_ret | prompt_template | chat | StrOutputParser()
|
59 |
+
|
60 |
+
|
61 |
+
|
62 |
+
#Defining the response function
|
63 |
+
#1
|
64 |
+
def generate_response(text):
|
65 |
+
response = rag_chain.invoke(text)
|
66 |
+
return response
|
67 |
+
|
68 |
+
#print(generate_response("I want to learn tableau. How to learn it?"))
|
requirements.txt
ADDED
Binary file (3.36 kB). View file
|
|