Praveen76 commited on
Commit
8c1038e
·
verified ·
1 Parent(s): 0d8e9b1

Upload 15 files

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ vector_db/chroma_v01/chroma.sqlite3 filter=lfs diff=lfs merge=lfs -text
Config/API_KEYS.yml ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ OPEN_AI:
2
+ Key: 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
3
+
4
+ TWILIO:
5
+ account_sid: AC106e12a4dce8d8920630a3addb6f14e2
6
+ auth_token: 941ad55307ae272ebd077f7c3fd7abdf
Config/config.yml ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ OPEN_AI:
2
+ Key: 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
3
+
Dockerfile ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # pull python base image
2
+ FROM python:3.10
3
+
4
+ ADD requirements.txt requirements.txt
5
+
6
+
7
+ # update pip
8
+ RUN pip install --upgrade pip
9
+
10
+ # install dependencies
11
+ RUN pip install -r requirements.txt
12
+
13
+
14
+ # copy application files
15
+ COPY app/. app/.
16
+
17
+ # expose port for application
18
+ EXPOSE 8001
19
+
20
+ # start fastapi application
21
+ CMD ["python", "app/main.py"]
app/__init__.py ADDED
File without changes
app/__pycache__/helper.cpython-312.pyc ADDED
Binary file (1.66 kB). View file
 
app/helper.py ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ### Vectorstores
2
+ from langchain_community.vectorstores import Chroma
3
+ import openai
4
+
5
+ def retrieve_relevant_context(vectordb, input_text, num_documents=5):
6
+ # Use the vector store to retrieve the most relevant documents
7
+ results = vectordb.similarity_search(query=input_text,k=num_documents) # Without MMR
8
+ # Extract the page content of the retrieved documents
9
+ context = "\n".join([result.page_content for result in results])
10
+ return context
11
+
12
+ def generate_response_with_context(input_text, context):
13
+ prompt = (
14
+ f"You're a GenAI powered assistant to users who're using communication platforms such as Whatsapp, Instagram, Facebook Messenger, etc. to interact with other users. "
15
+ f"You need to generate responses to the incoming texts on these platforms using the Words, their Meanings, and the Usages contained in the context: {context}\n\n"
16
+ f"Input text: {input_text}\nResponse:"
17
+ )
18
+
19
+ response = openai.completions.create(
20
+ model="gpt-3.5-turbo-instruct",
21
+ prompt=prompt,
22
+ max_tokens=150,
23
+ n=1, # Generate one completion per prompt
24
+ stop=None,
25
+ temperature=0.5,
26
+ frequency_penalty=0.5,
27
+ presence_penalty=0.5,
28
+ best_of=1,
29
+ )
30
+
31
+ return response.choices[0].text.strip()
app/main.py ADDED
@@ -0,0 +1,82 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from twilio.rest import Client
2
+ import yaml
3
+ import json
4
+ import os
5
+
6
+ import yaml
7
+ import json
8
+ from langchain.embeddings import OpenAIEmbeddings
9
+ from langchain_community.vectorstores import Chroma
10
+ from helper import retrieve_relevant_context, generate_response_with_context
11
+
12
+ # Load relevant API Keys
13
+ file_path = './Config/API_KEYS.yml'
14
+
15
+ with open(file_path, 'r') as file:
16
+ api_keys = yaml.safe_load(file)
17
+
18
+
19
+ # Extract openai username and key
20
+ openai_key = api_keys['OPEN_AI']['Key']
21
+
22
+ os.environ["OPENAI_API_KEY"] = openai_key
23
+
24
+
25
+
26
+ # Extract openai username and key
27
+ account_sid = api_keys['TWILIO']['account_sid']
28
+ auth_token = api_keys['TWILIO']['auth_token']
29
+
30
+ account_sid = account_sid
31
+ auth_token = auth_token
32
+
33
+ # Define the persist directory
34
+ persist_directory = './vector_db/chroma_v01'
35
+
36
+ # Initialize the embeddings model
37
+ embedding_model = OpenAIEmbeddings()
38
+
39
+ ### Vectorstores
40
+ from langchain_community.vectorstores import Chroma
41
+
42
+ # Load the Chroma vector store
43
+ vectordb = Chroma(persist_directory=persist_directory, embedding_function=embedding_model)
44
+
45
+
46
+ #setup Twilio client
47
+ client = Client(account_sid, auth_token)
48
+
49
+
50
+ from flask import Flask, request, redirect
51
+ from twilio.twiml.messaging_response import MessagingResponse
52
+ print("flask app is running")
53
+ app = Flask(__name__)
54
+
55
+ @app.route("/whatsapp", methods=['GET', 'POST'])
56
+ def incoming_sms():
57
+ """Send a dynamic reply to an incoming text message"""
58
+ # Get the message the user sent our Twilio number
59
+ body = request.values.get('Body', None)
60
+ print("body :",body)
61
+
62
+ ##### Process incoming text #############
63
+ incoming_msg = body.strip()
64
+ if not incoming_msg:
65
+ return str(MessagingResponse())
66
+
67
+ # Generate response using the RAG-powered system
68
+ retrieved_texts = retrieve_relevant_context(vectordb, incoming_msg)
69
+ context = "\n".join(retrieved_texts)
70
+ response = generate_response_with_context(incoming_msg, context)
71
+ print("response :",response)
72
+ ##### Process incoming text Done #############
73
+
74
+
75
+ # Start our TwiML response
76
+ resp = MessagingResponse()
77
+ print("TwiML resp :", resp)
78
+ resp.message(response)
79
+ return str(resp)
80
+
81
+ if __name__ == "__main__":
82
+ app.run(port=5000, debug=True)
assets/FinalFullData/GRE_Word_List_Final.pdf ADDED
Binary file (137 kB). View file
 
assets/FinalFullData/GRE_Words_Final.pdf ADDED
Binary file (244 kB). View file
 
requirements.txt ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ chromadb ==0.5.0
2
+ Flask==3.0.3
3
+ gradio==4.31.5
4
+ huggingface-hub==0.23.0
5
+ openai==1.30.1
6
+ langchain==0.2.0
7
+ langchain-community==0.2.0
8
+ fitz
9
+ twilio
vector_db/chroma_v01/86d715ec-534d-41ba-8058-74b3ecf1b19e/data_level0.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f18abd8c514282db82706e52b0a33ed659cd534e925a6f149deb7af9ce34bd8e
3
+ size 6284000
vector_db/chroma_v01/86d715ec-534d-41ba-8058-74b3ecf1b19e/header.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:effaa959ce2b30070fdafc2fe82096fc46e4ee7561b75920dd3ce43d09679b21
3
+ size 100
vector_db/chroma_v01/86d715ec-534d-41ba-8058-74b3ecf1b19e/length.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:85f45bb73617241cd10561c82fb101bbc3e3777f2b8f53f1cb2332f803a0366e
3
+ size 4000
vector_db/chroma_v01/86d715ec-534d-41ba-8058-74b3ecf1b19e/link_lists.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855
3
+ size 0
vector_db/chroma_v01/chroma.sqlite3 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bc5cc82d9a38112824d5469c1619e39feb34a29704c9a5eefb13882f11f401e1
3
+ size 8568832