Ilyas KHIAT commited on
Commit
fcbb419
·
2 Parent(s): 77465ae 3fd145c
Files changed (4) hide show
  1. app.py +8 -5
  2. chat_te.py +38 -29
  3. chat_with_pps.py +5 -2
  4. partie_prenante_carte.py +1 -1
app.py CHANGED
@@ -9,7 +9,7 @@ from ActionsRSE import display_actions_rse
9
  from AnalyseActionsRSE import display_analyse_actions_rse
10
  from partiesprenantes import display_materiality_partiesprenantes
11
  from partie_prenante_carte import display_pp
12
- from pp_viz import display_viz
13
 
14
  # Import modifiédes fonctions liées aux scripts
15
  from projetRSE import display_rse_projects
@@ -22,6 +22,7 @@ from RAG_PDF_WEB import rag_pdf_web
22
  from prompt import get_prompts_list,prompt_execution,execute_prompt
23
  from chat_with_pps import display_chat
24
  from high_chart import test_chart
 
25
 
26
  def main():
27
  st.markdown(":point_left: Cliquez pour vous inspirer", unsafe_allow_html=True)
@@ -70,7 +71,8 @@ def main():
70
  [
71
  "Audit flash RSE de vos contenus",
72
  "Parties prenantes",
73
- "Chatbot RSE"
 
74
  ]
75
  )
76
 
@@ -87,7 +89,7 @@ def main():
87
  # if selected_company:
88
  # display_materiality_matrix(selected_company, data, bziiit_data)
89
 
90
- elif ia_mode == "Chatbot RSE":
91
  display_chat()
92
 
93
  elif ia_mode == "Audit flash RSE de vos contenus":
@@ -101,8 +103,9 @@ def main():
101
 
102
  # selected_prompt = prompt_execution()
103
  # if selected_prompt:
104
- # execute_prompt(selected_prompt)
105
-
 
106
 
107
  elif section_principale == "Documentation":
108
  display_documentation()
 
9
  from AnalyseActionsRSE import display_analyse_actions_rse
10
  from partiesprenantes import display_materiality_partiesprenantes
11
  from partie_prenante_carte import display_pp
12
+
13
 
14
  # Import modifiédes fonctions liées aux scripts
15
  from projetRSE import display_rse_projects
 
22
  from prompt import get_prompts_list,prompt_execution,execute_prompt
23
  from chat_with_pps import display_chat
24
  from high_chart import test_chart
25
+ from chat_te import display_chat_te
26
 
27
  def main():
28
  st.markdown(":point_left: Cliquez pour vous inspirer", unsafe_allow_html=True)
 
71
  [
72
  "Audit flash RSE de vos contenus",
73
  "Parties prenantes",
74
+ "Chatbot partie prenante",
75
+ "Chatbot TE",
76
  ]
77
  )
78
 
 
89
  # if selected_company:
90
  # display_materiality_matrix(selected_company, data, bziiit_data)
91
 
92
+ elif ia_mode == "Chatbot partie prenante":
93
  display_chat()
94
 
95
  elif ia_mode == "Audit flash RSE de vos contenus":
 
103
 
104
  # selected_prompt = prompt_execution()
105
  # if selected_prompt:
106
+ # execute_prompt(selected_prompt)
107
+ elif ia_mode == "Chatbot TE":
108
+ display_chat_te()
109
 
110
  elif section_principale == "Documentation":
111
  display_documentation()
chat_te.py CHANGED
@@ -10,7 +10,24 @@ from langchain import hub
10
  from langchain_core.prompts.prompt import PromptTemplate
11
  from langchain_community.vectorstores import FAISS
12
  from langchain_community.embeddings import OpenAIEmbeddings
13
- load_dotenv()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
14
 
15
  def get_conversation_chain(vectorstore):
16
  llm = ChatOpenAI(model="gpt-4o",temperature=0.5, max_tokens=2048)
@@ -26,49 +43,44 @@ def get_conversation_chain(vectorstore):
26
  )
27
  return rag_chain
28
 
29
- def get_response(user_query, chat_history):
 
 
 
 
 
 
30
 
31
  template = """
32
  Chat history: {chat_history}
33
  User question: {user_question}
34
  """
35
-
36
- embeddings = OpenAIEmbeddings()
37
- db = FAISS.load_local("vectorstore_op", embeddings)
38
 
39
  question = ChatPromptTemplate.from_template(template)
40
  question = question.format(chat_history=chat_history, user_question=user_query)
41
 
42
- chain = get_conversation_chain(db)
43
-
44
  return chain.stream(question)
45
 
46
- def display_chart():
47
- if "pp_grouped" not in st.session_state or st.session_state['pp_grouped'] is None or len(st.session_state['pp_grouped']) == 0:
48
- st.warning("Aucune partie prenante n'a été définie")
49
- return None
50
- plot = construct_plot()
51
- st.plotly_chart(plot)
52
-
53
-
54
- def display_chat():
55
  # app config
56
  st.title("Chatbot")
57
 
58
  # session state
59
- if "chat_history" not in st.session_state:
60
- st.session_state.chat_history = [
61
- AIMessage(content="Salut, voici votre cartographie des parties prenantes. Que puis-je faire pour vous?"),
62
  ]
63
-
 
 
 
64
 
65
  # conversation
66
- for message in st.session_state.chat_history:
67
  if isinstance(message, AIMessage):
68
  with st.chat_message("AI"):
69
  st.write(message.content)
70
- if "cartographie des parties prenantes" in message.content:
71
- display_chart()
72
  elif isinstance(message, HumanMessage):
73
  with st.chat_message("Moi"):
74
  st.write(message.content)
@@ -76,15 +88,12 @@ def display_chat():
76
  # user input
77
  user_query = st.chat_input("Par ici...")
78
  if user_query is not None and user_query != "":
79
- st.session_state.chat_history.append(HumanMessage(content=user_query))
80
 
81
  with st.chat_message("Moi"):
82
  st.markdown(user_query)
83
 
84
  with st.chat_message("AI"):
 
85
 
86
- response = st.write_stream(get_response(user_query, st.session_state.chat_history,format_context(st.session_state['pp_grouped'],st.session_state['Nom de la marque'])))
87
- if "cartographie des parties prenantes" in message.content:
88
- display_chart()
89
-
90
- st.session_state.chat_history.append(AIMessage(content=response))
 
10
  from langchain_core.prompts.prompt import PromptTemplate
11
  from langchain_community.vectorstores import FAISS
12
  from langchain_community.embeddings import OpenAIEmbeddings
13
+ from langchain_community.document_loaders import PyPDFLoader
14
+ from langchain_experimental.text_splitter import SemanticChunker
15
+ load_dotenv()
16
+
17
+ def get_docs_from_pdf(file):
18
+ loader = PyPDFLoader(file)
19
+ docs = loader.load_and_split()
20
+ return docs
21
+
22
+ def get_doc_chunks(docs):
23
+ text_splitter = SemanticChunker(OpenAIEmbeddings())
24
+ chunks = text_splitter.split_documents(docs)
25
+ return chunks
26
+
27
+ def get_vectorstore_from_docs(doc_chunks):
28
+ embedding = OpenAIEmbeddings()
29
+ vectorstore = FAISS.from_documents(documents=doc_chunks, embedding=embedding)
30
+ return vectorstore
31
 
32
  def get_conversation_chain(vectorstore):
33
  llm = ChatOpenAI(model="gpt-4o",temperature=0.5, max_tokens=2048)
 
43
  )
44
  return rag_chain
45
 
46
+ def create_db(file):
47
+ docs = get_docs_from_pdf(file)
48
+ doc_chunks = get_doc_chunks(docs)
49
+ vectorstore = get_vectorstore_from_docs(doc_chunks)
50
+ return vectorstore
51
+
52
+ def get_response(chain,user_query, chat_history):
53
 
54
  template = """
55
  Chat history: {chat_history}
56
  User question: {user_question}
57
  """
58
+
 
 
59
 
60
  question = ChatPromptTemplate.from_template(template)
61
  question = question.format(chat_history=chat_history, user_question=user_query)
62
 
 
 
63
  return chain.stream(question)
64
 
65
+ def display_chat_te():
 
 
 
 
 
 
 
 
66
  # app config
67
  st.title("Chatbot")
68
 
69
  # session state
70
+ if "chat_history_te" not in st.session_state:
71
+ st.session_state.chat_history_te = [
72
+ AIMessage(content="Salut, posez-moi vos question sur la transistion ecologique."),
73
  ]
74
+ if "chain" not in st.session_state:
75
+ db=create_db("DATA_bziiit/op.pdf")
76
+ chain = get_conversation_chain(db)
77
+ st.session_state.chain = chain
78
 
79
  # conversation
80
+ for message in st.session_state.chat_history_te:
81
  if isinstance(message, AIMessage):
82
  with st.chat_message("AI"):
83
  st.write(message.content)
 
 
84
  elif isinstance(message, HumanMessage):
85
  with st.chat_message("Moi"):
86
  st.write(message.content)
 
88
  # user input
89
  user_query = st.chat_input("Par ici...")
90
  if user_query is not None and user_query != "":
91
+ st.session_state.chat_history_te.append(HumanMessage(content=user_query))
92
 
93
  with st.chat_message("Moi"):
94
  st.markdown(user_query)
95
 
96
  with st.chat_message("AI"):
97
+ response = st.write_stream(get_response(st.session_state.chain,user_query, st.session_state.chat_history_te))
98
 
99
+ st.session_state.chat_history_te.append(AIMessage(content=response))
 
 
 
 
chat_with_pps.py CHANGED
@@ -68,8 +68,9 @@ def display_chat():
68
  st.session_state.chat_history = [
69
  AIMessage(content="Salut, voici votre cartographie des parties prenantes. Que puis-je faire pour vous?"),
70
  ]
71
-
72
-
 
73
  # conversation
74
  for message in st.session_state.chat_history:
75
  if isinstance(message, AIMessage):
@@ -81,6 +82,8 @@ def display_chat():
81
  with st.chat_message("Moi"):
82
  st.write(message.content)
83
 
 
 
84
  # user input
85
  user_query = st.chat_input("Par ici...")
86
  if user_query is not None and user_query != "":
 
68
  st.session_state.chat_history = [
69
  AIMessage(content="Salut, voici votre cartographie des parties prenantes. Que puis-je faire pour vous?"),
70
  ]
71
+
72
+
73
+
74
  # conversation
75
  for message in st.session_state.chat_history:
76
  if isinstance(message, AIMessage):
 
82
  with st.chat_message("Moi"):
83
  st.write(message.content)
84
 
85
+ if "pp_grouped" not in st.session_state or st.session_state['pp_grouped'] is None or len(st.session_state['pp_grouped']) == 0:
86
+ return None
87
  # user input
88
  user_query = st.chat_input("Par ici...")
89
  if user_query is not None and user_query != "":
partie_prenante_carte.py CHANGED
@@ -15,7 +15,7 @@ from langchain.llms import HuggingFaceHub
15
  from langchain import hub
16
  from langchain_core.output_parsers import StrOutputParser
17
  from langchain_core.runnables import RunnablePassthrough
18
- from langchain_community.document_loaders import WebBaseLoader,FireCrawlLoader,PDFLoader
19
  from langchain_core.prompts.prompt import PromptTemplate
20
  from session import set_partie_prenante
21
  import os
 
15
  from langchain import hub
16
  from langchain_core.output_parsers import StrOutputParser
17
  from langchain_core.runnables import RunnablePassthrough
18
+ from langchain_community.document_loaders import WebBaseLoader,FireCrawlLoader
19
  from langchain_core.prompts.prompt import PromptTemplate
20
  from session import set_partie_prenante
21
  import os