shubhamchau222 commited on
Commit
58617a7
·
1 Parent(s): 71a4d85

chabot graph build & tested

Browse files
app.py ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ import os
2
+ from src.main import load_langgraph_agenticai_app
3
+
4
+ if __name__=="__main__":
5
+ load_langgraph_agenticai_app()
src/graph/graph_builder.py ADDED
@@ -0,0 +1,65 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+
3
+ # Add the src directory to the Python path
4
+ import sys
5
+ sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..'))
6
+
7
+ from langgraph.graph import StateGraph, START,END
8
+ from langgraph.prebuilt import tools_condition,ToolNode
9
+ from langchain_core.prompts import ChatPromptTemplate
10
+ from src.state.state import graphstate
11
+ from src.nodes.basic_chatbot_nodes import BasicChatbotNodes
12
+ from src.nodes.chabot_with_tool_nodes import ChatbotWithToolNodes
13
+ from src.tools.search_tool import get_tools, create_tool_node
14
+
15
+
16
+ class GraphBuilder:
17
+ def __init__(self, model):
18
+ self.model = model
19
+ self.graph_builder =StateGraph(graphstate)
20
+
21
+ def basic_chabot_build(self):
22
+ """
23
+ Builds a basic chatbot graph using LangGraph.
24
+ This method initializes a chatbot node using the `BasicChatbotNode` class
25
+ and integrates it into the graph. The chatbot node is set as both the
26
+ entry and exit point of the graph.
27
+ """
28
+ self.basic_chabot_node = BasicChatbotNodes(self.model)
29
+ self.graph_builder.add_node("chatbot", self.basic_chabot_node.process)
30
+ self.graph_builder.add_edge(START, "chatbot")
31
+ self.graph_builder.add_edge("chatbot", END)
32
+
33
+ def chatbot_with_tool_build(self):
34
+ """
35
+ Builds a chatbot graph with a tool node using LangGraph.
36
+ This method initializes a chatbot node using the `ChatbotWithToolNodes`"
37
+ """
38
+ tools:list= get_tools()
39
+ tool_node = create_tool_node(tools)
40
+ llm= self.model
41
+
42
+ # Define chatbot node
43
+ obj_chatbot_with_node = ChatbotWithToolNodes(llm)
44
+ chatbot_node = obj_chatbot_with_node.create_chatbot(tools)
45
+
46
+ self.graph_builder.add_node("chatbot", chatbot_node)
47
+ self.graph_builder.add_node("tools", tool_node)
48
+
49
+ # Define conditional and direct edges
50
+ self.graph_builder.add_edge(START,"chatbot")
51
+ self.graph_builder.add_conditional_edges("chatbot", tools_condition)
52
+ self.graph_builder.add_edge("tools","chatbot")
53
+
54
+ def setup_graph(self, usecase: str):
55
+ """
56
+ Sets up the graph for the selected use case.
57
+ """
58
+ if usecase == "Basic Chatbot":
59
+ self.basic_chabot_build()
60
+
61
+ if usecase == "Chatbot with Tool":
62
+ self.chatbot_with_tool_build()
63
+
64
+ return self.graph_builder.compile()
65
+
src/llms/groq_llms.py CHANGED
@@ -12,16 +12,16 @@ class GroqLLMS:
12
  """
13
  self.user_controls= user_controls_input
14
 
15
- def get_groq_model(self):
16
- try:
17
- api_key= self.user_controls["GROQ_API_KEY"]
18
- model_name= self.user_controls["selected_model"]
19
- if not api_key and os.environ["GROQ_API_KEY"]:
20
- st.error("Please enter your GROQ API key to proceed. Don't have? refer : https://console.groq.com/keys ", icon="🚨")
21
- llm = ChatGroq(model=model_name, api_key= api_key)
22
- except Exception as e:
23
- st.error("Error in fetching the model " +str(e))
24
- return llm
25
 
26
 
27
 
 
12
  """
13
  self.user_controls= user_controls_input
14
 
15
+ def get_groq_model(self):
16
+ try:
17
+ api_key= self.user_controls["GROQ_API_KEY"]
18
+ model_name= self.user_controls["selected_model"]
19
+ if not api_key and os.environ["GROQ_API_KEY"]:
20
+ st.error("Please enter your GROQ API key to proceed. Don't have? refer : https://console.groq.com/keys ", icon="🚨")
21
+ llm = ChatGroq(model=model_name, api_key= api_key)
22
+ except Exception as e:
23
+ st.error("Error in fetching the model " +str(e))
24
+ return llm
25
 
26
 
27
 
src/main.py ADDED
@@ -0,0 +1,68 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import json
3
+ import os
4
+ import sys
5
+
6
+ sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
7
+
8
+ from src.user_interface.streamlitui.loadui import LoadStreamlitUI
9
+ from src.user_interface.streamlitui.display_result import DisplayResultStreamlit
10
+ from src.llms.groq_llms import GroqLLMS
11
+ from src.graph.graph_builder import GraphBuilder
12
+
13
+
14
+ # MAIN Function START
15
+ def load_langgraph_agenticai_app():
16
+ """
17
+ Loads and runs the LangGraph AgenticAI application with Streamlit UI.
18
+ This function initializes the UI, handles user input, configures the LLM model,
19
+ sets up the graph based on the selected use case, and displays the output while
20
+ implementing exception handling for robustness.
21
+ """
22
+
23
+ # Load UI
24
+ ui = LoadStreamlitUI()
25
+ user_input = ui.load_streamlit_ui()
26
+
27
+ if not user_input:
28
+ st.error("Error: Failed to load user input from the UI.")
29
+ return
30
+
31
+ # Text input for user message
32
+ if st.session_state.IsFetchButtonClicked:
33
+ user_message = st.session_state.timeframe
34
+ else :
35
+ user_message = st.chat_input("Enter your message:")
36
+
37
+ if user_message:
38
+ try:
39
+ # Configure LLM
40
+ obj_llm_config = GroqLLMS(user_controls_input=user_input)
41
+ model = obj_llm_config.get_groq_model()
42
+
43
+ if not model:
44
+ st.error("Error: LLM model could not be initialized.")
45
+ return
46
+
47
+ # Initialize and set up the graph based on use case
48
+ usecase = user_input.get('selected_usecase')
49
+ if not usecase:
50
+ st.error("Error: No use case selected.")
51
+ return
52
+
53
+
54
+ ### Graph Builder
55
+ graph_builder=GraphBuilder(model)
56
+ try:
57
+ graph = graph_builder.setup_graph(usecase)
58
+ DisplayResultStreamlit(usecase,graph,user_message).display_result_on_ui()
59
+ except Exception as e:
60
+ st.error(f"Error: Graph setup failed - {e}")
61
+ return
62
+
63
+
64
+ except Exception as e:
65
+ raise ValueError(f"Error Occurred with Exception : {e}")
66
+
67
+
68
+
src/nodes/basic_chatbot_nodes.py CHANGED
@@ -9,7 +9,7 @@ class BasicChatbotNodes:
9
  def __init__(self, model):
10
  self.llm = model
11
 
12
- def peocess(self, state: graphstate):
13
  return {"messages":
14
  self.llm.invoke(state["messages"])}
15
 
 
9
  def __init__(self, model):
10
  self.llm = model
11
 
12
+ def process(self, state: graphstate):
13
  return {"messages":
14
  self.llm.invoke(state["messages"])}
15
 
src/user_interface/streamlitui/display_result.py ADDED
@@ -0,0 +1,42 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ from langchain_core.messages import HumanMessage,AIMessage,ToolMessage
3
+ import json
4
+
5
+
6
+ class DisplayResultStreamlit:
7
+ def __init__(self,usecase,graph,user_message):
8
+ self.usecase= usecase
9
+ self.graph = graph
10
+ self.user_message = user_message
11
+
12
+ def display_result_on_ui(self):
13
+ usecase= self.usecase
14
+ graph = self.graph
15
+ user_message = self.user_message
16
+ if usecase =="Basic Chatbot":
17
+ for event in graph.stream({'messages':("user",user_message)}):
18
+ print(event.values())
19
+ for value in event.values():
20
+ print(value['messages'])
21
+ with st.chat_message("user"):
22
+ st.write(user_message)
23
+ with st.chat_message("assistant"):
24
+ st.write(value["messages"].content)
25
+
26
+ elif usecase=="Chatbot with Tool":
27
+ # Prepare state and invoke the graph
28
+ initial_state = {"messages": [user_message]}
29
+ res = graph.invoke(initial_state)
30
+ for message in res['messages']:
31
+ if type(message) == HumanMessage:
32
+ with st.chat_message("user"):
33
+ st.write(message.content)
34
+ elif type(message)==ToolMessage:
35
+ with st.chat_message("ai"):
36
+ st.write("Tool Call Start")
37
+ st.write(message.content)
38
+ st.write("Tool Call End")
39
+ elif type(message)==AIMessage and message.content:
40
+ with st.chat_message("assistant"):
41
+ st.write(message.content)
42
+
src/user_interface/streamlitui/loadui.py CHANGED
@@ -57,7 +57,7 @@ class LoadStreamlitUI:
57
  if self.user_controls["selected_usecase"] == 'Chatbot with Tool':
58
 
59
  #ask for Tavily API key
60
- self.user_controls["TAVILY_API_KEY"] = st.session_state["TAVILY_API_KEY"] = st.text_input("Enter your Tavily API Key", type="password")
61
  if not self.user_controls["TAVILY_API_KEY"]:
62
  st.warning("⚠️ Please enter your TAVILTY API key to proceed. Don't have? refer : https://tavily.com/")
63
 
 
57
  if self.user_controls["selected_usecase"] == 'Chatbot with Tool':
58
 
59
  #ask for Tavily API key
60
+ os.environ["TAVILY_API_KEY"] = self.user_controls["TAVILY_API_KEY"]= st.session_state["TAVILY_API_KEY"] = st.text_input("Enter your Tavily API Key", type="password")
61
  if not self.user_controls["TAVILY_API_KEY"]:
62
  st.warning("⚠️ Please enter your TAVILTY API key to proceed. Don't have? refer : https://tavily.com/")
63