shubhamchau222 commited on
Commit
71a4d85
·
1 Parent(s): 796c2b8

graph nodes and llm functionality added

Browse files
src/llms/__init__.py ADDED
File without changes
src/llms/groq_llms.py ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import sys
3
+ from langchain_groq import ChatGroq
4
+ import streamlit as st
5
+
6
+ class GroqLLMS:
7
+ def __init__(self, user_controls_input):
8
+ """
9
+ This class will validate the API key & Return the Required Groq Model as per user request
10
+ Args:
11
+ user_controls_input (dict): User Controls
12
+ """
13
+ self.user_controls= user_controls_input
14
+
15
+ def get_groq_model(self):
16
+ try:
17
+ api_key= self.user_controls["GROQ_API_KEY"]
18
+ model_name= self.user_controls["selected_model"]
19
+ if not api_key and os.environ["GROQ_API_KEY"]:
20
+ st.error("Please enter your GROQ API key to proceed. Don't have? refer : https://console.groq.com/keys ", icon="🚨")
21
+ llm = ChatGroq(model=model_name, api_key= api_key)
22
+ except Exception as e:
23
+ st.error("Error in fetching the model " +str(e))
24
+ return llm
25
+
26
+
27
+
28
+
29
+
30
+
src/nodes/__init__.py ADDED
File without changes
src/nodes/basic_chatbot_nodes.py ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import sys
3
+
4
+ sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..'))
5
+ from src.state.state import graphstate
6
+ from langgraph.graph import START, END, StateGraph
7
+
8
+ class BasicChatbotNodes:
9
+ def __init__(self, model):
10
+ self.llm = model
11
+
12
+ def peocess(self, state: graphstate):
13
+ return {"messages":
14
+ self.llm.invoke(state["messages"])}
15
+
16
+
17
+
src/nodes/chabot_with_tool_nodes.py ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import sys
2
+ import os
3
+ sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..'))
4
+ from src.state.state import graphstate
5
+
6
+ class ChatbotWithToolNodes:
7
+ def __init__(self, model):
8
+ self.llm = model
9
+
10
+ def peocess(self, state: graphstate):
11
+ user_input = state["messages"][-1] if state["messages"] else ""
12
+ llm_response = self.llm.invoke([{"role": "user", "content": user_input}])
13
+ # Simulate tool-specific logic
14
+ tools_response = f"Tool integration for: '{user_input}'"
15
+ return {"messages": [llm_response, tools_response]}
16
+
17
+ def create_chatbot(self, tools):
18
+ """
19
+ Returns a chatbot node function.
20
+ """
21
+ llm_with_tools = self.llm.bind_tools(tools)
22
+
23
+ def chatbot_node(state: graphstate):
24
+ """
25
+ Chatbot logic for processing the input state and returning a response.
26
+ """
27
+ return {"messages": [llm_with_tools.invoke(state["messages"])]}
28
+
29
+ return chatbot_node
src/state/__init__.py ADDED
File without changes
src/state/state.py ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from langgraph.graph import StateGraph
2
+ from typing import Literal, TypedDict, List, Annotated, Optional
3
+ from langgraph.graph.message import add_messages
4
+ from langchain_core.messages import AIMessage, HumanMessage
5
+
6
+
7
+ class graphstate(TypedDict):
8
+ """
9
+ Represents the state of the graph, any new message will be added to the messages list
10
+ """
11
+ messages: Annotated[list, add_messages]
12
+ # current_step: Literal["requirements", "user_stories", "po_feedback", "generated_code", "review_feedback"]
13
+ # requirements: str
14
+ # user_stories: str
15
+ # po_feedback: str
16
+ # generated_code: str
17
+ # review_feedback: str
18
+ # decision: Optional[bool]
src/tools/__init__py ADDED
File without changes
src/tools/search_tool.py ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from langchain_community.tools.tavily_search import TavilySearchResults
2
+ from langgraph.prebuilt import ToolNode
3
+
4
+ def get_tools():
5
+ """
6
+ Return the list of tools to be used in the chatbot
7
+ """
8
+ tools=[TavilySearchResults(max_results=2)]
9
+ return tools
10
+
11
+ def create_tool_node(tools):
12
+ """
13
+ creates and returns a tool node for the graph
14
+ """
15
+ return ToolNode(tools=tools)
16
+
17
+
src/user_interface/streamlitui/loadui.py CHANGED
@@ -63,12 +63,13 @@ class LoadStreamlitUI:
63
 
64
  if "state" not in st.session_state:
65
  st.session_state.state = self.initialize_session()
 
66
 
67
 
68
-
69
- if __name__ == "__main__":
70
- a= LoadStreamlitUI()
71
- a.load_streamlit_ui()
72
- print(a.config.get_page_title())
73
 
74
 
 
63
 
64
  if "state" not in st.session_state:
65
  st.session_state.state = self.initialize_session()
66
+ return self.user_controls
67
 
68
 
69
+ ## UI testing code
70
+ # if __name__ == "__main__":
71
+ # a= LoadStreamlitUI()
72
+ # a.load_streamlit_ui()
73
+ # print(a.config.get_page_title())
74
 
75