suryanshp1 commited on
Commit
58459b2
Β·
1 Parent(s): 491aedf

feat: Added search tool tavily

Browse files
README.md CHANGED
@@ -11,9 +11,8 @@ pinned: false
11
  short_description: agenticai
12
  license: mit
13
  ---
14
- # Welcome to Streamlit!
15
 
16
- Edit `/src/streamlit_app.py` to customize this app to your heart's desire. :heart:
17
 
18
- If you have any questions, checkout our [documentation](https://docs.streamlit.io) and [community
19
- forums](https://discuss.streamlit.io).
 
11
  short_description: agenticai
12
  license: mit
13
  ---
 
14
 
15
+ ### END TO END AGENTIC AI APPLICATION
16
 
17
+ - Add support for memory, tools and MCP in it
18
+ - Add guardrail and lanfuse support
src/langgraphagenticai/__pycache__/main.cpython-312.pyc CHANGED
Binary files a/src/langgraphagenticai/__pycache__/main.cpython-312.pyc and b/src/langgraphagenticai/__pycache__/main.cpython-312.pyc differ
 
src/langgraphagenticai/graph/__pycache__/graph_builder.cpython-312.pyc CHANGED
Binary files a/src/langgraphagenticai/graph/__pycache__/graph_builder.cpython-312.pyc and b/src/langgraphagenticai/graph/__pycache__/graph_builder.cpython-312.pyc differ
 
src/langgraphagenticai/graph/graph_builder.py CHANGED
@@ -3,20 +3,58 @@ from langgraph.prebuilt import tools_condition, ToolNode
3
  from langchain_core.prompts import ChatPromptTemplate
4
  from src.langgraphagenticai.state.state import State
5
  from src.langgraphagenticai.nodes.basic_chatbot_node import BasicChatbotNode
 
 
6
  import datetime
7
 
 
8
  class GraphBuilder:
9
  def __init__(self, model):
10
  self.llm = model
11
- self.graph_builder = StateGraph(State)
12
 
13
  def basic_chatbot_build_graph(self):
 
14
  self.basic_chatbot_node = BasicChatbotNode(self.llm)
15
- self.graph_builder.add_node("chatbot", self.basic_chatbot_node.process)
16
- self.graph_builder.add_edge(START, "chatbot")
17
- self.graph_builder.add_edge("chatbot", END)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
18
 
19
  def setup_graph(self, usecase: str):
20
  if usecase == "Basic Chatbot":
21
- self.basic_chatbot_build_graph()
22
- return self.graph_builder.compile()
 
 
 
 
 
 
 
 
 
 
3
  from langchain_core.prompts import ChatPromptTemplate
4
  from src.langgraphagenticai.state.state import State
5
  from src.langgraphagenticai.nodes.basic_chatbot_node import BasicChatbotNode
6
+ from src.langgraphagenticai.nodes.chatbot_with_tool_node import ChatbotWithToolNode
7
+ from src.langgraphagenticai.tools.search_tool import get_tools, create_tool_node
8
  import datetime
9
 
10
+
11
  class GraphBuilder:
12
  def __init__(self, model):
13
  self.llm = model
 
14
 
15
  def basic_chatbot_build_graph(self):
16
+ graph_builder = StateGraph(State)
17
  self.basic_chatbot_node = BasicChatbotNode(self.llm)
18
+ graph_builder.add_node("chatbot", self.basic_chatbot_node.process)
19
+ graph_builder.add_edge(START, "chatbot")
20
+ graph_builder.add_edge("chatbot", END)
21
+ return graph_builder
22
+
23
+ def chatbot_with_tools_build_graph(self):
24
+ graph_builder = StateGraph(State)
25
+
26
+ # define tools and toolnode
27
+ tools = get_tools()
28
+ tool_node = create_tool_node(tools=tools)
29
+
30
+ # define llm
31
+ llm = self.llm
32
+
33
+ # define chatbot node
34
+ obj_chatbot_with_node = ChatbotWithToolNode(llm)
35
+ chatbot_node = obj_chatbot_with_node.create_chatbot(tools=tools)
36
+
37
+ # Add nodes
38
+ graph_builder.add_node("chatbot", chatbot_node)
39
+ graph_builder.add_node("tools", tool_node)
40
+
41
+ # Define conditional and direct edges
42
+ graph_builder.add_edge(START, "chatbot")
43
+ graph_builder.add_conditional_edges("chatbot", tools_condition)
44
+ graph_builder.add_edge("tools", "chatbot")
45
+
46
+ return graph_builder
47
 
48
  def setup_graph(self, usecase: str):
49
  if usecase == "Basic Chatbot":
50
+ graph_builder = self.basic_chatbot_build_graph()
51
+ elif usecase == "Chatbot with Tool":
52
+ graph_builder = self.chatbot_with_tools_build_graph()
53
+ elif usecase == "AI News":
54
+ graph_builder = (
55
+ self.chatbot_with_tools_build_graph()
56
+ ) # AI News also uses tools
57
+ else:
58
+ raise ValueError(f"Unknown usecase: {usecase}")
59
+
60
+ return graph_builder.compile()
src/langgraphagenticai/guardrails/__init__.py ADDED
File without changes
src/langgraphagenticai/main.py CHANGED
@@ -1,5 +1,6 @@
1
  import streamlit as st
2
  import json
 
3
  from src.langgraphagenticai.ui.streamlitui.loadui import LoadStreamlitUI
4
  from src.langgraphagenticai.llms.groqllm import GroqLLM
5
  from src.langgraphagenticai.graph.graph_builder import GraphBuilder
@@ -39,6 +40,15 @@ def load_langgraph_agenticai_app():
39
  st.error("Error: no usecase selected")
40
  return
41
 
 
 
 
 
 
 
 
 
 
42
  # Graph Builder
43
  graph_builder = GraphBuilder(model)
44
 
 
1
  import streamlit as st
2
  import json
3
+ import os
4
  from src.langgraphagenticai.ui.streamlitui.loadui import LoadStreamlitUI
5
  from src.langgraphagenticai.llms.groqllm import GroqLLM
6
  from src.langgraphagenticai.graph.graph_builder import GraphBuilder
 
40
  st.error("Error: no usecase selected")
41
  return
42
 
43
+ # Set TAVILY API key if needed for tool-based usecases
44
+ if usecase in ["Chatbot with Tool", "AI News"]:
45
+ tavily_key = user_input.get("TAVILY_API_KEY")
46
+ if tavily_key:
47
+ os.environ["TAVILY_API_KEY"] = tavily_key
48
+ else:
49
+ st.error("TAVILY API key is required for this usecase")
50
+ return
51
+
52
  # Graph Builder
53
  graph_builder = GraphBuilder(model)
54
 
src/langgraphagenticai/nodes/__pycache__/chatbot_with_tool_node.cpython-312.pyc ADDED
Binary file (1.94 kB). View file
 
src/langgraphagenticai/nodes/chatbot_with_tool_node.py ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from src.langgraphagenticai.state.state import State
2
+
3
+
4
+ class ChatbotWithToolNode:
5
+ """
6
+ chatbot logic enhanced with tool integration
7
+ """
8
+ def __init__(self, model):
9
+ self.llm = model
10
+
11
+ def process(self, state: State) -> dict:
12
+ """
13
+ Processes the input state and generates a response with tool integration.
14
+ """
15
+ user_input = state["messages"][-1] if state["messages"] else ""
16
+ llm_response = self.llm.invoke([{"role": "user", "content": user_input}])
17
+
18
+ # simulate tool-specific logic
19
+ tools_response = f"Tool Integration for: '{user_input}'"
20
+
21
+ return {"messages": [llm_response, tools_response]}
22
+
23
+ def create_chatbot(self, tools):
24
+ """
25
+ Returns a chatbot node function
26
+ """
27
+ llm_with_tools = self.llm.bind_tools(tools)
28
+
29
+ def chatbot_node(state: State):
30
+ """
31
+ Chatbot logic for processing the input state and returning a response
32
+ """
33
+ return {"messages": [llm_with_tools.invoke(state["messages"])]}
34
+
35
+ return chatbot_node
36
+
37
+ # def chatbot_node(state: State, llm_with_tools):
38
+ # """
39
+ # Chatbot logic for processing the input state and returning a response
40
+ # """
41
+ # return {"messages": [llm_with_tools.invoke(state["messages"])]}
42
+
43
+ # def create_chatbot(self, tools):
44
+ # """
45
+ # Returns a chatbot node function
46
+ # """
47
+ # llm_with_tools = self.llm.bind_tools(tools)
48
+
49
+ # chatbot_node = self.chatbot_node(State, llm_with_tools)
50
+
51
+ # return chatbot_node
src/langgraphagenticai/tools/__pycache__/__init__.cpython-312.pyc ADDED
Binary file (142 Bytes). View file
 
src/langgraphagenticai/tools/__pycache__/search_tool.cpython-312.pyc ADDED
Binary file (917 Bytes). View file
 
src/langgraphagenticai/tools/search_tool.py ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from langchain_community.tools.tavily_search import TavilySearchResults
2
+ from langgraph.prebuilt import ToolNode
3
+ import os
4
+
5
+ def get_tools():
6
+ """
7
+ Return the list of tools to be used in the chatbot
8
+ """
9
+ # Check if TAVILY_API_KEY is set
10
+ if not os.getenv("TAVILY_API_KEY"):
11
+ raise ValueError("TAVILY_API_KEY environment variable is not set")
12
+
13
+ tool = TavilySearchResults(max_results=2)
14
+ return [tool]
15
+
16
+ def create_tool_node(tools):
17
+ """
18
+ Creates and return tool node for the graph
19
+ """
20
+ return ToolNode(tools=tools)
src/langgraphagenticai/ui/streamlitui/__pycache__/display_result.cpython-312.pyc CHANGED
Binary files a/src/langgraphagenticai/ui/streamlitui/__pycache__/display_result.cpython-312.pyc and b/src/langgraphagenticai/ui/streamlitui/__pycache__/display_result.cpython-312.pyc differ
 
src/langgraphagenticai/ui/streamlitui/__pycache__/loadui.cpython-312.pyc CHANGED
Binary files a/src/langgraphagenticai/ui/streamlitui/__pycache__/loadui.cpython-312.pyc and b/src/langgraphagenticai/ui/streamlitui/__pycache__/loadui.cpython-312.pyc differ
 
src/langgraphagenticai/ui/streamlitui/display_result.py CHANGED
@@ -1,5 +1,5 @@
1
  import streamlit as st
2
- from langchain_core.messages import HumanMessage, AIMessage
3
  import json
4
 
5
  class DisplayResultStremlit:
@@ -20,4 +20,36 @@ class DisplayResultStremlit:
20
  with st.chat_message("user"):
21
  st.write(user_message)
22
  with st.chat_message("assisstant"):
23
- st.write(value["messages"].content)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import streamlit as st
2
+ from langchain_core.messages import HumanMessage, AIMessage, ToolMessage
3
  import json
4
 
5
  class DisplayResultStremlit:
 
20
  with st.chat_message("user"):
21
  st.write(user_message)
22
  with st.chat_message("assisstant"):
23
+ st.write(value["messages"].content)
24
+
25
+ elif usecase == "Chatbot with Tool" or usecase == "AI News":
26
+ # Prepare state and invoke the graph
27
+ initial_state = {"messages": [HumanMessage(content=user_message)]}
28
+
29
+ # Display user message
30
+ with st.chat_message("user"):
31
+ st.write(user_message)
32
+
33
+ # Stream the graph execution
34
+ for event in graph.stream(initial_state):
35
+ for node_name, node_output in event.items():
36
+ if "messages" in node_output:
37
+ messages = node_output["messages"]
38
+ if not isinstance(messages, list):
39
+ messages = [messages]
40
+
41
+ for message in messages:
42
+ if isinstance(message, ToolMessage):
43
+ with st.chat_message("assistant"):
44
+ st.write("πŸ” **Tool Search Results:**")
45
+ st.write(message.content)
46
+ elif isinstance(message, AIMessage):
47
+ if message.tool_calls:
48
+ with st.chat_message("assistant"):
49
+ st.write("πŸ”§ **Calling search tool...**")
50
+ for tool_call in message.tool_calls:
51
+ st.write(f"Searching for: {tool_call['args'].get('query', 'N/A')}")
52
+ else:
53
+ with st.chat_message("assistant"):
54
+ st.write(message.content)
55
+