Spaces:
Build error
Build error
| """ | |
| Optimized AI Customer Support Agent with Memory | |
| ------------------------------------------------ | |
| This Streamlit application integrates an AI-powered customer support agent | |
| that remembers past interactions using memory storage (Qdrant via Mem0). | |
| Key Features: | |
| - Uses OpenAI's GPT-4 for generating responses. | |
| - Stores and retrieves relevant user interactions from memory. | |
| - Generates synthetic customer data for testing. | |
| - Allows users to view their stored memory and customer profile. | |
| Enhancements in this optimized version: | |
| - Improved readability and structure. | |
| - Better error handling and logging. | |
| - Removed redundant checks and streamlined memory retrieval. | |
| - Clearer logic separation for querying, memory handling, and synthetic data generation. | |
| """ | |
| import streamlit as st | |
| from openai import OpenAI | |
| from qdrant_client import QdrantClient | |
| from mem0 import Memory | |
| import os | |
| import json | |
| from datetime import datetime, timedelta | |
| # Streamlit UI Setup | |
| st.title("AI Customer Support Agent with Memory") | |
| st.caption("Chat with a customer support assistant who recalls past interactions.") | |
| # OpenAI API Key Input | |
| openai_api_key = st.text_input("Enter OpenAI API Key", type="password") | |
| if openai_api_key: | |
| os.environ['OPENAI_API_KEY'] = openai_api_key | |
| class CustomerSupportAIAgent: | |
| def __init__(self): | |
| self.app_id = "customer-support" | |
| # Initialize Qdrant client separately | |
| try: | |
| self.qdrant_client = QdrantClient(host="localhost", port=6333) | |
| except Exception as e: | |
| st.error(f"Failed to connect to Qdrant: {e}") | |
| st.stop() | |
| # Pass the initialized Qdrant client to Memory | |
| try: | |
| self.memory = Memory(self.qdrant_client) | |
| except Exception as e: | |
| st.error(f"Failed to initialize memory: {e}") | |
| st.stop() | |
| # Initialize OpenAI client | |
| self.client = OpenAI() | |
| def handle_query(self, query, user_id): | |
| """Processes user queries by searching memory and generating AI responses.""" | |
| try: | |
| # Retrieve relevant past memories | |
| relevant_memories = self.memory.search(query=query, user_id=user_id) | |
| context = "\n".join(f"- {m['memory']}" for m in relevant_memories.get("results", []) if "memory" in m) | |
| full_prompt = f"Relevant past information:\n{context}\nCustomer: {query}\nSupport Agent:" | |
| # Generate AI response | |
| response = self.client.chat.completions.create( | |
| model="gpt-4", | |
| messages=[ | |
| {"role": "system", "content": "You are a customer support AI for TechGadgets.com."}, | |
| {"role": "user", "content": full_prompt} | |
| ] | |
| ) | |
| answer = response.choices[0].message.content | |
| # Store conversation in memory | |
| for text, role in [(query, "user"), (answer, "assistant")]: | |
| self.memory.add(text, user_id=user_id, metadata={"app_id": self.app_id, "role": role}) | |
| return answer | |
| except Exception as e: | |
| st.error(f"Error handling query: {e}") | |
| return "Sorry, I encountered an issue. Please try again." | |
| def generate_synthetic_data(self, user_id): | |
| """Creates and stores synthetic customer data for testing purposes.""" | |
| try: | |
| today = datetime.now() | |
| order_date, expected_delivery = (today - timedelta(days=10)).strftime("%B %d, %Y"), (today + timedelta(days=2)).strftime("%B %d, %Y") | |
| prompt = f""" | |
| Generate a realistic customer profile for TechGadgets.com user {user_id} with: | |
| - Basic details | |
| - A recent order (placed on {order_date}, delivery by {expected_delivery}) | |
| - Order history, shipping address, and past customer service interactions | |
| - Shopping preferences | |
| Return JSON format. | |
| """ | |
| response = self.client.chat.completions.create( | |
| model="gpt-4", | |
| messages=[ | |
| {"role": "system", "content": "Generate realistic customer profiles in JSON."}, | |
| {"role": "user", "content": prompt} | |
| ] | |
| ) | |
| customer_data = json.loads(response.choices[0].message.content) | |
| for key, value in customer_data.items(): | |
| if isinstance(value, list): | |
| for item in value: | |
| self.memory.add(json.dumps(item), user_id=user_id, metadata={"app_id": self.app_id, "role": "system"}) | |
| else: | |
| self.memory.add(f"{key}: {json.dumps(value)}", user_id=user_id, metadata={"app_id": self.app_id, "role": "system"}) | |
| return customer_data | |
| except Exception as e: | |
| st.error(f"Error generating synthetic data: {e}") | |
| return None | |
| # Initialize AI Agent | |
| if openai_api_key: | |
| support_agent = CustomerSupportAIAgent() | |
| # Sidebar - Customer ID Input & Actions | |
| st.sidebar.title("Customer ID") | |
| customer_id = st.sidebar.text_input("Enter Customer ID") | |
| if customer_id: | |
| # Synthetic Data Generation | |
| if st.sidebar.button("Generate Synthetic Data"): | |
| with st.spinner("Generating data..."): | |
| st.session_state.customer_data = support_agent.generate_synthetic_data(customer_id) | |
| st.sidebar.success("Data Generated!") if st.session_state.customer_data else st.sidebar.error("Generation Failed.") | |
| # View Stored Customer Data | |
| if st.sidebar.button("View Profile"): | |
| st.sidebar.json(st.session_state.get("customer_data", "No data available.")) | |
| # View Memory | |
| if st.sidebar.button("View Memory"): | |
| memories = support_agent.memory.get_all(user_id=customer_id) | |
| st.sidebar.write("\n".join(f"- {m['memory']}" for m in memories.get("results", []) if "memory" in m)) | |
| else: | |
| st.sidebar.error("Enter a Customer ID.") | |
| # Chat Interface | |
| if "messages" not in st.session_state: | |
| st.session_state.messages = [] | |
| for msg in st.session_state.messages: | |
| with st.chat_message(msg["role"]): | |
| st.markdown(msg["content"]) | |
| query = st.chat_input("How can I assist you today?") | |
| if query and customer_id: | |
| st.session_state.messages.append({"role": "user", "content": query}) | |
| with st.chat_message("user"): st.markdown(query) | |
| with st.spinner("Generating response..."): | |
| answer = support_agent.handle_query(query, user_id=customer_id) | |
| st.session_state.messages.append({"role": "assistant", "content": answer}) | |
| with st.chat_message("assistant"): st.markdown(answer) | |
| else: | |
| st.warning("Enter OpenAI API key to use the agent.") | |