|
|
import gradio as gr |
|
|
import os |
|
|
from utils.chatbot_logic import create_chatbot_chain |
|
|
|
|
|
|
|
|
chatbot_chain = create_chatbot_chain() |
|
|
|
|
|
def chat_interface(message, history): |
|
|
""" |
|
|
Process user message and return chatbot response |
|
|
""" |
|
|
try: |
|
|
response = chatbot_chain.invoke({"input": message}) |
|
|
return response["output"] |
|
|
except Exception as e: |
|
|
return f"Error: {str(e)}" |
|
|
|
|
|
|
|
|
with gr.Blocks(theme=gr.themes.Soft()) as demo: |
|
|
gr.Markdown( |
|
|
""" |
|
|
# 🤖 LangChain Runnables Chatbot |
|
|
|
|
|
A modular chatbot built using **LangChain Runnables** with multiple response strategies. |
|
|
|
|
|
**Features:** |
|
|
- Fact retrieval |
|
|
- Joke generation |
|
|
- Conversational memory |
|
|
- Modular and extensible design |
|
|
""" |
|
|
) |
|
|
|
|
|
chatbot = gr.Chatbot( |
|
|
label="Chat History", |
|
|
height=400, |
|
|
type="messages" |
|
|
) |
|
|
|
|
|
msg = gr.Textbox( |
|
|
label="Your Message", |
|
|
placeholder="Ask me anything...", |
|
|
lines=2 |
|
|
) |
|
|
|
|
|
clear = gr.Button("Clear Chat") |
|
|
|
|
|
def respond(message, chat_history): |
|
|
bot_message = chat_interface(message, chat_history) |
|
|
chat_history.append({"role": "user", "content": message}) |
|
|
chat_history.append({"role": "assistant", "content": bot_message}) |
|
|
return "", chat_history |
|
|
|
|
|
msg.submit(respond, [msg, chatbot], [msg, chatbot]) |
|
|
clear.click(lambda: None, None, chatbot, queue=False) |
|
|
|
|
|
if __name__ == "__main__": |
|
|
demo.launch() |