Spaces:
Runtime error
Runtime error
| import os | |
| import gradio as gr | |
| from smolagents import CodeAgent, ToolCallingAgent, MCPClient, InferenceClientModel | |
| from openai import OpenAI | |
| model_name = None | |
| workspace = "imessam" | |
| environment = None | |
| app_name = "example-vllm-openai-compatible" | |
| function_name = "serve" | |
| api_key = os.getenv("MODAL_API_KEY") | |
| client = OpenAI(api_key=api_key) | |
| prefix = workspace + (f"-{environment}" if environment else "") | |
| client.base_url = ( | |
| f"https://{prefix}--{app_name}-{function_name}.modal.run/v1" | |
| ) | |
| print(str(client.base_url.host)) | |
| model = client.models.list().data[0] | |
| model_id = model.id | |
| def generate_podcast(prompt : str, history: list) -> str: | |
| response = "" | |
| #try: | |
| mcp_client = MCPClient( | |
| {"url": "https://agents-mcp-hackathon-websearch.hf.space/gradio_api/mcp/sse", "transport": "sse"}# This is the MCP Server we created in the previous section | |
| ) | |
| tools = mcp_client.get_tools() | |
| model = InferenceClientModel() | |
| agent = CodeAgent(tools=[*tools], model=model) | |
| response = str(agent.run(prompt)) | |
| #finally: | |
| mcp_client.disconnect() | |
| return response | |
| demo = gr.ChatInterface( | |
| fn=generate_podcast, | |
| type="messages", | |
| examples=["Generate a podcast about AI"], | |
| title="Podcast Generator Agent and MCP Server", | |
| description="This is an agent that uses MCP tools to generate a podcast, and can be used as an MCP server.", | |
| ) | |
| if __name__ == "__main__": | |
| demo.launch( | |
| mcp_server=True | |
| ) | |