Spaces:
Sleeping
Sleeping
| import smolagents | |
| print("smolagents version:", smolagents.__version__) | |
| from smolagents import CodeAgent, WebSearchTool, PythonInterpreterTool, InferenceClientModel | |
| import requests | |
| import os | |
| # --- 1. API GAIA --- | |
| HF_TOKEN = os.getenv("token_curso") | |
| # BASE debe ser la raíz del Space, sin /api | |
| BASE = "http://localhost:7860" | |
| QUESTIONS_URL = f"{BASE}/questions" | |
| SUBMIT_URL = f"{BASE}/submit" | |
| HF_USERNAME = "jbaselga" | |
| AGENT_CODE_URL = "https://huggingface.co/spaces/jbaselga/agentes-unit4/tree/main" | |
| def fetch_gaia_questions(): | |
| resp = requests.get(QUESTIONS_URL) | |
| return resp.json() | |
| def submit_answers(answers: dict): | |
| payload = { | |
| "username": HF_USERNAME, | |
| "agent_code": AGENT_CODE_URL, | |
| "answers": [ | |
| {"task_id": tid, "submitted_answer": ans} | |
| for tid, ans in answers.items() | |
| ] | |
| } | |
| resp = requests.post(SUBMIT_URL, json=payload) | |
| return resp.json() | |
| # --- 2. Configuración del agente --- | |
| #model = InferenceClientModel(model="mistralai/Mistral-7B-Instruct-v0.2") | |
| model = InferenceClientModel(model="HuggingFaceH4/zephyr-7b-beta") | |
| # model = InferenceClientModel(model="Qwen/Qwen1.5-7B-Chat") | |
| tools = [ | |
| WebSearchTool(), # Búsqueda web (usa DuckDuckGo) | |
| PythonInterpreterTool() # Ejecuta código Python (incluye cálculos) | |
| ] | |
| agent = CodeAgent(tools=tools, model=model, add_base_tools=True, max_steps=10) | |
| # --- 3. Función para responder preguntas --- | |
| #def answer_question(qid: str, question: str) -> str: | |
| # prompt = f"{question}\nResponde SOLO la respuesta final, sin explicaciones ni justificación." | |
| # out = agent.run(prompt) | |
| # Devuelve solo la primera línea, por si acaso | |
| # return out.strip().split('\n')[0] | |
| def answer_question(qid: str, question: str) -> str: | |
| prompt = f"{question}\nResponde SOLO la respuesta final, sin explicaciones ni justificación." | |
| try: | |
| out = agent.run(prompt) | |
| if out is None: | |
| print(f"❌ [qid={qid}] El modelo devolvió None.") | |
| return "ERROR" | |
| return out.strip().split('\n')[0] | |
| except Exception as e: | |
| print(f"❌ [qid={qid}] Error al generar respuesta: {e}") | |
| return "ERROR" | |
| # --- 4. Flujo principal --- | |
| def main(): | |
| qs = fetch_gaia_questions() | |
| print("DEBUG qs:", qs) | |
| answers = {} | |
| for item in qs: | |
| print("DEBUG item:", item) | |
| if isinstance(item, dict) and "task_id" in item and "question" in item: | |
| answers[item["task_id"]] = answer_question(item["task_id"], item["question"]) | |
| else: | |
| print("Formato inesperado:", item) | |
| result = submit_answers(answers) | |
| print("🧪 Resultados GAIA:", result) | |
| if __name__ == "__main__": | |
| main() |