agentes-unit4 / app.py
jbaselga's picture
Create app.py
401aee4 verified
raw
history blame
2.05 kB
from smolagents import CodeAgent, WebSearchTool, PythonInterpreterTool, InferenceClientModel
import requests
import os
# --- 1. API GAIA ---
HF_TOKEN = os.getenv("token_curso")
BASE = "https://jbaselga-agentes-unit4.hf.space/api"
QUESTIONS_URL = f"{BASE}/questions"
SUBMIT_URL = f"{BASE}/submit"
HF_USERNAME = "jbaselga"
AGENT_CODE_URL = "https://huggingface.co/spaces/jbaselga/agentes-unit4/tree/main"
def fetch_gaia_questions():
resp = requests.get(QUESTIONS_URL, headers={"Authorization": f"Bearer {HF_TOKEN}"})
return resp.json()
def submit_answers(answers: dict):
payload = {
"username": HF_USERNAME,
"agent_code": AGENT_CODE_URL,
"answers": [
{"task_id": tid, "submitted_answer": ans}
for tid, ans in answers.items()
]
}
resp = requests.post(SUBMIT_URL, json=payload, headers={"Authorization": f"Bearer {HF_TOKEN}"})
return resp.json()
# --- 2. Configuraci贸n del agente ---
model = InferenceClientModel()
tools = [
WebSearchTool(), # B煤squeda web (usa DuckDuckGo)
PythonInterpreterTool() # Ejecuta c贸digo Python (incluye c谩lculos)
]
agent = CodeAgent(tools=tools, model=model, add_base_tools=True, max_steps=10)
# --- 3. Funci贸n para responder preguntas ---
def answer_question(qid: str, question: str) -> str:
prompt = f"Pregunta ID:{qid}. Responde SOLO la respuesta, sin explicaciones:\n{question}"
out = agent.run(prompt)
return out.strip()
# --- 4. Flujo principal ---
def main():
qs = fetch_gaia_questions()
print("DEBUG qs:", qs) # <-- A帽ade esto para ver la estructura
answers = {}
for item in qs:
print("DEBUG item:", item)
if isinstance(item, dict) and "task_id" in item and "question" in item:
answers[item["task_id"]] = answer_question(item["task_id"], item["question"])
else:
print("Formato inesperado:", item)
result = submit_answers(answers)
print("馃И Resultados GAIA:", result)
if __name__ == "__main__":
main()