maxxxi100 commited on
Commit
6a250ce
·
verified ·
1 Parent(s): b861a93

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +52 -84
app.py CHANGED
@@ -1,104 +1,72 @@
1
- import os
2
  import gradio as gr
3
- from fastapi import FastAPI
4
- from fastapi.middleware.cors import CORSMiddleware
5
- from fastapi.responses import HTMLResponse
6
- import uvicorn
 
7
 
8
- # ===============================
9
- # 1️⃣ Función de Respuesta LLM (Mock)
10
- # ===============================
11
- def respond(message, history, system_message, max_tokens, temperature, top_p):
12
- """
13
- Simula una respuesta de un LLM.
14
- """
15
- messages = [{"role": "system", "content": system_message}]
16
- messages.extend(history)
17
- messages.append({"role": "user", "content": message})
18
 
19
  if message.lower().strip() in ["hola", "hi"]:
20
- response = "Hola Max 000! Soc un chatbot basat en LLM. Com et puc ajudar amb la teva salut avui?"
21
  elif "informació" in message.lower():
22
- response = "La informació que cerques es pot trobar a la secció d'informes o diagnòstics."
23
  else:
24
- response = f"He rebut el teu missatge: '{message}'. Prova preguntant sobre cites o historial clínic."
25
-
26
- return [{"role": "assistant", "content": response}]
27
 
28
- # ===============================
29
- # 2️⃣ Gradio Chat Interface
30
- # ===============================
31
- chatbot_llm = gr.ChatInterface(
32
- respond,
33
- textbox=gr.Textbox(placeholder="Escriu la teva pregunta al LLM...", container=False, scale=7),
34
- theme="soft",
35
- title="Asistente LLM (Hugging Face Client)",
36
- type="messages", # ✅ Corrección para evitar warning
37
- additional_inputs=[
38
- gr.Textbox(value="Ets l'assistent sanitari de My health. Respon en català, de manera concisa i útil.", label="Missatge del sistema"),
39
- gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Tokens màxims"),
40
- gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperatura"),
41
- gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p (mostreig nucli)"),
42
- ],
43
- )
44
 
45
- # ===============================
46
- # 3️⃣ Función para cargar HTML Dashboard
47
- # ===============================
48
- HTML_FILE_PATH = "My_health.html"
49
 
50
- def load_html_content():
51
- try:
52
- if not os.path.exists(HTML_FILE_PATH):
53
- return f"<h1>Error: Archivo {HTML_FILE_PATH} no encontrado.</h1>"
54
- with open(HTML_FILE_PATH, "r", encoding="utf-8") as f:
55
- return f.read()
56
- except Exception as e:
57
- return f"<h1>Error al cargar el HTML:</h1><p>{e}</p>"
58
 
59
- # ===============================
60
- # 4️⃣ FastAPI App para API y HTML
61
- # ===============================
62
- app = FastAPI()
63
 
64
- # Permitir requests desde cualquier frontend
65
- app.add_middleware(
66
- CORSMiddleware,
67
- allow_origins=["*"],
68
- allow_methods=["*"],
69
- allow_headers=["*"],
70
- )
71
 
72
- @app.get("/", response_class=HTMLResponse)
73
- async def home():
74
- return load_html_content()
 
 
75
 
76
- @app.post("/chat")
77
- async def chat_endpoint(payload: dict):
78
- message = payload.get("message", "")
79
- system_message = payload.get("system_message", "Ets l'assistent sanitari de My health. Respon en català, de manera concisa i útil.")
80
- response = respond(message, history=[], system_message=system_message, max_tokens=512, temperature=0.7, top_p=0.95)
81
- return {"response": response[0]["content"]}
82
 
83
- # ===============================
84
- # 5️⃣ Gradio + FastAPI Integration
85
- # ===============================
86
- from fastapi.middleware.wsgi import WSGIMiddleware
 
 
 
 
 
87
 
88
- gradio_app = gr.Blocks()
89
- with gradio_app:
90
- gr.Markdown("# Aplicació My Health - Integració Gradio/HTML")
91
  with gr.Tabs():
92
- with gr.TabItem("Dashboard LMS (UI Estàtica amb Chat Flotant)"):
93
- gr.HTML(value=load_html_content())
94
- with gr.TabItem("Chat LLM (Accés Directe a Model)"):
95
  chatbot_llm.render()
 
 
96
 
97
- app.mount("/gradio", WSGIMiddleware(gradio_app))
 
 
98
 
99
- # ===============================
100
- # 6️⃣ Launch Uvicorn (Red / Contenedor)
101
- # ===============================
102
  if __name__ == "__main__":
103
- port = int(os.environ.get("PORT", 8000))
104
- uvicorn.run(app, host="0.0.0.0", port=port)
 
 
1
  import gradio as gr
2
+ import os
3
+
4
+ # -------------------------
5
+ # 1️⃣ Lógica del chat
6
+ # -------------------------
7
 
8
+ chat_history = []
9
+
10
+ def respond(message, history, system_message="Ets l'assistent sanitari de My Health. Respon en català, de manera concisa i útil."):
11
+ global chat_history
12
+ chat_history.append({"role": "user", "content": message})
 
 
 
 
 
13
 
14
  if message.lower().strip() in ["hola", "hi"]:
15
+ reply = "Hola! Soc un chatbot basat en LLM. Com et puc ajudar amb la teva salut avui?"
16
  elif "informació" in message.lower():
17
+ reply = "La informació que cerques es pot trobar a la secció d'informes o diagnòstics."
18
  else:
19
+ reply = f"He rebut el teu missatge: '{message}'. Pots provar amb una pregunta sobre el teu historial clínic."
 
 
20
 
21
+ chat_history.append({"role": "assistant", "content": reply})
22
+ return reply
 
 
 
 
 
 
 
 
 
 
 
 
 
 
23
 
24
+ # -------------------------
25
+ # 2️⃣ Función para API (HTML flotante)
26
+ # -------------------------
 
27
 
28
+ def api_chat(message):
29
+ return respond(message, chat_history)
 
 
 
 
 
 
30
 
31
+ # -------------------------
32
+ # 3️⃣ Cargar HTML
33
+ # -------------------------
 
34
 
35
+ HTML_FILE_PATH = "My_health.html"
 
 
 
 
 
 
36
 
37
+ def load_html_content():
38
+ if not os.path.exists(HTML_FILE_PATH):
39
+ return "<h1>Error: archivo HTML no encontrado.</h1>"
40
+ with open(HTML_FILE_PATH, "r", encoding="utf-8") as f:
41
+ return f.read()
42
 
43
+ # -------------------------
44
+ # 4️⃣ Interfaz Gradio
45
+ # -------------------------
 
 
 
46
 
47
+ chatbot_llm = gr.ChatInterface(
48
+ fn=respond,
49
+ textbox=gr.Textbox(placeholder="Escriu la teva pregunta al LLM...", container=False, scale=7),
50
+ theme="soft",
51
+ title="Asistent LLM",
52
+ additional_inputs=[
53
+ gr.Textbox(value="Ets l'assistent sanitari de My Health. Respon en català, de manera concisa i útil.", label="Missatge del sistema"),
54
+ ],
55
+ )
56
 
57
+ with gr.Blocks() as demo:
58
+ gr.Markdown("# My Health Dashboard - HTML + LLM")
 
59
  with gr.Tabs():
60
+ with gr.TabItem("Dashboard HTML + Chat Flotant"):
61
+ gr.HTML(load_html_content())
62
+ with gr.TabItem("Chat LLM"):
63
  chatbot_llm.render()
64
+ with gr.TabItem("API Endpoint Simulado"):
65
+ gr.Markdown("Aquest endpoint es pot cridar des del teu HTML flotant: `api_chat(message)`")
66
 
67
+ # -------------------------
68
+ # 5️⃣ Lanzamiento
69
+ # -------------------------
70
 
 
 
 
71
  if __name__ == "__main__":
72
+ demo.launch(server_name="0.0.0.0", server_port=7860, share=True)