Update app.py
Browse files
app.py
CHANGED
|
@@ -1725,7 +1725,8 @@ def create_app():
|
|
| 1725 |
show_copy_button=True,
|
| 1726 |
show_label=False,
|
| 1727 |
avatar_images=(None, "https://upload.wikimedia.org/wikipedia/commons/0/04/ChatGPT_logo.svg"),
|
| 1728 |
-
elem_id="chat-window"
|
|
|
|
| 1729 |
)
|
| 1730 |
|
| 1731 |
with gr.Row():
|
|
@@ -2299,12 +2300,6 @@ def create_app():
|
|
| 2299 |
return gr.update(choices=all_models, value=default_model)
|
| 2300 |
|
| 2301 |
|
| 2302 |
-
def refresh_groq_models_list():
|
| 2303 |
-
"""Refresh the list of Groq models"""
|
| 2304 |
-
global GROQ_MODELS
|
| 2305 |
-
GROQ_MODELS = fetch_groq_models()
|
| 2306 |
-
return gr.update(choices=list(GROQ_MODELS.keys()))
|
| 2307 |
-
|
| 2308 |
def get_current_model(provider, openrouter_model, openai_model, hf_model, groq_model, cohere_model, together_model, ovh_model, cerebras_model, googleai_model):
|
| 2309 |
"""Get the currently selected model based on provider"""
|
| 2310 |
if provider == "OpenRouter":
|
|
@@ -2327,6 +2322,7 @@ def create_app():
|
|
| 2327 |
return googleai_model
|
| 2328 |
return None
|
| 2329 |
|
|
|
|
| 2330 |
# Process uploaded images
|
| 2331 |
image_upload_btn.upload(
|
| 2332 |
fn=lambda files: files,
|
|
@@ -2338,17 +2334,17 @@ def create_app():
|
|
| 2338 |
provider_choice.change(
|
| 2339 |
fn=toggle_model_dropdowns,
|
| 2340 |
inputs=provider_choice,
|
| 2341 |
-
outputs=
|
| 2342 |
-
openrouter_model
|
| 2343 |
-
openai_model
|
| 2344 |
-
hf_model
|
| 2345 |
-
groq_model
|
| 2346 |
-
cohere_model
|
| 2347 |
-
together_model
|
| 2348 |
-
ovh_model
|
| 2349 |
-
cerebras_model
|
| 2350 |
-
googleai_model
|
| 2351 |
-
|
| 2352 |
).then(
|
| 2353 |
fn=update_context_for_provider,
|
| 2354 |
inputs=[provider_choice, openrouter_model, openai_model, hf_model, groq_model, cohere_model, together_model, ovh_model, cerebras_model, googleai_model],
|
|
@@ -2373,20 +2369,19 @@ def create_app():
|
|
| 2373 |
outputs=image_upload_container
|
| 2374 |
)
|
| 2375 |
|
| 2376 |
-
# Set up model search event -
|
| 2377 |
-
# We'll now have the correct function that returns model dropdown updates
|
| 2378 |
model_search.change(
|
| 2379 |
-
fn=lambda provider, search:
|
| 2380 |
-
"OpenRouter"
|
| 2381 |
-
"OpenAI"
|
| 2382 |
-
"HuggingFace"
|
| 2383 |
-
"Groq"
|
| 2384 |
-
"Cohere"
|
| 2385 |
-
"Together"
|
| 2386 |
-
"OVH"
|
| 2387 |
-
"Cerebras"
|
| 2388 |
-
"GoogleAI"
|
| 2389 |
-
|
| 2390 |
inputs=[provider_choice, model_search],
|
| 2391 |
outputs=[
|
| 2392 |
openrouter_model, openai_model, hf_model, groq_model,
|
|
@@ -2556,6 +2551,28 @@ def create_app():
|
|
| 2556 |
inputs=googleai_model,
|
| 2557 |
outputs=image_upload_container
|
| 2558 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 2559 |
|
| 2560 |
# Set up submission event
|
| 2561 |
def submit_message(message, history, provider, openrouter_model, openai_model, hf_model, groq_model, cohere_model,
|
|
|
|
| 1725 |
show_copy_button=True,
|
| 1726 |
show_label=False,
|
| 1727 |
avatar_images=(None, "https://upload.wikimedia.org/wikipedia/commons/0/04/ChatGPT_logo.svg"),
|
| 1728 |
+
elem_id="chat-window",
|
| 1729 |
+
type="messages" # use the new format
|
| 1730 |
)
|
| 1731 |
|
| 1732 |
with gr.Row():
|
|
|
|
| 2300 |
return gr.update(choices=all_models, value=default_model)
|
| 2301 |
|
| 2302 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 2303 |
def get_current_model(provider, openrouter_model, openai_model, hf_model, groq_model, cohere_model, together_model, ovh_model, cerebras_model, googleai_model):
|
| 2304 |
"""Get the currently selected model based on provider"""
|
| 2305 |
if provider == "OpenRouter":
|
|
|
|
| 2322 |
return googleai_model
|
| 2323 |
return None
|
| 2324 |
|
| 2325 |
+
|
| 2326 |
# Process uploaded images
|
| 2327 |
image_upload_btn.upload(
|
| 2328 |
fn=lambda files: files,
|
|
|
|
| 2334 |
provider_choice.change(
|
| 2335 |
fn=toggle_model_dropdowns,
|
| 2336 |
inputs=provider_choice,
|
| 2337 |
+
outputs=[
|
| 2338 |
+
openrouter_model,
|
| 2339 |
+
openai_model,
|
| 2340 |
+
hf_model,
|
| 2341 |
+
groq_model,
|
| 2342 |
+
cohere_model,
|
| 2343 |
+
together_model,
|
| 2344 |
+
ovh_model,
|
| 2345 |
+
cerebras_model,
|
| 2346 |
+
googleai_model
|
| 2347 |
+
]
|
| 2348 |
).then(
|
| 2349 |
fn=update_context_for_provider,
|
| 2350 |
inputs=[provider_choice, openrouter_model, openai_model, hf_model, groq_model, cohere_model, together_model, ovh_model, cerebras_model, googleai_model],
|
|
|
|
| 2369 |
outputs=image_upload_container
|
| 2370 |
)
|
| 2371 |
|
| 2372 |
+
# Set up model search event - return model dropdown updates
|
|
|
|
| 2373 |
model_search.change(
|
| 2374 |
+
fn=lambda provider, search: [
|
| 2375 |
+
search_openrouter_models(search) if provider == "OpenRouter" else gr.update(),
|
| 2376 |
+
search_openai_models(search) if provider == "OpenAI" else gr.update(),
|
| 2377 |
+
search_hf_models(search) if provider == "HuggingFace" else gr.update(),
|
| 2378 |
+
search_groq_models(search) if provider == "Groq" else gr.update(),
|
| 2379 |
+
search_cohere_models(search) if provider == "Cohere" else gr.update(),
|
| 2380 |
+
search_together_models(search) if provider == "Together" else gr.update(),
|
| 2381 |
+
search_ovh_models(search) if provider == "OVH" else gr.update(),
|
| 2382 |
+
search_cerebras_models(search) if provider == "Cerebras" else gr.update(),
|
| 2383 |
+
search_googleai_models(search) if provider == "GoogleAI" else gr.update()
|
| 2384 |
+
],
|
| 2385 |
inputs=[provider_choice, model_search],
|
| 2386 |
outputs=[
|
| 2387 |
openrouter_model, openai_model, hf_model, groq_model,
|
|
|
|
| 2551 |
inputs=googleai_model,
|
| 2552 |
outputs=image_upload_container
|
| 2553 |
)
|
| 2554 |
+
|
| 2555 |
+
def handle_search(provider, search_term):
|
| 2556 |
+
"""Handle search based on provider"""
|
| 2557 |
+
if provider == "OpenRouter":
|
| 2558 |
+
return search_openrouter_models(search_term)
|
| 2559 |
+
elif provider == "OpenAI":
|
| 2560 |
+
return search_openai_models(search_term)
|
| 2561 |
+
elif provider == "HuggingFace":
|
| 2562 |
+
return search_hf_models(search_term)
|
| 2563 |
+
elif provider == "Groq":
|
| 2564 |
+
return search_groq_models(search_term)
|
| 2565 |
+
elif provider == "Cohere":
|
| 2566 |
+
return search_cohere_models(search_term)
|
| 2567 |
+
elif provider == "Together":
|
| 2568 |
+
return search_together_models(search_term)
|
| 2569 |
+
elif provider == "OVH":
|
| 2570 |
+
return search_ovh_models(search_term)
|
| 2571 |
+
elif provider == "Cerebras":
|
| 2572 |
+
return search_cerebras_models(search_term)
|
| 2573 |
+
elif provider == "GoogleAI":
|
| 2574 |
+
return search_googleai_models(search_term)
|
| 2575 |
+
return None
|
| 2576 |
|
| 2577 |
# Set up submission event
|
| 2578 |
def submit_message(message, history, provider, openrouter_model, openai_model, hf_model, groq_model, cohere_model,
|