Spaces:
Running
Running
c3
Browse files
app.py
CHANGED
|
@@ -382,7 +382,11 @@ def get_model_options(pipeline_type):
|
|
| 382 |
Returns:
|
| 383 |
list: A list of model IDs.
|
| 384 |
"""
|
| 385 |
-
if pipeline_type == "
|
|
|
|
|
|
|
|
|
|
|
|
|
| 386 |
return ["openai/whisper-large-v3", "openai/whisper-large-v2", "openai/whisper-medium", "openai/whisper-small"]
|
| 387 |
else:
|
| 388 |
return []
|
|
|
|
| 382 |
Returns:
|
| 383 |
list: A list of model IDs.
|
| 384 |
"""
|
| 385 |
+
if pipeline_type == "faster-batched":
|
| 386 |
+
return ["cstr/whisper-large-v3-turbo-int8_float32", "SYSTRAN/faster-whisper-large-v1", "GalaktischeGurke/primeline-whisper-large-v3-german-ct2"]
|
| 387 |
+
elif pipeline_type == "faster-sequenced":
|
| 388 |
+
return ["SYSTRAN/faster-whisper-large-v1", "GalaktischeGurke/primeline-whisper-large-v3-german-ct2"]
|
| 389 |
+
elif pipeline_type == "transformers":
|
| 390 |
return ["openai/whisper-large-v3", "openai/whisper-large-v2", "openai/whisper-medium", "openai/whisper-small"]
|
| 391 |
else:
|
| 392 |
return []
|