Shuwei Hou
commited on
Commit
·
07709c8
1
Parent(s):
67d6834
update_model_loading
Browse files- preprocess.py +18 -0
preprocess.py
CHANGED
|
@@ -125,6 +125,8 @@ def process_audio_file(input_audio_file, num_speakers, device="cuda"):
|
|
| 125 |
print("Loading CrisperWhisper model...")
|
| 126 |
device_str = "cuda:0" if torch.cuda.is_available() else "cpu"
|
| 127 |
torch_dtype = torch.float16 if torch.cuda.is_available() else torch.float32
|
|
|
|
|
|
|
| 128 |
local_model_dir = "./CrisperWhisper_local"
|
| 129 |
|
| 130 |
cw_model = AutoModelForSpeechSeq2Seq.from_pretrained(
|
|
@@ -137,6 +139,22 @@ def process_audio_file(input_audio_file, num_speakers, device="cuda"):
|
|
| 137 |
|
| 138 |
processor = AutoProcessor.from_pretrained(local_model_dir)
|
| 139 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 140 |
asr_pipeline = pipeline(
|
| 141 |
"automatic-speech-recognition",
|
| 142 |
model=cw_model,
|
|
|
|
| 125 |
print("Loading CrisperWhisper model...")
|
| 126 |
device_str = "cuda:0" if torch.cuda.is_available() else "cpu"
|
| 127 |
torch_dtype = torch.float16 if torch.cuda.is_available() else torch.float32
|
| 128 |
+
|
| 129 |
+
""" Use local Crisper Whisper Model
|
| 130 |
local_model_dir = "./CrisperWhisper_local"
|
| 131 |
|
| 132 |
cw_model = AutoModelForSpeechSeq2Seq.from_pretrained(
|
|
|
|
| 139 |
|
| 140 |
processor = AutoProcessor.from_pretrained(local_model_dir)
|
| 141 |
|
| 142 |
+
"""
|
| 143 |
+
hf_model_id = "nyrahealth/CrisperWhisper"
|
| 144 |
+
|
| 145 |
+
cw_model = AutoModelForSpeechSeq2Seq.from_pretrained(
|
| 146 |
+
hf_model_id,
|
| 147 |
+
torch_dtype=torch_dtype,
|
| 148 |
+
low_cpu_mem_usage=True,
|
| 149 |
+
use_safetensors=True,
|
| 150 |
+
token=token
|
| 151 |
+
)
|
| 152 |
+
cw_model.to(device_str)
|
| 153 |
+
|
| 154 |
+
processor = AutoProcessor.from_pretrained(hf_model_id, token=token)
|
| 155 |
+
|
| 156 |
+
|
| 157 |
+
|
| 158 |
asr_pipeline = pipeline(
|
| 159 |
"automatic-speech-recognition",
|
| 160 |
model=cw_model,
|