Allow CTranslate2 backend to choose device and compute types.

This commit is contained in:
Chris Margach
2025-04-07 14:47:29 +09:00
parent 2399487e45
commit 9c9c179684

View File

@@ -105,8 +105,9 @@ class FasterWhisperASR(ASRBase):
model_size_or_path = modelsize
else:
raise ValueError("Either modelsize or model_dir must be set")
device = "cuda" if torch and torch.cuda.is_available() else "cpu"
compute_type = "float16" if device == "cuda" else "float32"
device = "auto" # Allow CTranslate2 to decide available device
compute_type = "auto" # Allow CTranslate2 to decide faster compute type
model = WhisperModel(
model_size_or_path,