Update config.py

Change the inference device for Mac to accelerate inference and reduce memory leak
This commit is contained in:
XXXXRT666 2024-02-20 15:57:58 +00:00 committed by GitHub
parent a16de2e7c6
commit 3180294710
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -20,7 +20,7 @@ python_exec = sys.executable or "python"
if torch.cuda.is_available(): if torch.cuda.is_available():
infer_device = "cuda" infer_device = "cuda"
elif torch.backends.mps.is_available(): elif torch.backends.mps.is_available():
infer_device = "mps" infer_device = "cpu"
else: else:
infer_device = "cpu" infer_device = "cpu"