65 lines
3.6 KiB
Python
Raw Normal View History

2023-05-02 20:57:12 +09:00
from const import EnumInferenceTypes
2024-02-28 23:08:49 +09:00
from voice_changer.RVC.inferencer.EasyVCInferencerONNX import EasyVCInferencerONNX
2023-05-02 20:57:12 +09:00
from voice_changer.RVC.inferencer.Inferencer import Inferencer
2023-05-03 13:14:00 +09:00
from voice_changer.RVC.inferencer.OnnxRVCInferencer import OnnxRVCInferencer
from voice_changer.RVC.inferencer.OnnxRVCInferencerNono import OnnxRVCInferencerNono
2023-05-02 20:57:12 +09:00
from voice_changer.RVC.inferencer.RVCInferencer import RVCInferencer
from voice_changer.RVC.inferencer.RVCInferencerNono import RVCInferencerNono
2023-05-20 15:54:00 +09:00
from voice_changer.RVC.inferencer.RVCInferencerv2 import RVCInferencerv2
from voice_changer.RVC.inferencer.RVCInferencerv2Nono import RVCInferencerv2Nono
2023-05-02 20:57:12 +09:00
from voice_changer.RVC.inferencer.WebUIInferencer import WebUIInferencer
from voice_changer.RVC.inferencer.WebUIInferencerNono import WebUIInferencerNono
2023-07-29 04:53:00 +09:00
import sys
2023-05-02 20:57:12 +09:00
2023-06-26 03:13:25 +09:00
2023-05-02 20:57:12 +09:00
class InferencerManager:
currentInferencer: Inferencer | None = None
@classmethod
def getInferencer(
2023-05-29 17:34:35 +09:00
cls,
inferencerType: EnumInferenceTypes,
file: str,
gpu: int,
2023-09-06 08:04:39 +09:00
inferencerTypeVersion: str | None = None,
2023-05-02 20:57:12 +09:00
) -> Inferencer:
2023-09-06 08:04:39 +09:00
cls.currentInferencer = cls.loadInferencer(inferencerType, file, gpu, inferencerTypeVersion)
2023-05-02 20:57:12 +09:00
return cls.currentInferencer
@classmethod
def loadInferencer(
2023-05-29 17:34:35 +09:00
cls,
inferencerType: EnumInferenceTypes,
file: str,
gpu: int,
2023-09-06 08:04:39 +09:00
inferencerTypeVersion: str | None = None,
2023-05-02 22:29:28 +09:00
) -> Inferencer:
2023-06-18 09:24:47 +09:00
if inferencerType == EnumInferenceTypes.pyTorchRVC or inferencerType == EnumInferenceTypes.pyTorchRVC.value:
2023-05-29 17:34:35 +09:00
return RVCInferencer().loadModel(file, gpu)
2023-06-18 09:24:47 +09:00
elif inferencerType == EnumInferenceTypes.pyTorchRVCNono or inferencerType == EnumInferenceTypes.pyTorchRVCNono.value:
2023-05-29 17:34:35 +09:00
return RVCInferencerNono().loadModel(file, gpu)
2023-06-18 09:24:47 +09:00
elif inferencerType == EnumInferenceTypes.pyTorchRVCv2 or inferencerType == EnumInferenceTypes.pyTorchRVCv2.value:
2023-05-29 17:34:35 +09:00
return RVCInferencerv2().loadModel(file, gpu)
2023-06-24 10:33:57 +09:00
elif inferencerType == EnumInferenceTypes.pyTorchVoRASbeta or inferencerType == EnumInferenceTypes.pyTorchVoRASbeta.value:
2023-07-29 04:53:00 +09:00
if sys.platform.startswith("darwin") is False:
from voice_changer.RVC.inferencer.VorasInferencebeta import VoRASInferencer
2024-02-28 23:08:49 +09:00
2023-07-29 04:53:00 +09:00
return VoRASInferencer().loadModel(file, gpu)
else:
raise RuntimeError("[Voice Changer] VoRAS is not supported on macOS")
2023-06-18 09:24:47 +09:00
elif inferencerType == EnumInferenceTypes.pyTorchRVCv2Nono or inferencerType == EnumInferenceTypes.pyTorchRVCv2Nono.value:
2023-05-29 17:34:35 +09:00
return RVCInferencerv2Nono().loadModel(file, gpu)
2023-06-18 09:24:47 +09:00
elif inferencerType == EnumInferenceTypes.pyTorchWebUI or inferencerType == EnumInferenceTypes.pyTorchWebUI.value:
2023-05-29 17:34:35 +09:00
return WebUIInferencer().loadModel(file, gpu)
2023-06-18 09:24:47 +09:00
elif inferencerType == EnumInferenceTypes.pyTorchWebUINono or inferencerType == EnumInferenceTypes.pyTorchWebUINono.value:
2023-05-29 17:34:35 +09:00
return WebUIInferencerNono().loadModel(file, gpu)
2023-06-18 09:24:47 +09:00
elif inferencerType == EnumInferenceTypes.onnxRVC or inferencerType == EnumInferenceTypes.onnxRVC.value:
2023-09-06 08:04:39 +09:00
return OnnxRVCInferencer().loadModel(file, gpu, inferencerTypeVersion)
2023-06-18 09:24:47 +09:00
elif inferencerType == EnumInferenceTypes.onnxRVCNono or inferencerType == EnumInferenceTypes.onnxRVCNono.value:
2023-09-06 08:04:39 +09:00
return OnnxRVCInferencerNono().loadModel(file, gpu, inferencerTypeVersion)
2024-02-28 23:08:49 +09:00
elif inferencerType == EnumInferenceTypes.easyVC or inferencerType == EnumInferenceTypes.easyVC.value:
return EasyVCInferencerONNX().loadModel(file, gpu)
2023-05-02 20:57:12 +09:00
else:
raise RuntimeError("[Voice Changer] Inferencer not found", inferencerType)