voice-changer/server/voice_changer/VoiceChangerManager.py

40 lines
1.4 KiB
Python
Raw Normal View History

2022-12-31 16:02:53 +09:00
import numpy as np
2022-12-31 16:08:14 +09:00
from voice_changer.VoiceChanger import VoiceChanger
2022-12-31 16:02:53 +09:00
2023-01-29 09:42:45 +09:00
2022-12-31 16:02:53 +09:00
class VoiceChangerManager():
@classmethod
2023-03-16 08:11:38 +09:00
def get_instance(cls, params):
2022-12-31 16:02:53 +09:00
if not hasattr(cls, "_instance"):
cls._instance = cls()
2023-03-16 08:11:38 +09:00
cls._instance.voiceChanger = VoiceChanger(params)
2022-12-31 16:02:53 +09:00
return cls._instance
2023-03-16 08:11:38 +09:00
def loadModel(self, config, model, onnx_model, clusterTorchModel):
info = self.voiceChanger.loadModel(config, model, onnx_model, clusterTorchModel)
2023-01-29 09:42:45 +09:00
info["status"] = "OK"
2023-01-10 22:49:16 +09:00
return info
2022-12-31 16:02:53 +09:00
2023-01-08 00:25:21 +09:00
def get_info(self):
if hasattr(self, 'voiceChanger'):
2023-01-10 22:49:16 +09:00
info = self.voiceChanger.get_info()
2023-01-29 09:42:45 +09:00
info["status"] = "OK"
2023-01-10 22:49:16 +09:00
return info
2023-01-08 00:25:21 +09:00
else:
2023-01-29 09:42:45 +09:00
return {"status": "ERROR", "msg": "no model loaded"}
2023-01-08 00:25:21 +09:00
2023-01-29 09:42:45 +09:00
def update_setteings(self, key: str, val: any):
2023-01-08 00:25:21 +09:00
if hasattr(self, 'voiceChanger'):
2023-01-10 22:49:16 +09:00
info = self.voiceChanger.update_setteings(key, val)
2023-01-29 09:42:45 +09:00
info["status"] = "OK"
2023-01-10 22:49:16 +09:00
return info
2023-01-08 00:25:21 +09:00
else:
2023-01-29 09:42:45 +09:00
return {"status": "ERROR", "msg": "no model loaded"}
2023-01-08 16:18:20 +09:00
def changeVoice(self, receivedData: any):
2023-01-05 02:28:36 +09:00
if hasattr(self, 'voiceChanger') == True:
return self.voiceChanger.on_request(receivedData)
2023-01-05 02:28:36 +09:00
else:
print("Voice Change is not loaded. Did you load a correct model?")
2023-02-21 04:07:43 +09:00
return np.zeros(1).astype(np.int16), []