2023-05-02 14:57:12 +03:00
|
|
|
from typing import Any, Protocol
|
|
|
|
import torch
|
2023-05-03 07:14:00 +03:00
|
|
|
import onnxruntime
|
2023-05-02 14:57:12 +03:00
|
|
|
|
2023-05-31 08:30:35 +03:00
|
|
|
from const import EnumInferenceTypes
|
|
|
|
|
2023-05-02 14:57:12 +03:00
|
|
|
|
|
|
|
class Inferencer(Protocol):
|
2023-05-31 08:30:35 +03:00
|
|
|
inferencerType: EnumInferenceTypes = EnumInferenceTypes.pyTorchRVC
|
|
|
|
file: str
|
|
|
|
isHalf: bool = True
|
|
|
|
gpu: int = 0
|
|
|
|
|
2023-05-03 07:14:00 +03:00
|
|
|
model: onnxruntime.InferenceSession | Any | None = None
|
2023-05-02 14:57:12 +03:00
|
|
|
|
2023-05-29 11:34:35 +03:00
|
|
|
def loadModel(self, file: str, gpu: int):
|
2023-05-02 14:57:12 +03:00
|
|
|
...
|
|
|
|
|
|
|
|
def infer(
|
|
|
|
self,
|
|
|
|
feats: torch.Tensor,
|
|
|
|
pitch_length: torch.Tensor,
|
|
|
|
pitch: torch.Tensor | None,
|
|
|
|
pitchf: torch.Tensor | None,
|
|
|
|
sid: torch.Tensor,
|
|
|
|
) -> torch.Tensor:
|
|
|
|
...
|
|
|
|
|
2023-05-31 08:30:35 +03:00
|
|
|
def setProps(
|
|
|
|
self,
|
|
|
|
inferencerType: EnumInferenceTypes,
|
|
|
|
file: str,
|
|
|
|
isHalf: bool,
|
|
|
|
gpu: int,
|
|
|
|
):
|
|
|
|
self.inferencerType = inferencerType
|
|
|
|
self.file = file
|
|
|
|
self.isHalf = isHalf
|
|
|
|
self.gpu = gpu
|
|
|
|
|
|
|
|
def getInferencerInfo(self):
|
|
|
|
return {
|
|
|
|
"inferencerType": self.inferencerType.value,
|
|
|
|
"file": self.file,
|
|
|
|
"isHalf": self.isHalf,
|
|
|
|
"gpu": self.gpu,
|
|
|
|
}
|