import { DefaultVoiceChangerOptions, OnnxExecutionProvider, Protocol, Framework, fileSelector, getInfo, loadModel } from "@dannadori/voice-changer-client-js" import React, { useEffect } from "react" import { useMemo, useState } from "react" export type UseServerSettingProps = { uploadFile: (baseUrl: string, file: File, onprogress: (progress: number, end: boolean) => void) => Promise changeOnnxExcecutionProvider: (baseUrl: string, provider: OnnxExecutionProvider) => Promise } export type ServerSettingState = { serverSetting: JSX.Element; mmvcServerUrl: string; pyTorchModel: File | null; configFile: File | null; onnxModel: File | null; framework: string; onnxExecutionProvider: OnnxExecutionProvider; protocol: Protocol; } export const useServerSetting = (props: UseServerSettingProps): ServerSettingState => { const [mmvcServerUrl, setMmvcServerUrl] = useState(DefaultVoiceChangerOptions.mmvcServerUrl) const [pyTorchModel, setPyTorchModel] = useState(null) const [configFile, setConfigFile] = useState(null) const [onnxModel, setOnnxModel] = useState(null) const [protocol, setProtocol] = useState("sio") const [onnxExecutionProvider, setOnnxExecutionProvider] = useState("CPUExecutionProvider") const [framework, setFramework] = useState("PyTorch") const mmvcServerUrlRow = useMemo(() => { const onSetServerClicked = async () => { const input = document.getElementById("mmvc-server-url") as HTMLInputElement setMmvcServerUrl(input.value) } return (
MMVC Server
set
) }, []) const uploadeModelRow = useMemo(() => { const onPyTorchFileLoadClicked = async () => { const file = await fileSelector("") if (file.name.endsWith(".pth") == false) { alert("モデルファイルの拡張子はpthである必要があります。") return } setPyTorchModel(file) } const onConfigFileLoadClicked = async () => { const file = await fileSelector("") if (file.name.endsWith(".json") == false) { alert("モデルファイルの拡張子はjsonである必要があります。") return } setConfigFile(file) } const onOnnxFileLoadClicked = async () => { const file = await fileSelector("") if (file.name.endsWith(".onnx") == false) { alert("モデルファイルの拡張子はonnxである必要があります。") return } setOnnxModel(file) } const onModelUploadClicked = async () => { if (!pyTorchModel && !onnxModel) { alert("PyTorchモデルとONNXモデルのどちらか一つ以上指定する必要があります。") return } if (!configFile) { alert("Configファイルを指定する必要があります。") return } if (pyTorchModel) { await props.uploadFile(mmvcServerUrl, pyTorchModel, (progress: number, end: boolean) => { console.log(progress, end) }) } if (onnxModel) { await props.uploadFile(mmvcServerUrl, onnxModel, (progress: number, end: boolean) => { console.log(progress, end) }) } await props.uploadFile(mmvcServerUrl, configFile, (progress: number, end: boolean) => { console.log(progress, end) }) const res = await getInfo(mmvcServerUrl) console.log(res) const res2 = await loadModel(mmvcServerUrl, configFile, pyTorchModel, onnxModel) console.log(res2) } return ( <>
Model Uploader
PyTorch(.pth)
{pyTorchModel?.name}
select
Config(.json)
{configFile?.name}
select
Onnx(.onnx)
{onnxModel?.name}
select
upload
) }, [pyTorchModel, configFile, onnxModel, mmvcServerUrl, props.uploadFile]) const protocolRow = useMemo(() => { const onProtocolChanged = async (val: Protocol) => { setProtocol(val) } return (
Protocol
) }, [protocol]) const frameworkRow = useMemo(() => { const onFrameworkChanged = async (val: Framework) => { setFramework(val) } return (
Framework
) }, [framework]) const onnxExecutionProviderRow = useMemo(() => { if (framework != "ONNX") { return } const onOnnxExecutionProviderChanged = async (val: OnnxExecutionProvider) => { await props.changeOnnxExcecutionProvider(mmvcServerUrl, val) setOnnxExecutionProvider(val) } return (
OnnxExecutionProvider
) }, [onnxExecutionProvider, framework, mmvcServerUrl]) const serverSetting = useMemo(() => { return ( <>
Server Setting
{mmvcServerUrlRow} {uploadeModelRow} {frameworkRow} {onnxExecutionProviderRow} {protocolRow} ) }, [mmvcServerUrlRow, uploadeModelRow, frameworkRow, onnxExecutionProviderRow, protocolRow]) return { serverSetting, mmvcServerUrl, pyTorchModel, configFile, onnxModel, framework, onnxExecutionProvider, protocol, } }