improve web edition gui

This commit is contained in:
w-okada 2023-11-23 06:20:54 +09:00
parent b895bdec4f
commit 6fd61b9591
4 changed files with 207 additions and 195 deletions

View File

@ -125,10 +125,10 @@ const f0ModelUrl: { [modelType in VoiceChangerType]: { [inputLength in InputLeng
}; };
export const useWebInfo = (props: UseWebInfoProps): WebInfoStateAndMethod => { export const useWebInfo = (props: UseWebInfoProps): WebInfoStateAndMethod => {
const initVoiceChangerType: VoiceChangerType = "rvcv1"; const initVoiceChangerType: VoiceChangerType = "rvcv2";
const initInputLength: InputLengthKey = "24000"; const initInputLength: InputLengthKey = "24000";
const initUseF0 = false; const initUseF0 = false;
const initSampleRate: ModelSampleRateStr = "40k"; const initSampleRate: ModelSampleRateStr = "32k";
const progressCallback = (data: ProgreeeUpdateCallbcckInfo) => { const progressCallback = (data: ProgreeeUpdateCallbcckInfo) => {
if (data.progressUpdateType === ProgressUpdateType.loadPreprocessModel) { if (data.progressUpdateType === ProgressUpdateType.loadPreprocessModel) {

View File

@ -142,21 +142,37 @@ export const CharacterArea = (_props: CharacterAreaProps) => {
throw new Error("invalid webModelLoadingState"); throw new Error("invalid webModelLoadingState");
} }
} else { } else {
return ( if (webEdition) {
<div className="character-area-control"> return (
<div className="character-area-control-buttons"> <div className="character-area-control">
<div onClick={onStartClicked} className={startClassName}> <div className="character-area-control-buttons">
start <div onClick={onStartClicked} className={startClassName}>
</div> start
<div onClick={onStopClicked} className={stopClassName}> </div>
stop <div onClick={onStopClicked} className={stopClassName}>
</div> stop
<div onClick={onPassThroughClicked} className={passThruClassName}> </div>
passthru
</div> </div>
</div> </div>
</div> );
); } else {
return (
<div className="character-area-control">
<div className="character-area-control-buttons">
<div onClick={onStartClicked} className={startClassName}>
start
</div>
<div onClick={onStopClicked} className={stopClassName}>
stop
</div>
<div onClick={onPassThroughClicked} className={passThruClassName}>
passthru
</div>
</div>
</div>
);
}
} }
}, [guiState.isConverting, start, stop, serverSetting.serverSetting, serverSetting.updateServerSettings, webInfoState.progressLoadPreprocess, webInfoState.progressLoadVCModel, webInfoState.progressWarmup, webInfoState.webModelLoadingState]); }, [guiState.isConverting, start, stop, serverSetting.serverSetting, serverSetting.updateServerSettings, webInfoState.progressLoadPreprocess, webInfoState.progressLoadVCModel, webInfoState.progressWarmup, webInfoState.webModelLoadingState]);

View File

@ -4,6 +4,7 @@ import { fileSelectorAsDataURL, useIndexedDB } from "@dannadori/voice-changer-cl
import { useGuiState } from "../001_GuiStateProvider"; import { useGuiState } from "../001_GuiStateProvider";
import { AUDIO_ELEMENT_FOR_PLAY_MONITOR, AUDIO_ELEMENT_FOR_PLAY_RESULT, AUDIO_ELEMENT_FOR_TEST_CONVERTED, AUDIO_ELEMENT_FOR_TEST_CONVERTED_ECHOBACK, AUDIO_ELEMENT_FOR_TEST_ORIGINAL, INDEXEDDB_KEY_AUDIO_MONITR, INDEXEDDB_KEY_AUDIO_OUTPUT } from "../../../const"; import { AUDIO_ELEMENT_FOR_PLAY_MONITOR, AUDIO_ELEMENT_FOR_PLAY_RESULT, AUDIO_ELEMENT_FOR_TEST_CONVERTED, AUDIO_ELEMENT_FOR_TEST_CONVERTED_ECHOBACK, AUDIO_ELEMENT_FOR_TEST_ORIGINAL, INDEXEDDB_KEY_AUDIO_MONITR, INDEXEDDB_KEY_AUDIO_OUTPUT } from "../../../const";
import { isDesktopApp } from "../../../const"; import { isDesktopApp } from "../../../const";
import { useAppRoot } from "../../../001_provider/001_AppRootProvider";
export type DeviceAreaProps = {}; export type DeviceAreaProps = {};
@ -19,8 +20,19 @@ export const DeviceArea = (_props: DeviceAreaProps) => {
const { getItem, setItem } = useIndexedDB({ clientType: null }); const { getItem, setItem } = useIndexedDB({ clientType: null });
const [outputRecordingStarted, setOutputRecordingStarted] = useState<boolean>(false); const [outputRecordingStarted, setOutputRecordingStarted] = useState<boolean>(false);
const { appGuiSettingState } = useAppRoot();
const webEdition = appGuiSettingState.edition.indexOf("web") >= 0;
// (1) Audio Mode // (1) Audio Mode
const deviceModeRow = useMemo(() => { const deviceModeRow = useMemo(() => {
if (webEdition) {
return (
<div className="config-sub-area-control">
<div className="config-sub-area-control-title">AUDIO:</div>
<div className="config-sub-area-control-field"></div>
</div>
);
}
const enableServerAudio = serverSetting.serverSetting.enableServerAudio; const enableServerAudio = serverSetting.serverSetting.enableServerAudio;
const clientChecked = enableServerAudio == 1 ? false : true; const clientChecked = enableServerAudio == 1 ? false : true;
const serverChecked = enableServerAudio == 1 ? true : false; const serverChecked = enableServerAudio == 1 ? true : false;

View File

@ -1,207 +1,191 @@
export const RequestType = { export const RequestType = {
voice: "voice", voice: "voice",
config: "config", config: "config",
start: "start", start: "start",
stop: "stop", stop: "stop",
trancateBuffer: "trancateBuffer", trancateBuffer: "trancateBuffer",
} as const; } as const;
export type RequestType = (typeof RequestType)[keyof typeof RequestType]; export type RequestType = (typeof RequestType)[keyof typeof RequestType];
export const ResponseType = { export const ResponseType = {
volume: "volume", volume: "volume",
inputData: "inputData", inputData: "inputData",
start_ok: "start_ok", start_ok: "start_ok",
stop_ok: "stop_ok", stop_ok: "stop_ok",
} as const; } as const;
export type ResponseType = (typeof ResponseType)[keyof typeof ResponseType]; export type ResponseType = (typeof ResponseType)[keyof typeof ResponseType];
export type VoiceChangerWorkletProcessorRequest = { export type VoiceChangerWorkletProcessorRequest = {
requestType: RequestType; requestType: RequestType;
voice: Float32Array; voice: Float32Array;
numTrancateTreshold: number; numTrancateTreshold: number;
volTrancateThreshold: number; volTrancateThreshold: number;
volTrancateLength: number; volTrancateLength: number;
}; };
export type VoiceChangerWorkletProcessorResponse = { export type VoiceChangerWorkletProcessorResponse = {
responseType: ResponseType; responseType: ResponseType;
volume?: number; volume?: number;
recordData?: Float32Array[]; recordData?: Float32Array[];
inputData?: Float32Array; inputData?: Float32Array;
}; };
class VoiceChangerWorkletProcessor extends AudioWorkletProcessor { class VoiceChangerWorkletProcessor extends AudioWorkletProcessor {
private BLOCK_SIZE = 128; private BLOCK_SIZE = 128;
private initialized = false; private initialized = false;
private volume = 0; private volume = 0;
// private numTrancateTreshold = 100; // private numTrancateTreshold = 100;
// private volTrancateThreshold = 0.0005 // private volTrancateThreshold = 0.0005
// private volTrancateLength = 32 // private volTrancateLength = 32
// private volTrancateCount = 0 // private volTrancateCount = 0
private isRecording = false; private isRecording = false;
playBuffer: Float32Array[] = []; playBuffer: Float32Array[] = [];
unpushedF32Data: Float32Array = new Float32Array(0); unpushedF32Data: Float32Array = new Float32Array(0);
/** /**
* @constructor * @constructor
*/ */
constructor() { constructor() {
super(); super();
console.log("[AudioWorkletProcessor] created."); console.log("[AudioWorkletProcessor] created.");
this.initialized = true; this.initialized = true;
this.port.onmessage = this.handleMessage.bind(this); this.port.onmessage = this.handleMessage.bind(this);
}
calcVol = (data: Float32Array, prevVol: number) => {
const sum = data.reduce((prev, cur) => {
return prev + cur * cur;
}, 0);
const rms = Math.sqrt(sum / data.length);
return Math.max(rms, prevVol * 0.95);
};
trancateBuffer = () => {
console.log("[worklet] Buffer truncated");
while (this.playBuffer.length > 2) {
this.playBuffer.shift();
}
};
handleMessage(event: any) {
const request = event.data as VoiceChangerWorkletProcessorRequest;
if (request.requestType === "config") {
// this.numTrancateTreshold = request.numTrancateTreshold;
// this.volTrancateLength = request.volTrancateLength
// this.volTrancateThreshold = request.volTrancateThreshold
console.log("[worklet] worklet configured", request);
return;
} else if (request.requestType === "start") {
if (this.isRecording) {
console.warn("[worklet] recoring is already started");
return;
}
this.isRecording = true;
const startResponse: VoiceChangerWorkletProcessorResponse = {
responseType: "start_ok",
};
this.port.postMessage(startResponse);
return;
} else if (request.requestType === "stop") {
if (!this.isRecording) {
console.warn("[worklet] recoring is not started");
return;
}
this.isRecording = false;
const stopResponse: VoiceChangerWorkletProcessorResponse = {
responseType: "stop_ok",
};
this.port.postMessage(stopResponse);
return;
} else if (request.requestType === "trancateBuffer") {
this.trancateBuffer();
return;
} }
const f32Data = request.voice; calcVol = (data: Float32Array, prevVol: number) => {
// if (this.playBuffer.length > this.numTrancateTreshold) { const sum = data.reduce((prev, cur) => {
// console.log(`[worklet] Truncate ${this.playBuffer.length} > ${this.numTrancateTreshold}`); return prev + cur * cur;
// this.trancateBuffer(); }, 0);
// } const rms = Math.sqrt(sum / data.length);
if (this.playBuffer.length > (f32Data.length / this.BLOCK_SIZE) * 1.5) { return Math.max(rms, prevVol * 0.95);
console.log(
`[worklet] Truncate ${this.playBuffer.length} > ${
f32Data.length / this.BLOCK_SIZE
}`
);
this.trancateBuffer();
}
const concatedF32Data = new Float32Array(
this.unpushedF32Data.length + f32Data.length
);
concatedF32Data.set(this.unpushedF32Data);
concatedF32Data.set(f32Data, this.unpushedF32Data.length);
const chunkNum = Math.floor(concatedF32Data.length / this.BLOCK_SIZE);
for (let i = 0; i < chunkNum; i++) {
const block = concatedF32Data.slice(
i * this.BLOCK_SIZE,
(i + 1) * this.BLOCK_SIZE
);
this.playBuffer.push(block);
}
this.unpushedF32Data = concatedF32Data.slice(chunkNum * this.BLOCK_SIZE);
}
pushData = (inputData: Float32Array) => {
const volumeResponse: VoiceChangerWorkletProcessorResponse = {
responseType: ResponseType.inputData,
inputData: inputData,
}; };
this.port.postMessage(volumeResponse);
};
process( trancateBuffer = () => {
_inputs: Float32Array[][], console.log("[worklet] Buffer truncated");
outputs: Float32Array[][], while (this.playBuffer.length > 2) {
_parameters: Record<string, Float32Array> this.playBuffer.shift();
) { }
if (!this.initialized) { };
console.warn("[worklet] worklet_process not ready"); handleMessage(event: any) {
return true; const request = event.data as VoiceChangerWorkletProcessorRequest;
if (request.requestType === "config") {
// this.numTrancateTreshold = request.numTrancateTreshold;
// this.volTrancateLength = request.volTrancateLength
// this.volTrancateThreshold = request.volTrancateThreshold
console.log("[worklet] worklet configured", request);
return;
} else if (request.requestType === "start") {
if (this.isRecording) {
console.warn("[worklet] recoring is already started");
return;
}
this.isRecording = true;
const startResponse: VoiceChangerWorkletProcessorResponse = {
responseType: "start_ok",
};
this.port.postMessage(startResponse);
return;
} else if (request.requestType === "stop") {
if (!this.isRecording) {
console.warn("[worklet] recoring is not started");
return;
}
this.isRecording = false;
const stopResponse: VoiceChangerWorkletProcessorResponse = {
responseType: "stop_ok",
};
this.port.postMessage(stopResponse);
return;
} else if (request.requestType === "trancateBuffer") {
this.trancateBuffer();
return;
}
const f32Data = request.voice;
// if (this.playBuffer.length > this.numTrancateTreshold) {
// console.log(`[worklet] Truncate ${this.playBuffer.length} > ${this.numTrancateTreshold}`);
// this.trancateBuffer();
// }
if (this.playBuffer.length > (f32Data.length / this.BLOCK_SIZE) * 1.5) {
console.log(`[worklet] Truncate ${this.playBuffer.length} > ${f32Data.length / this.BLOCK_SIZE}`);
this.trancateBuffer();
}
const concatedF32Data = new Float32Array(this.unpushedF32Data.length + f32Data.length);
concatedF32Data.set(this.unpushedF32Data);
concatedF32Data.set(f32Data, this.unpushedF32Data.length);
const chunkNum = Math.floor(concatedF32Data.length / this.BLOCK_SIZE);
for (let i = 0; i < chunkNum; i++) {
const block = concatedF32Data.slice(i * this.BLOCK_SIZE, (i + 1) * this.BLOCK_SIZE);
this.playBuffer.push(block);
}
this.unpushedF32Data = concatedF32Data.slice(chunkNum * this.BLOCK_SIZE);
} }
if (this.isRecording) { pushData = (inputData: Float32Array) => {
if (_inputs.length > 0 && _inputs[0].length > 0) { const volumeResponse: VoiceChangerWorkletProcessorResponse = {
this.pushData(_inputs[0][0]); responseType: ResponseType.inputData,
} inputData: inputData,
} };
this.port.postMessage(volumeResponse);
};
if (this.playBuffer.length === 0) { process(_inputs: Float32Array[][], outputs: Float32Array[][], _parameters: Record<string, Float32Array>) {
// console.log("[worklet] no play buffer"); if (!this.initialized) {
return true; console.warn("[worklet] worklet_process not ready");
} return true;
// console.log("[worklet] play buffer"); }
//// 一定期間無音状態が続いている場合はスキップ。
// let voice: Float32Array | undefined
// while (true) {
// voice = this.playBuffer.shift()
// if (!voice) {
// break
// }
// this.volume = this.calcVol(voice, this.volume)
// if (this.volume < this.volTrancateThreshold) {
// this.volTrancateCount += 1
// } else {
// this.volTrancateCount = 0
// }
// // V.1.5.0よりsilent skipで音飛びするようになったので無効化 if (this.isRecording) {
// if (this.volTrancateCount < this.volTrancateLength || this.volTrancateLength < 0) { if (_inputs.length > 0 && _inputs[0].length > 0) {
// break this.pushData(_inputs[0][0]);
// } else { }
// break }
// // console.log("silent...skip")
// }
// }
let voice = this.playBuffer.shift();
if (voice) {
this.volume = this.calcVol(voice, this.volume);
const volumeResponse: VoiceChangerWorkletProcessorResponse = {
responseType: ResponseType.volume,
volume: this.volume,
};
this.port.postMessage(volumeResponse);
outputs[0][0].set(voice);
if (outputs[0].length == 2) {
outputs[0][1].set(voice);
}
}
return true; if (this.playBuffer.length === 0) {
} // console.log("[worklet] no play buffer");
return true;
}
// console.log("[worklet] play buffer");
//// 一定期間無音状態が続いている場合はスキップ。
// let voice: Float32Array | undefined
// while (true) {
// voice = this.playBuffer.shift()
// if (!voice) {
// break
// }
// this.volume = this.calcVol(voice, this.volume)
// if (this.volume < this.volTrancateThreshold) {
// this.volTrancateCount += 1
// } else {
// this.volTrancateCount = 0
// }
// // V.1.5.0よりsilent skipで音飛びするようになったので無効化
// if (this.volTrancateCount < this.volTrancateLength || this.volTrancateLength < 0) {
// break
// } else {
// break
// // console.log("silent...skip")
// }
// }
let voice = this.playBuffer.shift();
if (voice) {
this.volume = this.calcVol(voice, this.volume);
const volumeResponse: VoiceChangerWorkletProcessorResponse = {
responseType: ResponseType.volume,
volume: this.volume,
};
this.port.postMessage(volumeResponse);
outputs[0][0].set(voice);
if (outputs[0].length == 2) {
outputs[0][1].set(voice);
}
}
return true;
}
} }
registerProcessor( registerProcessor("voice-changer-worklet-processor", VoiceChangerWorkletProcessor);
"voice-changer-worklet-processor",
VoiceChangerWorkletProcessor
);