mirror of
https://github.com/w-okada/voice-changer.git
synced 2025-01-23 13:35:12 +03:00
EXP. remove microphone stream
This commit is contained in:
parent
11d6bd414c
commit
de0be188ff
@ -164,11 +164,24 @@ export class VoiceChangerClient {
|
|||||||
const voiceFocusNode = await this.currentDevice.createAudioNode(this.ctx); // vf node
|
const voiceFocusNode = await this.currentDevice.createAudioNode(this.ctx); // vf node
|
||||||
this.inputGainNode.connect(voiceFocusNode.start) // input node -> vf node
|
this.inputGainNode.connect(voiceFocusNode.start) // input node -> vf node
|
||||||
voiceFocusNode.end.connect(this.outputNodeFromVF!)
|
voiceFocusNode.end.connect(this.outputNodeFromVF!)
|
||||||
this.micStream.setStream(this.outputNodeFromVF!.stream) // vf node -> mic stream
|
// this.micStream.setStream(this.outputNodeFromVF!.stream) // vf node -> mic stream
|
||||||
} else {
|
} else {
|
||||||
const inputDestinationNodeForMicStream = this.ctx.createMediaStreamDestination()
|
// const inputDestinationNodeForMicStream = this.ctx.createMediaStreamDestination()
|
||||||
this.inputGainNode.connect(inputDestinationNodeForMicStream)
|
// this.inputGainNode.connect(inputDestinationNodeForMicStream)
|
||||||
this.micStream.setStream(inputDestinationNodeForMicStream.stream) // input device -> mic stream
|
console.log("input___ media stream", this.currentMediaStream)
|
||||||
|
this.currentMediaStream.getTracks().forEach(x => {
|
||||||
|
console.log("input___ media stream set", x.getSettings())
|
||||||
|
console.log("input___ media stream con", x.getConstraints())
|
||||||
|
console.log("input___ media stream cap", x.getCapabilities())
|
||||||
|
})
|
||||||
|
console.log("input___ media node", this.currentMediaStreamAudioSourceNode)
|
||||||
|
console.log("input___ gain node", this.inputGainNode.channelCount, this.inputGainNode)
|
||||||
|
this.inputGainNode.connect(this.vcNode)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
// this.micStream.setStream(inputDestinationNodeForMicStream.stream) // input device -> mic stream
|
||||||
}
|
}
|
||||||
this.micStream.pipe(this.audioStreamer) // mic stream -> audio streamer
|
this.micStream.pipe(this.audioStreamer) // mic stream -> audio streamer
|
||||||
if (!this._isVoiceChanging) {
|
if (!this._isVoiceChanging) {
|
||||||
|
@ -1,5 +1,7 @@
|
|||||||
import { VoiceChangerWorkletProcessorRequest } from "./@types/voice-changer-worklet-processor";
|
import { VoiceChangerWorkletProcessorRequest } from "./@types/voice-changer-worklet-processor";
|
||||||
import { WorkletSetting } from "./const";
|
import { WorkletSetting } from "./const";
|
||||||
|
import { io, Socket } from "socket.io-client";
|
||||||
|
import { DefaultEventsMap } from "@socket.io/component-emitter";
|
||||||
|
|
||||||
export type VoiceChangerWorkletListener = {
|
export type VoiceChangerWorkletListener = {
|
||||||
notifyVolume: (vol: number) => void
|
notifyVolume: (vol: number) => void
|
||||||
@ -8,10 +10,13 @@ export type VoiceChangerWorkletListener = {
|
|||||||
|
|
||||||
export class VoiceChangerWorkletNode extends AudioWorkletNode {
|
export class VoiceChangerWorkletNode extends AudioWorkletNode {
|
||||||
private listener: VoiceChangerWorkletListener
|
private listener: VoiceChangerWorkletListener
|
||||||
|
private requestChunks: ArrayBuffer[] = []
|
||||||
|
private socket: Socket<DefaultEventsMap, DefaultEventsMap> | null = null
|
||||||
constructor(context: AudioContext, listener: VoiceChangerWorkletListener) {
|
constructor(context: AudioContext, listener: VoiceChangerWorkletListener) {
|
||||||
super(context, "voice-changer-worklet-processor");
|
super(context, "voice-changer-worklet-processor");
|
||||||
this.port.onmessage = this.handleMessage.bind(this);
|
this.port.onmessage = this.handleMessage.bind(this);
|
||||||
this.listener = listener
|
this.listener = listener
|
||||||
|
this.createSocketIO()
|
||||||
console.log(`[worklet_node][voice-changer-worklet-processor] created.`);
|
console.log(`[worklet_node][voice-changer-worklet-processor] created.`);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -26,17 +31,118 @@ export class VoiceChangerWorkletNode extends AudioWorkletNode {
|
|||||||
this.port.postMessage(req)
|
this.port.postMessage(req)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
private createSocketIO = () => {
|
||||||
|
if (this.socket) {
|
||||||
|
this.socket.close()
|
||||||
|
}
|
||||||
|
// if (this.setting.protocol === "sio") {
|
||||||
|
// this.socket = io(this.setting.serverUrl + "/test");
|
||||||
|
this.socket = io("/test");
|
||||||
|
this.socket.on('connect_error', (err) => {
|
||||||
|
console.log("connect exception !!!!!")
|
||||||
|
// this.audioStreamerListeners.notifyException(VOICE_CHANGER_CLIENT_EXCEPTION.ERR_SIO_CONNECT_FAILED, `[SIO] rconnection failed ${err}`)
|
||||||
|
})
|
||||||
|
this.socket.on('connect', () => {
|
||||||
|
// console.log(`[SIO] sonnect to ${this.setting.serverUrl}`)
|
||||||
|
console.log(`[SIO] ${this.socket?.id}`)
|
||||||
|
});
|
||||||
|
this.socket.on('response', (response: any[]) => {
|
||||||
|
const cur = Date.now()
|
||||||
|
const responseTime = cur - response[0]
|
||||||
|
const result = response[1] as ArrayBuffer
|
||||||
|
if (result.byteLength < 128 * 2) {
|
||||||
|
console.log("tooshort!!")
|
||||||
|
// this.audioStreamerListeners.notifyException(VOICE_CHANGER_CLIENT_EXCEPTION.ERR_SIO_INVALID_RESPONSE, `[SIO] recevied data is too short ${result.byteLength}`)
|
||||||
|
} else {
|
||||||
|
console.log("response!!!")
|
||||||
|
this.postReceivedVoice(response[1])
|
||||||
|
// this.callbacks.onVoiceReceived(response[1])
|
||||||
|
// this.audioStreamerListeners.notifyResponseTime(responseTime)
|
||||||
|
}
|
||||||
|
});
|
||||||
|
// }
|
||||||
|
}
|
||||||
|
|
||||||
handleMessage(event: any) {
|
handleMessage(event: any) {
|
||||||
// console.log(`[Node:handleMessage_] `, event.data.volume);
|
// console.log(`[Node:handleMessage_] `, event.data.volume);
|
||||||
if (event.data.responseType === "volume") {
|
if (event.data.responseType === "volume") {
|
||||||
this.listener.notifyVolume(event.data.volume as number)
|
this.listener.notifyVolume(event.data.volume as number)
|
||||||
} else if (event.data.responseType === "recordData") {
|
} else if (event.data.responseType === "recordData") {
|
||||||
this.listener.notifyOutputRecordData(event.data.recordData as Float32Array[])
|
this.listener.notifyOutputRecordData(event.data.recordData as Float32Array[])
|
||||||
|
} else if (event.data.responseType === "inputData") {
|
||||||
|
const inputData = event.data.inputData as Float32Array
|
||||||
|
// console.log("receive input data", inputData)
|
||||||
|
|
||||||
|
const arrayBuffer = new ArrayBuffer(inputData.length * 2)
|
||||||
|
const dataView = new DataView(arrayBuffer);
|
||||||
|
for (let i = 0; i < inputData.length; i++) {
|
||||||
|
let s = Math.max(-1, Math.min(1, inputData[i]));
|
||||||
|
s = s < 0 ? s * 0x8000 : s * 0x7FFF
|
||||||
|
dataView.setInt16(i * 2, s, true);
|
||||||
|
}
|
||||||
|
|
||||||
|
this.requestChunks.push(arrayBuffer)
|
||||||
|
|
||||||
|
//// リクエストバッファの中身が、リクエスト送信数と違う場合は処理終了。
|
||||||
|
if (this.requestChunks.length < 32) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
console.log("sending...")
|
||||||
|
|
||||||
|
// リクエスト用の入れ物を作成
|
||||||
|
const windowByteLength = this.requestChunks.reduce((prev, cur) => {
|
||||||
|
return prev + cur.byteLength
|
||||||
|
}, 0)
|
||||||
|
const newBuffer = new Uint8Array(windowByteLength);
|
||||||
|
|
||||||
|
// リクエストのデータをセット
|
||||||
|
this.requestChunks.reduce((prev, cur) => {
|
||||||
|
newBuffer.set(new Uint8Array(cur), prev)
|
||||||
|
return prev + cur.byteLength
|
||||||
|
}, 0)
|
||||||
|
|
||||||
|
|
||||||
|
this.sendBuffer(newBuffer)
|
||||||
|
console.log("sended...")
|
||||||
|
this.requestChunks = []
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
console.warn(`[worklet_node][voice-changer-worklet-processor] unknown response ${event.data.responseType}`, event.data)
|
console.warn(`[worklet_node][voice-changer-worklet-processor] unknown response ${event.data.responseType}`, event.data)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
private sendBuffer = async (newBuffer: Uint8Array) => {
|
||||||
|
const timestamp = Date.now()
|
||||||
|
// if (this.setting.protocol === "sio") {
|
||||||
|
if (!this.socket) {
|
||||||
|
console.warn(`sio is not initialized`)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
// console.log("emit!")
|
||||||
|
this.socket.emit('request_message', [
|
||||||
|
timestamp,
|
||||||
|
newBuffer.buffer]);
|
||||||
|
// } else {
|
||||||
|
// const res = await postVoice(
|
||||||
|
// this.setting.serverUrl + "/test",
|
||||||
|
// timestamp,
|
||||||
|
// newBuffer.buffer)
|
||||||
|
|
||||||
|
// if (res.byteLength < 128 * 2) {
|
||||||
|
// this.audioStreamerListeners.notifyException(VOICE_CHANGER_CLIENT_EXCEPTION.ERR_REST_INVALID_RESPONSE, `[REST] recevied data is too short ${res.byteLength}`)
|
||||||
|
// } else {
|
||||||
|
// this.callbacks.onVoiceReceived(res)
|
||||||
|
// this.audioStreamerListeners.notifyResponseTime(Date.now() - timestamp)
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
configure = (setting: WorkletSetting) => {
|
configure = (setting: WorkletSetting) => {
|
||||||
const req: VoiceChangerWorkletProcessorRequest = {
|
const req: VoiceChangerWorkletProcessorRequest = {
|
||||||
requestType: "config",
|
requestType: "config",
|
||||||
|
@ -9,7 +9,8 @@ export type RequestType = typeof RequestType[keyof typeof RequestType]
|
|||||||
|
|
||||||
export const ResponseType = {
|
export const ResponseType = {
|
||||||
"volume": "volume",
|
"volume": "volume",
|
||||||
"recordData": "recordData"
|
"recordData": "recordData",
|
||||||
|
"inputData": "inputData"
|
||||||
} as const
|
} as const
|
||||||
export type ResponseType = typeof ResponseType[keyof typeof ResponseType]
|
export type ResponseType = typeof ResponseType[keyof typeof ResponseType]
|
||||||
|
|
||||||
@ -27,6 +28,7 @@ export type VoiceChangerWorkletProcessorResponse = {
|
|||||||
responseType: ResponseType,
|
responseType: ResponseType,
|
||||||
volume?: number,
|
volume?: number,
|
||||||
recordData?: Float32Array[]
|
recordData?: Float32Array[]
|
||||||
|
inputData?: Float32Array
|
||||||
}
|
}
|
||||||
|
|
||||||
class VoiceChangerWorkletProcessor extends AudioWorkletProcessor {
|
class VoiceChangerWorkletProcessor extends AudioWorkletProcessor {
|
||||||
@ -132,12 +134,24 @@ class VoiceChangerWorkletProcessor extends AudioWorkletProcessor {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
pushData = (inputData: Float32Array) => {
|
||||||
|
const volumeResponse: VoiceChangerWorkletProcessorResponse = {
|
||||||
|
responseType: ResponseType.inputData,
|
||||||
|
inputData: inputData
|
||||||
|
}
|
||||||
|
this.port.postMessage(volumeResponse);
|
||||||
|
}
|
||||||
|
|
||||||
process(_inputs: Float32Array[][], outputs: Float32Array[][], _parameters: Record<string, Float32Array>) {
|
process(_inputs: Float32Array[][], outputs: Float32Array[][], _parameters: Record<string, Float32Array>) {
|
||||||
if (!this.initialized) {
|
if (!this.initialized) {
|
||||||
console.warn("[worklet] worklet_process not ready");
|
console.warn("[worklet] worklet_process not ready");
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (_inputs.length > 0 && _inputs[0].length > 0) {
|
||||||
|
this.pushData(_inputs[0][0])
|
||||||
|
}
|
||||||
|
|
||||||
if (this.playBuffer.length === 0) {
|
if (this.playBuffer.length === 0) {
|
||||||
// console.log("[worklet] no play buffer")
|
// console.log("[worklet] no play buffer")
|
||||||
return true
|
return true
|
||||||
@ -174,6 +188,7 @@ class VoiceChangerWorkletProcessor extends AudioWorkletProcessor {
|
|||||||
}
|
}
|
||||||
this.port.postMessage(volumeResponse);
|
this.port.postMessage(volumeResponse);
|
||||||
outputs[0][0].set(voice)
|
outputs[0][0].set(voice)
|
||||||
|
outputs[0][1].set(voice)
|
||||||
}
|
}
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
|
Loading…
Reference in New Issue
Block a user