refactoring

This commit is contained in:
wataru 2023-02-09 04:08:57 +09:00
parent 5b85d58b8b
commit 46e7a64185
5 changed files with 25 additions and 152 deletions

File diff suppressed because one or more lines are too long

View File

@ -1,5 +1,3 @@
import { FixedUserData } from "../002_hooks/013_useAudioControllerState";
export const fetchTextResource = async (url: string): Promise<string> => { export const fetchTextResource = async (url: string): Promise<string> => {
const res = await fetch(url, { const res = await fetch(url, {
method: "GET" method: "GET"
@ -7,107 +5,3 @@ export const fetchTextResource = async (url: string): Promise<string> => {
const text = res.text() const text = res.text()
return text; return text;
} }
export const postVoice = async (title: string, prefix: string, index: number, blob: Blob) => {
// const url = `./api/voice/${title}/${prefix}/${index}`
// const url = `./api/voice`
// !!!!!!!!!!! COLABのプロキシがRoot直下のパスしか通さない??? !!!!!!
// !!!!!!!!!!! Bodyで参照、設定コマンドを代替する。 !!!!!!
const url = `/api`
const blobBuffer = await blob.arrayBuffer()
const obj = {
command: "POST_VOICE",
data: Buffer.from(blobBuffer).toString("base64"),
title: title,
prefix: prefix,
index: index
};
const body = JSON.stringify(obj);
const res = await fetch(`${url}`, {
method: "POST",
headers: {
'Accept': 'application/json',
'Content-Type': 'application/json'
},
body: body
})
const receivedJson = await res.json()
const message = receivedJson["message"]
console.debug("POST VOICE RES:", message)
return
}
export const getVoice = async (title: string, prefix: string, index: number) => {
if (!title || !prefix) {
return null
}
const url = `/api`
const obj = {
command: "GET_VOICE",
title: title,
prefix: prefix,
index: index
};
const body = JSON.stringify(obj);
const res = await fetch(`${url}`, {
method: "POST",
headers: {
'Accept': 'application/json',
'Content-Type': 'application/json'
},
body: body
})
const receivedJson = await res.json()
// const message = receivedJson["message"]
const dataBase64 = receivedJson["data"]
// console.log("GET VOICE RES:", message, dataBase64)
if (!dataBase64) {
return null;
}
const buf = Buffer.from(dataBase64, "base64")
const blob = new Blob([buf.buffer])
return blob
}
export const postVoice__ = async (title: string, index: number, userData: FixedUserData) => {
const url = `/api/voice/${title}/${index}`
const micWavBlob = await userData.micWavBlob!.arrayBuffer()
const vfWavBlob = await userData.vfWavBlob!.arrayBuffer()
const micF32 = await userData.micWavSamples!
const vfF32 = await userData.vfWavSamples!
const obj = {
micWavBlob: Buffer.from(micWavBlob).toString("base64"),
vfWavBlob: Buffer.from(vfWavBlob).toString("base64"),
micF32: Buffer.from(micF32).toString("base64"),
vfF32: Buffer.from(vfF32).toString("base64")
};
const body = JSON.stringify(obj);
const res = await fetch(`${url}`, {
method: "POST",
headers: {
'Accept': 'application/json',
'Content-Type': 'application/json'
},
body: body
})
const receivedJson = await res.json()
const changedVoiceBase64 = receivedJson["changedVoiceBase64"]
const buf = Buffer.from(changedVoiceBase64, "base64")
const ab = new ArrayBuffer(buf.length);
const view = new Uint8Array(ab);
for (let i = 0; i < buf.length; ++i) {
view[i] = buf[i];
}
return ab
}

View File

@ -1,5 +1,5 @@
import { ApplicationSetting } from "../001_clients_and_managers/000_ApplicationSettingLoader" import { ApplicationSetting } from "../001_clients_and_managers/000_ApplicationSettingLoader"
import { generateWavNameForLocalStorage } from "../const" import { generateDataNameForLocalStorage } from "../const"
import { IndexedDBStateAndMethod } from "./001_useIndexedDB" import { IndexedDBStateAndMethod } from "./001_useIndexedDB"
import { FixedUserData } from "./013_useAudioControllerState" import { FixedUserData } from "./013_useAudioControllerState"
export type UseAppStateStorageProps = { export type UseAppStateStorageProps = {
@ -17,13 +17,13 @@ export type AppStateStorageStateAndMethod = AppStateStorageState & {
export const useAppStateStorage = (props: UseAppStateStorageProps): AppStateStorageStateAndMethod => { export const useAppStateStorage = (props: UseAppStateStorageProps): AppStateStorageStateAndMethod => {
const saveUserData = async (_title: string, prefix: string, index: number, userData: FixedUserData) => { const saveUserData = async (_title: string, prefix: string, index: number, userData: FixedUserData) => {
const { micString } = generateWavNameForLocalStorage(prefix, index) const { dataName } = generateDataNameForLocalStorage(prefix, index)
props.indexedDBState.setItem(micString, userData) props.indexedDBState.setItem(dataName, userData)
} }
const loadUserData = async (_title: string, prefix: string, index: number): Promise<FixedUserData | null> => { const loadUserData = async (_title: string, prefix: string, index: number): Promise<FixedUserData | null> => {
const { micString } = generateWavNameForLocalStorage(prefix, index) const { dataName } = generateDataNameForLocalStorage(prefix, index)
const obj = await props.indexedDBState.getItem(micString) as FixedUserData const obj = await props.indexedDBState.getItem(dataName) as FixedUserData
return obj return obj
} }

View File

@ -4,9 +4,6 @@ import { Duplex, DuplexOptions } from "readable-stream";
import MicrophoneStream from "microphone-stream"; import MicrophoneStream from "microphone-stream";
import { useAppSetting } from "../003_provider/AppSettingProvider"; import { useAppSetting } from "../003_provider/AppSettingProvider";
export type MediaRecorderState = { export type MediaRecorderState = {
micMediaStream: MediaStream | undefined, micMediaStream: MediaStream | undefined,
vfMediaStream: MediaStream | undefined vfMediaStream: MediaStream | undefined
@ -27,6 +24,8 @@ export type MediaRecorderStateAndMethod = MediaRecorderState & {
} }
} }
// AudioInputデータを蓄積するAudiowStreamer
class AudioStreamer extends Duplex { class AudioStreamer extends Duplex {
chunks: Float32Array[] = [] chunks: Float32Array[] = []
SampleRate: number SampleRate: number
@ -45,18 +44,14 @@ class AudioStreamer extends Duplex {
this.initializeData() this.initializeData()
} }
// 蓄積したデータをWavに変換して返す
getRecordedData = () => { getRecordedData = () => {
const sampleSize = this.chunks.reduce((prev, cur) => { const sampleSize = this.chunks.reduce((prev, cur) => {
return prev + cur.length return prev + cur.length
}, 0) }, 0)
const samples = new Float32Array(sampleSize); const samples = new Float32Array(sampleSize);
let sampleIndex = 0 let sampleIndex = 0
// this.chunks.forEach(floatArray => {
// floatArray.forEach(val => {
// samples[sampleIndex] = val
// sampleIndex++;
// })
// })
for (let i = 0; i < this.chunks.length; i++) { for (let i = 0; i < this.chunks.length; i++) {
for (let j = 0; j < this.chunks[i].length; j++) { for (let j = 0; j < this.chunks[i].length; j++) {
samples[sampleIndex] = this.chunks[i][j]; samples[sampleIndex] = this.chunks[i][j];
@ -64,14 +59,6 @@ class AudioStreamer extends Duplex {
} }
} }
// console.log("samples:c2", this.chunks[0][0])
// console.log("samples:c2", this.chunks[0][1])
// console.log("samples:c2", this.chunks[0])
// console.log("samples:s", samples[0])
// console.log("samples:s", samples[1])
// console.log("samples:s", samples[2])
// console.log("samples:s", samples)
const writeString = (view: DataView, offset: number, string: string) => { const writeString = (view: DataView, offset: number, string: string) => {
for (var i = 0; i < string.length; i++) { for (var i = 0; i < string.length; i++) {
view.setUint8(offset + i, string.charCodeAt(i)); view.setUint8(offset + i, string.charCodeAt(i));
@ -107,28 +94,14 @@ class AudioStreamer extends Duplex {
const audioBlob = new Blob([view], { type: 'audio/wav' }); const audioBlob = new Blob([view], { type: 'audio/wav' });
const duration = samples.length / this.SampleRate const duration = samples.length / this.SampleRate
// audioBlob.arrayBuffer().then((buffer) => {
// console.log("DATALENGTH1", samples.length * 2)
// const oldView = new DataView(buffer);
// console.log("DATALENGTH2", view.getUint32(40, true))
// console.log("DATALENGTH3", oldView.getUint32(40, true))
// })
return { audioBlob, duration, samples } return { audioBlob, duration, samples }
// var url = URL.createObjectURL(audioBlob);
// // var a = document.createElement('a');
// // a.href = url;
// // a.download = 'test.wav';
// // a.click();
// // return this.chunks
// return url
} }
// AudioInputを蓄積
public _write(chunk: AudioBuffer, _encoding: any, callback: any) { public _write(chunk: AudioBuffer, _encoding: any, callback: any) {
const buffer = chunk.getChannelData(0); const buffer = chunk.getChannelData(0);
console.log("SAMPLERATE:", chunk.sampleRate, chunk.numberOfChannels, chunk.length) // console.log("SAMPLERATE:", chunk.sampleRate, chunk.numberOfChannels, chunk.length)
var bufferData = new Float32Array(chunk.length); var bufferData = new Float32Array(chunk.length);
for (var i = 0; i < chunk.length; i++) { for (var i = 0; i < chunk.length; i++) {
bufferData[i] = buffer[i]; bufferData[i] = buffer[i];
@ -152,6 +125,7 @@ export const useMediaRecorder = (): MediaRecorderStateAndMethod => {
const [micMediaStream, setMicMediaStream] = useState<MediaStream>() const [micMediaStream, setMicMediaStream] = useState<MediaStream>()
const [vfMediaStream, setVfMediaStream] = useState<MediaStream>() const [vfMediaStream, setVfMediaStream] = useState<MediaStream>()
// 生の(ノイキャンなしの)データ蓄積用Streamer
const micAudioStreamer = useMemo(() => { const micAudioStreamer = useMemo(() => {
return new AudioStreamer({ objectMode: true, SampleRate: applicationSetting.applicationSetting.sample_rate }) return new AudioStreamer({ objectMode: true, SampleRate: applicationSetting.applicationSetting.sample_rate })
}, []) }, [])
@ -165,6 +139,7 @@ export const useMediaRecorder = (): MediaRecorderStateAndMethod => {
return s return s
}, []) }, [])
// イキャンしたデータの蓄積用Streamer
const vfAudioStreamer = useMemo(() => { const vfAudioStreamer = useMemo(() => {
return new AudioStreamer({ objectMode: true, SampleRate: applicationSetting.applicationSetting.sample_rate }) return new AudioStreamer({ objectMode: true, SampleRate: applicationSetting.applicationSetting.sample_rate })
}, []) }, [])
@ -179,6 +154,12 @@ export const useMediaRecorder = (): MediaRecorderStateAndMethod => {
}, []) }, [])
// AudioInput変更のトリガー
useEffect(() => {
setNewAudioInputDevice(deviceManagerState.audioInputDeviceId || "")
}, [deviceManagerState.audioInputDeviceId])
// AudioInput変更のトリガーによりinputのパイプラインを再生成する
const setNewAudioInputDevice = async (deviceId: string) => { const setNewAudioInputDevice = async (deviceId: string) => {
console.log("setNewAudioInputDevice", deviceId) console.log("setNewAudioInputDevice", deviceId)
let vf = voiceFocusDeviceTransformer let vf = voiceFocusDeviceTransformer
@ -231,9 +212,7 @@ export const useMediaRecorder = (): MediaRecorderStateAndMethod => {
vfStream.setStream(outputNode.stream) vfStream.setStream(outputNode.stream)
vfStream.pauseRecording() vfStream.pauseRecording()
} }
useEffect(() => {
setNewAudioInputDevice(deviceManagerState.audioInputDeviceId || "")
}, [deviceManagerState.audioInputDeviceId])
const startRecord = () => { const startRecord = () => {
console.log("start record") console.log("start record")
@ -242,6 +221,7 @@ export const useMediaRecorder = (): MediaRecorderStateAndMethod => {
vfAudioStreamer.clearRecordedData() vfAudioStreamer.clearRecordedData()
vfStream!.playRecording() vfStream!.playRecording()
} }
const pauseRecord = () => { const pauseRecord = () => {
micStream!.pauseRecording() micStream!.pauseRecording()
vfStream!.pauseRecording() vfStream!.pauseRecording()

View File

@ -11,11 +11,10 @@ export const generateTextFileName = (prefix: string, index: number) => {
return `${prefix}${indexString}.txt` return `${prefix}${indexString}.txt`
} }
export const generateWavNameForLocalStorage = (prefix: string, index: number) => { export const generateDataNameForLocalStorage = (prefix: string, index: number) => {
const indexString = String(index + 1).padStart(3, '0') const indexString = String(index + 1).padStart(3, '0')
const vfString = `${prefix}${indexString}_vf` const dataName = `${prefix}${indexString}_mic`
const micString = `${prefix}${indexString}_mic` return { dataName }
return { micString, vfString }
} }
export const generateRegionNameForLocalStorage = (prefix: string, index: number) => { export const generateRegionNameForLocalStorage = (prefix: string, index: number) => {
const indexString = String(index + 1).padStart(3, '0') const indexString = String(index + 1).padStart(3, '0')