mirror of
https://github.com/w-okada/voice-changer.git
synced 2025-01-23 05:25:01 +03:00
Created using Colaboratory
This commit is contained in:
parent
ae52548113
commit
22b0f83992
@ -421,6 +421,74 @@
|
||||
" --samples samples.json\n",
|
||||
"\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"source": [
|
||||
"![](https://i.pinimg.com/474x/de/72/9e/de729ecfa41b69901c42c82fff752414.jpg)\n",
|
||||
"![](https://i.pinimg.com/474x/de/72/9e/de729ecfa41b69901c42c82fff752414.jpg)"
|
||||
],
|
||||
"metadata": {
|
||||
"id": "2Uu1sTSwTc7q"
|
||||
}
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"source": [
|
||||
"# @title **[Optional]** Start Server **using localtunnel** (ngrok alternative | no account needed)\n",
|
||||
"# @markdown This cell will start the server, the first time that you run it will download the models, so it can take a while (~1-2 minutes)\n",
|
||||
"\n",
|
||||
"# @markdown ---\n",
|
||||
"!npm config set update-notifier false\n",
|
||||
"!npm install -g localtunnel\n",
|
||||
"print(\"\\033[92mLocalTunnel installed!\")\n",
|
||||
"# @markdown If you want to automatically clear the output when the server loads, check this option.\n",
|
||||
"Clear_Output = True # @param {type:\"boolean\"}\n",
|
||||
"\n",
|
||||
"import portpicker, subprocess, threading, time, socket, urllib.request\n",
|
||||
"PORT = portpicker.pick_unused_port()\n",
|
||||
"\n",
|
||||
"from IPython.display import clear_output, Javascript\n",
|
||||
"\n",
|
||||
"def iframe_thread(port):\n",
|
||||
" while True:\n",
|
||||
" time.sleep(0.5)\n",
|
||||
" sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n",
|
||||
" result = sock.connect_ex(('127.0.0.1', port))\n",
|
||||
" if result == 0:\n",
|
||||
" break\n",
|
||||
" sock.close()\n",
|
||||
" clear_output()\n",
|
||||
" print(\"Use the following endpoint to connect to localtunnel:\", urllib.request.urlopen('https://ipv4.icanhazip.com').read().decode('utf8').strip(\"\\n\"))\n",
|
||||
" p = subprocess.Popen([\"lt\", \"--port\", \"{}\".format(port)], stdout=subprocess.PIPE)\n",
|
||||
" for line in p.stdout:\n",
|
||||
" print(line.decode(), end='')\n",
|
||||
"\n",
|
||||
"threading.Thread(target=iframe_thread, daemon=True, args=(PORT,)).start()\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"!python3 MMVCServerSIO.py \\\n",
|
||||
" -p {PORT} \\\n",
|
||||
" --https False \\\n",
|
||||
" --content_vec_500 pretrain/checkpoint_best_legacy_500.pt \\\n",
|
||||
" --content_vec_500_onnx pretrain/content_vec_500.onnx \\\n",
|
||||
" --content_vec_500_onnx_on true \\\n",
|
||||
" --hubert_base pretrain/hubert_base.pt \\\n",
|
||||
" --hubert_base_jp pretrain/rinna_hubert_base_jp.pt \\\n",
|
||||
" --hubert_soft pretrain/hubert/hubert-soft-0d54a1f4.pt \\\n",
|
||||
" --nsf_hifigan pretrain/nsf_hifigan/model \\\n",
|
||||
" --crepe_onnx_full pretrain/crepe_onnx_full.onnx \\\n",
|
||||
" --crepe_onnx_tiny pretrain/crepe_onnx_tiny.onnx \\\n",
|
||||
" --rmvpe pretrain/rmvpe.pt \\\n",
|
||||
" --model_dir model_dir \\\n",
|
||||
" --samples samples.json \\\n",
|
||||
" --colab True"
|
||||
],
|
||||
"metadata": {
|
||||
"id": "Mr7325z-TTX5"
|
||||
},
|
||||
"execution_count": null,
|
||||
"outputs": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
|
Loading…
Reference in New Issue
Block a user