diff --git a/w_okada's_Voice_Changer_version_2_x.ipynb b/w_okada's_Voice_Changer_version_2_x.ipynb index e6bfe8d3..ab6779a3 100644 --- a/w_okada's_Voice_Changer_version_2_x.ipynb +++ b/w_okada's_Voice_Changer_version_2_x.ipynb @@ -52,12 +52,40 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 3, "metadata": { "id": "W2GYWTHWmRIY", - "cellView": "form" + "cellView": "form", + "outputId": "ab23f187-546e-4156-813d-12e2f178d23a", + "colab": { + "base_uri": "https://localhost:8080/" + } }, - "outputs": [], + "outputs": [ + { + "metadata": { + "tags": null + }, + "name": "stdout", + "output_type": "stream", + "text": [ + "GPU is available\n", + "GPU Name: Tesla T4\n", + "Welcome to ColabMod\n", + "Downloading the latest vcclient... \n", + " % Total % Received % Xferd Average Speed Time Time Time Current\n", + " Dload Upload Total Spent Left Speed\n", + "100 1163 100 1163 0 0 2654 0 --:--:-- --:--:-- --:--:-- 2649\n", + "Warning: Failed to create the file /content/vcclient_latest_for_colab: Text \n", + "Warning: file busy\n", + " 0 2965M 0 15848 0 0 16360 0 52:47:53 --:--:-- 52:47:53 16360\n", + "curl: (23) Failure writing output to destination\n", + "Download is done.\n", + "/content\n", + "Installing modules... Install is done.\n" + ] + } + ], "source": [ "#=================Updated=================\n", "# @title **[1]** Clone repository and install dependencies\n", @@ -196,14 +224,19 @@ "# @title **[2]** Start server\n", "# @markdown This cell will start the server, the first time that you run it will download the models, so it can take a while (2~4 minutes)\n", "\n", + "# @markdown If you want to use ngrok, please input your token in the option section below. If you encounter a 403 error with the colab proxy, using ngrok can sometimes help to work around it.\n", + "# @markdown https://dashboard.ngrok.com/\n", + "\n", "\n", "# @markdown ### Options:\n", "ClearConsole = True # @param {type:\"boolean\"}\n", "Play_Notification = True # @param {type:\"boolean\"}\n", + "NgrokToken = \"\" # @param {type:\"string\"}\n", "\n", "PORT=8003\n", + "NGROK_URL_FILE = \"ngrok_url.txt\"\n", "\n", - "LOG_FILE = f\"/content/LOG_FILE_{PORT}\"\n", + "LOG_FILE = f\"/content/LOG_FILE_{PORT}.log\"\n", "\n", "from IPython.display import Audio, display\n", "def play_notification_sound(url):\n", @@ -212,10 +245,14 @@ "from google.colab.output import eval_js\n", "\n", "\n", + "\n", + "\n", "if mode == \"elf\":\n", " # !LD_LIBRARY_PATH=/usr/lib64-nvidia:/usr/lib/x86_64-linux-gnu ./vcclient_latest_for_colab cui --port {PORT} --no_cui true &\n", - "\n", - " get_ipython().system_raw(f'LD_LIBRARY_PATH=/usr/lib64-nvidia:/usr/lib/x86_64-linux-gnu ./vcclient_latest_for_colab cui --port {PORT} --no_cui true >{LOG_FILE} 2>&1 &')\n", + " if NgrokToken ==\"\":\n", + " get_ipython().system_raw(f'LD_LIBRARY_PATH=/usr/lib64-nvidia:/usr/lib/x86_64-linux-gnu ./vcclient_latest_for_colab cui --port {PORT} --no_cui true --https False >{LOG_FILE} 2>&1 &')\n", + " else:\n", + " get_ipython().system_raw(f'LD_LIBRARY_PATH=/usr/lib64-nvidia:/usr/lib/x86_64-linux-gnu ./vcclient_latest_for_colab cui --port {PORT} --no_cui true --https False --ngrok_token {NgrokToken} --ngrok_proxy_url_file {NGROK_URL_FILE} >{LOG_FILE} 2>&1 &')\n", "elif mode == \"zip\":\n", " !LD_LIBRARY_PATH=/usr/lib64-nvidia:/usr/lib/x86_64-linux-gnu ./main cui --port {PORT} --no_cui true &\n", "\n", @@ -251,13 +288,26 @@ " print(\"--------- SERVER READY! ---------\")\n", " print(f\"Your server is available. elapsed: {elapsed_time}sec\")\n", " proxy = eval_js( \"google.colab.kernel.proxyPort(\" + str(PORT) + \")\" )\n", - " print(f\"{proxy}\")\n", + " print(f\"colab proxy: {proxy}\")\n", + " if NgrokToken != \"\":\n", + " with open(NGROK_URL_FILE, \"r\") as f:\n", + " ngrok_url = f.read().strip()\n", + " print(f\"Ngrok URL: {ngrok_url}\")\n", " print(\"---------------------------------\")\n", " if Play_Notification==True:\n", " play_notification_sound('https://huggingface.co/wok000/voices/resolve/main/vcclient001_vctk229_gpt-sovits_vcclient-ready.wav')\n", "wait_for_server()\n" ] }, + { + "cell_type": "code", + "source": [], + "metadata": { + "id": "CRZALk96MxuH" + }, + "execution_count": null, + "outputs": [] + }, { "cell_type": "code", "source": [ @@ -279,7 +329,7 @@ "colab": { "provenance": [], "gpuType": "T4", - "authorship_tag": "ABX9TyPpbFBssircyl/qT8uDX2zy", + "authorship_tag": "ABX9TyMWd/nQ6LnQpMDqF9MqEpsm", "include_colab_link": true }, "kernelspec": {