{ "cells": [ { "cell_type": "markdown", "metadata": { "id": "view-in-github", "colab_type": "text" }, "source": [ "\"Open" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "id": "5m_Xf_2NY6mI" }, "outputs": [], "source": [ "import torch, torchaudio\n", "import requests\n", "import IPython.display as display" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "id": "GGiC0rT2hoik" }, "outputs": [], "source": [ "!apt-get install -y espeak libsndfile1-dev\n", "!pip install flask\n", "!pip install python-socketio\n", "!pip install eventlet\n", "!pip install unidecode\n", "!pip install phonemizer\n", "!pip install retry\n", "!pip install flask\n", "!pip install flask_cors" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "id": "WO8XzrFMZGoj" }, "outputs": [], "source": [ "# hubert = torch.hub.load(\"bshall/hubert:main\", \"hubert_soft\").cuda()\n", "# acoustic = torch.hub.load(\"bshall/acoustic-model:main\", \"hubert_soft\").cuda()\n", "# hifigan = torch.hub.load(\"bshall/hifigan:main\", \"hifigan_hubert_soft\").cuda()" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "id": "v7xM7CnEZMTL" }, "outputs": [], "source": [ "# with open(\"example.wav\", \"wb\") as file:\n", "# response = requests.get(\"https://drive.google.com/uc?export=preview&id=1Y3KuPAhB5VcsmIaokBVKu3LUEZOfhSu8\")\n", "# file.write(response.content)" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "id": "UZox3YDVZOya" }, "outputs": [], "source": [ "# source, sr = torchaudio.load(\"emotion059.wav\")\n", "# source = torchaudio.functional.resample(source, sr, 16000)\n", "# source = source.unsqueeze(0).cuda()" ] }, { "cell_type": "markdown", "metadata": { "id": "dEZ9_zCKnXpZ" }, "source": [] }, { "cell_type": "code", "execution_count": null, "metadata": { "id": "DSAA2CMfZY7C" }, "outputs": [], "source": [ "# with torch.inference_mode():\n", "# # Extract speech units\n", "# units = hubert.units(source)\n", "# # Generate target spectrogram\n", "# mel = acoustic.generate(units).transpose(1, 2)\n", "# # Generate audio waveform\n", "# target = hifigan(mel)" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "id": "VCwjKdIUZZoi" }, "outputs": [], "source": [ "# display.Audio(target.squeeze().cpu(), rate=16000)" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "id": "vjJs35ifZbSK" }, "outputs": [], "source": [ "# data = target.squeeze().cpu()" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "id": "0SuFlButeKXG" }, "outputs": [], "source": [ "# display.Audio(data, rate=16000)" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "id": "vFwF5Uh0eMLV" }, "outputs": [], "source": [ "# dest = torchaudio.functional.resample(target, 16000,24000)" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "id": "eIWedhF6ebuV" }, "outputs": [], "source": [ "# display.Audio(dest.squeeze().cpu(), rate=24000)" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "id": "XkCO-j9teccu" }, "outputs": [], "source": [ "# dest" ] }, { "cell_type": "code", "execution_count": 41, "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "lzo_ZWmAjaby", "outputId": "ed0af7dc-1614-4d28-e9b4-b3a600cafd88" }, "outputs": [ { "output_type": "stream", "name": "stdout", "text": [ "fatal: destination path 'voice-changer' already exists and is not an empty directory.\n", "\u001b[0m\u001b[01;34massets\u001b[0m/ \u001b[01;32mfavicon.ico\u001b[0m* \u001b[01;32mindex.js\u001b[0m*\n", "\u001b[01;32mcoffee.png\u001b[0m* \u001b[01;32mindex.html\u001b[0m* \u001b[01;32mindex.js.LICENSE.txt\u001b[0m*\n" ] } ], "source": [ "# (3) リポジトリのクローン\n", "!git clone https://github.com/w-okada/voice-changer.git\n", "%ls voice-changer/frontend/dist\n" ] }, { "cell_type": "code", "execution_count": 42, "metadata": { "id": "8-z9j4e_j-Wb" }, "outputs": [], "source": [ "# (4-1) 設定ファイルの配置\n", "!cp voice-changer/template/setting_colab.json voice-changer/frontend/dist/assets/setting.json\n" ] }, { "cell_type": "code", "execution_count": 50, "metadata": { "id": "-iPiSzvAepCl" }, "outputs": [], "source": [ "# (6-1) サーバの起動\n", "PORT=8087\n", "get_ipython().system_raw(f'python3 serverFlask.py {PORT} >foo 2>&1 &')" ] }, { "cell_type": "code", "execution_count": 54, "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "IiWSwDjQidc7", "outputId": "2bb83f5f-965c-4b54-ac6c-7fd407daa5dc" }, "outputs": [ { "output_type": "stream", "name": "stdout", "text": [ "Using cache found in /root/.cache/torch/hub/bshall_hubert_main\n", "Using cache found in /root/.cache/torch/hub/bshall_acoustic-model_main\n", "Using cache found in /root/.cache/torch/hub/bshall_hifigan_main\n", "[2022-09-14 06:41:12,893] INFO in serverFlask: INITIALIZE MODEL\n", "[2022-09-14 06:41:12,893] INFO in serverFlask: START APP\n", "Removing weight norm...\n", " * Serving Flask app \"serverFlask\" (lazy loading)\n", " * Environment: production\n", " WARNING: This is a development server. Do not use it in a production deployment.\n", " Use a production WSGI server instead.\n", " * Debug mode: on\n", "[2022-09-14 06:41:12,902] INFO in _internal: * Running on http://0.0.0.0:8087/ (Press CTRL+C to quit)\n", "[2022-09-14 06:41:12,903] INFO in _internal: * Restarting with stat\n" ] } ], "source": [ "# (6-2) サーバの起動確認 (Ctrl+Retで実行)\n", "!cat foo" ] }, { "cell_type": "code", "execution_count": 1, "metadata": { "colab": { "base_uri": "https://localhost:8080/", "height": 205 }, "id": "WWn3HJlpin4R", "outputId": "ab69bc12-969b-46b8-8705-6ef84c4ab34f" }, "outputs": [ { "output_type": "error", "ename": "NameError", "evalue": "ignored", "traceback": [ "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", "\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)", "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m\u001b[0m\n\u001b[1;32m 2\u001b[0m \u001b[0;32mfrom\u001b[0m \u001b[0mgoogle\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcolab\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0moutput\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 3\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 4\u001b[0;31m \u001b[0moutput\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mserve_kernel_port_as_window\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mPORT\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", "\u001b[0;31mNameError\u001b[0m: name 'PORT' is not defined" ] } ], "source": [ "# (7) プロキシを起動\n", "from google.colab import output\n", "\n", "output.serve_kernel_port_as_window(PORT)" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "k9OqZ-hLjKIx", "outputId": "28f3b99d-29cc-4581-c6f8-9ff0f55359a1" }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "\u001b[0m\u001b[01;34massets\u001b[0m/ \u001b[01;32mfavicon.ico\u001b[0m* \u001b[01;32mindex.js\u001b[0m*\n", "\u001b[01;32mcoffee.png\u001b[0m* \u001b[01;32mindex.html\u001b[0m* \u001b[01;32mindex.js.LICENSE.txt\u001b[0m*\n" ] } ], "source": [] } ], "metadata": { "accelerator": "GPU", "colab": { "collapsed_sections": [], "provenance": [], "authorship_tag": "ABX9TyPxoYe+Y2QsMoX8N7iTlceN", "include_colab_link": true }, "gpuClass": "standard", "kernelspec": { "display_name": "Python 3", "name": "python3" }, "language_info": { "name": "python" } }, "nbformat": 4, "nbformat_minor": 0 }