├── espnet_model_zoo ├── __init__.py ├── zenodo_upload.py ├── downloader.py └── table.csv ├── ci ├── test_python.sh └── test_model.py ├── .gitignore ├── setup.cfg ├── .github ├── stale.yml └── workflows │ ├── pythonpublish.yml │ ├── unittest.yaml │ └── model_test.yaml ├── test └── test_downloader.py ├── setup.py ├── README.md └── LICENSE /espnet_model_zoo/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /ci/test_python.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -euo pipefail 4 | modules="test espnet_model_zoo setup.py ci" 5 | # black 6 | if ! black --check ${modules}; then 7 | printf 'Please apply:\n $ black %s\n' "${modules}" 8 | exit 1 9 | fi 10 | # flake8 11 | flake8 --show-source $modules 12 | # pycodestyle 13 | pycodestyle -r $modules --show-source --show-pep8 14 | pytest -q 15 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # general 2 | *~ 3 | *.pyc 4 | \#*\# 5 | .\#* 6 | *DS_Store 7 | out.txt 8 | espnet.egg-info/ 9 | doc/_build 10 | slurm-*.out 11 | tmp* 12 | .eggs/ 13 | .hypothesis/ 14 | .idea 15 | .pytest_cache/ 16 | __pycache__/ 17 | check_autopep8 18 | .coverage 19 | htmlcov 20 | coverage.xml* 21 | bats-core/ 22 | shellcheck* 23 | check_shellcheck* 24 | test_spm.vocab 25 | test_spm.model 26 | .vscode* 27 | *.vim 28 | *.swp 29 | 30 | # packaging related 31 | dist/ 32 | *.egg-info 33 | 34 | espnet_model_zoo/*.zip 35 | espnet_model_zoo/*.tgz 36 | espnet_model_zoo/*.tar 37 | espnet_model_zoo/*/ 38 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [aliases] 2 | test=pytest 3 | 4 | [tool:pytest] 5 | addopts = --cov-config=.coveragerc --verbose --durations=0 --cov=espnet_model_zoo 6 | testpaths = test 7 | 8 | 9 | # [H238] old style class declaration, use new style (inherit from `object`) 10 | # [H102 H103] Newly contributed Source Code should be licensed under the Apache 2.0 license. All source files should have the following header:: 11 | # [W504] Line break occurred after a binary operator 12 | 13 | # Black says "W503, E203 is incompatible with PEP 8" 14 | # [W503] Line break occurred before a binary operator 15 | # [E203] whitespace before : 16 | 17 | [flake8] 18 | ignore = H102,H103,W503,H238,E203 19 | max-line-length = 88 20 | [pycodestyle] 21 | ignore = H102,H103,W503,H238,E203 22 | max-line-length = 88 23 | -------------------------------------------------------------------------------- /.github/stale.yml: -------------------------------------------------------------------------------- 1 | # Number of days of inactivity before an issue becomes stale 2 | daysUntilStale: 45 3 | # Number of days of inactivity before a stale issue is closed 4 | daysUntilClose: 30 5 | # Issues with these labels will never be considered stale 6 | exemptLabels: 7 | - Roadmap 8 | - Bug 9 | # Label to use when marking an issue as stale 10 | staleLabel: Stale 11 | # Comment to post when marking an issue as stale. Set to `false` to disable 12 | markComment: > 13 | This issue has been automatically marked as stale because it has not had 14 | recent activity. It will be closed if no further activity occurs. Thank you 15 | for your contributions. 16 | unmarkComment: false 17 | # Comment to post when closing a stale issue. Set to `false` to disable 18 | closeComment: > 19 | This issue is closed. Please re-open if needed. 20 | -------------------------------------------------------------------------------- /.github/workflows/pythonpublish.yml: -------------------------------------------------------------------------------- 1 | # This workflows will upload a Python Package using Twine when a release is created 2 | # For more information see: https://help.github.com/en/actions/language-and-framework-guides/using-python-with-github-actions#publishing-to-package-registries 3 | 4 | name: Upload Python Package 5 | 6 | on: 7 | push: 8 | tags: 9 | - 'v*' 10 | 11 | jobs: 12 | deploy: 13 | 14 | runs-on: ubuntu-latest 15 | 16 | steps: 17 | - uses: actions/checkout@v2 18 | - name: Set up Python 19 | uses: actions/setup-python@v1 20 | with: 21 | python-version: '3.8' 22 | - name: Install dependencies 23 | run: | 24 | python -m pip install --upgrade pip 25 | pip install setuptools wheel twine 26 | - name: Build and publish 27 | env: 28 | TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }} 29 | TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} 30 | run: | 31 | python setup.py sdist bdist_wheel 32 | twine upload dist/* 33 | -------------------------------------------------------------------------------- /.github/workflows/unittest.yaml: -------------------------------------------------------------------------------- 1 | name: Unitest 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | pull_request: 8 | branches: 9 | - master 10 | 11 | jobs: 12 | linter_and_test: 13 | runs-on: ${{ matrix.os }} 14 | strategy: 15 | max-parallel: 20 16 | matrix: 17 | # os: [ubuntu-16.04, ubuntu-18.04] 18 | os: [ubuntu-20.04] 19 | python-version: [3.9] 20 | espnet-version: ["espnet", "git+https://github.com/espnet/espnet.git"] 21 | steps: 22 | - uses: actions/checkout@master 23 | - uses: actions/cache@v1 24 | with: 25 | path: ~/.cache/pip 26 | key: ${{ runner.os }}-pip-${{ matrix.python-version }}-${{ matrix.espnet-version }}-${{ hashFiles('**/setup.py') }} 27 | - uses: actions/setup-python@v1 28 | with: 29 | python-version: ${{ matrix.python-version }} 30 | architecture: 'x64' 31 | - name: install dependencies 32 | run: | 33 | sudo apt-get update -qq 34 | sudo apt-get install -qq -y cmake g++-7 libsndfile1-dev bc 35 | - name: install 36 | env: 37 | ESPNET_VERSION: ${{ matrix.espnet-version }} 38 | run: | 39 | pip install -U pip setuptools wheel 40 | pip install "${ESPNET_VERSION}" 41 | pip install ".[test]" 42 | - name: test python 43 | run: | 44 | ./ci/test_python.sh 45 | - name: codecov 46 | run: | 47 | bash <(curl -s https://codecov.io/bash) 48 | -------------------------------------------------------------------------------- /.github/workflows/model_test.yaml: -------------------------------------------------------------------------------- 1 | name: Model test 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | pull_request: 8 | branches: 9 | - master 10 | 11 | jobs: 12 | test: 13 | runs-on: ${{ matrix.os }} 14 | strategy: 15 | max-parallel: 20 16 | matrix: 17 | # os: [ubuntu-16.04, ubuntu-18.04] 18 | os: [ubuntu-18.04] 19 | python-version: [3.8] 20 | # espnet-version: ["espnet", "git+https://github.com/espnet/espnet.git"] 21 | espnet-version: ["git+https://github.com/espnet/espnet.git"] 22 | steps: 23 | - uses: actions/checkout@master 24 | - uses: actions/cache@v1 25 | with: 26 | path: ~/.cache/pip 27 | key: ${{ runner.os }}-pip-${{ matrix.python-version }}-${{ matrix.espnet-version }}-${{ hashFiles('**/setup.py') }} 28 | - uses: actions/setup-python@v1 29 | with: 30 | python-version: ${{ matrix.python-version }} 31 | architecture: 'x64' 32 | - name: install dependencies 33 | run: | 34 | sudo apt-get update -qq 35 | sudo apt-get install -qq -y cmake g++-7 libsndfile1-dev bc 36 | - name: install espnet 37 | env: 38 | ESPNET_VERSION: ${{ matrix.espnet-version }} 39 | run: | 40 | pip install -U pip setuptools wheel 41 | pip install "${ESPNET_VERSION}" 42 | pip install torch 43 | pip install ".[test]" 44 | - name: install pyopenjtalk 45 | run: | 46 | mkdir pyopenjtalk 47 | cd pyopenjtalk 48 | wget https://raw.githubusercontent.com/espnet/espnet/master/tools/installers/install_pyopenjtalk.sh 49 | chmod +x install_pyopenjtalk.sh 50 | ./install_pyopenjtalk.sh 51 | - name: test model 52 | run: | 53 | export LD_LIBRARY_PATH=$(pwd)/pyopenjtalk/lib:${LD_LIBRARY_PATH} 54 | pytest ./ci/test_model.py 55 | -------------------------------------------------------------------------------- /test/test_downloader.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | import pytest 3 | 4 | from espnet_model_zoo.downloader import cmd_download 5 | from espnet_model_zoo.downloader import cmd_query 6 | from espnet_model_zoo.downloader import download 7 | from espnet_model_zoo.downloader import ModelDownloader 8 | 9 | 10 | def test_download(): 11 | download("http://example.com", "index.html") 12 | 13 | 14 | def test_update_model_table(tmp_path): 15 | d = ModelDownloader(tmp_path) 16 | d.update_model_table() 17 | 18 | 19 | def test_get_data_frame(): 20 | d = ModelDownloader() 21 | d.get_data_frame() 22 | 23 | 24 | def test_new_cachedir(tmp_path): 25 | ModelDownloader(tmp_path) 26 | 27 | 28 | def test_download_and_unpack_names_with_condition(): 29 | d = ModelDownloader() 30 | d.query("name", task="asr") 31 | 32 | 33 | def test_get_model_names_and_urls(): 34 | d = ModelDownloader() 35 | d.query(["name", "url"], task="asr") 36 | 37 | 38 | def test_get_model_names_non_matching(): 39 | d = ModelDownloader() 40 | assert d.query("name", task="dummy") == [] 41 | 42 | 43 | def test_download_and_unpack_with_url(): 44 | d = ModelDownloader() 45 | d.download_and_unpack("https://zenodo.org/record/3951842/files/test.zip?download=1") 46 | 47 | 48 | def test_download_and_unpack_with_name(): 49 | d = ModelDownloader() 50 | d.download_and_unpack("test") 51 | 52 | 53 | def test_download_and_unpack_no_inputting(): 54 | d = ModelDownloader() 55 | with pytest.raises(TypeError): 56 | d.download_and_unpack() 57 | 58 | 59 | def test_download_and_unpack_non_matching(): 60 | d = ModelDownloader() 61 | with pytest.raises(RuntimeError): 62 | d.download_and_unpack(task="dummy") 63 | 64 | 65 | def test_download_and_unpack_local_file(): 66 | d = ModelDownloader() 67 | path = d.download("test") 68 | d.download_and_unpack(path) 69 | 70 | 71 | def test_download_and_clean_cache(): 72 | d = ModelDownloader() 73 | d.download_and_unpack("test") 74 | p = d.download("test") 75 | d.clean_cache("test") 76 | assert not Path(p).exists() 77 | 78 | 79 | def test_cmd_download(): 80 | cmd_download(["test"]) 81 | 82 | 83 | def test_query(): 84 | cmd_query([]) 85 | -------------------------------------------------------------------------------- /ci/test_model.py: -------------------------------------------------------------------------------- 1 | import os 2 | import shutil 3 | 4 | import numpy as np 5 | 6 | from espnet2.bin.asr_inference import Speech2Text 7 | from espnet2.bin.asr_inference_streaming import Speech2TextStreaming 8 | from espnet2.bin.tts_inference import Text2Speech 9 | from espnet_model_zoo.downloader import ModelDownloader 10 | 11 | 12 | def _asr(model_name): 13 | d = ModelDownloader("downloads") 14 | speech2text = Speech2Text(**d.download_and_unpack(model_name, quiet=True)) 15 | speech = np.zeros((10000,), dtype=np.float32) 16 | nbests = speech2text(speech) 17 | text, *_ = nbests[0] 18 | assert isinstance(text, str) 19 | 20 | 21 | def _asr_streaming(model_name): 22 | d = ModelDownloader("downloads") 23 | speech2text = Speech2TextStreaming(**d.download_and_unpack(model_name, quiet=True)) 24 | speech = np.zeros((10000,), dtype=np.float32) 25 | nbests = speech2text(speech) 26 | text, *_ = nbests[0] 27 | assert isinstance(text, str) 28 | 29 | 30 | def _tts(model_name): 31 | d = ModelDownloader("downloads") 32 | text2speech = Text2Speech(**d.download_and_unpack(model_name, quiet=True)) 33 | inputs = {"text": "foo"} 34 | if text2speech.use_speech: 35 | inputs["speech"] = np.zeros((10000,), dtype=np.float32) 36 | if text2speech.use_spembs: 37 | inputs["spembs"] = np.zeros((text2speech.tts.spk_embed_dim,), dtype=np.float32) 38 | if text2speech.use_sids: 39 | inputs["sids"] = np.ones((1,), dtype=np.int64) 40 | if text2speech.use_lids: 41 | inputs["lids"] = np.ones((1,), dtype=np.int64) 42 | text2speech(**inputs) 43 | 44 | 45 | def test_model(): 46 | d = ModelDownloader() 47 | tasks = ["asr", "asr_stream", "tts"] 48 | 49 | for task in tasks: 50 | for corpus in list(set(d.query("corpus", task=task))): 51 | for model_name in d.query(task=task, corpus=corpus): 52 | if d.query("valid", name=model_name)[0] == "false": 53 | continue 54 | print(f"#### Test {model_name} ####") 55 | 56 | if task == "asr": 57 | _asr(model_name) 58 | elif task == "asr_stream": 59 | _asr_streaming(model_name) 60 | elif task == "tts": 61 | _tts(model_name) 62 | else: 63 | raise NotImplementedError(f"task={task}") 64 | 65 | # NOTE(kan-bayashi): remove and recreate cache dir to reduce the disk usage. 66 | shutil.rmtree("downloads") 67 | os.makedirs("downloads") 68 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | import os 3 | from setuptools import find_packages 4 | from setuptools import setup 5 | 6 | 7 | requirements = { 8 | "install": [ 9 | "pandas", 10 | "requests", 11 | "tqdm", 12 | "numpy", 13 | "espnet", 14 | "huggingface_hub", 15 | "filelock", 16 | "torchaudio", 17 | ], 18 | "setup": ["pytest-runner"], 19 | "test": [ 20 | "pytest>=3.3.0", 21 | "pytest-pythonpath>=0.7.3", 22 | "pytest-cov>=2.7.1", 23 | "hacking>=1.1.0", 24 | "mock>=2.0.0", 25 | "pycodestyle", 26 | "flake8>=3.7.8", 27 | "black", 28 | ], 29 | } 30 | 31 | install_requires = requirements["install"] 32 | setup_requires = requirements["setup"] 33 | tests_require = requirements["test"] 34 | extras_require = { 35 | k: v for k, v in requirements.items() if k not in ["install", "setup"] 36 | } 37 | 38 | dirname = os.path.dirname(__file__) 39 | setup( 40 | name="espnet_model_zoo", 41 | version="0.1.8", 42 | url="http://github.com/espnet/espnet_model_zoo", 43 | description="ESPnet Model Zoo", 44 | long_description=open(os.path.join(dirname, "README.md"), encoding="utf-8").read(), 45 | long_description_content_type="text/markdown", 46 | license="Apache Software License", 47 | packages=find_packages(include=["espnet_model_zoo*"]), 48 | package_data={"espnet_model_zoo": ["table.csv"]}, 49 | entry_points={ 50 | "console_scripts": [ 51 | "espnet_model_zoo_upload = espnet_model_zoo.zenodo_upload:main", 52 | "espnet_model_zoo_download = espnet_model_zoo.downloader:cmd_download", 53 | "espnet_model_zoo_query = espnet_model_zoo.downloader:cmd_query", 54 | ], 55 | }, 56 | install_requires=install_requires, 57 | setup_requires=setup_requires, 58 | tests_require=tests_require, 59 | extras_require=extras_require, 60 | python_requires=">=3.6.0", 61 | classifiers=[ 62 | "Programming Language :: Python", 63 | "Programming Language :: Python :: 3", 64 | "Programming Language :: Python :: 3.6", 65 | "Programming Language :: Python :: 3.7", 66 | "Programming Language :: Python :: 3.8", 67 | "Programming Language :: Python :: 3.9", 68 | "Development Status :: 5 - Production/Stable", 69 | "Intended Audience :: Science/Research", 70 | "Operating System :: POSIX :: Linux", 71 | "License :: OSI Approved :: Apache Software License", 72 | "Topic :: Software Development :: Libraries :: Python Modules", 73 | ], 74 | ) 75 | -------------------------------------------------------------------------------- /espnet_model_zoo/zenodo_upload.py: -------------------------------------------------------------------------------- 1 | """Upload files to Zenodo. 2 | 3 | You need to do as follows in order to access zenodo: 4 | 5 | 1. Sign up to Zenodo: https://zenodo.org/ 6 | 2. Create access_token: https://zenodo.org/account/settings/applications/tokens/new/ 7 | """ 8 | 9 | import argparse 10 | from datetime import datetime 11 | from getpass import getpass 12 | import json 13 | import os 14 | from pathlib import Path 15 | import requests 16 | from typing import Collection 17 | from typing import Union 18 | 19 | from espnet2.utils import config_argparse 20 | from espnet2.utils.types import str2bool 21 | 22 | 23 | class Zenodo: 24 | """Helper class to invoke Zenodo API 25 | 26 | REST API of zenodo: https://developers.zenodo.org/ 27 | 28 | """ 29 | 30 | def __init__(self, access_token: str, use_sandbox: bool = False): 31 | if use_sandbox: 32 | self.zenodo_url = "https://sandbox.zenodo.org" 33 | else: 34 | self.zenodo_url = "https://zenodo.org" 35 | 36 | self.params = {"access_token": access_token} 37 | self.headers = {"Content-Type": "application/json"} 38 | 39 | def create_deposition(self) -> requests.models.Response: 40 | r = requests.post( 41 | f"{self.zenodo_url}/api/deposit/depositions", 42 | params=self.params, 43 | json={}, 44 | headers=self.headers, 45 | ) 46 | if r.status_code != 201: 47 | raise RuntimeError(r.json()["message"]) 48 | return r 49 | 50 | def get_deposition(self, r: Union[requests.models.Response, int]): 51 | if isinstance(r, requests.models.Response): 52 | deposition_id = r.json()["id"] 53 | else: 54 | deposition_id = r 55 | r = requests.get( 56 | f"{self.zenodo_url}/api/deposit/depositions/{deposition_id}", 57 | params=self.params, 58 | json={}, 59 | headers=self.headers, 60 | ) 61 | if r.status_code != 200: 62 | raise RuntimeError(r.json()["message"]) 63 | return r 64 | 65 | def update_metadata( 66 | self, r: Union[requests.models.Response, int], data 67 | ) -> requests.models.Response: 68 | if isinstance(r, requests.models.Response): 69 | deposition_id = r.json()["id"] 70 | else: 71 | deposition_id = r 72 | 73 | r = requests.put( 74 | f"{self.zenodo_url}/api/deposit/depositions/{deposition_id}", 75 | params=self.params, 76 | data=json.dumps(data), 77 | headers=self.headers, 78 | ) 79 | if r.status_code != 200: 80 | raise RuntimeError(r.json()["message"]) 81 | return r 82 | 83 | def upload_file( 84 | self, r: Union[requests.models.Response, int], filename: Union[Path, str] 85 | ) -> requests.models.Response: 86 | if isinstance(r, int): 87 | r = requests.get( 88 | f"{self.zenodo_url}/api/deposit/depositions/{r}", headers=self.headers 89 | ) 90 | 91 | bucket_url = r.json()["links"]["bucket"] 92 | name = Path(filename).name 93 | with open(filename, "rb") as fp: 94 | r = requests.put( 95 | f"{bucket_url}/{name}", 96 | data=fp, 97 | # No headers included since it's a raw byte request 98 | params=self.params, 99 | ) 100 | if r.status_code != 200: 101 | raise RuntimeError(r.json()["message"]) 102 | return r 103 | 104 | def publish( 105 | self, r: Union[requests.models.Response, int] 106 | ) -> requests.models.Response: 107 | if isinstance(r, requests.models.Response): 108 | deposition_id = r.json()["id"] 109 | else: 110 | deposition_id = r 111 | 112 | r = requests.post( 113 | f"{self.zenodo_url}/api/deposit/depositions/" 114 | f"{deposition_id}/actions/publish", 115 | params=self.params, 116 | ) 117 | if r.status_code != 202: 118 | raise RuntimeError(r.json()["message"]) 119 | return r 120 | 121 | 122 | def upload( 123 | access_token: str, 124 | title: str, 125 | creator_name: str, 126 | description: str = "", 127 | files: Collection[Union[Path, str]] = (), 128 | affiliation: str = None, 129 | orcid: str = None, 130 | gnd: str = None, 131 | upload_type: str = "other", 132 | license: str = "CC-BY-4.0", 133 | keywords: Collection[str] = (), 134 | related_identifiers: Collection[dict] = (), 135 | community_identifer: str = None, 136 | use_sandbox: bool = True, 137 | publish: bool = False, 138 | ): 139 | zenodo = Zenodo(access_token, use_sandbox=use_sandbox) 140 | r = zenodo.create_deposition() 141 | 142 | # Update metatdata using old API 143 | creator = {"name": creator_name} 144 | if affiliation is not None: 145 | creator["affiliation"] = affiliation 146 | if orcid is not None: 147 | creator["orcid"] = orcid 148 | if gnd is not None: 149 | creator["gnd"] = gnd 150 | data = { 151 | "metadata": { 152 | "upload_type": upload_type, 153 | "publication_date": datetime.now().strftime("%Y-%m-%d"), 154 | "title": title, 155 | "description": description, 156 | "creators": [creator], 157 | "license": license, 158 | "keywords": list(keywords), 159 | "related_identifiers": list(related_identifiers), 160 | } 161 | } 162 | if community_identifer is not None: 163 | data["communities"] = [{"identifier": community_identifer}] 164 | zenodo.update_metadata(r, data) 165 | 166 | # Upload files using new API 167 | for f in files: 168 | # Check file existing 169 | if not Path(f).exists(): 170 | raise FileNotFoundError(f"{f} is not found") 171 | for f in files: 172 | print(f"Now uploading {f}...") 173 | zenodo.upload_file(r, f) 174 | 175 | if publish: 176 | r = zenodo.publish(r) 177 | url = r.json()["links"]["latest_html"] 178 | print(f"Successfully published. Go to {url}") 179 | else: 180 | url = r.json()["links"]["html"] 181 | print(f"Successfully uploaded, but not published yet. Go to {url}") 182 | 183 | 184 | def upload_espnet_model( 185 | access_token: str, 186 | title: str, 187 | creator_name: str, 188 | file: Collection[Union[Path, str]] = (), 189 | description: str = "", 190 | description_file: str = None, 191 | affiliation: str = None, 192 | license: str = "CC-BY-4.0", 193 | orcid: str = None, 194 | gnd: str = None, 195 | use_sandbox: bool = False, 196 | publish: bool = False, 197 | ): 198 | if description_file is not None: 199 | with open(description_file, "r", encoding="utf-8") as f: 200 | description = f.read() 201 | 202 | upload( 203 | access_token=access_token, 204 | title=title, 205 | description=description, 206 | creator_name=creator_name, 207 | files=file, 208 | keywords=[ 209 | "ESPnet", 210 | "deep-learning", 211 | "python", 212 | "pytorch", 213 | "speech-recognition", 214 | "speech-synthesis", 215 | "speech-translation", 216 | "machine-translation", 217 | ], 218 | related_identifiers=[ 219 | { 220 | "relation": "isSupplementTo", 221 | "identifier": "https://github.com/espnet/espnet", 222 | } 223 | ], 224 | affiliation=affiliation, 225 | license=license, 226 | orcid=orcid, 227 | gnd=gnd, 228 | use_sandbox=use_sandbox, 229 | publish=publish, 230 | ) 231 | 232 | 233 | def get_parser(): 234 | parser = config_argparse.ArgumentParser( 235 | description="Upload files to Zenodo", 236 | formatter_class=argparse.ArgumentDefaultsHelpFormatter, 237 | ) 238 | parser.add_argument( 239 | "--access_token", 240 | help="Get your access_token from " 241 | "https://zenodo.org/account/settings/applications/ or " 242 | "https://sandbox.zenodo.org/account/settings/applications/ . " 243 | "You can also give it from an environment variable 'ACCESS_TOKEN'", 244 | ) 245 | parser.add_argument( 246 | "--title", 247 | required=True, 248 | help="e.g. ESPnet pretrained model, MT, " 249 | "Fisher-CallHome Spanish (Es->En), Transformer", 250 | ) 251 | parser.add_argument("--creator_name", required=True, help="Your name") 252 | parser.add_argument("--file", nargs="+", required=True) 253 | group = parser.add_mutually_exclusive_group(required=True) 254 | group.add_argument("--description", help="Give the description") 255 | group.add_argument("--description_file", help="Give the description from file") 256 | parser.add_argument( 257 | "--use_sandbox", 258 | type=str2bool, 259 | default=False, 260 | help="Use zenodo sandbox for testing", 261 | ) 262 | parser.add_argument( 263 | "--publish", type=str2bool, default=False, help="Publish after uploading" 264 | ) 265 | parser.add_argument("--license", default="CC-BY-4.0") 266 | parser.add_argument("--affiliation") 267 | parser.add_argument("--orcid") 268 | parser.add_argument("--gnd") 269 | return parser 270 | 271 | 272 | def main(cmd=None): 273 | # espnet_model_zoo_upload 274 | 275 | parser = get_parser() 276 | args = parser.parse_args(cmd) 277 | 278 | # If --access_token is not given, get from "ACCESS_TOKEN" 279 | if args.access_token is None: 280 | args.access_token = os.environ.get("ACCESS_TOKEN") 281 | 282 | # If neither is given, input from stdin 283 | if args.access_token is None: 284 | if args.use_sandbox: 285 | zenodo_url = "https://sandbox.zenodo.org" 286 | else: 287 | zenodo_url = "https://zenodo.org" 288 | args.access_token = getpass( 289 | "Input Zenodo API Token\n" 290 | "(You can create it from " 291 | f"{zenodo_url}/account/settings/applications/tokens/new/): " 292 | ) 293 | 294 | kwargs = vars(args) 295 | kwargs.pop("config") 296 | upload_espnet_model(**kwargs) 297 | 298 | 299 | if __name__ == "__main__": 300 | main() 301 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # ESPnet Model Zoo 2 | 3 | [![PyPI version](https://badge.fury.io/py/espnet-model-zoo.svg)](https://badge.fury.io/py/espnet-model-zoo) 4 | [![Python Versions](https://img.shields.io/pypi/pyversions/espnet_model_zoo.svg)](https://pypi.org/project/espnet_model_zoo/) 5 | [![Downloads](https://pepy.tech/badge/espnet_model_zoo)](https://pepy.tech/project/espnet_model_zoo) 6 | [![GitHub license](https://img.shields.io/github/license/espnet/espnet_model_zoo.svg)](https://github.com/espnet/espnet_model_zoo) 7 | [![Unitest](https://github.com/espnet/espnet_model_zoo/workflows/Unitest/badge.svg)](https://github.com/espnet/espnet_model_zoo/actions?query=workflow%3AUnitest) 8 | [![Model test](https://github.com/espnet/espnet_model_zoo/workflows/Model%20test/badge.svg)](https://github.com/espnet/espnet_model_zoo/actions?query=workflow%3A%22Model+test%22) 9 | [![codecov](https://codecov.io/gh/espnet/espnet_model_zoo/branch/master/graph/badge.svg)](https://codecov.io/gh/espnet/espnet_model_zoo) 10 | [![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) 11 | 12 | Utilities managing the pretrained models created by [ESPnet](https://github.com/espnet/espnet). This function is inspired by the [Asteroid pretrained model function](https://github.com/mpariente/asteroid/blob/master/docs/source/readmes/pretrained_models.md). 13 | 14 | - **From version 0.1.0, the huggingface models can be also used**: https://huggingface.co/models?filter=espnet 15 | - Zenodo community: https://zenodo.org/communities/espnet/ 16 | - Registered models: [table.csv](espnet_model_zoo/table.csv) 17 | 18 | ## Install 19 | 20 | ``` 21 | pip install torch 22 | pip install espnet_model_zoo 23 | ``` 24 | 25 | ## Python API for inference 26 | `model_name` in the following section should be `huggingface_id` or one of the tags in the [table.csv](espnet_model_zoo/table.csv). 27 | Or you can directly provide zenodo URL (e.g., `https://zenodo.org/record/xxxxxxx/files/hogehoge.zip?download=1`). 28 | 29 | ### ASR 30 | 31 | ```python 32 | import soundfile 33 | from espnet2.bin.asr_inference import Speech2Text 34 | speech2text = Speech2Text.from_pretrained( 35 | "model_name", 36 | # Decoding parameters are not included in the model file 37 | maxlenratio=0.0, 38 | minlenratio=0.0, 39 | beam_size=20, 40 | ctc_weight=0.3, 41 | lm_weight=0.5, 42 | penalty=0.0, 43 | nbest=1 44 | ) 45 | # Confirm the sampling rate is equal to that of the training corpus. 46 | # If not, you need to resample the audio data before inputting to speech2text 47 | speech, rate = soundfile.read("speech.wav") 48 | nbests = speech2text(speech) 49 | 50 | text, *_ = nbests[0] 51 | print(text) 52 | ``` 53 | 54 | ### TTS 55 | 56 | ```python 57 | import soundfile 58 | from espnet2.bin.tts_inference import Text2Speech 59 | text2speech = Text2Speech.from_pretrained("model_name") 60 | speech = text2speech("foobar")["wav"] 61 | soundfile.write("out.wav", speech.numpy(), text2speech.fs, "PCM_16") 62 | ``` 63 | 64 | ### Speech separation 65 | 66 | ```python 67 | import soundfile 68 | from espnet2.bin.enh_inference import SeparateSpeech 69 | separate_speech = SeparateSpeech.from_pretrained( 70 | "model_name", 71 | # for segment-wise process on long speech 72 | segment_size=2.4, 73 | hop_size=0.8, 74 | normalize_segment_scale=False, 75 | show_progressbar=True, 76 | ref_channel=None, 77 | normalize_output_wav=True, 78 | ) 79 | # Confirm the sampling rate is equal to that of the training corpus. 80 | # If not, you need to resample the audio data before inputting to speech2text 81 | speech, rate = soundfile.read("long_speech.wav") 82 | waves = separate_speech(speech[None, ...], fs=rate) 83 | ``` 84 | 85 | This API allows processing both short audio samples and long audio samples. For long audio samples, you can set the value of arguments segment_size, hop_size (optionally normalize_segment_scale and show_progressbar) to perform segment-wise speech enhancement/separation on the input speech. Note that the segment-wise processing is disabled by default. 86 | 87 | 88 |
For old ESPnet (<=10.1)
89 | 90 | ### ASR 91 | 92 | ```python 93 | import soundfile 94 | from espnet_model_zoo.downloader import ModelDownloader 95 | from espnet2.bin.asr_inference import Speech2Text 96 | d = ModelDownloader() 97 | speech2text = Speech2Text( 98 | **d.download_and_unpack("model_name"), 99 | # Decoding parameters are not included in the model file 100 | maxlenratio=0.0, 101 | minlenratio=0.0, 102 | beam_size=20, 103 | ctc_weight=0.3, 104 | lm_weight=0.5, 105 | penalty=0.0, 106 | nbest=1 107 | ) 108 | ``` 109 | 110 | ### TTS 111 | 112 | ```python 113 | import soundfile 114 | from espnet_model_zoo.downloader import ModelDownloader 115 | from espnet2.bin.tts_inference import Text2Speech 116 | d = ModelDownloader() 117 | text2speech = Text2Speech(**d.download_and_unpack("model_name")) 118 | ``` 119 | 120 | ### Speech separation 121 | 122 | ```python 123 | import soundfile 124 | from espnet_model_zoo.downloader import ModelDownloader 125 | from espnet2.bin.enh_inference import SeparateSpeech 126 | d = ModelDownloader() 127 | separate_speech = SeparateSpeech( 128 | **d.download_and_unpack("model_name"), 129 | # for segment-wise process on long speech 130 | segment_size=2.4, 131 | hop_size=0.8, 132 | normalize_segment_scale=False, 133 | show_progressbar=True, 134 | ref_channel=None, 135 | normalize_output_wav=True, 136 | ) 137 | ``` 138 |
139 | 140 | 141 | ## Instruction for ModelDownloader 142 | 143 | ```python 144 | from espnet_model_zoo.downloader import ModelDownloader 145 | d = ModelDownloader("~/.cache/espnet") # Specify cachedir 146 | d = ModelDownloader() # is used as cachedir by default 147 | ``` 148 | 149 | To obtain a model, you need to give a `huggingface_id`model` or a tag , which is listed in [table.csv](espnet_model_zoo/table.csv). 150 | 151 | ```python 152 | >>> d.download_and_unpack("kamo-naoyuki/mini_an4_asr_train_raw_bpe_valid.acc.best") 153 | {"asr_train_config": , "asr_model_file": , ...} 154 | ``` 155 | 156 | You can specify the revision if it's huggingface_id giving with `@`: 157 | 158 | ```python 159 | >>> d.download_and_unpack("kamo-naoyuki/mini_an4_asr_train_raw_bpe_valid.acc.best@") 160 | {"asr_train_config": , "asr_model_file": , ...} 161 | ``` 162 | 163 | Note that if the model already exists, you can skip downloading and unpacking. 164 | 165 | You can also get a model with certain conditions. 166 | 167 | ```python 168 | d.download_and_unpack(task="asr", corpus="wsj") 169 | ``` 170 | 171 | If multiple models are found with the condition, the last model is selected. 172 | You can also specify the condition using "version" option. 173 | 174 | ```python 175 | d.download_and_unpack(task="asr", corpus="wsj", version=-1) # Get the last model 176 | d.download_and_unpack(task="asr", corpus="wsj", version=-2) # Get previous model 177 | ``` 178 | 179 | You can also obtain it from the URL directly. 180 | 181 | ```python 182 | d.download_and_unpack("https://zenodo.org/record/...") 183 | ``` 184 | 185 | If you need to use a local model file using this API, you can also give it. 186 | 187 | ```python 188 | d.download_and_unpack("./some/where/model.zip") 189 | ``` 190 | 191 | In this case, the contents are also expanded in the cache directory, 192 | but the model is identified by the file path, 193 | so if you move the model to somewhere and unpack again, 194 | it's treated as another model, 195 | thus the contents are expanded again at another place. 196 | 197 | ## Query model names 198 | 199 | You can view the model names from our Zenodo community, https://zenodo.org/communities/espnet/, 200 | or using `query()`. All information are written in [table.csv](espnet_model_zoo/table.csv). 201 | 202 | ```python 203 | d.query("name") 204 | ``` 205 | 206 | You can also show them with specifying certain conditions. 207 | 208 | ```python 209 | d.query("name", task="asr") 210 | ``` 211 | 212 | ## Command line tools 213 | 214 | - `espnet_model_zoo_query` 215 | 216 | ```sh 217 | # Query model name 218 | espnet_model_zoo_query task=asr corpus=wsj 219 | # Show all model name 220 | espnet_model_zoo_query 221 | # Query the other key 222 | espnet_model_zoo_query --key url task=asr corpus=wsj 223 | ``` 224 | - `espnet_model_zoo_download` 225 | 226 | ```sh 227 | espnet_model_zoo_download # Print the path of the downloaded file 228 | espnet_model_zoo_download --unpack true # Print the path of unpacked files 229 | ``` 230 | - `espnet_model_zoo_upload` 231 | 232 | ```sh 233 | export ACCESS_TOKEN= 234 | espnet_zenodo_upload \ 235 | --file \ 236 | --title \ 237 | --description <description> \ 238 | --creator_name <your-git-account> 239 | ``` 240 | 241 | ## Use pretrained model in ESPnet recipe 242 | 243 | ```sh 244 | # e.g. ASR WSJ task 245 | git clone https://github.com/espnet/espnet 246 | pip install -e . 247 | cd egs2/wsj/asr1 248 | ./run.sh --skip_data_prep false --skip_train true --download_model kamo-naoyuki/wsj 249 | ``` 250 | 251 | ## Register your model 252 | 253 | ### Huggingface 254 | 1. Upload your model using huggingface API 255 | 256 | 1. (if you do not have an HF hub account) Go to https://huggingface.co and create an HF account by clicking a `sign up` button below. 257 | ![image](https://user-images.githubusercontent.com/11741550/147585941-af1a7e88-934e-4e24-b30e-4b120dbc023a.png) 258 | 2. From a [new model](https://huggingface.co/new) link in the profile, create a new model repository. Please include a recipe name (e.g., aidatatang_200zh) and model info (e.g., conformer) in the repository name 259 | ![image](https://user-images.githubusercontent.com/11741550/147586093-51c98c53-6d23-45a0-b359-14a4489cc970.png) 260 | 3. In the espnet recipe, execute the following command: 261 | ``` 262 | ./run.sh --stage 15 --skip_upload_hf false --hf_repo sw005320/aidatatang_200zh_conformer 263 | ``` 264 | 4. Please follow the instruction (e.g., type the HF Username/Password) 265 | 5. If it works successfully, you can get the following messages 266 | ![image](https://user-images.githubusercontent.com/11741550/147586699-a3bb5a49-8b59-417d-b376-4d1ec270fb71.png) 267 | 268 | 1. Create a Pull Request to modify [table.csv](espnet_model_zoo/table.csv) 269 | 270 | The model can be registered in [table.csv](https://github.com/espnet/espnet_model_zoo/blob/master/espnet_model_zoo/table.csv). 271 | Then, the model will be tested in the CI. 272 | Note that, unlike the zenodo case, you don't need to add the URL because huggingface_id itself can specify the model file, so please fill the value as `https://huggingface.co/`. 273 | 274 | e.g. `table.csv` 275 | 276 | ``` 277 | ... 278 | aidatatang_200zh,asr,sw005320/aidatatang_200zh_conformer,https://huggingface.co/,16000,zh,,,,,true 279 | ``` 280 | 1. (Administrator does) Increment the third version number of [setup.py](setup.py), e.g. 0.0.3 -> 0.0.4 281 | 1. (Administrator does) Release new version 282 | 283 | 284 | ### Zenodo (Obsolete) 285 | 286 | 1. Upload your model to Zenodo 287 | 288 | You need to [signup to Zenodo](https://zenodo.org/) and [create an access token](https://zenodo.org/account/settings/applications/tokens/new/) to upload models. 289 | You can upload your own model by using `espnet_model_zoo_upload` command freely, 290 | but we normally upload a model using [recipes](https://github.com/espnet/espnet/blob/master/egs2/TEMPLATE). 291 | 292 | 1. Create a Pull Request to modify [table.csv](espnet_model_zoo/table.csv) 293 | 294 | You need to append your record at the last line. 295 | 1. (Administrator does) Increment the third version number of [setup.py](setup.py), e.g. 0.0.3 -> 0.0.4 296 | 1. (Administrator does) Release new version 297 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright 2017 Johns Hopkins University (Shinji Watanabe) 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /espnet_model_zoo/downloader.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | from distutils.util import strtobool 3 | import hashlib 4 | import os 5 | from pathlib import Path 6 | import re 7 | import shutil 8 | import tempfile 9 | from typing import Dict 10 | from typing import List 11 | from typing import Sequence 12 | from typing import Tuple 13 | from typing import Union 14 | import warnings 15 | 16 | from filelock import FileLock 17 | from huggingface_hub import snapshot_download 18 | import pandas as pd 19 | import requests 20 | from tqdm import tqdm 21 | import yaml 22 | 23 | from espnet2.main_funcs.pack_funcs import find_path_and_change_it_recursive 24 | from espnet2.main_funcs.pack_funcs import get_dict_from_cache 25 | from espnet2.main_funcs.pack_funcs import unpack 26 | 27 | 28 | MODELS_URL = ( 29 | "https://raw.githubusercontent.com/espnet/espnet_model_zoo/master/" 30 | "espnet_model_zoo/table.csv" 31 | ) 32 | 33 | 34 | URL_REGEX = re.compile( 35 | r"^(?:http|ftp)s?://" # http:// or https:// 36 | r"(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)" 37 | r"+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|" # domain... 38 | r"localhost|" # localhost... 39 | r"\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})" # ...or ip 40 | r"(?::\d+)?" # optional port 41 | r"(?:/?|[/?]\S+)$", 42 | re.IGNORECASE, 43 | ) 44 | 45 | 46 | def is_url(url: str) -> bool: 47 | return re.match(URL_REGEX, url) is not None 48 | 49 | 50 | def str_to_hash(string: Union[str, Path]) -> str: 51 | return hashlib.md5(str(string).encode("utf-8")).hexdigest() 52 | 53 | 54 | def download( 55 | url, output_path, retry: int = 3, chunk_size: int = 8192, quiet: bool = False 56 | ): 57 | # Set retry 58 | session = requests.Session() 59 | session.mount("http://", requests.adapters.HTTPAdapter(max_retries=retry)) 60 | session.mount("https://", requests.adapters.HTTPAdapter(max_retries=retry)) 61 | 62 | # Timeout 63 | response = session.get(url=url, stream=True, timeout=(10.0, 30.0)) 64 | file_size = int(response.headers["content-length"]) 65 | 66 | # Raise error when connection error 67 | response.raise_for_status() 68 | 69 | # Write in temporary file 70 | with tempfile.TemporaryDirectory() as d: 71 | with (Path(d) / "tmp").open("wb") as f: 72 | if quiet: 73 | for chunk in response.iter_content(chunk_size=chunk_size): 74 | if chunk: 75 | f.write(chunk) 76 | else: 77 | with tqdm( 78 | desc=url, 79 | total=file_size, 80 | unit="B", 81 | unit_scale=True, 82 | unit_divisor=1024, 83 | ) as pbar: 84 | for chunk in response.iter_content(chunk_size=chunk_size): 85 | if chunk: 86 | f.write(chunk) 87 | pbar.update(len(chunk)) 88 | 89 | Path(output_path).parent.mkdir(parents=True, exist_ok=True) 90 | shutil.move(Path(d) / "tmp", output_path) 91 | 92 | 93 | class ModelDownloader: 94 | """Download model from zenodo and unpack.""" 95 | 96 | def __init__(self, cachedir: Union[Path, str] = None): 97 | if cachedir is None: 98 | # The default path is the directory of this module 99 | cachedir = Path(__file__).parent 100 | # If not having write permission, fallback to homedir 101 | if not os.access(cachedir, os.W_OK): 102 | cachedir = Path.home() / ".cache" / "espnet_model_zoo" 103 | else: 104 | cachedir = Path(cachedir).expanduser().absolute() 105 | cachedir.mkdir(parents=True, exist_ok=True) 106 | 107 | csv = Path(__file__).parent / "table.csv" 108 | if not csv.exists(): 109 | download(MODELS_URL, csv) 110 | 111 | self.cachedir = cachedir 112 | self.csv = csv 113 | self.data_frame = pd.read_csv(csv, dtype=str) 114 | 115 | def get_data_frame(self): 116 | return self.data_frame 117 | 118 | def update_model_table(self): 119 | lock_file = str(self.csv) + ".lock" 120 | Path(lock_file).parent.mkdir(parents=True, exist_ok=True) 121 | with FileLock(lock_file): 122 | download(MODELS_URL, self.csv) 123 | 124 | def clean_cache(self, name: str = None, version: int = -1, **kwargs: str): 125 | url = self.get_url(name=name, version=version, **kwargs) 126 | outdir = self.cachedir / str_to_hash(url) 127 | shutil.rmtree(outdir) 128 | 129 | def query( 130 | self, key: Union[Sequence[str]] = "name", **kwargs 131 | ) -> List[Union[str, Tuple[str]]]: 132 | conditions = None 133 | for k, v in kwargs.items(): 134 | if k not in self.data_frame: 135 | warnings.warn( 136 | f"Invalid key: {k}: Available keys:\n" 137 | f"{list(self.data_frame.keys())}" 138 | ) 139 | continue 140 | condition = self.data_frame[k] == v 141 | if conditions is None: 142 | conditions = condition 143 | else: 144 | conditions &= condition 145 | 146 | if conditions is not None: 147 | df = self.data_frame[conditions] 148 | else: 149 | df = self.data_frame 150 | 151 | if len(df) == 0: 152 | return [] 153 | else: 154 | if isinstance(key, (tuple, list)): 155 | return list(zip(*[df[k] for k in key])) 156 | else: 157 | return list(df[key]) 158 | 159 | def get_url(self, name: str = None, version: int = -1, **kwargs: str) -> str: 160 | if name is None and len(kwargs) == 0: 161 | raise TypeError("No arguments are given") 162 | 163 | if name is not None and is_url(name): 164 | # Specify the downloading link directly. "kwargs" are ignored in this case. 165 | url = name 166 | 167 | else: 168 | if name is not None: 169 | kwargs["name"] = name 170 | 171 | conditions = None 172 | for key, value in kwargs.items(): 173 | condition = self.data_frame[key] == value 174 | if conditions is None: 175 | conditions = condition 176 | else: 177 | conditions &= condition 178 | 179 | if len(self.data_frame[conditions]) == 0: 180 | # If Specifying local file path 181 | if name is not None and Path(name).exists() and len(kwargs) == 1: 182 | url = str(Path(name).absolute()) 183 | 184 | else: 185 | return "huggingface.co" 186 | else: 187 | urls = self.data_frame[conditions]["url"] 188 | if version < 0: 189 | version = len(urls) + version 190 | url = list(urls)[version] 191 | return url 192 | 193 | @staticmethod 194 | def _get_file_name(url): 195 | ma = re.match(r"https://.*/([^/]*)\?download=[0-9]*$", url) 196 | if ma is not None: 197 | # URL e.g. 198 | # https://sandbox.zenodo.org/record/646767/files/asr_train_raw_bpe_valid.acc.best.zip?download=1 199 | a = ma.groups()[0] 200 | return a 201 | else: 202 | # If not Zenodo 203 | r = requests.head(url) 204 | if "Content-Disposition" in r.headers: 205 | # e.g. attachment; filename=asr_train_raw_bpe_valid.acc.best.zip 206 | for v in r.headers["Content-Disposition"].split(";"): 207 | if "filename=" in v: 208 | return v.split("filename=")[1].strip() 209 | 210 | # if not specified or some error happens 211 | return Path(url).name 212 | 213 | def unpack_local_file(self, name: str = None) -> Dict[str, Union[str, List[str]]]: 214 | if not Path(name).exists(): 215 | raise FileNotFoundError(f"No such file or directory: {name}") 216 | 217 | warnings.warn( 218 | "Expanding a local model to the cachedir. " 219 | "If you'll move the file to another path, " 220 | "it's treated as a different model." 221 | ) 222 | name = Path(name).absolute() 223 | 224 | outdir = self.cachedir / str_to_hash(name) 225 | filename = outdir / name.name 226 | outdir.mkdir(parents=True, exist_ok=True) 227 | 228 | if not filename.exists(): 229 | if filename.is_symlink(): 230 | filename.unlink() 231 | filename.symlink_to(name) 232 | 233 | # Skip unpacking if the cache exists 234 | meta_yaml = outdir / "meta.yaml" 235 | outdir.mkdir(parents=True, exist_ok=True) 236 | lock_file = str(meta_yaml) + ".lock" 237 | with FileLock(lock_file): 238 | if meta_yaml.exists(): 239 | info = get_dict_from_cache(meta_yaml) 240 | if info is not None: 241 | return info 242 | 243 | # Extract files from archived file 244 | return unpack(filename, outdir) 245 | 246 | def huggingface_download( 247 | self, name: str = None, version: int = -1, quiet: bool = False, **kwargs: str 248 | ) -> str: 249 | # Get huggingface_id from table.csv 250 | if name is None: 251 | names = self.query(key="name", **kwargs) 252 | if len(names) == 0: 253 | message = "Not found models:" 254 | for key, value in kwargs.items(): 255 | message += f" {key}={value}" 256 | raise RuntimeError(message) 257 | if version < 0: 258 | version = len(names) + version 259 | name = list(names)[version] 260 | 261 | if "@" in name: 262 | huggingface_id, revision = name.split("@", 1) 263 | else: 264 | huggingface_id = name 265 | revision = None 266 | 267 | return snapshot_download( 268 | huggingface_id, 269 | revision=revision, 270 | library_name="espnet", 271 | cache_dir=self.cachedir, 272 | ) 273 | 274 | @staticmethod 275 | def _unpack_cache_dir_for_huggingface(cache_dir: str): 276 | meta_yaml = Path(cache_dir) / "meta.yaml" 277 | lock_file = Path(cache_dir) / ".lock" 278 | flag_file = Path(cache_dir) / ".done" 279 | 280 | with meta_yaml.open("r", encoding="utf-8") as f: 281 | d = yaml.safe_load(f) 282 | assert isinstance(d, dict), type(d) 283 | 284 | yaml_files = d["yaml_files"] 285 | files = d["files"] 286 | assert isinstance(yaml_files, dict), type(yaml_files) 287 | assert isinstance(files, dict), type(files) 288 | 289 | # Rewrite yaml_files for first case 290 | with FileLock(lock_file): 291 | if not flag_file.exists(): 292 | for key, value in yaml_files.items(): 293 | yaml_file = Path(cache_dir) / value 294 | with yaml_file.open("r", encoding="utf-8") as f: 295 | d = yaml.safe_load(f) 296 | assert isinstance(d, dict), type(d) 297 | for name in Path(cache_dir).glob("**/*"): 298 | name = name.relative_to(Path(cache_dir)) 299 | d = find_path_and_change_it_recursive( 300 | d, name, str(Path(cache_dir) / name) 301 | ) 302 | 303 | with yaml_file.open("w", encoding="utf-8") as f: 304 | yaml.safe_dump(d, f) 305 | 306 | with flag_file.open("w"): 307 | pass 308 | 309 | retval = {} 310 | for key, value in list(yaml_files.items()) + list(files.items()): 311 | retval[key] = str(Path(cache_dir) / value) 312 | return retval 313 | 314 | def download( 315 | self, name: str = None, version: int = -1, quiet: bool = False, **kwargs: str 316 | ) -> str: 317 | url = self.get_url(name=name, version=version, **kwargs) 318 | 319 | # Support direct huggingface url specification 320 | if name is not None and name.startswith("https://huggingface.co/"): 321 | url = "https://huggingface.co/" 322 | name = name.replace("https://huggingface.co/", "") 323 | 324 | # For huggingface compatibility 325 | if url in [ 326 | "https://huggingface.co/", 327 | "https://huggingface.co", 328 | "huggingface.co", 329 | ]: 330 | # TODO(kamo): Support quiet 331 | cache_dir = self.huggingface_download(name=name, version=version, **kwargs) 332 | self._unpack_cache_dir_for_huggingface(cache_dir) 333 | return cache_dir 334 | 335 | if not is_url(url) and Path(url).exists(): 336 | return url 337 | 338 | outdir = self.cachedir / str_to_hash(url) 339 | filename = self._get_file_name(url) 340 | # Download the model file if not existing 341 | outdir.mkdir(parents=True, exist_ok=True) 342 | lock_file = str(outdir / filename) + ".lock" 343 | with FileLock(lock_file): 344 | if not (outdir / filename).exists(): 345 | download(url, outdir / filename, quiet=quiet) 346 | 347 | # Write the url for debugging 348 | with (outdir / "url").open("w", encoding="utf-8") as f: 349 | f.write(url) 350 | 351 | r = requests.head(url) 352 | if "Content-MD5" in r.headers: 353 | checksum = r.headers["Content-MD5"] 354 | 355 | # MD5 checksum 356 | sig = hashlib.md5() 357 | chunk_size = 8192 358 | with open(outdir / filename, "rb") as f: 359 | while True: 360 | chunk = f.read(chunk_size) 361 | if len(chunk) == 0: 362 | break 363 | sig.update(chunk) 364 | 365 | if sig.hexdigest() != checksum: 366 | Path(outdir / filename).unlink() 367 | raise RuntimeError(f"Failed to download file: {url}") 368 | else: 369 | warnings.warn("Not validating checksum") 370 | return str(outdir / filename) 371 | 372 | def download_and_unpack( 373 | self, name: str = None, version: int = -1, quiet: bool = False, **kwargs: str 374 | ) -> Dict[str, Union[str, List[str]]]: 375 | url = self.get_url(name=name, version=version, **kwargs) 376 | if not is_url(url) and Path(url).exists(): 377 | return self.unpack_local_file(url) 378 | 379 | # Support direct huggingface url specification 380 | if name is not None and name.startswith("https://huggingface.co/"): 381 | url = "https://huggingface.co/" 382 | name = name.replace("https://huggingface.co/", "") 383 | 384 | # For huggingface compatibility 385 | if url in [ 386 | "https://huggingface.co/", 387 | "https://huggingface.co", 388 | "huggingface.co", 389 | ]: 390 | # download_and_unpack and download are same if huggingface case 391 | # TODO(kamo): Support quiet 392 | cache_dir = self.huggingface_download(name=name, version=version, **kwargs) 393 | return self._unpack_cache_dir_for_huggingface(cache_dir) 394 | 395 | # Unpack to <cachedir>/<hash> in order to give an unique name 396 | outdir = self.cachedir / str_to_hash(url) 397 | 398 | # Skip downloading and unpacking if the cache exists 399 | meta_yaml = outdir / "meta.yaml" 400 | outdir.mkdir(parents=True, exist_ok=True) 401 | lock_file = str(meta_yaml) + ".lock" 402 | with FileLock(lock_file): 403 | if meta_yaml.exists(): 404 | info = get_dict_from_cache(meta_yaml) 405 | if info is not None: 406 | return info 407 | 408 | # Download the file to an unique path 409 | filename = self.download(url, quiet=quiet) 410 | 411 | # Extract files from archived file 412 | return unpack(filename, outdir) 413 | 414 | 415 | def str2bool(v) -> bool: 416 | return bool(strtobool(v)) 417 | 418 | 419 | def cmd_download(cmd=None): 420 | # espnet_model_zoo_download 421 | 422 | parser = argparse.ArgumentParser("Download file from Zenodo") 423 | parser.add_argument( 424 | "name", 425 | help="URL or model name in the form of <username>/<model name>. " 426 | "e.g. kamo-naoyuki/mini_an4_asr_train_raw_bpe_valid.acc.best", 427 | ) 428 | parser.add_argument( 429 | "--cachedir", 430 | help="Specify cache dir. By default, download to module root.", 431 | ) 432 | parser.add_argument( 433 | "--unpack", 434 | type=str2bool, 435 | default=False, 436 | help="Unpack the archived file after downloading.", 437 | ) 438 | args = parser.parse_args(cmd) 439 | 440 | d = ModelDownloader(args.cachedir) 441 | if args.unpack: 442 | print(d.download_and_unpack(args.name)) 443 | else: 444 | print(d.download(args.name)) 445 | 446 | 447 | def cmd_query(cmd=None): 448 | # espnet_model_zoo_query 449 | 450 | parser = argparse.ArgumentParser("Download file from Zenodo") 451 | 452 | parser.add_argument( 453 | "condition", 454 | nargs="*", 455 | default=[], 456 | help="Given desired condition in form of <key>=<value>. " 457 | "e.g. fs=16000. " 458 | "If no condition is given, you can view all available models", 459 | ) 460 | parser.add_argument( 461 | "--key", 462 | default="name", 463 | help="The key name you want", 464 | ) 465 | parser.add_argument( 466 | "--cachedir", 467 | help="Specify cache dir. By default, download to module root.", 468 | ) 469 | args = parser.parse_args(cmd) 470 | 471 | conditions = dict(s.split("=") for s in args.condition) 472 | d = ModelDownloader(args.cachedir) 473 | for v in d.query(args.key, **conditions): 474 | print(v) 475 | -------------------------------------------------------------------------------- /espnet_model_zoo/table.csv: -------------------------------------------------------------------------------- 1 | corpus,task,name,url,fs,lang,gender,pytorch,espnet,commit,valid 2 | ,,test,https://zenodo.org/record/3951842/files/test.zip?download=1,,,,,,,true 3 | mini_an4,asr,kamo-naoyuki/mini_an4_asr_train_raw_bpe_valid.acc.best,https://zenodo.org/record/3957940/files/asr_train_raw_bpe_valid.acc.best.zip?download=1,16000,en,,,,,true 4 | librispeech,asr,Shinji Watanabe/librispeech_asr_train_asr_transformer_e18_raw_bpe_sp_valid.acc.best,https://zenodo.org/record/4030677/files/asr_train_asr_transformer_e18_raw_bpe_sp_valid.acc.best.zip?download=1,16000,en,,,,,true 5 | jsut,tts,kan-bayashi/jsut_tts_train_tacotron2_raw_phn_jaconv_pyopenjtalk_train.loss.best,https://zenodo.org/record/3963886/files/tts_train_tacotron2_raw_phn_jaconv_pyopenjtalk_train.loss.best.zip?download=1,24000,jp,female,,,,true 6 | jsut,tts,kan-bayashi/jsut_tts_train_fastspeech_raw_phn_jaconv_pyopenjtalk_train.loss.best,https://zenodo.org/record/3986225/files/tts_train_fastspeech_raw_phn_jaconv_pyopenjtalk_train.loss.best.zip?download=1,24000,jp,female,,,,true 7 | csmsc,tts,kan-bayashi/csmsc_tts_train_tacotron2_raw_phn_pypinyin_g2p_phone_train.loss.best,https://zenodo.org/record/3969118/files/tts_train_tacotron2_raw_phn_pypinyin_g2p_phone_train.loss.best.zip?download=1,24000,zh,female,,,,true 8 | csmsc,tts,kan-bayashi/csmsc_tts_train_fastspeech_raw_phn_pypinyin_g2p_phone_train.loss.best,https://zenodo.org/record/3986227/files/tts_train_fastspeech_raw_phn_pypinyin_g2p_phone_train.loss.best.zip?download=1,24000,zh,female,,,,true 9 | ljspeech,tts,kan-bayashi/ljspeech_tts_train_tacotron2_raw_phn_tacotron_g2p_en_no_space_train.loss.best,https://zenodo.org/record/3989498/files/tts_train_tacotron2_raw_phn_tacotron_g2p_en_no_space_train.loss.best.zip?download=1,22050,en,female,,,,true 10 | ljspeech,tts,kan-bayashi/ljspeech_tts_train_fastspeech_raw_phn_tacotron_g2p_en_no_space_train.loss.best,https://zenodo.org/record/3986231/files/tts_train_fastspeech_raw_phn_tacotron_g2p_en_no_space_train.loss.best.zip?download=1,22050,en,female,,,,true 11 | vctk,tts,kan-bayashi/vctk_tts_train_gst_tacotron2_raw_phn_tacotron_g2p_en_no_space_train.loss.best,https://zenodo.org/record/3986237/files/tts_train_gst_tacotron2_raw_phn_tacotron_g2p_en_no_space_train.loss.best.zip?download=1,24000,en,,,,,true 12 | vctk,tts,kan-bayashi/vctk_tts_train_gst_fastspeech_raw_phn_tacotron_g2p_en_no_space_train.loss.best,https://zenodo.org/record/3986241/files/tts_train_gst_fastspeech_raw_phn_tacotron_g2p_en_no_space_train.loss.best.zip?download=1,24000,en,,,,,false 13 | jsut,tts,kan-bayashi/jsut_tacotron2,https://zenodo.org/record/3963886/files/tts_train_tacotron2_raw_phn_jaconv_pyopenjtalk_train.loss.best.zip?download=1,24000,jp,female,,,,true 14 | jsut,tts,kan-bayashi/jsut_fastspeech,https://zenodo.org/record/3986225/files/tts_train_fastspeech_raw_phn_jaconv_pyopenjtalk_train.loss.best.zip?download=1,24000,jp,female,,,,true 15 | csmsc,tts,kan-bayashi/csmsc_tacotron2,https://zenodo.org/record/3969118/files/tts_train_tacotron2_raw_phn_pypinyin_g2p_phone_train.loss.best.zip?download=1,24000,zh,female,,,,true 16 | csmsc,tts,kan-bayashi/csmsc_fastspeech,https://zenodo.org/record/3986227/files/tts_train_fastspeech_raw_phn_pypinyin_g2p_phone_train.loss.best.zip?download=1,24000,zh,female,,,,true 17 | ljspeech,tts,kan-bayashi/ljspeech_tacotron2,https://zenodo.org/record/3989498/files/tts_train_tacotron2_raw_phn_tacotron_g2p_en_no_space_train.loss.best.zip?download=1,22050,en,female,,,,true 18 | ljspeech,tts,kan-bayashi/ljspeech_fastspeech,https://zenodo.org/record/3986231/files/tts_train_fastspeech_raw_phn_tacotron_g2p_en_no_space_train.loss.best.zip?download=1,22050,en,female,,,,true 19 | vctk,tts,kan-bayashi/vctk_gst_tacotron2,https://zenodo.org/record/3986237/files/tts_train_gst_tacotron2_raw_phn_tacotron_g2p_en_no_space_train.loss.best.zip?download=1,24000,en,,,,,true 20 | vctk,tts,kan-bayashi/vctk_gst_fastspeech,https://zenodo.org/record/3986241/files/tts_train_gst_fastspeech_raw_phn_tacotron_g2p_en_no_space_train.loss.best.zip?download=1,24000,en,,,,,false 21 | wsj,asr,kamo-naoyuki/wsj,https://zenodo.org/record/4003381/files/asr_train_asr_transformer_raw_char_valid.acc.ave.zip?download=1,16000,en,,1.6.0,0.9.1,e67a1ad,true 22 | zeroth_korean,asr,Hoon Chung/zeroth_korean_asr_train_asr_transformer5_raw_bpe_valid.acc.ave,https://zenodo.org/record/4014588/files/asr_train_asr_transformer5_raw_bpe_valid.acc.ave.zip?download=1,16000,kr,,1.2.0,0.7.0,de119e7,true 23 | jsut,tts,kan-bayashi/jsut_tts_train_fastspeech2_raw_phn_jaconv_pyopenjtalk_train.loss.ave,https://zenodo.org/record/4032224/files/tts_train_fastspeech2_raw_phn_jaconv_pyopenjtalk_train.loss.ave.zip?download=1,24000,jp,female,1.6.0,0.9.3,f366560,true 24 | jsut,tts,kan-bayashi/jsut_tts_train_conformer_fastspeech2_raw_phn_jaconv_pyopenjtalk_train.loss.ave,https://zenodo.org/record/4032246/files/tts_train_conformer_fastspeech2_raw_phn_jaconv_pyopenjtalk_train.loss.ave.zip?download=1,24000,jp,female,1.6.0,0.9.3,f366560,true 25 | csmsc,tts,kan-bayashi/csmsc_tts_train_fastspeech2_raw_phn_pypinyin_g2p_phone_train.loss.ave,https://zenodo.org/record/4031953/files/tts_train_fastspeech2_raw_phn_pypinyin_g2p_phone_train.loss.ave.zip?download=1,24000,zh,female,1.6.0,0.9.3,f366560,true 26 | csmsc,tts,kan-bayashi/csmsc_tts_train_conformer_fastspeech2_raw_phn_pypinyin_g2p_phone_train.loss.ave,https://zenodo.org/record/4031955/files/tts_train_conformer_fastspeech2_raw_phn_pypinyin_g2p_phone_train.loss.ave.zip?download=1,24000,zh,female,1.6.0,0.9.3,f366560,true 27 | jsut,tts,kan-bayashi/jsut_fastspeech2,https://zenodo.org/record/4032224/files/tts_train_fastspeech2_raw_phn_jaconv_pyopenjtalk_train.loss.ave.zip?download=1,24000,jp,female,1.6.0,0.9.3,f366560,true 28 | jsut,tts,kan-bayashi/jsut_conformer_fastspeech2,https://zenodo.org/record/4032246/files/tts_train_conformer_fastspeech2_raw_phn_jaconv_pyopenjtalk_train.loss.ave.zip?download=1,24000,jp,female,1.6.0,0.9.3,f366560,true 29 | csmsc,tts,kan-bayashi/csmsc_fastspeech2,https://zenodo.org/record/4031953/files/tts_train_fastspeech2_raw_phn_pypinyin_g2p_phone_train.loss.ave.zip?download=1,24000,zh,female,1.6.0,0.9.3,f366560,true 30 | csmsc,tts,kan-bayashi/csmsc_conformer_fastspeech2,https://zenodo.org/record/4031955/files/tts_train_conformer_fastspeech2_raw_phn_pypinyin_g2p_phone_train.loss.ave.zip?download=1,24000,zh,female,1.6.0,0.9.3,f366560,true 31 | ljspeech,tts,kan-bayashi/ljspeech_tts_train_fastspeech2_raw_phn_tacotron_g2p_en_no_space_train.loss.ave,https://zenodo.org/record/4036272/files/tts_train_fastspeech2_raw_phn_tacotron_g2p_en_no_space_train.loss.ave.zip?download=1,22050,en,female,1.6.0,0.9.3,322a5cf,true 32 | ljspeech,tts,kan-bayashi/ljspeech_tts_train_conformer_fastspeech2_raw_phn_tacotron_g2p_en_no_space_train.loss.ave,https://zenodo.org/record/4036268/files/tts_train_conformer_fastspeech2_raw_phn_tacotron_g2p_en_no_space_train.loss.ave.zip?download=1,22050,en,female,1.6.0,0.9.3,322a5cf,true 33 | jsut,tts,kan-bayashi/jsut_tts_train_transformer_raw_phn_jaconv_pyopenjtalk_train.loss.ave,https://zenodo.org/record/4034121/files/tts_train_transformer_raw_phn_jaconv_pyopenjtalk_train.loss.ave.zip?download=1,24000,jp,female,1.5.1,0.9.3,67ca53d,true 34 | csmsc,tts,kan-bayashi/csmsc_tts_train_transformer_raw_phn_pypinyin_g2p_phone_train.loss.ave,https://zenodo.org/record/4034125/files/tts_train_transformer_raw_phn_pypinyin_g2p_phone_train.loss.ave.zip?download=1,24000,zh,female,1.5.1,0.9.3,67ca53d,true 35 | vctk,tts,kan-bayashi/vctk_tts_train_gst_transformer_raw_phn_tacotron_g2p_en_no_space_train.loss.ave,https://zenodo.org/record/4037456/files/tts_train_gst_transformer_raw_phn_tacotron_g2p_en_no_space_train.loss.ave.zip?download=1,24000,en,,1.5.1,0.9.3,67ca53d,true 36 | vctk,tts,kan-bayashi/vctk_tts_train_gst_fastspeech2_raw_phn_tacotron_g2p_en_no_space_train.loss.ave,https://zenodo.org/record/4036266/files/tts_train_gst_fastspeech2_raw_phn_tacotron_g2p_en_no_space_train.loss.ave.zip?download=1,24000,en,,1.6.0,0.9.3,322a5cf,true 37 | vctk,tts,kan-bayashi/vctk_tts_train_gst_conformer_fastspeech2_raw_phn_tacotron_g2p_en_no_space_train.loss.ave,https://zenodo.org/record/4036264/files/tts_train_gst_conformer_fastspeech2_raw_phn_tacotron_g2p_en_no_space_train.loss.ave.zip?download=1,24000,en,,1.6.0,0.9.3,322a5cf,true 38 | ljspeech,tts,kan-bayashi/ljspeech_fastspeech2,https://zenodo.org/record/4036272/files/tts_train_fastspeech2_raw_phn_tacotron_g2p_en_no_space_train.loss.ave.zip?download=1,22050,en,female,1.6.0,0.9.3,322a5cf,true 39 | ljspeech,tts,kan-bayashi/ljspeech_conformer_fastspeech2,https://zenodo.org/record/4036268/files/tts_train_conformer_fastspeech2_raw_phn_tacotron_g2p_en_no_space_train.loss.ave.zip?download=1,22050,en,female,1.6.0,0.9.3,322a5cf,true 40 | jsut,tts,kan-bayashi/jsut_transformer,https://zenodo.org/record/4034121/files/tts_train_transformer_raw_phn_jaconv_pyopenjtalk_train.loss.ave.zip?download=1,24000,jp,female,1.5.1,0.9.3,67ca53d,true 41 | csmsc,tts,kan-bayashi/csmsc_transformer,https://zenodo.org/record/4034125/files/tts_train_transformer_raw_phn_pypinyin_g2p_phone_train.loss.ave.zip?download=1,24000,zh,female,1.5.1,0.9.3,67ca53d,true 42 | vctk,tts,kan-bayashi/vctk_gst_transformer,https://zenodo.org/record/4037456/files/tts_train_gst_transformer_raw_phn_tacotron_g2p_en_no_space_train.loss.ave.zip?download=1,24000,en,,1.5.1,0.9.3,67ca53d,true 43 | vctk,tts,kan-bayashi/vctk_gst_fastspeech2,https://zenodo.org/record/4036266/files/tts_train_gst_fastspeech2_raw_phn_tacotron_g2p_en_no_space_train.loss.ave.zip?download=1,24000,en,,1.6.0,0.9.3,322a5cf,true 44 | vctk,tts,kan-bayashi/vctk_gst_conformer_fastspeech2,https://zenodo.org/record/4036264/files/tts_train_gst_conformer_fastspeech2_raw_phn_tacotron_g2p_en_no_space_train.loss.ave.zip?download=1,24000,en,,1.6.0,0.9.3,322a5cf,true 45 | csj,asr,kan-bayashi/csj_asr_train_asr_transformer_raw_char_sp_valid.acc.ave,https://zenodo.org/record/4037458/files/asr_train_asr_transformer_raw_char_sp_valid.acc.ave.zip?download=1,16000,jp,,1.5.1,0.9.3,67ca53d,true 46 | ljspeech,tts,kan-bayashi/ljspeech_tts_train_transformer_raw_phn_tacotron_g2p_en_no_space_train.loss.ave,https://zenodo.org/record/4039194/files/tts_train_transformer_raw_phn_tacotron_g2p_en_no_space_train.loss.ave.zip?download=1,22050,en,female,1.5.1,0.9.3,67ca53d,true 47 | ljspeech,tts,kan-bayashi/ljspeech_transformer,https://zenodo.org/record/4039194/files/tts_train_transformer_raw_phn_tacotron_g2p_en_no_space_train.loss.ave.zip?download=1,22050,en,female,1.5.1,0.9.3,67ca53d,true 48 | aishell,asr,kamo-naoyuki/aishell_conformer,https://zenodo.org/record/4105763/files/asr_train_asr_conformer3_raw_char_batch_bins4000000_accum_grad4_sp_valid.acc.ave.zip?download=1,16000,zh,,1.6.0,0.9.0,20b0c89,true 49 | wsj,asr,kamo-naoyuki/wsj_transformer2,https://zenodo.org/record/4243201/files/asr_train_asr_transformer_raw_char_valid.acc.ave.zip?download=1,16000,en,,1.6.0,0.9.0,e7d278a,true 50 | reverb,asr,kamo-naoyuki/reverb_asr_train_asr_transformer4_raw_char_batch_bins16000000_accum_grad1_sp_valid.acc.ave,https://zenodo.org/record/4278363/files/asr_train_asr_transformer4_raw_char_batch_bins16000000_accum_grad1_sp_valid.acc.ave.zip?download=1,16000,en,,1.5.1,0.9.5,7aad824,true 51 | timit,asr,kamo-naoyuki/timit_asr_train_asr_raw_word_valid.acc.ave,https://zenodo.org/record/4284058/files/asr_train_asr_raw_word_valid.acc.ave.zip?download=1,16000,en,,1.6.0,0.9.5,f157fcd,true 52 | jsut,asr,Hoon Chung/jsut_asr_train_asr_conformer8_raw_char_sp_valid.acc.ave,https://zenodo.org/record/4292742/files/asr_train_asr_conformer8_raw_char_sp_valid.acc.ave.zip?download=1,16000,ja,,1.2.0,0.9.0,6448047,true 53 | jsut,tts,kan-bayashi/jsut_tts_train_tacotron2_raw_phn_jaconv_pyopenjtalk_accent_train.loss.ave,https://zenodo.org/record/4381098/files/tts_train_tacotron2_raw_phn_jaconv_pyopenjtalk_accent_train.loss.ave.zip?download=1,24000,jp,female,1.5.1,0.9.6,acd6957,true 54 | jsut,tts,kan-bayashi/jsut_tts_train_transformer_raw_phn_jaconv_pyopenjtalk_accent_train.loss.ave,https://zenodo.org/record/4381096/files/tts_train_transformer_raw_phn_jaconv_pyopenjtalk_accent_train.loss.ave.zip?download=1,24000,jp,female,1.5.1,0.9.6,acd6957,true 55 | jsut,tts,kan-bayashi/jsut_tts_train_fastspeech2_tacotron2_teacher_raw_phn_jaconv_pyopenjtalk_accent_train.loss.ave,https://zenodo.org/record/4381100/files/tts_train_fastspeech2_tacotron2_teacher_raw_phn_jaconv_pyopenjtalk_accent_train.loss.ave.zip?download=1,24000,jp,female,1.5.1,0.9.6,acd6957,true 56 | jsut,tts,kan-bayashi/jsut_tts_train_conformer_fastspeech2_tacotron2_teacher_raw_phn_jaconv_pyopenjtalk_accent_train.loss.ave,https://zenodo.org/record/4381102/files/tts_train_conformer_fastspeech2_tacotron2_teacher_raw_phn_jaconv_pyopenjtalk_accent_train.loss.ave.zip?download=1,24000,jp,female,1.5.1,0.9.6,acd6957,true 57 | jsut,tts,kan-bayashi/jsut_tts_train_fastspeech2_transformer_teacher_raw_phn_jaconv_pyopenjtalk_accent_train.loss.ave,https://zenodo.org/record/4391405/files/tts_train_fastspeech2_transformer_teacher_raw_phn_jaconv_pyopenjtalk_accent_train.loss.ave.zip?download=1,24000,jp,female,1.5.1,0.9.6,acd6957,true 58 | jsut,tts,kan-bayashi/jsut_tts_train_conformer_fastspeech2_transformer_teacher_raw_phn_jaconv_pyopenjtalk_accent_train.loss.ave,https://zenodo.org/record/4391409/files/tts_train_conformer_fastspeech2_transformer_teacher_raw_phn_jaconv_pyopenjtalk_accent_train.loss.ave.zip?download=1,24000,jp,female,1.5.1,0.9.6,acd6957,true 59 | jsut,tts,kan-bayashi/jsut_tacotron2_accent,https://zenodo.org/record/4381098/files/tts_train_tacotron2_raw_phn_jaconv_pyopenjtalk_accent_train.loss.ave.zip?download=1,24000,jp,female,1.5.1,0.9.6,acd6957,true 60 | jsut,tts,kan-bayashi/jsut_transformer_accent,https://zenodo.org/record/4381096/files/tts_train_transformer_raw_phn_jaconv_pyopenjtalk_accent_train.loss.ave.zip?download=1,24000,jp,female,1.5.1,0.9.6,acd6957,true 61 | jsut,tts,kan-bayashi/jsut_fastspeech2_accent,https://zenodo.org/record/4381100/files/tts_train_fastspeech2_tacotron2_teacher_raw_phn_jaconv_pyopenjtalk_accent_train.loss.ave.zip?download=1,24000,jp,female,1.5.1,0.9.6,acd6957,true 62 | jsut,tts,kan-bayashi/jsut_conformer_fastspeech2_accent,https://zenodo.org/record/4381102/files/tts_train_conformer_fastspeech2_tacotron2_teacher_raw_phn_jaconv_pyopenjtalk_accent_train.loss.ave.zip?download=1,24000,jp,female,1.5.1,0.9.6,acd6957,true 63 | chime4,asr,kamo-naoyuki/chime4_asr_train_asr_transformer3_raw_en_char_sp_valid.acc.ave,https://zenodo.org/record/4414883/files/asr_train_asr_transformer3_raw_en_char_sp_valid.acc.ave.zip?download=1,16000,en,,1.4.0,0.9.6,d5ddd5e,true 64 | dirha_wsj,asr,kamo-naoyuki/dirha_wsj_asr_train_asr_transformer_cmvn_raw_char_rir_scpdatadirha_irwav.scp_noise_db_range10_17_noise_scpdatadirha_noisewav.scp_speech_volume_normalize1.0_num_workers2_rir_apply_prob1._sp_valid.acc.ave,https://zenodo.org/record/4415021/files/asr_train_asr_transformer_cmvn_raw_char_rir_scpdatadirha_irwav.scp_noise_db_range10_17_noise_scpdatadirha_noisewav.scp_speech_volume_normalize1.0_num_workers2_rir_apply_prob1._sp_valid.acc.ave.zip?download=1,16000,en,,1.5.1,0.9.6,c30ce88,true 65 | vctk,tts,kan-bayashi/vctk_tts_train_xvector_transformer_raw_phn_tacotron_g2p_en_no_space_train.loss.ave,https://zenodo.org/record/4393279/files/tts_train_xvector_transformer_raw_phn_tacotron_g2p_en_no_space_train.loss.ave.zip?download=1,24000,en,,1.5.1,0.9.6,96ce09,true 66 | vctk,tts,kan-bayashi/vctk_tts_train_gst+xvector_transformer_raw_phn_tacotron_g2p_en_no_space_train.loss.ave,https://zenodo.org/record/4393277/files/tts_train_gst%2Bxvector_transformer_raw_phn_tacotron_g2p_en_no_space_train.loss.ave.zip?download=1,24000,en,,1.5.1,0.9.6,96ce09,true 67 | vctk,tts,kan-bayashi/vctk_tts_train_xvector_tacotron2_raw_phn_tacotron_g2p_en_no_space_train.loss.ave,https://zenodo.org/record/4394600/files/tts_train_xvector_tacotron2_raw_phn_tacotron_g2p_en_no_space_train.loss.ave.zip?download=1,24000,en,,1.5.1,0.9.6,96ce09,true 68 | vctk,tts,kan-bayashi/vctk_tts_train_gst+xvector_tacotron2_raw_phn_tacotron_g2p_en_no_space_train.loss.ave,https://zenodo.org/record/4394598/files/tts_train_gst%2Bxvector_tacotron2_raw_phn_tacotron_g2p_en_no_space_train.loss.ave.zip?download=1,24000,en,,1.5.1,0.9.6,96ce09,true 69 | vctk,tts,kan-bayashi/vctk_tts_train_xvector_conformer_fastspeech2_transformer_teacher_raw_phn_tacotron_g2p_en_no_space_train.loss.ave,https://zenodo.org/record/4394602/files/tts_train_xvector_conformer_fastspeech2_transformer_teacher_raw_phn_tacotron_g2p_en_no_space_train.loss.ave.zip?download=1,24000,en,,1.5.1,0.9.6,96ce09,true 70 | vctk,tts,kan-bayashi/vctk_tts_train_gst+xvector_conformer_fastspeech2_transformer_teacher_raw_phn_tacotron_g2p_en_no_space_train.loss.ave,https://zenodo.org/record/4394608/files/tts_train_gst%2Bxvector_conformer_fastspeech2_transformer_teacher_raw_phn_tacotron_g2p_en_no_space_train.loss.ave.zip?download=1,24000,en,,1.5.1,0.9.6,96ce09,true 71 | libritts,tts,kan-bayashi/libritts_tts_train_xvector_transformer_raw_phn_tacotron_g2p_en_no_space_train.loss.ave,https://zenodo.org/record/4409704/files/tts_train_xvector_transformer_raw_phn_tacotron_g2p_en_no_space_train.loss.ave.zip?download=1,24000,en,,1.5.1,0.9.6,861431,true 72 | libritts,tts,kan-bayashi/libritts_tts_train_gst+xvector_transformer_raw_phn_tacotron_g2p_en_no_space_train.loss.ave,https://zenodo.org/record/4409702/files/tts_train_gst%2Bxvector_transformer_raw_phn_tacotron_g2p_en_no_space_train.loss.ave.zip?download=1,24000,en,,1.5.1,0.9.6,861431,true 73 | libritts,tts,kan-bayashi/libritts_tts_train_xvector_conformer_fastspeech2_transformer_teacher_raw_phn_tacotron_g2p_en_no_space_train.loss,https://zenodo.org/record/4418754/files/tts_train_xvector_conformer_fastspeech2_transformer_teacher_raw_phn_tacotron_g2p_en_no_space_train.loss.ave.zip?download=1,24000,en,,1.5.1,0.9.6,861431,true 74 | libritts,tts,kan-bayashi/libritts_tts_train_gst+xvector_conformer_fastspeech2_transformer_teacher_raw_phn_tacotron_g2p_en_no_space_train.loss,https://zenodo.org/record/4418774/files/tts_train_gst%2Bxvector_conformer_fastspeech2_transformer_teacher_raw_phn_tacotron_g2p_en_no_space_train.loss.ave.zip?download=1,24000,en,,1.5.1,0.9.6,861431,true 75 | vctk,tts,kan-bayashi/vctk_xvector_transformer,https://zenodo.org/record/4393279/files/tts_train_xvector_transformer_raw_phn_tacotron_g2p_en_no_space_train.loss.ave.zip?download=1,24000,en,,1.5.1,0.9.6,96ce09,true 76 | vctk,tts,kan-bayashi/vctk_gst+xvector_transformer,https://zenodo.org/record/4393277/files/tts_train_gst%2Bxvector_transformer_raw_phn_tacotron_g2p_en_no_space_train.loss.ave.zip?download=1,24000,en,,1.5.1,0.9.6,96ce09,true 77 | vctk,tts,kan-bayashi/vctk_xvector_tacotron2,https://zenodo.org/record/4394600/files/tts_train_xvector_tacotron2_raw_phn_tacotron_g2p_en_no_space_train.loss.ave.zip?download=1,24000,en,,1.5.1,0.9.6,96ce09,true 78 | vctk,tts,kan-bayashi/vctk_gst+xvector_tacotron2,https://zenodo.org/record/4394598/files/tts_train_gst%2Bxvector_tacotron2_raw_phn_tacotron_g2p_en_no_space_train.loss.ave.zip?download=1,24000,en,,1.5.1,0.9.6,96ce09,true 79 | vctk,tts,kan-bayashi/vctk_xvector_conformer_fastspeech2,https://zenodo.org/record/4394602/files/tts_train_xvector_conformer_fastspeech2_transformer_teacher_raw_phn_tacotron_g2p_en_no_space_train.loss.ave.zip?download=1,24000,en,,1.5.1,0.9.6,96ce09,true 80 | vctk,tts,kan-bayashi/vctk_gst+xvector_conformer_fastspeech2,https://zenodo.org/record/4394608/files/tts_train_gst%2Bxvector_conformer_fastspeech2_transformer_teacher_raw_phn_tacotron_g2p_en_no_space_train.loss.ave.zip?download=1,24000,en,,1.5.1,0.9.6,96ce09,true 81 | libritts,tts,kan-bayashi/libritts_xvector_transformer,https://zenodo.org/record/4409704/files/tts_train_xvector_transformer_raw_phn_tacotron_g2p_en_no_space_train.loss.ave.zip?download=1,24000,en,,1.5.1,0.9.6,861431,true 82 | libritts,tts,kan-bayashi/libritts_gst+xvector_transformer,https://zenodo.org/record/4409702/files/tts_train_gst%2Bxvector_transformer_raw_phn_tacotron_g2p_en_no_space_train.loss.ave.zip?download=1,24000,en,,1.5.1,0.9.6,861431,true 83 | libritts,tts,kan-bayashi/libritts_xvector_conformer_fastspeech2,https://zenodo.org/record/4418754/files/tts_train_xvector_conformer_fastspeech2_transformer_teacher_raw_phn_tacotron_g2p_en_no_space_train.loss.ave.zip?download=1,24000,en,,1.5.1,0.9.6,861431,true 84 | libritts,tts,kan-bayashi/libritts_gst+xvector_conformer_fastspeech2,https://zenodo.org/record/4418774/files/tts_train_gst%2Bxvector_conformer_fastspeech2_transformer_teacher_raw_phn_tacotron_g2p_en_no_space_train.loss.ave.zip?download=1,24000,en,,1.5.1,0.9.6,861431,true 85 | hkust,asr,kamo-naoyuki/hkust_asr_train_asr_transformer2_raw_zh_char_batch_bins20000000_ctc_confignore_nan_gradtrue_sp_valid.acc.ave,https://zenodo.org/record/4430974/files/asr_train_asr_transformer2_raw_zh_char_batch_bins20000000_ctc_confignore_nan_gradtrue_sp_valid.acc.ave.zip?download=1,16000,zh,,1.4.0,0.9.6,db7dfea,true 86 | reverb,asr,kamo-naoyuki/reverb_asr_train_asr_transformer2_raw_en_char_rir_scpdatareverb_rir_singlewav.scp_noise_db_range12_17_noise_scpdatareverb_noise_singlewav.scp_speech_volume_normalize1.0_num_workers2_rir_apply_prob0.999_noise_apply_prob1._sp_valid.acc.ave,https://zenodo.org/record/4441309/files/asr_train_asr_transformer2_raw_en_char_rir_scpdatareverb_rir_singlewav.scp_noise_db_range12_17_noise_scpdatareverb_noise_singlewav.scp_speech_volume_normalize1.0_num_workers2_rir_apply_prob0.999_noise_apply_prob1._sp_valid.acc.ave.zip?download=1,16000,en,,1.5.1,0.9.5,03659ca,true 87 | jsut,tts,kan-bayashi/jsut_tts_train_tacotron2_raw_phn_jaconv_pyopenjtalk_accent_with_pause_train.loss.ave,https://zenodo.org/record/4433194/files/tts_train_tacotron2_raw_phn_jaconv_pyopenjtalk_accent_with_pause_train.loss.ave.zip?download=1,24000,jp,female,1.5.1,0.9.6,18fb6e,true 88 | jsut,tts,kan-bayashi/jsut_tts_train_transformer_raw_phn_jaconv_pyopenjtalk_accent_with_pause_train.loss.ave,https://zenodo.org/record/4433196/files/tts_train_transformer_raw_phn_jaconv_pyopenjtalk_accent_with_pause_train.loss.ave.zip?download=1,24000,jp,female,1.5.1,0.9.6,18fb6e,true 89 | jsut,tts,kan-bayashi/jsut_tts_train_fastspeech2_tacotron2_teacher_raw_phn_jaconv_pyopenjtalk_accent_with_pause_train.loss.ave,https://zenodo.org/record/4436450/files/tts_train_fastspeech2_tacotron2_teacher_raw_phn_jaconv_pyopenjtalk_accent_with_pause_train.loss.ave.zip?download=1,24000,jp,female,1.5.1,0.9.6,18fb6e,true 90 | jsut,tts,kan-bayashi/jsut_tts_train_conformer_fastspeech2_tacotron2_teacher_raw_phn_jaconv_pyopenjtalk_accent_with_pause_train.loss.ave,https://zenodo.org/record/4436448/files/tts_train_conformer_fastspeech2_tacotron2_teacher_raw_phn_jaconv_pyopenjtalk_accent_with_pause_train.loss.ave.zip?download=1,24000,jp,female,1.5.1,0.9.6,18fb6e,true 91 | jsut,tts,kan-bayashi/jsut_tts_train_fastspeech2_transformer_teacher_raw_phn_jaconv_pyopenjtalk_accent_with_pause_train.loss.ave,https://zenodo.org/record/4433200/files/tts_train_fastspeech2_transformer_teacher_raw_phn_jaconv_pyopenjtalk_accent_with_pause_train.loss.ave.zip?download=1,24000,jp,female,1.5.1,0.9.6,18fb6e,true 92 | jsut,tts,kan-bayashi/jsut_tts_train_conformer_fastspeech2_transformer_teacher_raw_phn_jaconv_pyopenjtalk_accent_with_pause_train.loss.ave,https://zenodo.org/record/4433198/files/tts_train_conformer_fastspeech2_transformer_teacher_raw_phn_jaconv_pyopenjtalk_accent_with_pause_train.loss.ave.zip?download=1,24000,jp,female,1.5.1,0.9.6,18fb6e,true 93 | jsut,tts,kan-bayashi/jsut_tacotron2_accent_with_pause,https://zenodo.org/record/4433194/files/tts_train_tacotron2_raw_phn_jaconv_pyopenjtalk_accent_with_pause_train.loss.ave.zip?download=1,24000,jp,female,1.5.1,0.9.6,18fb6e,true 94 | jsut,tts,kan-bayashi/jsut_transformer_accent_with_pause,https://zenodo.org/record/4433196/files/tts_train_transformer_raw_phn_jaconv_pyopenjtalk_accent_with_pause_train.loss.ave.zip?download=1,24000,jp,female,1.5.1,0.9.6,18fb6e,true 95 | jsut,tts,kan-bayashi/jsut_fastspeech2_accent_with_pause,https://zenodo.org/record/4436450/files/tts_train_fastspeech2_tacotron2_teacher_raw_phn_jaconv_pyopenjtalk_accent_with_pause_train.loss.ave.zip?download=1,24000,jp,female,1.5.1,0.9.6,18fb6e,true 96 | jsut,tts,kan-bayashi/jsut_conformer_fastspeech2_accent_with_pause,https://zenodo.org/record/4436448/files/tts_train_conformer_fastspeech2_tacotron2_teacher_raw_phn_jaconv_pyopenjtalk_accent_with_pause_train.loss.ave.zip?download=1,24000,jp,female,1.5.1,0.9.6,18fb6e,true 97 | librispeech,asr,kamo-naoyuki/librispeech_asr_train_asr_conformer5_raw_bpe5000_scheduler_confwarmup_steps25000_batch_bins140000000_optim_conflr0.0015_initnone_accum_grad2_sp_valid.acc.ave,https://zenodo.org/record/4541452/files/asr_train_asr_conformer5_raw_bpe5000_scheduler_confwarmup_steps25000_batch_bins140000000_optim_conflr0.0015_initnone_accum_grad2_sp_valid.acc.ave.zip?download=1,16000,en,,1.7.1,0.9.6,8eff1a9,true 98 | librispeech,asr,kamo-naoyuki/librispeech_asr_train_asr_conformer5_raw_bpe5000_frontend_confn_fft400_frontend_confhop_length160_scheduler_confwarmup_steps25000_batch_bins140000000_optim_conflr0.0015_initnone_sp_valid.acc.ave,https://zenodo.org/record/4543003/files/asr_train_asr_conformer5_raw_bpe5000_frontend_confn_fft400_frontend_confhop_length160_scheduler_confwarmup_steps25000_batch_bins140000000_optim_conflr0.0015_initnone_sp_valid.acc.ave.zip?download=1,16000,en,,1.7.1,0.9.6,8eff1a9,true 99 | librispeech,asr,kamo-naoyuki/librispeech_asr_train_asr_conformer5_raw_bpe5000_frontend_confn_fft512_frontend_confhop_length256_scheduler_confwarmup_steps25000_batch_bins140000000_optim_conflr0.0015_initnone_sp_valid.acc.ave,https://zenodo.org/record/4543018/files/asr_train_asr_conformer5_raw_bpe5000_frontend_confn_fft512_frontend_confhop_length256_scheduler_confwarmup_steps25000_batch_bins140000000_optim_conflr0.0015_initnone_sp_valid.acc.ave.zip?download=1,16000,en,,1.7.1,0.9.6,8eff1a9,true 100 | laborotv,asr,Shinji Watanabe/laborotv_asr_train_asr_conformer2_latest33_raw_char_sp_valid.acc.ave,https://zenodo.org/record/4304245/files/asr_train_asr_conformer2_latest33_raw_char_sp_valid.acc.ave.zip?download=1,16000,jp,,1.4.0,0.9.5,c40d6e2,true 101 | mls,asr,ftshijt/mls_asr_transformer_valid.acc.best,https://zenodo.org/record/4458452/files/asr_transformer_valid.acc.best.zip?download=1,16000,es,,1.6.0,0.9.2,c0c3724,true 102 | librimix,enh,anogkongda/librimix_enh_train_raw_valid.si_snr.ave,https://zenodo.org/record/4480771/files/enh_train_raw_valid.si_snr.ave.zip?download=1,8000,en,,1.6.0,0.9.7,dccadcd,TRUE 103 | wsj0_2mix,enh,Chenda Li/wsj0_2mix_enh_train_enh_conv_tasnet_raw_valid.si_snr.ave,https://zenodo.org/record/4498562/files/enh_train_enh_conv_tasnet_raw_valid.si_snr.ave.zip?download=1,8000,en,,1.5.0,0.9.7,110eca5,TRUE 104 | wsj0_2mix,enh,Chenda Li/wsj0_2mix_enh_train_enh_rnn_tf_raw_valid.si_snr.ave,https://zenodo.org/record/4498554/files/enh_train_enh_rnn_tf_raw_valid.si_snr.ave.zip?download=1,8000,en,,1.5.1,0.9.8,110eca5,TRUE 105 | wsj0_2mix,enh,lichenda/wsj0_2mix_skim_noncausal,https://huggingface.co/lichenda/wsj0_2mix_skim_noncausal,8000,en,,1.8.1,0.10.7a1,79b9f11,TRUE 106 | wsj0_2mix,enh,lichenda/Chenda_Li_wsj0_2mix_enh_dprnn_tasnet,https://huggingface.co/lichenda/Chenda_Li_wsj0_2mix_enh_dprnn_tasnet,8000,en,,1.5.0,0.9.8,,TRUE 107 | spgispeech,asr,Shinji Watanabe/spgispeech_asr_train_asr_conformer6_n_fft512_hop_length256_raw_en_unnorm_bpe5000_valid.acc.ave,https://zenodo.org/record/4585558/files/asr_train_asr_conformer6_n_fft512_hop_length256_raw_en_unnorm_bpe5000_valid.acc.ave.zip?download=1,16000,en_unnorm,,1.7.1,0.9.8,,true 108 | spgispeech,asr,Shinji Watanabe/spgispeech_asr_train_asr_conformer6_n_fft512_hop_length256_raw_en_bpe5000_valid.acc.ave,https://zenodo.org/record/4585546/files/asr_train_asr_conformer6_n_fft512_hop_length256_raw_en_bpe5000_valid.acc.ave.zip?download=1,16000,en,,1.7.1,0.9.8,,true 109 | librispeech,asr,kamo-naoyuki/librispeech_asr_train_asr_conformer6_n_fft512_hop_length256_raw_en_bpe5000_scheduler_confwarmup_steps40000_optim_conflr0.0025_sp_valid.acc.ave,https://zenodo.org/record/4604066/files/asr_train_asr_conformer6_n_fft512_hop_length256_raw_en_bpe5000_scheduler_confwarmup_steps40000_optim_conflr0.0025_sp_valid.acc.ave.zip?download=1,16000,en,,1.8.0,0.9.8,2ccd176,true 110 | aishell,asr_stream,Emiru Tsunoo/aishell_asr_train_asr_streaming_transformer_raw_zh_char_sp_valid.acc.ave,https://zenodo.org/record/4604023/files/asr_train_asr_streaming_transformer_raw_zh_char_sp_valid.acc.ave.zip?download=1,16000,zh,,1.4.0,0.9.7,,true 111 | open_li52,asr,ftshijt/open_li52_asr_train_asr_raw_bpe7000_valid.acc.ave_10best,https://zenodo.org/record/4738407/files/asr_train_asr_raw_bpe7000_valid.acc.ave_10best.zip?download=1,16000,multilingual,,1.7.1,0.9.7,59bc1f7,true 112 | gigaspeech,asr,Shinji Watanabe/gigaspeech_asr_train_asr_raw_en_bpe5000_valid.acc.ave,https://zenodo.org/record/4630406/files/asr_train_asr_raw_en_bpe5000_valid.acc.ave.zip?download=1,16000,en,,1.7.1,0.9.8,,true 113 | switchboard,asr,Yuekai Zhang/swbd_asr_train_asr_cformer5_raw_bpe2000_sp_valid.acc.ave,https://zenodo.org/record/4978923/files/asr_train_asr_cformer5_raw_bpe2000_sp_valid.acc.ave.zip?download=1,8000,en,,1.7.1,0.9.8,,true 114 | dns_ins20,enh,Yen-Ju Lu/dns_ins20_enh_train_enh_blstm_tf_raw_valid.loss.best,https://zenodo.org/record/4923697/files/enh_train_enh_blstm_tf_raw_valid.loss.best.zip?download=1,16000,en,,1.4.0,0.9.9,,true 115 | jv_openslr35,asr,jv_openslr35,https://zenodo.org/record/5090139/files/asr_train_asr_raw_bpe1000_valid.acc.best.zip?download=1,16000,jv,,1.8.1,0.9.10,,true 116 | su_openslr36,asr,su_openslr36,https://zenodo.org/record/5090135/files/asr_train_asr_raw_bpe1000_valid.acc.best.zip?download=1,16000,su,,1.8.1,0.9.10,,true 117 | ksponspeech,asr,Yushi Ueda/ksponspeech_asr_train_asr_conformer8_n_fft512_hop_length256_raw_kr_bpe2309_valid.acc.best,https://zenodo.org/record/5154341/files/asr_train_asr_conformer8_n_fft512_hop_length256_raw_kr_bpe2309_valid.acc.best.zip?download=1,16000,kr,,1.8.1,0.10.0,538393c,true 118 | librispeech,asr,byan/librispeech_asr_train_asr_conformer_raw_bpe_batch_bins30000000_accum_grad3_optim_conflr0.001_sp,https://huggingface.co/,16000,en,,,,,true 119 | mini_librispeech,diar,Yushi Ueda/mini_librispeech_diar_train_diar_raw_max_epoch20_valid.acc.best,https://zenodo.org/record/5264020/files/diar_train_diar_raw_max_epoch20_valid.acc.best.zip?download=1,8000,,,1.9.1,0.10.2,,true 120 | jsut,tts,kan-bayashi/jsut_tts_train_vits_raw_phn_jaconv_pyopenjtalk_accent_with_pause_train.total_count.ave,https://zenodo.org/record/5414980/files/tts_train_vits_raw_phn_jaconv_pyopenjtalk_accent_with_pause_train.total_count.ave.zip?download=1,22050,jp,female,1.7.1,0.10.3a1,dee654,true 121 | jsut,tts,kan-bayashi/jsut_tts_train_full_band_vits_raw_phn_jaconv_pyopenjtalk_accent_with_pause_train.total_count.ave,https://zenodo.org/record/5521360/files/tts_train_full_band_vits_raw_phn_jaconv_pyopenjtalk_accent_with_pause_train.total_count.ave.zip?download=1,44100,jp,female,1.7.1,0.10.3a1,dee654,true 122 | jsut,tts,kan-bayashi/jsut_vits_accent_with_pause,https://zenodo.org/record/5414980/files/tts_train_vits_raw_phn_jaconv_pyopenjtalk_accent_with_pause_train.total_count.ave.zip?download=1,22050,jp,female,1.7.1,0.10.3a1,dee654,true 123 | jsut,tts,kan-bayashi/jsut_full_band_vits_accent_with_pause,https://zenodo.org/record/5521360/files/tts_train_full_band_vits_raw_phn_jaconv_pyopenjtalk_accent_with_pause_train.total_count.ave.zip?download=1,44100,jp,female,1.7.1,0.10.3a1,dee654,true 124 | ljspeech,tts,kan-bayashi/ljspeech_tts_train_vits_raw_phn_tacotron_g2p_en_no_space_train.total_count.ave,https://zenodo.org/record/5443814/files/tts_train_vits_raw_phn_tacotron_g2p_en_no_space_train.total_count.ave.zip?download=1,22050,en,female,1.7.1,0.10.3a1,dee654,true 125 | ljspeech,tts,kan-bayashi/ljspeech_vits,https://zenodo.org/record/5443814/files/tts_train_vits_raw_phn_tacotron_g2p_en_no_space_train.total_count.ave.zip?download=1,22050,en,female,1.7.1,0.10.3a1,dee654,true 126 | csmsc,tts,kan-bayashi/csmsc_tts_train_full_band_vits_raw_phn_pypinyin_g2p_phone_train.total_count.ave,https://zenodo.org/record/5521404/files/tts_train_full_band_vits_raw_phn_pypinyin_g2p_phone_train.total_count.ave.zip?download=1,44100,zh,female,1.7.1,0.10.3a1,dee654,true 127 | csmsc,tts,kan-bayashi/csmsc_full_band_vits,https://zenodo.org/record/5521404/files/tts_train_full_band_vits_raw_phn_pypinyin_g2p_phone_train.total_count.ave.zip?download=1,44100,zh,female,1.7.1,0.10.3a1,dee654,true 128 | csmsc,tts,kan-bayashi/csmsc_tts_train_vits_raw_phn_pypinyin_g2p_phone_train.total_count.ave,https://zenodo.org/record/5499120/files/tts_train_vits_raw_phn_pypinyin_g2p_phone_train.total_count.ave.zip?download=1,22050,zh,female,1.7.1,0.10.3a2,628b46,true 129 | csmsc,tts,kan-bayashi/csmsc_vits,https://zenodo.org/record/5499120/files/tts_train_vits_raw_phn_pypinyin_g2p_phone_train.total_count.ave.zip?download=1,22050,zh,female,1.7.1,0.10.3a2,628b46,true 130 | jsut,tts,kan-bayashi/jsut_tts_train_tacotron2_raw_phn_jaconv_pyopenjtalk_prosody_train.loss.ave,https://zenodo.org/record/5499026/files/tts_train_tacotron2_raw_phn_jaconv_pyopenjtalk_prosody_train.loss.ave.zip?download=1,24000,jp,female,1.7.1,0.10.3a2,628b46,true 131 | jsut,tts,kan-bayashi/jsut_tacotron2_prosody,https://zenodo.org/record/5499026/files/tts_train_tacotron2_raw_phn_jaconv_pyopenjtalk_prosody_train.loss.ave.zip?download=1,24000,jp,female,1.7.1,0.10.3a2,628b46,true 132 | jsut,tts,kan-bayashi/jsut_tts_train_transformer_raw_phn_jaconv_pyopenjtalk_prosody_train.loss.ave,https://zenodo.org/record/5499040/files/tts_train_transformer_raw_phn_jaconv_pyopenjtalk_prosody_train.loss.ave.zip?download=1,24000,jp,female,1.7.1,0.10.3a2,628b46,true 133 | jsut,tts,kan-bayashi/jsut_transformer_prosody,https://zenodo.org/record/5499040/files/tts_train_transformer_raw_phn_jaconv_pyopenjtalk_prosody_train.loss.ave.zip?download=1,24000,jp,female,1.7.1,0.10.3a2,628b46,true 134 | jsut,tts,kan-bayashi/jsut_tts_train_conformer_fastspeech2_tacotron2_teacher_raw_phn_jaconv_pyopenjtalk_prosody_train.loss.ave,https://zenodo.org/record/5499050/files/tts_train_conformer_fastspeech2_tacotron2_teacher_raw_phn_jaconv_pyopenjtalk_prosody_train.loss.ave.zip?download=1,24000,jp,female,1.7.1,0.10.3a2,628b46,true 135 | jsut,tts,kan-bayashi/jsut_conformer_fastspeech2_tacotron2_prosody,https://zenodo.org/record/5499050/files/tts_train_conformer_fastspeech2_tacotron2_teacher_raw_phn_jaconv_pyopenjtalk_prosody_train.loss.ave.zip?download=1,24000,jp,female,1.7.1,0.10.3a2,628b46,true 136 | jsut,tts,kan-bayashi/jsut_tts_train_conformer_fastspeech2_transformer_teacher_raw_phn_jaconv_pyopenjtalk_prosody_train.loss.ave,https://zenodo.org/record/5499066/files/tts_train_conformer_fastspeech2_transformer_teacher_raw_phn_jaconv_pyopenjtalk_prosody_train.loss.ave.zip?download=1,24000,jp,female,1.7.1,0.10.3a2,628b46,true 137 | jsut,tts,kan-bayashi/jsut_conformer_fastspeech2_transformer_prosody,https://zenodo.org/record/5499066/files/tts_train_conformer_fastspeech2_transformer_teacher_raw_phn_jaconv_pyopenjtalk_prosody_train.loss.ave.zip?download=1,24000,jp,female,1.7.1,0.10.3a2,628b46,true 138 | vctk,tts,kan-bayashi/vctk_tts_train_multi_spk_vits_raw_phn_tacotron_g2p_en_no_space_train.total_count.ave,https://zenodo.org/record/5500759/files/tts_train_multi_spk_vits_raw_phn_tacotron_g2p_en_no_space_train.total_count.ave.zip?download=1,22050,en,,1.7.1,0.10.3a2,628b46,true 139 | vctk,tts,kan-bayashi/vctk_multi_spk_vits,https://zenodo.org/record/5500759/files/tts_train_multi_spk_vits_raw_phn_tacotron_g2p_en_no_space_train.total_count.ave.zip?download=1,22050,en,,1.7.1,0.10.3a2,628b46,true 140 | vctk,tts,kan-bayashi/vctk_tts_train_full_band_multi_spk_vits_raw_phn_tacotron_g2p_en_no_space_train.total_count.ave,https://zenodo.org/record/5521431/files/tts_train_full_band_multi_spk_vits_raw_phn_tacotron_g2p_en_no_space_train.total_count.ave.zip?download=1,44100,en,,1.7.1,0.10.3a2,628b46,true 141 | vctk,tts,kan-bayashi/vctk_full_band_multi_spk_vits,https://zenodo.org/record/5521431/files/tts_train_full_band_multi_spk_vits_raw_phn_tacotron_g2p_en_no_space_train.total_count.ave.zip?download=1,44100,en,,1.7.1,0.10.3a2,628b46,true 142 | jsut,tts,kan-bayashi/jsut_tts_train_vits_raw_phn_jaconv_pyopenjtalk_prosody_train.total_count.ave,https://zenodo.org/record/5521354/files/tts_train_vits_raw_phn_jaconv_pyopenjtalk_prosody_train.total_count.ave.zip?download=1,22050,jp,female,1.7.1,0.10.3a1,dee654,true 143 | jsut,tts,kan-bayashi/jsut_tts_train_full_band_vits_raw_phn_jaconv_pyopenjtalk_prosody_train.total_count.ave,https://zenodo.org/record/5521340/files/tts_train_full_band_vits_raw_phn_jaconv_pyopenjtalk_prosody_train.total_count.ave.zip?download=1,44100,jp,female,1.7.1,0.10.3a1,dee654,true 144 | jsut,tts,kan-bayashi/jsut_vits_prosody,https://zenodo.org/record/5521354/files/tts_train_vits_raw_phn_jaconv_pyopenjtalk_prosody_train.total_count.ave.zip?download=1,22050,jp,female,1.7.1,0.10.3a1,dee654,true 145 | jsut,tts,kan-bayashi/jsut_full_band_vits_prosody,https://zenodo.org/record/5521340/files/tts_train_full_band_vits_raw_phn_jaconv_pyopenjtalk_prosody_train.total_count.ave.zip?download=1,44100,jp,female,1.7.1,0.10.3a1,dee654,true 146 | jvs,tts,kan-bayashi/jvs_tts_finetune_jvs010_jsut_vits_raw_phn_jaconv_pyopenjtalk_prosody_latest,https://zenodo.org/record/5521494/files/tts_finetune_jvs010_jsut_vits_raw_phn_jaconv_pyopenjtalk_prosody_latest.zip?download=1,22050,jp,female,1.7.1,0.10.3a1,dee654,true 147 | jvs,tts,kan-bayashi/jvs_jvs010_vits_prosody,https://zenodo.org/record/5521494/files/tts_finetune_jvs010_jsut_vits_raw_phn_jaconv_pyopenjtalk_prosody_latest.zip?download=1,22050,jp,female,1.7.1,0.10.3a1,dee654,true 148 | tsukuyomi,tts,kan-bayashi/tsukuyomi_tts_finetune_full_band_jsut_vits_raw_phn_jaconv_pyopenjtalk_prosody_latest,https://zenodo.org/record/5521446/files/tts_finetune_full_band_jsut_vits_raw_phn_jaconv_pyopenjtalk_prosody_latest.zip?download=1,44100,jp,female,1.7.1,0.10.3a1,dee654,true 149 | tsukuyomi,tts,kan-bayashi/tsukuyomi_full_band_vits_prosody,https://zenodo.org/record/5521446/files/tts_finetune_full_band_jsut_vits_raw_phn_jaconv_pyopenjtalk_prosody_latest.zip?download=1,44100,jp,female,1.7.1,0.10.3a1,dee654,true 150 | libritts,tts,kan-bayashi/libritts_tts_train_xvector_vits_raw_phn_tacotron_g2p_en_no_space_train.total_count.ave,https://zenodo.org/record/5521416/files/tts_train_xvector_vits_raw_phn_tacotron_g2p_en_no_space_train.total_count.ave.zip?download=1,22050,en,,1.7.1,0.10.3a2,628b46,true 151 | libritts,tts,kan-bayashi/libritts_xvector_vits,https://zenodo.org/record/5521416/files/tts_train_xvector_vits_raw_phn_tacotron_g2p_en_no_space_train.total_count.ave.zip?download=1,22050,en,,1.7.1,0.10.3a2,628b46,true 152 | ljspeech,tts,kan-bayashi/ljspeech_tts_train_joint_conformer_fastspeech2_hifigan_raw_phn_tacotron_g2p_en_no_space_train.total_count.ave,https://zenodo.org/record/5498487/files/tts_train_joint_conformer_fastspeech2_hifigan_raw_phn_tacotron_g2p_en_no_space_train.total_count.ave.zip?download=1,22050,en,female,1.7.1,0.10.3a2,628b46,true 153 | ljspeech,tts,kan-bayashi/ljspeech_tts_finetune_joint_conformer_fastspeech2_hifigan_raw_phn_tacotron_g2p_en_no_space_train.total_count.ave,https://zenodo.org/record/5498896/files/tts_finetune_joint_conformer_fastspeech2_hifigan_raw_phn_tacotron_g2p_en_no_space_train.total_count.ave.zip?download=1,22050,en,female,1.7.1,0.10.3a2,628b46,true 154 | ljspeech,tts,kan-bayashi/ljspeech_joint_train_conformer_fastspeech2_hifigan,https://zenodo.org/record/5498487/files/tts_train_joint_conformer_fastspeech2_hifigan_raw_phn_tacotron_g2p_en_no_space_train.total_count.ave.zip?download=1,22050,en,female,1.7.1,0.10.3a2,628b46,true 155 | ljspeech,tts,kan-bayashi/ljspeech_joint_finetune_conformer_fastspeech2_hifigan,https://zenodo.org/record/5498896/files/tts_finetune_joint_conformer_fastspeech2_hifigan_raw_phn_tacotron_g2p_en_no_space_train.total_count.ave.zip?download=1,22050,en,female,1.7.1,0.10.3a2,628b46,true 156 | tedlium2,asr_stream,D-Keqi/espnet_asr_train_asr_streaming_transformer_raw_en_bpe500_sp_valid.acc.ave,huggingface.co,16000,en,,,,,true 157 | speechcommands,slu,pyf98/speechcommands_12commands_conformer,https://huggingface.co/,16000,en,,,,,true 158 | speechcommands,slu,pyf98/speechcommands_35commands_conformer,https://huggingface.co/,16000,en,,,,,true 159 | chime4,enh,Wangyou-Zhang/chime4_enh_train_enh_beamformer_mvdr_raw_valid.si_snr.ave,https://zenodo.org/record/6025881/files/enh_train_enh_beamformer_mvdr_raw_valid.si_snr.ave.zip?download=1,16000,en,,1.6.0,0.9.7,,true 160 | chime4,enh,lichenda/chime4_fasnet_dprnn_tac,https://huggingface.co/lichenda/chime4_fasnet_dprnn_tac,16000,en,,1.8.1,0.10.7a1,,true 161 | chime4,enh,espnet/Wangyou_Zhang_chime4_enh_train_enh_conv_tasnet_raw,https://huggingface.co/espnet/Wangyou_Zhang_chime4_enh_train_enh_conv_tasnet_raw,16000,en,,1.7.1,0.9.9,,true 162 | chime4,enh,espnet/Wangyou_Zhang_wsj0_2mix_enh_dc_crn_mapping_snr_raw,https://huggingface.co/espnet/Wangyou_Zhang_wsj0_2mix_enh_dc_crn_mapping_snr_raw,16000,en,,1.10.2,0.10.7a1,,true 163 | chime4,enh,espnet/Wangyou_Zhang_chime4_enh_train_enh_dc_crn_mapping_snr_raw,https://huggingface.co/espnet/Wangyou_Zhang_chime4_enh_train_enh_dc_crn_mapping_snr_raw,16000,en,,1.10.2,0.10.7a1,,true 164 | reazonspeech,asr,reazon-research/reazonspeech-espnet-v1,https://huggingface.co/,16000,ja,,1.12.1,202209,,true 165 | --------------------------------------------------------------------------------