├── .gitignore
├── LICENSE
├── README.md
├── bark
├── __init__.py
├── __main__.py
├── api.py
├── assets
│ └── prompts
│ │ ├── announcer.npz
│ │ ├── de_speaker_0.npz
│ │ ├── de_speaker_1.npz
│ │ ├── de_speaker_2.npz
│ │ ├── de_speaker_3.npz
│ │ ├── de_speaker_4.npz
│ │ ├── de_speaker_5.npz
│ │ ├── de_speaker_6.npz
│ │ ├── de_speaker_7.npz
│ │ ├── de_speaker_8.npz
│ │ ├── de_speaker_9.npz
│ │ ├── en_speaker_0.npz
│ │ ├── en_speaker_1.npz
│ │ ├── en_speaker_2.npz
│ │ ├── en_speaker_3.npz
│ │ ├── en_speaker_4.npz
│ │ ├── en_speaker_5.npz
│ │ ├── en_speaker_6.npz
│ │ ├── en_speaker_7.npz
│ │ ├── en_speaker_8.npz
│ │ ├── en_speaker_9.npz
│ │ ├── es_speaker_0.npz
│ │ ├── es_speaker_1.npz
│ │ ├── es_speaker_2.npz
│ │ ├── es_speaker_3.npz
│ │ ├── es_speaker_4.npz
│ │ ├── es_speaker_5.npz
│ │ ├── es_speaker_6.npz
│ │ ├── es_speaker_7.npz
│ │ ├── es_speaker_8.npz
│ │ ├── es_speaker_9.npz
│ │ ├── fr_speaker_0.npz
│ │ ├── fr_speaker_1.npz
│ │ ├── fr_speaker_2.npz
│ │ ├── fr_speaker_3.npz
│ │ ├── fr_speaker_4.npz
│ │ ├── fr_speaker_5.npz
│ │ ├── fr_speaker_6.npz
│ │ ├── fr_speaker_7.npz
│ │ ├── fr_speaker_8.npz
│ │ ├── fr_speaker_9.npz
│ │ ├── hi_speaker_0.npz
│ │ ├── hi_speaker_1.npz
│ │ ├── hi_speaker_2.npz
│ │ ├── hi_speaker_3.npz
│ │ ├── hi_speaker_4.npz
│ │ ├── hi_speaker_5.npz
│ │ ├── hi_speaker_6.npz
│ │ ├── hi_speaker_7.npz
│ │ ├── hi_speaker_8.npz
│ │ ├── hi_speaker_9.npz
│ │ ├── it_speaker_0.npz
│ │ ├── it_speaker_1.npz
│ │ ├── it_speaker_2.npz
│ │ ├── it_speaker_3.npz
│ │ ├── it_speaker_4.npz
│ │ ├── it_speaker_5.npz
│ │ ├── it_speaker_6.npz
│ │ ├── it_speaker_7.npz
│ │ ├── it_speaker_8.npz
│ │ ├── it_speaker_9.npz
│ │ ├── ja_speaker_0.npz
│ │ ├── ja_speaker_1.npz
│ │ ├── ja_speaker_2.npz
│ │ ├── ja_speaker_3.npz
│ │ ├── ja_speaker_4.npz
│ │ ├── ja_speaker_5.npz
│ │ ├── ja_speaker_6.npz
│ │ ├── ja_speaker_7.npz
│ │ ├── ja_speaker_8.npz
│ │ ├── ja_speaker_9.npz
│ │ ├── ko_speaker_0.npz
│ │ ├── ko_speaker_1.npz
│ │ ├── ko_speaker_2.npz
│ │ ├── ko_speaker_3.npz
│ │ ├── ko_speaker_4.npz
│ │ ├── ko_speaker_5.npz
│ │ ├── ko_speaker_6.npz
│ │ ├── ko_speaker_7.npz
│ │ ├── ko_speaker_8.npz
│ │ ├── ko_speaker_9.npz
│ │ ├── pl_speaker_0.npz
│ │ ├── pl_speaker_1.npz
│ │ ├── pl_speaker_2.npz
│ │ ├── pl_speaker_3.npz
│ │ ├── pl_speaker_4.npz
│ │ ├── pl_speaker_5.npz
│ │ ├── pl_speaker_6.npz
│ │ ├── pl_speaker_7.npz
│ │ ├── pl_speaker_8.npz
│ │ ├── pl_speaker_9.npz
│ │ ├── pt_speaker_0.npz
│ │ ├── pt_speaker_1.npz
│ │ ├── pt_speaker_2.npz
│ │ ├── pt_speaker_3.npz
│ │ ├── pt_speaker_4.npz
│ │ ├── pt_speaker_5.npz
│ │ ├── pt_speaker_6.npz
│ │ ├── pt_speaker_7.npz
│ │ ├── pt_speaker_8.npz
│ │ ├── pt_speaker_9.npz
│ │ ├── readme.md
│ │ ├── ru_speaker_0.npz
│ │ ├── ru_speaker_1.npz
│ │ ├── ru_speaker_2.npz
│ │ ├── ru_speaker_3.npz
│ │ ├── ru_speaker_4.npz
│ │ ├── ru_speaker_5.npz
│ │ ├── ru_speaker_6.npz
│ │ ├── ru_speaker_7.npz
│ │ ├── ru_speaker_8.npz
│ │ ├── ru_speaker_9.npz
│ │ ├── speaker_0.npz
│ │ ├── speaker_1.npz
│ │ ├── speaker_2.npz
│ │ ├── speaker_3.npz
│ │ ├── speaker_4.npz
│ │ ├── speaker_5.npz
│ │ ├── speaker_6.npz
│ │ ├── speaker_7.npz
│ │ ├── speaker_8.npz
│ │ ├── speaker_9.npz
│ │ ├── tr_speaker_0.npz
│ │ ├── tr_speaker_1.npz
│ │ ├── tr_speaker_2.npz
│ │ ├── tr_speaker_3.npz
│ │ ├── tr_speaker_4.npz
│ │ ├── tr_speaker_5.npz
│ │ ├── tr_speaker_6.npz
│ │ ├── tr_speaker_7.npz
│ │ ├── tr_speaker_8.npz
│ │ ├── tr_speaker_9.npz
│ │ ├── v2
│ │ ├── de_speaker_0.npz
│ │ ├── de_speaker_1.npz
│ │ ├── de_speaker_2.npz
│ │ ├── de_speaker_3.npz
│ │ ├── de_speaker_4.npz
│ │ ├── de_speaker_5.npz
│ │ ├── de_speaker_6.npz
│ │ ├── de_speaker_7.npz
│ │ ├── de_speaker_8.npz
│ │ ├── de_speaker_9.npz
│ │ ├── en_speaker_0.npz
│ │ ├── en_speaker_1.npz
│ │ ├── en_speaker_2.npz
│ │ ├── en_speaker_3.npz
│ │ ├── en_speaker_4.npz
│ │ ├── en_speaker_5.npz
│ │ ├── en_speaker_6.npz
│ │ ├── en_speaker_7.npz
│ │ ├── en_speaker_8.npz
│ │ ├── en_speaker_9.npz
│ │ ├── es_speaker_0.npz
│ │ ├── es_speaker_1.npz
│ │ ├── es_speaker_2.npz
│ │ ├── es_speaker_3.npz
│ │ ├── es_speaker_4.npz
│ │ ├── es_speaker_5.npz
│ │ ├── es_speaker_6.npz
│ │ ├── es_speaker_7.npz
│ │ ├── es_speaker_8.npz
│ │ ├── es_speaker_9.npz
│ │ ├── fr_speaker_0.npz
│ │ ├── fr_speaker_1.npz
│ │ ├── fr_speaker_2.npz
│ │ ├── fr_speaker_3.npz
│ │ ├── fr_speaker_4.npz
│ │ ├── fr_speaker_5.npz
│ │ ├── fr_speaker_6.npz
│ │ ├── fr_speaker_7.npz
│ │ ├── fr_speaker_8.npz
│ │ ├── fr_speaker_9.npz
│ │ ├── hi_speaker_0.npz
│ │ ├── hi_speaker_1.npz
│ │ ├── hi_speaker_2.npz
│ │ ├── hi_speaker_3.npz
│ │ ├── hi_speaker_4.npz
│ │ ├── hi_speaker_5.npz
│ │ ├── hi_speaker_6.npz
│ │ ├── hi_speaker_7.npz
│ │ ├── hi_speaker_8.npz
│ │ ├── hi_speaker_9.npz
│ │ ├── it_speaker_0.npz
│ │ ├── it_speaker_1.npz
│ │ ├── it_speaker_2.npz
│ │ ├── it_speaker_3.npz
│ │ ├── it_speaker_4.npz
│ │ ├── it_speaker_5.npz
│ │ ├── it_speaker_6.npz
│ │ ├── it_speaker_7.npz
│ │ ├── it_speaker_8.npz
│ │ ├── it_speaker_9.npz
│ │ ├── ja_speaker_0.npz
│ │ ├── ja_speaker_1.npz
│ │ ├── ja_speaker_2.npz
│ │ ├── ja_speaker_3.npz
│ │ ├── ja_speaker_4.npz
│ │ ├── ja_speaker_5.npz
│ │ ├── ja_speaker_6.npz
│ │ ├── ja_speaker_7.npz
│ │ ├── ja_speaker_8.npz
│ │ ├── ja_speaker_9.npz
│ │ ├── ko_speaker_0.npz
│ │ ├── ko_speaker_1.npz
│ │ ├── ko_speaker_2.npz
│ │ ├── ko_speaker_3.npz
│ │ ├── ko_speaker_4.npz
│ │ ├── ko_speaker_5.npz
│ │ ├── ko_speaker_6.npz
│ │ ├── ko_speaker_7.npz
│ │ ├── ko_speaker_8.npz
│ │ ├── ko_speaker_9.npz
│ │ ├── pl_speaker_0.npz
│ │ ├── pl_speaker_1.npz
│ │ ├── pl_speaker_2.npz
│ │ ├── pl_speaker_3.npz
│ │ ├── pl_speaker_4.npz
│ │ ├── pl_speaker_5.npz
│ │ ├── pl_speaker_6.npz
│ │ ├── pl_speaker_7.npz
│ │ ├── pl_speaker_8.npz
│ │ ├── pl_speaker_9.npz
│ │ ├── pt_speaker_0.npz
│ │ ├── pt_speaker_1.npz
│ │ ├── pt_speaker_2.npz
│ │ ├── pt_speaker_3.npz
│ │ ├── pt_speaker_4.npz
│ │ ├── pt_speaker_5.npz
│ │ ├── pt_speaker_6.npz
│ │ ├── pt_speaker_7.npz
│ │ ├── pt_speaker_8.npz
│ │ ├── pt_speaker_9.npz
│ │ ├── ru_speaker_0.npz
│ │ ├── ru_speaker_1.npz
│ │ ├── ru_speaker_2.npz
│ │ ├── ru_speaker_3.npz
│ │ ├── ru_speaker_4.npz
│ │ ├── ru_speaker_5.npz
│ │ ├── ru_speaker_6.npz
│ │ ├── ru_speaker_7.npz
│ │ ├── ru_speaker_8.npz
│ │ ├── ru_speaker_9.npz
│ │ ├── tr_speaker_0.npz
│ │ ├── tr_speaker_1.npz
│ │ ├── tr_speaker_2.npz
│ │ ├── tr_speaker_3.npz
│ │ ├── tr_speaker_4.npz
│ │ ├── tr_speaker_5.npz
│ │ ├── tr_speaker_6.npz
│ │ ├── tr_speaker_7.npz
│ │ ├── tr_speaker_8.npz
│ │ ├── tr_speaker_9.npz
│ │ ├── zh_speaker_0.npz
│ │ ├── zh_speaker_1.npz
│ │ ├── zh_speaker_2.npz
│ │ ├── zh_speaker_3.npz
│ │ ├── zh_speaker_4.npz
│ │ ├── zh_speaker_5.npz
│ │ ├── zh_speaker_6.npz
│ │ ├── zh_speaker_7.npz
│ │ ├── zh_speaker_8.npz
│ │ └── zh_speaker_9.npz
│ │ ├── zh_speaker_0.npz
│ │ ├── zh_speaker_1.npz
│ │ ├── zh_speaker_2.npz
│ │ ├── zh_speaker_3.npz
│ │ ├── zh_speaker_4.npz
│ │ ├── zh_speaker_5.npz
│ │ ├── zh_speaker_6.npz
│ │ ├── zh_speaker_7.npz
│ │ ├── zh_speaker_8.npz
│ │ └── zh_speaker_9.npz
├── cli.py
├── generation.py
├── model.py
└── model_fine.py
├── model-card.md
├── notebooks
├── fake_classifier.ipynb
├── long_form_generation.ipynb
├── memory_profiling_bark.ipynb
└── use_small_models_on_cpu.ipynb
├── pyproject.toml
└── setup.py
/.gitignore:
--------------------------------------------------------------------------------
1 | __pycache__/
2 | suno_bark.egg-info/
3 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) Suno, Inc
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | > Notice: Bark is Suno's open-source text-to-speech+ model. If you are looking for our text-to-music models, please visit us on our [web page](https://suno.ai) and join our community on [Discord](https://suno.ai/discord).
2 |
3 |
4 | # 🐶 Bark
5 |
6 | [](https://suno.ai/discord)
7 | [](https://twitter.com/suno_ai_)
8 |
9 | > 🔗 [Examples](https://suno.ai/examples/bark-v0) • [Suno Studio Waitlist](https://suno-ai.typeform.com/suno-studio) • [Updates](#-updates) • [How to Use](#-usage-in-python) • [Installation](#-installation) • [FAQ](#-faq)
10 |
11 | [//]:
(vertical spaces around image)
12 |
13 |
14 |
15 |
16 |
17 |
18 | Bark is a transformer-based text-to-audio model created by [Suno](https://suno.ai). Bark can generate highly realistic, multilingual speech as well as other audio - including music, background noise and simple sound effects. The model can also produce nonverbal communications like laughing, sighing and crying. To support the research community, we are providing access to pretrained model checkpoints, which are ready for inference and available for commercial use.
19 |
20 | ## ⚠ Disclaimer
21 | Bark was developed for research purposes. It is not a conventional text-to-speech model but instead a fully generative text-to-audio model, which can deviate in unexpected ways from provided prompts. Suno does not take responsibility for any output generated. Use at your own risk, and please act responsibly.
22 |
23 | ## 📖 Quick Index
24 | * [🚀 Updates](#-updates)
25 | * [💻 Installation](#-installation)
26 | * [🐍 Usage](#-usage-in-python)
27 | * [🌀 Live Examples](https://suno.ai/examples/bark-v0)
28 | * [❓ FAQ](#-faq)
29 |
30 | ## 🎧 Demos
31 |
32 | [](https://huggingface.co/spaces/suno/bark)
33 | [](https://replicate.com/suno-ai/bark)
34 | [](https://colab.research.google.com/drive/1eJfA2XUa-mXwdMy7DoYKVYHI1iTd9Vkt?usp=sharing)
35 |
36 | ## 🚀 Updates
37 |
38 | **2023.05.01**
39 | - ©️ Bark is now licensed under the MIT License, meaning it's now available for commercial use!
40 | - ⚡ 2x speed-up on GPU. 10x speed-up on CPU. We also added an option for a smaller version of Bark, which offers additional speed-up with the trade-off of slightly lower quality.
41 | - 📕 [Long-form generation](notebooks/long_form_generation.ipynb), voice consistency enhancements and other examples are now documented in a new [notebooks](./notebooks) section.
42 | - 👥 We created a [voice prompt library](https://suno-ai.notion.site/8b8e8749ed514b0cbf3f699013548683?v=bc67cff786b04b50b3ceb756fd05f68c). We hope this resource helps you find useful prompts for your use cases! You can also join us on [Discord](https://suno.ai/discord), where the community actively shares useful prompts in the **#audio-prompts** channel.
43 | - 💬 Growing community support and access to new features here:
44 |
45 | [](https://suno.ai/discord)
46 |
47 | - 💾 You can now use Bark with GPUs that have low VRAM (<4GB).
48 |
49 | **2023.04.20**
50 | - 🐶 Bark release!
51 |
52 | ## 🐍 Usage in Python
53 |
54 |
55 | 🪑 Basics
56 |
57 | ```python
58 | from bark import SAMPLE_RATE, generate_audio, preload_models
59 | from scipy.io.wavfile import write as write_wav
60 | from IPython.display import Audio
61 |
62 | # download and load all models
63 | preload_models()
64 |
65 | # generate audio from text
66 | text_prompt = """
67 | Hello, my name is Suno. And, uh — and I like pizza. [laughs]
68 | But I also have other interests such as playing tic tac toe.
69 | """
70 | audio_array = generate_audio(text_prompt)
71 |
72 | # save audio to disk
73 | write_wav("bark_generation.wav", SAMPLE_RATE, audio_array)
74 |
75 | # play text in notebook
76 | Audio(audio_array, rate=SAMPLE_RATE)
77 | ```
78 |
79 | [pizza1.webm](https://user-images.githubusercontent.com/34592747/cfa98e54-721c-4b9c-b962-688e09db684f.webm)
80 |
81 |
82 |
83 |
84 | 🌎 Foreign Language
85 |
86 | Bark supports various languages out-of-the-box and automatically determines language from input text. When prompted with code-switched text, Bark will attempt to employ the native accent for the respective languages. English quality is best for the time being, and we expect other languages to further improve with scaling.
87 |
88 |
89 |
90 | ```python
91 |
92 | text_prompt = """
93 | 추석은 내가 가장 좋아하는 명절이다. 나는 며칠 동안 휴식을 취하고 친구 및 가족과 시간을 보낼 수 있습니다.
94 | """
95 | audio_array = generate_audio(text_prompt)
96 | ```
97 | [suno_korean.webm](https://user-images.githubusercontent.com/32879321/235313033-dc4477b9-2da0-4b94-9c8b-a8c2d8f5bb5e.webm)
98 |
99 | *Note: since Bark recognizes languages automatically from input text, it is possible to use, for example, a german history prompt with english text. This usually leads to english audio with a german accent.*
100 | ```python
101 | text_prompt = """
102 | Der Dreißigjährige Krieg (1618-1648) war ein verheerender Konflikt, der Europa stark geprägt hat.
103 | This is a beginning of the history. If you want to hear more, please continue.
104 | """
105 | audio_array = generate_audio(text_prompt)
106 | ```
107 | [suno_german_accent.webm](https://user-images.githubusercontent.com/34592747/3f96ab3e-02ec-49cb-97a6-cf5af0b3524a.webm)
108 |
109 |
110 |
111 |
112 |
113 |
114 |
115 | 🎶 Music
116 | Bark can generate all types of audio, and, in principle, doesn't see a difference between speech and music. Sometimes Bark chooses to generate text as music, but you can help it out by adding music notes around your lyrics.
117 |
118 |
119 |
120 | ```python
121 | text_prompt = """
122 | ♪ In the jungle, the mighty jungle, the lion barks tonight ♪
123 | """
124 | audio_array = generate_audio(text_prompt)
125 | ```
126 | [lion.webm](https://user-images.githubusercontent.com/5068315/230684766-97f5ea23-ad99-473c-924b-66b6fab24289.webm)
127 |
128 |
129 |
130 | 🎤 Voice Presets
131 |
132 | Bark supports 100+ speaker presets across [supported languages](#supported-languages). You can browse the library of supported voice presets [HERE](https://suno-ai.notion.site/8b8e8749ed514b0cbf3f699013548683?v=bc67cff786b04b50b3ceb756fd05f68c), or in the [code](bark/assets/prompts). The community also often shares presets in [Discord](https://discord.gg/J2B2vsjKuE).
133 |
134 | > Bark tries to match the tone, pitch, emotion and prosody of a given preset, but does not currently support custom voice cloning. The model also attempts to preserve music, ambient noise, etc.
135 |
136 | ```python
137 | text_prompt = """
138 | I have a silky smooth voice, and today I will tell you about
139 | the exercise regimen of the common sloth.
140 | """
141 | audio_array = generate_audio(text_prompt, history_prompt="v2/en_speaker_1")
142 | ```
143 |
144 | [sloth.webm](https://user-images.githubusercontent.com/5068315/230684883-a344c619-a560-4ff5-8b99-b4463a34487b.webm)
145 |
146 |
147 | ### 📃 Generating Longer Audio
148 |
149 | By default, `generate_audio` works well with around 13 seconds of spoken text. For an example of how to do long-form generation, see 👉 **[Notebook](notebooks/long_form_generation.ipynb)** 👈
150 |
151 |
152 | Click to toggle example long-form generations (from the example notebook)
153 |
154 | [dialog.webm](https://user-images.githubusercontent.com/2565833/235463539-f57608da-e4cb-4062-8771-148e29512b01.webm)
155 |
156 | [longform_advanced.webm](https://user-images.githubusercontent.com/2565833/235463547-1c0d8744-269b-43fe-9630-897ea5731652.webm)
157 |
158 | [longform_basic.webm](https://user-images.githubusercontent.com/2565833/235463559-87efe9f8-a2db-4d59-b764-57db83f95270.webm)
159 |
160 |
161 |
162 |
163 | ## Command line
164 | ```commandline
165 | python -m bark --text "Hello, my name is Suno." --output_filename "example.wav"
166 | ```
167 |
168 | ## 💻 Installation
169 | *‼️ CAUTION ‼️ Do NOT use `pip install bark`. It installs a different package, which is not managed by Suno.*
170 | ```bash
171 | pip install git+https://github.com/suno-ai/bark.git
172 | ```
173 |
174 | or
175 |
176 | ```bash
177 | git clone https://github.com/suno-ai/bark
178 | cd bark && pip install .
179 | ```
180 |
181 |
182 | ## 🤗 Transformers Usage
183 |
184 | Bark is available in the 🤗 Transformers library from version 4.31.0 onwards, requiring minimal dependencies
185 | and additional packages. Steps to get started:
186 |
187 | 1. First install the 🤗 [Transformers library](https://github.com/huggingface/transformers) from main:
188 |
189 | ```
190 | pip install git+https://github.com/huggingface/transformers.git
191 | ```
192 |
193 | 2. Run the following Python code to generate speech samples:
194 |
195 | ```py
196 | from transformers import AutoProcessor, BarkModel
197 |
198 | processor = AutoProcessor.from_pretrained("suno/bark")
199 | model = BarkModel.from_pretrained("suno/bark")
200 |
201 | voice_preset = "v2/en_speaker_6"
202 |
203 | inputs = processor("Hello, my dog is cute", voice_preset=voice_preset)
204 |
205 | audio_array = model.generate(**inputs)
206 | audio_array = audio_array.cpu().numpy().squeeze()
207 | ```
208 |
209 | 3. Listen to the audio samples either in an ipynb notebook:
210 |
211 | ```py
212 | from IPython.display import Audio
213 |
214 | sample_rate = model.generation_config.sample_rate
215 | Audio(audio_array, rate=sample_rate)
216 | ```
217 |
218 | Or save them as a `.wav` file using a third-party library, e.g. `scipy`:
219 |
220 | ```py
221 | import scipy
222 |
223 | sample_rate = model.generation_config.sample_rate
224 | scipy.io.wavfile.write("bark_out.wav", rate=sample_rate, data=audio_array)
225 | ```
226 |
227 | For more details on using the Bark model for inference using the 🤗 Transformers library, refer to the
228 | [Bark docs](https://huggingface.co/docs/transformers/main/en/model_doc/bark) or the hands-on
229 | [Google Colab](https://colab.research.google.com/drive/1dWWkZzvu7L9Bunq9zvD-W02RFUXoW-Pd?usp=sharing).
230 |
231 |
232 | ## 🛠️ Hardware and Inference Speed
233 |
234 | Bark has been tested and works on both CPU and GPU (`pytorch 2.0+`, CUDA 11.7 and CUDA 12.0).
235 |
236 | On enterprise GPUs and PyTorch nightly, Bark can generate audio in roughly real-time. On older GPUs, default colab, or CPU, inference time might be significantly slower. For older GPUs or CPU you might want to consider using smaller models. Details can be found in out tutorial sections here.
237 |
238 | The full version of Bark requires around 12GB of VRAM to hold everything on GPU at the same time.
239 | To use a smaller version of the models, which should fit into 8GB VRAM, set the environment flag `SUNO_USE_SMALL_MODELS=True`.
240 |
241 | If you don't have hardware available or if you want to play with bigger versions of our models, you can also sign up for early access to our model playground [here](https://suno-ai.typeform.com/suno-studio).
242 |
243 | ## ⚙️ Details
244 |
245 | Bark is fully generative text-to-audio model devolved for research and demo purposes. It follows a GPT style architecture similar to [AudioLM](https://arxiv.org/abs/2209.03143) and [Vall-E](https://arxiv.org/abs/2301.02111) and a quantized Audio representation from [EnCodec](https://github.com/facebookresearch/encodec). It is not a conventional TTS model, but instead a fully generative text-to-audio model capable of deviating in unexpected ways from any given script. Different to previous approaches, the input text prompt is converted directly to audio without the intermediate use of phonemes. It can therefore generalize to arbitrary instructions beyond speech such as music lyrics, sound effects or other non-speech sounds.
246 |
247 | Below is a list of some known non-speech sounds, but we are finding more every day. Please let us know if you find patterns that work particularly well on [Discord](https://suno.ai/discord)!
248 |
249 | - `[laughter]`
250 | - `[laughs]`
251 | - `[sighs]`
252 | - `[music]`
253 | - `[gasps]`
254 | - `[clears throat]`
255 | - `—` or `...` for hesitations
256 | - `♪` for song lyrics
257 | - CAPITALIZATION for emphasis of a word
258 | - `[MAN]` and `[WOMAN]` to bias Bark toward male and female speakers, respectively
259 |
260 | ### Supported Languages
261 |
262 | | Language | Status |
263 | | --- | :---: |
264 | | English (en) | ✅ |
265 | | German (de) | ✅ |
266 | | Spanish (es) | ✅ |
267 | | French (fr) | ✅ |
268 | | Hindi (hi) | ✅ |
269 | | Italian (it) | ✅ |
270 | | Japanese (ja) | ✅ |
271 | | Korean (ko) | ✅ |
272 | | Polish (pl) | ✅ |
273 | | Portuguese (pt) | ✅ |
274 | | Russian (ru) | ✅ |
275 | | Turkish (tr) | ✅ |
276 | | Chinese, simplified (zh) | ✅ |
277 |
278 | Requests for future language support [here](https://github.com/suno-ai/bark/discussions/111) or in the **#forums** channel on [Discord](https://suno.ai/discord).
279 |
280 | ## 🙏 Appreciation
281 |
282 | - [nanoGPT](https://github.com/karpathy/nanoGPT) for a dead-simple and blazing fast implementation of GPT-style models
283 | - [EnCodec](https://github.com/facebookresearch/encodec) for a state-of-the-art implementation of a fantastic audio codec
284 | - [AudioLM](https://github.com/lucidrains/audiolm-pytorch) for related training and inference code
285 | - [Vall-E](https://arxiv.org/abs/2301.02111), [AudioLM](https://arxiv.org/abs/2209.03143) and many other ground-breaking papers that enabled the development of Bark
286 |
287 | ## © License
288 |
289 | Bark is licensed under the MIT License.
290 |
291 | ## 📱 Community
292 |
293 | - [Twitter](https://twitter.com/suno_ai_)
294 | - [Discord](https://suno.ai/discord)
295 |
296 | ## 🎧 Suno Studio (Early Access)
297 |
298 | We’re developing a playground for our models, including Bark.
299 |
300 | If you are interested, you can sign up for early access [here](https://suno-ai.typeform.com/suno-studio).
301 |
302 | ## ❓ FAQ
303 |
304 | #### How do I specify where models are downloaded and cached?
305 | * Bark uses Hugging Face to download and store models. You can see find more info [here](https://huggingface.co/docs/huggingface_hub/package_reference/environment_variables#hfhome).
306 |
307 |
308 | #### Bark's generations sometimes differ from my prompts. What's happening?
309 | * Bark is a GPT-style model. As such, it may take some creative liberties in its generations, resulting in higher-variance model outputs than traditional text-to-speech approaches.
310 |
311 | #### What voices are supported by Bark?
312 | * Bark supports 100+ speaker presets across [supported languages](#supported-languages). You can browse the library of speaker presets [here](https://suno-ai.notion.site/8b8e8749ed514b0cbf3f699013548683?v=bc67cff786b04b50b3ceb756fd05f68c). The community also shares presets in [Discord](https://suno.ai/discord). Bark also supports generating unique random voices that fit the input text. Bark does not currently support custom voice cloning.
313 |
314 | #### Why is the output limited to ~13-14 seconds?
315 | * Bark is a GPT-style model, and its architecture/context window is optimized to output generations with roughly this length.
316 |
317 | #### How much VRAM do I need?
318 | * The full version of Bark requires around 12Gb of memory to hold everything on GPU at the same time. However, even smaller cards down to ~2Gb work with some additional settings. Simply add the following code snippet before your generation:
319 |
320 | ```python
321 | import os
322 | os.environ["SUNO_OFFLOAD_CPU"] = "True"
323 | os.environ["SUNO_USE_SMALL_MODELS"] = "True"
324 | ```
325 |
326 | #### My generated audio sounds like a 1980s phone call. What's happening?
327 | * Bark generates audio from scratch. It is not meant to create only high-fidelity, studio-quality speech. Rather, outputs could be anything from perfect speech to multiple people arguing at a baseball game recorded with bad microphones.
328 |
--------------------------------------------------------------------------------
/bark/__init__.py:
--------------------------------------------------------------------------------
1 | from .api import generate_audio, text_to_semantic, semantic_to_waveform, save_as_prompt
2 | from .generation import SAMPLE_RATE, preload_models
3 |
--------------------------------------------------------------------------------
/bark/__main__.py:
--------------------------------------------------------------------------------
1 | from .cli import cli
2 |
3 | cli()
4 |
--------------------------------------------------------------------------------
/bark/api.py:
--------------------------------------------------------------------------------
1 | from typing import Dict, Optional, Union
2 |
3 | import numpy as np
4 |
5 | from .generation import codec_decode, generate_coarse, generate_fine, generate_text_semantic
6 |
7 |
8 | def text_to_semantic(
9 | text: str,
10 | history_prompt: Optional[Union[Dict, str]] = None,
11 | temp: float = 0.7,
12 | silent: bool = False,
13 | ):
14 | """Generate semantic array from text.
15 |
16 | Args:
17 | text: text to be turned into audio
18 | history_prompt: history choice for audio cloning
19 | temp: generation temperature (1.0 more diverse, 0.0 more conservative)
20 | silent: disable progress bar
21 |
22 | Returns:
23 | numpy semantic array to be fed into `semantic_to_waveform`
24 | """
25 | x_semantic = generate_text_semantic(
26 | text,
27 | history_prompt=history_prompt,
28 | temp=temp,
29 | silent=silent,
30 | use_kv_caching=True
31 | )
32 | return x_semantic
33 |
34 |
35 | def semantic_to_waveform(
36 | semantic_tokens: np.ndarray,
37 | history_prompt: Optional[Union[Dict, str]] = None,
38 | temp: float = 0.7,
39 | silent: bool = False,
40 | output_full: bool = False,
41 | ):
42 | """Generate audio array from semantic input.
43 |
44 | Args:
45 | semantic_tokens: semantic token output from `text_to_semantic`
46 | history_prompt: history choice for audio cloning
47 | temp: generation temperature (1.0 more diverse, 0.0 more conservative)
48 | silent: disable progress bar
49 | output_full: return full generation to be used as a history prompt
50 |
51 | Returns:
52 | numpy audio array at sample frequency 24khz
53 | """
54 | coarse_tokens = generate_coarse(
55 | semantic_tokens,
56 | history_prompt=history_prompt,
57 | temp=temp,
58 | silent=silent,
59 | use_kv_caching=True
60 | )
61 | fine_tokens = generate_fine(
62 | coarse_tokens,
63 | history_prompt=history_prompt,
64 | temp=0.5,
65 | )
66 | audio_arr = codec_decode(fine_tokens)
67 | if output_full:
68 | full_generation = {
69 | "semantic_prompt": semantic_tokens,
70 | "coarse_prompt": coarse_tokens,
71 | "fine_prompt": fine_tokens,
72 | }
73 | return full_generation, audio_arr
74 | return audio_arr
75 |
76 |
77 | def save_as_prompt(filepath, full_generation):
78 | assert(filepath.endswith(".npz"))
79 | assert(isinstance(full_generation, dict))
80 | assert("semantic_prompt" in full_generation)
81 | assert("coarse_prompt" in full_generation)
82 | assert("fine_prompt" in full_generation)
83 | np.savez(filepath, **full_generation)
84 |
85 |
86 | def generate_audio(
87 | text: str,
88 | history_prompt: Optional[Union[Dict, str]] = None,
89 | text_temp: float = 0.7,
90 | waveform_temp: float = 0.7,
91 | silent: bool = False,
92 | output_full: bool = False,
93 | ):
94 | """Generate audio array from input text.
95 |
96 | Args:
97 | text: text to be turned into audio
98 | history_prompt: history choice for audio cloning
99 | text_temp: generation temperature (1.0 more diverse, 0.0 more conservative)
100 | waveform_temp: generation temperature (1.0 more diverse, 0.0 more conservative)
101 | silent: disable progress bar
102 | output_full: return full generation to be used as a history prompt
103 |
104 | Returns:
105 | numpy audio array at sample frequency 24khz
106 | """
107 | semantic_tokens = text_to_semantic(
108 | text,
109 | history_prompt=history_prompt,
110 | temp=text_temp,
111 | silent=silent,
112 | )
113 | out = semantic_to_waveform(
114 | semantic_tokens,
115 | history_prompt=history_prompt,
116 | temp=waveform_temp,
117 | silent=silent,
118 | output_full=output_full,
119 | )
120 | if output_full:
121 | full_generation, audio_arr = out
122 | return full_generation, audio_arr
123 | else:
124 | audio_arr = out
125 | return audio_arr
126 |
--------------------------------------------------------------------------------
/bark/assets/prompts/announcer.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/announcer.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/de_speaker_0.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/de_speaker_0.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/de_speaker_1.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/de_speaker_1.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/de_speaker_2.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/de_speaker_2.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/de_speaker_3.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/de_speaker_3.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/de_speaker_4.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/de_speaker_4.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/de_speaker_5.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/de_speaker_5.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/de_speaker_6.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/de_speaker_6.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/de_speaker_7.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/de_speaker_7.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/de_speaker_8.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/de_speaker_8.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/de_speaker_9.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/de_speaker_9.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/en_speaker_0.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/en_speaker_0.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/en_speaker_1.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/en_speaker_1.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/en_speaker_2.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/en_speaker_2.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/en_speaker_3.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/en_speaker_3.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/en_speaker_4.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/en_speaker_4.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/en_speaker_5.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/en_speaker_5.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/en_speaker_6.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/en_speaker_6.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/en_speaker_7.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/en_speaker_7.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/en_speaker_8.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/en_speaker_8.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/en_speaker_9.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/en_speaker_9.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/es_speaker_0.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/es_speaker_0.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/es_speaker_1.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/es_speaker_1.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/es_speaker_2.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/es_speaker_2.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/es_speaker_3.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/es_speaker_3.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/es_speaker_4.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/es_speaker_4.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/es_speaker_5.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/es_speaker_5.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/es_speaker_6.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/es_speaker_6.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/es_speaker_7.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/es_speaker_7.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/es_speaker_8.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/es_speaker_8.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/es_speaker_9.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/es_speaker_9.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/fr_speaker_0.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/fr_speaker_0.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/fr_speaker_1.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/fr_speaker_1.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/fr_speaker_2.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/fr_speaker_2.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/fr_speaker_3.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/fr_speaker_3.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/fr_speaker_4.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/fr_speaker_4.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/fr_speaker_5.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/fr_speaker_5.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/fr_speaker_6.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/fr_speaker_6.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/fr_speaker_7.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/fr_speaker_7.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/fr_speaker_8.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/fr_speaker_8.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/fr_speaker_9.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/fr_speaker_9.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/hi_speaker_0.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/hi_speaker_0.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/hi_speaker_1.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/hi_speaker_1.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/hi_speaker_2.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/hi_speaker_2.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/hi_speaker_3.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/hi_speaker_3.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/hi_speaker_4.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/hi_speaker_4.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/hi_speaker_5.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/hi_speaker_5.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/hi_speaker_6.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/hi_speaker_6.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/hi_speaker_7.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/hi_speaker_7.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/hi_speaker_8.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/hi_speaker_8.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/hi_speaker_9.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/hi_speaker_9.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/it_speaker_0.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/it_speaker_0.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/it_speaker_1.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/it_speaker_1.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/it_speaker_2.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/it_speaker_2.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/it_speaker_3.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/it_speaker_3.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/it_speaker_4.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/it_speaker_4.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/it_speaker_5.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/it_speaker_5.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/it_speaker_6.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/it_speaker_6.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/it_speaker_7.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/it_speaker_7.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/it_speaker_8.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/it_speaker_8.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/it_speaker_9.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/it_speaker_9.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/ja_speaker_0.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/ja_speaker_0.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/ja_speaker_1.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/ja_speaker_1.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/ja_speaker_2.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/ja_speaker_2.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/ja_speaker_3.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/ja_speaker_3.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/ja_speaker_4.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/ja_speaker_4.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/ja_speaker_5.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/ja_speaker_5.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/ja_speaker_6.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/ja_speaker_6.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/ja_speaker_7.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/ja_speaker_7.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/ja_speaker_8.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/ja_speaker_8.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/ja_speaker_9.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/ja_speaker_9.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/ko_speaker_0.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/ko_speaker_0.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/ko_speaker_1.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/ko_speaker_1.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/ko_speaker_2.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/ko_speaker_2.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/ko_speaker_3.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/ko_speaker_3.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/ko_speaker_4.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/ko_speaker_4.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/ko_speaker_5.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/ko_speaker_5.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/ko_speaker_6.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/ko_speaker_6.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/ko_speaker_7.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/ko_speaker_7.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/ko_speaker_8.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/ko_speaker_8.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/ko_speaker_9.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/ko_speaker_9.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/pl_speaker_0.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/pl_speaker_0.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/pl_speaker_1.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/pl_speaker_1.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/pl_speaker_2.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/pl_speaker_2.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/pl_speaker_3.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/pl_speaker_3.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/pl_speaker_4.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/pl_speaker_4.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/pl_speaker_5.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/pl_speaker_5.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/pl_speaker_6.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/pl_speaker_6.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/pl_speaker_7.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/pl_speaker_7.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/pl_speaker_8.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/pl_speaker_8.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/pl_speaker_9.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/pl_speaker_9.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/pt_speaker_0.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/pt_speaker_0.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/pt_speaker_1.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/pt_speaker_1.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/pt_speaker_2.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/pt_speaker_2.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/pt_speaker_3.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/pt_speaker_3.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/pt_speaker_4.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/pt_speaker_4.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/pt_speaker_5.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/pt_speaker_5.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/pt_speaker_6.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/pt_speaker_6.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/pt_speaker_7.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/pt_speaker_7.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/pt_speaker_8.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/pt_speaker_8.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/pt_speaker_9.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/pt_speaker_9.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/readme.md:
--------------------------------------------------------------------------------
1 | # Example Prompts Data
2 |
3 | ## Version Two
4 | The `v2` prompts are better engineered to follow text with a consistent voice.
5 | To use them, simply include `v2` in the prompt. For example
6 | ```python
7 | from bark import generate_audio
8 | text_prompt = "madam I'm adam"
9 | audio_array = generate_audio(text_prompt, history_prompt="v2/en_speaker_1")
10 | ```
11 |
12 | ## Prompt Format
13 | The provided data is in the .npz format, which is a file format used in Python for storing arrays and data. The data contains three arrays: semantic_prompt, coarse_prompt, and fine_prompt.
14 |
15 | ```semantic_prompt```
16 |
17 | The semantic_prompt array contains a sequence of token IDs generated by the BERT tokenizer from Hugging Face. These tokens encode the text input and are used as an input to generate the audio output. The shape of this array is (n,), where n is the number of tokens in the input text.
18 |
19 | ```coarse_prompt```
20 |
21 | The coarse_prompt array is an intermediate output of the text-to-speech pipeline, and contains token IDs generated by the first two codebooks of the EnCodec Codec from Facebook. This step converts the semantic tokens into a different representation that is better suited for the subsequent step. The shape of this array is (2, m), where m is the number of tokens after conversion by the EnCodec Codec.
22 |
23 | ```fine_prompt```
24 |
25 | The fine_prompt array is a further processed output of the pipeline, and contains 8 codebooks from the EnCodec Codec. These codebooks represent the final stage of tokenization, and the resulting tokens are used to generate the audio output. The shape of this array is (8, p), where p is the number of tokens after further processing by the EnCodec Codec.
26 |
27 | Overall, these arrays represent different stages of a text-to-speech pipeline that converts text input into synthesized audio output. The semantic_prompt array represents the input text, while coarse_prompt and fine_prompt represent intermediate and final stages of tokenization, respectively.
28 |
29 |
30 |
31 |
--------------------------------------------------------------------------------
/bark/assets/prompts/ru_speaker_0.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/ru_speaker_0.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/ru_speaker_1.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/ru_speaker_1.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/ru_speaker_2.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/ru_speaker_2.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/ru_speaker_3.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/ru_speaker_3.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/ru_speaker_4.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/ru_speaker_4.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/ru_speaker_5.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/ru_speaker_5.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/ru_speaker_6.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/ru_speaker_6.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/ru_speaker_7.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/ru_speaker_7.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/ru_speaker_8.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/ru_speaker_8.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/ru_speaker_9.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/ru_speaker_9.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/speaker_0.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/speaker_0.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/speaker_1.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/speaker_1.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/speaker_2.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/speaker_2.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/speaker_3.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/speaker_3.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/speaker_4.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/speaker_4.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/speaker_5.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/speaker_5.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/speaker_6.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/speaker_6.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/speaker_7.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/speaker_7.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/speaker_8.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/speaker_8.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/speaker_9.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/speaker_9.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/tr_speaker_0.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/tr_speaker_0.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/tr_speaker_1.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/tr_speaker_1.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/tr_speaker_2.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/tr_speaker_2.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/tr_speaker_3.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/tr_speaker_3.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/tr_speaker_4.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/tr_speaker_4.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/tr_speaker_5.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/tr_speaker_5.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/tr_speaker_6.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/tr_speaker_6.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/tr_speaker_7.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/tr_speaker_7.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/tr_speaker_8.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/tr_speaker_8.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/tr_speaker_9.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/tr_speaker_9.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/de_speaker_0.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/de_speaker_0.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/de_speaker_1.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/de_speaker_1.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/de_speaker_2.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/de_speaker_2.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/de_speaker_3.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/de_speaker_3.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/de_speaker_4.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/de_speaker_4.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/de_speaker_5.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/de_speaker_5.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/de_speaker_6.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/de_speaker_6.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/de_speaker_7.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/de_speaker_7.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/de_speaker_8.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/de_speaker_8.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/de_speaker_9.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/de_speaker_9.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/en_speaker_0.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/en_speaker_0.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/en_speaker_1.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/en_speaker_1.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/en_speaker_2.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/en_speaker_2.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/en_speaker_3.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/en_speaker_3.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/en_speaker_4.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/en_speaker_4.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/en_speaker_5.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/en_speaker_5.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/en_speaker_6.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/en_speaker_6.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/en_speaker_7.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/en_speaker_7.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/en_speaker_8.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/en_speaker_8.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/en_speaker_9.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/en_speaker_9.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/es_speaker_0.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/es_speaker_0.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/es_speaker_1.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/es_speaker_1.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/es_speaker_2.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/es_speaker_2.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/es_speaker_3.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/es_speaker_3.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/es_speaker_4.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/es_speaker_4.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/es_speaker_5.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/es_speaker_5.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/es_speaker_6.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/es_speaker_6.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/es_speaker_7.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/es_speaker_7.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/es_speaker_8.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/es_speaker_8.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/es_speaker_9.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/es_speaker_9.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/fr_speaker_0.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/fr_speaker_0.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/fr_speaker_1.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/fr_speaker_1.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/fr_speaker_2.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/fr_speaker_2.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/fr_speaker_3.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/fr_speaker_3.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/fr_speaker_4.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/fr_speaker_4.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/fr_speaker_5.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/fr_speaker_5.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/fr_speaker_6.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/fr_speaker_6.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/fr_speaker_7.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/fr_speaker_7.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/fr_speaker_8.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/fr_speaker_8.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/fr_speaker_9.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/fr_speaker_9.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/hi_speaker_0.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/hi_speaker_0.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/hi_speaker_1.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/hi_speaker_1.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/hi_speaker_2.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/hi_speaker_2.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/hi_speaker_3.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/hi_speaker_3.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/hi_speaker_4.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/hi_speaker_4.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/hi_speaker_5.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/hi_speaker_5.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/hi_speaker_6.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/hi_speaker_6.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/hi_speaker_7.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/hi_speaker_7.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/hi_speaker_8.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/hi_speaker_8.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/hi_speaker_9.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/hi_speaker_9.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/it_speaker_0.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/it_speaker_0.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/it_speaker_1.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/it_speaker_1.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/it_speaker_2.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/it_speaker_2.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/it_speaker_3.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/it_speaker_3.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/it_speaker_4.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/it_speaker_4.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/it_speaker_5.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/it_speaker_5.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/it_speaker_6.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/it_speaker_6.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/it_speaker_7.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/it_speaker_7.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/it_speaker_8.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/it_speaker_8.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/it_speaker_9.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/it_speaker_9.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/ja_speaker_0.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/ja_speaker_0.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/ja_speaker_1.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/ja_speaker_1.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/ja_speaker_2.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/ja_speaker_2.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/ja_speaker_3.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/ja_speaker_3.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/ja_speaker_4.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/ja_speaker_4.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/ja_speaker_5.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/ja_speaker_5.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/ja_speaker_6.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/ja_speaker_6.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/ja_speaker_7.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/ja_speaker_7.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/ja_speaker_8.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/ja_speaker_8.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/ja_speaker_9.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/ja_speaker_9.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/ko_speaker_0.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/ko_speaker_0.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/ko_speaker_1.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/ko_speaker_1.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/ko_speaker_2.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/ko_speaker_2.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/ko_speaker_3.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/ko_speaker_3.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/ko_speaker_4.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/ko_speaker_4.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/ko_speaker_5.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/ko_speaker_5.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/ko_speaker_6.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/ko_speaker_6.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/ko_speaker_7.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/ko_speaker_7.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/ko_speaker_8.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/ko_speaker_8.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/ko_speaker_9.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/ko_speaker_9.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/pl_speaker_0.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/pl_speaker_0.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/pl_speaker_1.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/pl_speaker_1.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/pl_speaker_2.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/pl_speaker_2.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/pl_speaker_3.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/pl_speaker_3.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/pl_speaker_4.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/pl_speaker_4.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/pl_speaker_5.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/pl_speaker_5.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/pl_speaker_6.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/pl_speaker_6.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/pl_speaker_7.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/pl_speaker_7.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/pl_speaker_8.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/pl_speaker_8.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/pl_speaker_9.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/pl_speaker_9.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/pt_speaker_0.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/pt_speaker_0.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/pt_speaker_1.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/pt_speaker_1.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/pt_speaker_2.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/pt_speaker_2.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/pt_speaker_3.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/pt_speaker_3.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/pt_speaker_4.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/pt_speaker_4.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/pt_speaker_5.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/pt_speaker_5.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/pt_speaker_6.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/pt_speaker_6.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/pt_speaker_7.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/pt_speaker_7.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/pt_speaker_8.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/pt_speaker_8.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/pt_speaker_9.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/pt_speaker_9.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/ru_speaker_0.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/ru_speaker_0.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/ru_speaker_1.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/ru_speaker_1.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/ru_speaker_2.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/ru_speaker_2.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/ru_speaker_3.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/ru_speaker_3.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/ru_speaker_4.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/ru_speaker_4.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/ru_speaker_5.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/ru_speaker_5.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/ru_speaker_6.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/ru_speaker_6.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/ru_speaker_7.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/ru_speaker_7.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/ru_speaker_8.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/ru_speaker_8.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/ru_speaker_9.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/ru_speaker_9.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/tr_speaker_0.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/tr_speaker_0.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/tr_speaker_1.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/tr_speaker_1.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/tr_speaker_2.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/tr_speaker_2.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/tr_speaker_3.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/tr_speaker_3.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/tr_speaker_4.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/tr_speaker_4.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/tr_speaker_5.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/tr_speaker_5.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/tr_speaker_6.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/tr_speaker_6.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/tr_speaker_7.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/tr_speaker_7.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/tr_speaker_8.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/tr_speaker_8.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/tr_speaker_9.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/tr_speaker_9.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/zh_speaker_0.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/zh_speaker_0.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/zh_speaker_1.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/zh_speaker_1.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/zh_speaker_2.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/zh_speaker_2.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/zh_speaker_3.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/zh_speaker_3.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/zh_speaker_4.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/zh_speaker_4.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/zh_speaker_5.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/zh_speaker_5.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/zh_speaker_6.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/zh_speaker_6.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/zh_speaker_7.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/zh_speaker_7.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/zh_speaker_8.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/zh_speaker_8.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/v2/zh_speaker_9.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/v2/zh_speaker_9.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/zh_speaker_0.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/zh_speaker_0.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/zh_speaker_1.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/zh_speaker_1.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/zh_speaker_2.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/zh_speaker_2.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/zh_speaker_3.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/zh_speaker_3.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/zh_speaker_4.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/zh_speaker_4.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/zh_speaker_5.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/zh_speaker_5.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/zh_speaker_6.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/zh_speaker_6.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/zh_speaker_7.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/zh_speaker_7.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/zh_speaker_8.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/zh_speaker_8.npz
--------------------------------------------------------------------------------
/bark/assets/prompts/zh_speaker_9.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/suno-ai/bark/f4f32d4cd480dfec1c245d258174bc9bde3c2148/bark/assets/prompts/zh_speaker_9.npz
--------------------------------------------------------------------------------
/bark/cli.py:
--------------------------------------------------------------------------------
1 | import argparse
2 | from typing import Dict, Optional, Union
3 | import os
4 |
5 | from scipy.io.wavfile import write as write_wav
6 | from .api import generate_audio
7 | from .generation import SAMPLE_RATE
8 |
9 |
10 | def cli():
11 | """Commandline interface."""
12 | parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
13 | parser.add_argument("--text", type=str, help="text to be turned into audio")
14 | parser.add_argument(
15 | "--output_filename",
16 | type=str,
17 | default="bark_generation.wav",
18 | help="output audio file name",
19 | )
20 | parser.add_argument("--output_dir", type=str, default=".", help="directory to save the outputs")
21 | parser.add_argument(
22 | "--history_prompt",
23 | type=str,
24 | default=None,
25 | help="history choice for audio cloning, be path to the .npz file.",
26 | )
27 | parser.add_argument(
28 | "--text_temp",
29 | default=0.7,
30 | type=float,
31 | help="generation temperature (1.0 more diverse, 0.0 more conservative)",
32 | )
33 | parser.add_argument(
34 | "--waveform_temp",
35 | default=0.7,
36 | type=float,
37 | help="generation temperature (1.0 more diverse, 0.0 more conservative)",
38 | )
39 | parser.add_argument("--silent", default=False, type=bool, help="disable progress bar")
40 | parser.add_argument(
41 | "--output_full",
42 | default=False,
43 | type=bool,
44 | help="return full generation to be used as a history prompt",
45 | )
46 |
47 | args = vars(parser.parse_args())
48 | input_text: str = args.get("text")
49 | output_filename: str = args.get("output_filename")
50 | output_dir: str = args.get("output_dir")
51 | history_prompt: str = args.get("history_prompt")
52 | text_temp: float = args.get("text_temp")
53 | waveform_temp: float = args.get("waveform_temp")
54 | silent: bool = args.get("silent")
55 | output_full: bool = args.get("output_full")
56 |
57 | try:
58 | os.makedirs(output_dir, exist_ok=True)
59 | generated_audio = generate_audio(
60 | input_text,
61 | history_prompt=history_prompt,
62 | text_temp=text_temp,
63 | waveform_temp=waveform_temp,
64 | silent=silent,
65 | output_full=output_full,
66 | )
67 | output_file_path = os.path.join(output_dir, output_filename)
68 | write_wav(output_file_path, SAMPLE_RATE, generated_audio)
69 | print(f"Done! Output audio file is saved at: '{output_file_path}'")
70 | except Exception as e:
71 | print(f"Oops, an error occurred: {e}")
72 |
--------------------------------------------------------------------------------
/bark/generation.py:
--------------------------------------------------------------------------------
1 | import contextlib
2 | import gc
3 | import os
4 | import re
5 |
6 | from encodec import EncodecModel
7 | import funcy
8 | import logging
9 | import numpy as np
10 | from scipy.special import softmax
11 | import torch
12 | import torch.nn.functional as F
13 | import tqdm
14 | from transformers import BertTokenizer
15 | from huggingface_hub import hf_hub_download
16 |
17 | from .model import GPTConfig, GPT
18 | from .model_fine import FineGPT, FineGPTConfig
19 |
20 | if (
21 | torch.cuda.is_available() and
22 | hasattr(torch.cuda, "amp") and
23 | hasattr(torch.cuda.amp, "autocast") and
24 | hasattr(torch.cuda, "is_bf16_supported") and
25 | torch.cuda.is_bf16_supported()
26 | ):
27 | autocast = funcy.partial(torch.cuda.amp.autocast, dtype=torch.bfloat16)
28 | else:
29 | @contextlib.contextmanager
30 | def autocast():
31 | yield
32 |
33 |
34 | # hold models in global scope to lazy load
35 | global models
36 | models = {}
37 |
38 | global models_devices
39 | models_devices = {}
40 |
41 |
42 | CONTEXT_WINDOW_SIZE = 1024
43 |
44 | SEMANTIC_RATE_HZ = 49.9
45 | SEMANTIC_VOCAB_SIZE = 10_000
46 |
47 | CODEBOOK_SIZE = 1024
48 | N_COARSE_CODEBOOKS = 2
49 | N_FINE_CODEBOOKS = 8
50 | COARSE_RATE_HZ = 75
51 |
52 | SAMPLE_RATE = 24_000
53 |
54 |
55 | SUPPORTED_LANGS = [
56 | ("English", "en"),
57 | ("German", "de"),
58 | ("Spanish", "es"),
59 | ("French", "fr"),
60 | ("Hindi", "hi"),
61 | ("Italian", "it"),
62 | ("Japanese", "ja"),
63 | ("Korean", "ko"),
64 | ("Polish", "pl"),
65 | ("Portuguese", "pt"),
66 | ("Russian", "ru"),
67 | ("Turkish", "tr"),
68 | ("Chinese", "zh"),
69 | ]
70 |
71 | ALLOWED_PROMPTS = {"announcer"}
72 | for _, lang in SUPPORTED_LANGS:
73 | for prefix in ("", f"v2{os.path.sep}"):
74 | for n in range(10):
75 | ALLOWED_PROMPTS.add(f"{prefix}{lang}_speaker_{n}")
76 |
77 |
78 | logger = logging.getLogger(__name__)
79 |
80 |
81 | CUR_PATH = os.path.dirname(os.path.abspath(__file__))
82 |
83 |
84 | default_cache_dir = os.path.join(os.path.expanduser("~"), ".cache")
85 | CACHE_DIR = os.path.join(os.getenv("XDG_CACHE_HOME", default_cache_dir), "suno", "bark_v0")
86 |
87 |
88 | def _cast_bool_env_var(s):
89 | return s.lower() in ('true', '1', 't')
90 |
91 |
92 | USE_SMALL_MODELS = _cast_bool_env_var(os.environ.get("SUNO_USE_SMALL_MODELS", "False"))
93 | GLOBAL_ENABLE_MPS = _cast_bool_env_var(os.environ.get("SUNO_ENABLE_MPS", "False"))
94 | OFFLOAD_CPU = _cast_bool_env_var(os.environ.get("SUNO_OFFLOAD_CPU", "False"))
95 |
96 |
97 | REMOTE_MODEL_PATHS = {
98 | "text_small": {
99 | "repo_id": "suno/bark",
100 | "file_name": "text.pt",
101 | },
102 | "coarse_small": {
103 | "repo_id": "suno/bark",
104 | "file_name": "coarse.pt",
105 | },
106 | "fine_small": {
107 | "repo_id": "suno/bark",
108 | "file_name": "fine.pt",
109 | },
110 | "text": {
111 | "repo_id": "suno/bark",
112 | "file_name": "text_2.pt",
113 | },
114 | "coarse": {
115 | "repo_id": "suno/bark",
116 | "file_name": "coarse_2.pt",
117 | },
118 | "fine": {
119 | "repo_id": "suno/bark",
120 | "file_name": "fine_2.pt",
121 | },
122 | }
123 |
124 |
125 | if not hasattr(torch.nn.functional, 'scaled_dot_product_attention') and torch.cuda.is_available():
126 | logger.warning(
127 | "torch version does not support flash attention. You will get faster" +
128 | " inference speed by upgrade torch to newest nightly version."
129 | )
130 |
131 |
132 | def _grab_best_device(use_gpu=True):
133 | if torch.cuda.device_count() > 0 and use_gpu:
134 | device = "cuda"
135 | elif torch.backends.mps.is_available() and use_gpu and GLOBAL_ENABLE_MPS:
136 | device = "mps"
137 | else:
138 | device = "cpu"
139 | return device
140 |
141 |
142 | def _get_ckpt_path(model_type, use_small=False):
143 | key = model_type
144 | if use_small or USE_SMALL_MODELS:
145 | key += "_small"
146 | return os.path.join(CACHE_DIR, REMOTE_MODEL_PATHS[key]["file_name"])
147 |
148 |
149 | def _download(from_hf_path, file_name):
150 | os.makedirs(CACHE_DIR, exist_ok=True)
151 | hf_hub_download(repo_id=from_hf_path, filename=file_name, local_dir=CACHE_DIR)
152 |
153 |
154 | class InferenceContext:
155 | def __init__(self, benchmark=False):
156 | # we can't expect inputs to be the same length, so disable benchmarking by default
157 | self._chosen_cudnn_benchmark = benchmark
158 | self._cudnn_benchmark = None
159 |
160 | def __enter__(self):
161 | self._cudnn_benchmark = torch.backends.cudnn.benchmark
162 | torch.backends.cudnn.benchmark = self._chosen_cudnn_benchmark
163 |
164 | def __exit__(self, exc_type, exc_value, exc_traceback):
165 | torch.backends.cudnn.benchmark = self._cudnn_benchmark
166 |
167 |
168 | if torch.cuda.is_available():
169 | torch.backends.cuda.matmul.allow_tf32 = True
170 | torch.backends.cudnn.allow_tf32 = True
171 |
172 |
173 | @contextlib.contextmanager
174 | def _inference_mode():
175 | with InferenceContext(), torch.inference_mode(), torch.no_grad(), autocast():
176 | yield
177 |
178 |
179 | def _clear_cuda_cache():
180 | if torch.cuda.is_available():
181 | torch.cuda.empty_cache()
182 | torch.cuda.synchronize()
183 |
184 |
185 | def clean_models(model_key=None):
186 | global models
187 | model_keys = [model_key] if model_key is not None else list(models.keys())
188 | for k in model_keys:
189 | if k in models:
190 | del models[k]
191 | _clear_cuda_cache()
192 | gc.collect()
193 |
194 |
195 | def _load_model(ckpt_path, device, use_small=False, model_type="text"):
196 | if model_type == "text":
197 | ConfigClass = GPTConfig
198 | ModelClass = GPT
199 | elif model_type == "coarse":
200 | ConfigClass = GPTConfig
201 | ModelClass = GPT
202 | elif model_type == "fine":
203 | ConfigClass = FineGPTConfig
204 | ModelClass = FineGPT
205 | else:
206 | raise NotImplementedError()
207 | model_key = f"{model_type}_small" if use_small or USE_SMALL_MODELS else model_type
208 | model_info = REMOTE_MODEL_PATHS[model_key]
209 | if not os.path.exists(ckpt_path):
210 | logger.info(f"{model_type} model not found, downloading into `{CACHE_DIR}`.")
211 | _download(model_info["repo_id"], model_info["file_name"])
212 | checkpoint = torch.load(ckpt_path, map_location=device)
213 | # this is a hack
214 | model_args = checkpoint["model_args"]
215 | if "input_vocab_size" not in model_args:
216 | model_args["input_vocab_size"] = model_args["vocab_size"]
217 | model_args["output_vocab_size"] = model_args["vocab_size"]
218 | del model_args["vocab_size"]
219 | gptconf = ConfigClass(**checkpoint["model_args"])
220 | model = ModelClass(gptconf)
221 | state_dict = checkpoint["model"]
222 | # fixup checkpoint
223 | unwanted_prefix = "_orig_mod."
224 | for k, v in list(state_dict.items()):
225 | if k.startswith(unwanted_prefix):
226 | state_dict[k[len(unwanted_prefix) :]] = state_dict.pop(k)
227 | extra_keys = set(state_dict.keys()) - set(model.state_dict().keys())
228 | extra_keys = set([k for k in extra_keys if not k.endswith(".attn.bias")])
229 | missing_keys = set(model.state_dict().keys()) - set(state_dict.keys())
230 | missing_keys = set([k for k in missing_keys if not k.endswith(".attn.bias")])
231 | if len(extra_keys) != 0:
232 | raise ValueError(f"extra keys found: {extra_keys}")
233 | if len(missing_keys) != 0:
234 | raise ValueError(f"missing keys: {missing_keys}")
235 | model.load_state_dict(state_dict, strict=False)
236 | n_params = model.get_num_params()
237 | val_loss = checkpoint["best_val_loss"].item()
238 | logger.info(f"model loaded: {round(n_params/1e6,1)}M params, {round(val_loss,3)} loss")
239 | model.eval()
240 | model.to(device)
241 | del checkpoint, state_dict
242 | _clear_cuda_cache()
243 | if model_type == "text":
244 | tokenizer = BertTokenizer.from_pretrained("bert-base-multilingual-cased")
245 | return {
246 | "model": model,
247 | "tokenizer": tokenizer,
248 | }
249 | return model
250 |
251 |
252 | def _load_codec_model(device):
253 | model = EncodecModel.encodec_model_24khz()
254 | model.set_target_bandwidth(6.0)
255 | model.eval()
256 | model.to(device)
257 | _clear_cuda_cache()
258 | return model
259 |
260 |
261 | def load_model(use_gpu=True, use_small=False, force_reload=False, model_type="text"):
262 | _load_model_f = funcy.partial(_load_model, model_type=model_type, use_small=use_small)
263 | if model_type not in ("text", "coarse", "fine"):
264 | raise NotImplementedError()
265 | global models
266 | global models_devices
267 | device = _grab_best_device(use_gpu=use_gpu)
268 | model_key = f"{model_type}"
269 | if OFFLOAD_CPU:
270 | models_devices[model_key] = device
271 | device = "cpu"
272 | if model_key not in models or force_reload:
273 | ckpt_path = _get_ckpt_path(model_type, use_small=use_small)
274 | clean_models(model_key=model_key)
275 | model = _load_model_f(ckpt_path, device)
276 | models[model_key] = model
277 | if model_type == "text":
278 | models[model_key]["model"].to(device)
279 | else:
280 | models[model_key].to(device)
281 | return models[model_key]
282 |
283 |
284 | def load_codec_model(use_gpu=True, force_reload=False):
285 | global models
286 | global models_devices
287 | device = _grab_best_device(use_gpu=use_gpu)
288 | if device == "mps":
289 | # encodec doesn't support mps
290 | device = "cpu"
291 | model_key = "codec"
292 | if OFFLOAD_CPU:
293 | models_devices[model_key] = device
294 | device = "cpu"
295 | if model_key not in models or force_reload:
296 | clean_models(model_key=model_key)
297 | model = _load_codec_model(device)
298 | models[model_key] = model
299 | models[model_key].to(device)
300 | return models[model_key]
301 |
302 |
303 | def preload_models(
304 | text_use_gpu=True,
305 | text_use_small=False,
306 | coarse_use_gpu=True,
307 | coarse_use_small=False,
308 | fine_use_gpu=True,
309 | fine_use_small=False,
310 | codec_use_gpu=True,
311 | force_reload=False,
312 | ):
313 | """Load all the necessary models for the pipeline."""
314 | if _grab_best_device() == "cpu" and (
315 | text_use_gpu or coarse_use_gpu or fine_use_gpu or codec_use_gpu
316 | ):
317 | logger.warning("No GPU being used. Careful, inference might be very slow!")
318 | _ = load_model(
319 | model_type="text", use_gpu=text_use_gpu, use_small=text_use_small, force_reload=force_reload
320 | )
321 | _ = load_model(
322 | model_type="coarse",
323 | use_gpu=coarse_use_gpu,
324 | use_small=coarse_use_small,
325 | force_reload=force_reload,
326 | )
327 | _ = load_model(
328 | model_type="fine", use_gpu=fine_use_gpu, use_small=fine_use_small, force_reload=force_reload
329 | )
330 | _ = load_codec_model(use_gpu=codec_use_gpu, force_reload=force_reload)
331 |
332 |
333 | ####
334 | # Generation Functionality
335 | ####
336 |
337 |
338 | def _tokenize(tokenizer, text):
339 | return tokenizer.encode(text, add_special_tokens=False)
340 |
341 |
342 | def _detokenize(tokenizer, enc_text):
343 | return tokenizer.decode(enc_text)
344 |
345 |
346 | def _normalize_whitespace(text):
347 | return re.sub(r"\s+", " ", text).strip()
348 |
349 |
350 | TEXT_ENCODING_OFFSET = 10_048
351 | SEMANTIC_PAD_TOKEN = 10_000
352 | TEXT_PAD_TOKEN = 129_595
353 | SEMANTIC_INFER_TOKEN = 129_599
354 |
355 |
356 | def _load_history_prompt(history_prompt_input):
357 | if isinstance(history_prompt_input, str) and history_prompt_input.endswith(".npz"):
358 | history_prompt = np.load(history_prompt_input)
359 | elif isinstance(history_prompt_input, str):
360 | # make sure this works on non-ubuntu
361 | history_prompt_input = os.path.join(*history_prompt_input.split("/"))
362 | if history_prompt_input not in ALLOWED_PROMPTS:
363 | raise ValueError("history prompt not found")
364 | history_prompt = np.load(
365 | os.path.join(CUR_PATH, "assets", "prompts", f"{history_prompt_input}.npz")
366 | )
367 | elif isinstance(history_prompt_input, dict):
368 | assert("semantic_prompt" in history_prompt_input)
369 | assert("coarse_prompt" in history_prompt_input)
370 | assert("fine_prompt" in history_prompt_input)
371 | history_prompt = history_prompt_input
372 | else:
373 | raise ValueError("history prompt format unrecognized")
374 | return history_prompt
375 |
376 |
377 | def generate_text_semantic(
378 | text,
379 | history_prompt=None,
380 | temp=0.7,
381 | top_k=None,
382 | top_p=None,
383 | silent=False,
384 | min_eos_p=0.2,
385 | max_gen_duration_s=None,
386 | allow_early_stop=True,
387 | use_kv_caching=False,
388 | ):
389 | """Generate semantic tokens from text."""
390 | assert isinstance(text, str)
391 | text = _normalize_whitespace(text)
392 | assert len(text.strip()) > 0
393 | if history_prompt is not None:
394 | history_prompt = _load_history_prompt(history_prompt)
395 | semantic_history = history_prompt["semantic_prompt"]
396 | assert (
397 | isinstance(semantic_history, np.ndarray)
398 | and len(semantic_history.shape) == 1
399 | and len(semantic_history) > 0
400 | and semantic_history.min() >= 0
401 | and semantic_history.max() <= SEMANTIC_VOCAB_SIZE - 1
402 | )
403 | else:
404 | semantic_history = None
405 | # load models if not yet exist
406 | global models
407 | global models_devices
408 | if "text" not in models:
409 | preload_models()
410 | model_container = models["text"]
411 | model = model_container["model"]
412 | tokenizer = model_container["tokenizer"]
413 | encoded_text = np.array(_tokenize(tokenizer, text)) + TEXT_ENCODING_OFFSET
414 | if OFFLOAD_CPU:
415 | model.to(models_devices["text"])
416 | device = next(model.parameters()).device
417 | if len(encoded_text) > 256:
418 | p = round((len(encoded_text) - 256) / len(encoded_text) * 100, 1)
419 | logger.warning(f"warning, text too long, lopping of last {p}%")
420 | encoded_text = encoded_text[:256]
421 | encoded_text = np.pad(
422 | encoded_text,
423 | (0, 256 - len(encoded_text)),
424 | constant_values=TEXT_PAD_TOKEN,
425 | mode="constant",
426 | )
427 | if semantic_history is not None:
428 | semantic_history = semantic_history.astype(np.int64)
429 | # lop off if history is too long, pad if needed
430 | semantic_history = semantic_history[-256:]
431 | semantic_history = np.pad(
432 | semantic_history,
433 | (0, 256 - len(semantic_history)),
434 | constant_values=SEMANTIC_PAD_TOKEN,
435 | mode="constant",
436 | )
437 | else:
438 | semantic_history = np.array([SEMANTIC_PAD_TOKEN] * 256)
439 | x = torch.from_numpy(
440 | np.hstack([
441 | encoded_text, semantic_history, np.array([SEMANTIC_INFER_TOKEN])
442 | ]).astype(np.int64)
443 | )[None]
444 | assert x.shape[1] == 256 + 256 + 1
445 | with _inference_mode():
446 | x = x.to(device)
447 | n_tot_steps = 768
448 | # custom tqdm updates since we don't know when eos will occur
449 | pbar = tqdm.tqdm(disable=silent, total=n_tot_steps)
450 | pbar_state = 0
451 | tot_generated_duration_s = 0
452 | kv_cache = None
453 | for n in range(n_tot_steps):
454 | if use_kv_caching and kv_cache is not None:
455 | x_input = x[:, [-1]]
456 | else:
457 | x_input = x
458 | logits, kv_cache = model(
459 | x_input, merge_context=True, use_cache=use_kv_caching, past_kv=kv_cache
460 | )
461 | relevant_logits = logits[0, 0, :SEMANTIC_VOCAB_SIZE]
462 | if allow_early_stop:
463 | relevant_logits = torch.hstack(
464 | (relevant_logits, logits[0, 0, [SEMANTIC_PAD_TOKEN]]) # eos
465 | )
466 | if top_p is not None:
467 | # faster to convert to numpy
468 | original_device = relevant_logits.device
469 | relevant_logits = relevant_logits.detach().cpu().type(torch.float32).numpy()
470 | sorted_indices = np.argsort(relevant_logits)[::-1]
471 | sorted_logits = relevant_logits[sorted_indices]
472 | cumulative_probs = np.cumsum(softmax(sorted_logits))
473 | sorted_indices_to_remove = cumulative_probs > top_p
474 | sorted_indices_to_remove[1:] = sorted_indices_to_remove[:-1].copy()
475 | sorted_indices_to_remove[0] = False
476 | relevant_logits[sorted_indices[sorted_indices_to_remove]] = -np.inf
477 | relevant_logits = torch.from_numpy(relevant_logits)
478 | relevant_logits = relevant_logits.to(original_device)
479 | if top_k is not None:
480 | v, _ = torch.topk(relevant_logits, min(top_k, relevant_logits.size(-1)))
481 | relevant_logits[relevant_logits < v[-1]] = -float("Inf")
482 | probs = F.softmax(relevant_logits / temp, dim=-1)
483 | item_next = torch.multinomial(probs, num_samples=1).to(torch.int32)
484 | if allow_early_stop and (
485 | item_next == SEMANTIC_VOCAB_SIZE
486 | or (min_eos_p is not None and probs[-1] >= min_eos_p)
487 | ):
488 | # eos found, so break
489 | pbar.update(n - pbar_state)
490 | break
491 | x = torch.cat((x, item_next[None]), dim=1)
492 | tot_generated_duration_s += 1 / SEMANTIC_RATE_HZ
493 | if max_gen_duration_s is not None and tot_generated_duration_s > max_gen_duration_s:
494 | pbar.update(n - pbar_state)
495 | break
496 | if n == n_tot_steps - 1:
497 | pbar.update(n - pbar_state)
498 | break
499 | del logits, relevant_logits, probs, item_next
500 |
501 | if n > pbar_state:
502 | if n > pbar.total:
503 | pbar.total = n
504 | pbar.update(n - pbar_state)
505 | pbar_state = n
506 | pbar.total = n
507 | pbar.refresh()
508 | pbar.close()
509 | out = x.detach().cpu().numpy().squeeze()[256 + 256 + 1 :]
510 | if OFFLOAD_CPU:
511 | model.to("cpu")
512 | assert all(0 <= out) and all(out < SEMANTIC_VOCAB_SIZE)
513 | _clear_cuda_cache()
514 | return out
515 |
516 |
517 | def _flatten_codebooks(arr, offset_size=CODEBOOK_SIZE):
518 | assert len(arr.shape) == 2
519 | arr = arr.copy()
520 | if offset_size is not None:
521 | for n in range(1, arr.shape[0]):
522 | arr[n, :] += offset_size * n
523 | flat_arr = arr.ravel("F")
524 | return flat_arr
525 |
526 |
527 | COARSE_SEMANTIC_PAD_TOKEN = 12_048
528 | COARSE_INFER_TOKEN = 12_050
529 |
530 |
531 | def generate_coarse(
532 | x_semantic,
533 | history_prompt=None,
534 | temp=0.7,
535 | top_k=None,
536 | top_p=None,
537 | silent=False,
538 | max_coarse_history=630, # min 60 (faster), max 630 (more context)
539 | sliding_window_len=60,
540 | use_kv_caching=False,
541 | ):
542 | """Generate coarse audio codes from semantic tokens."""
543 | assert (
544 | isinstance(x_semantic, np.ndarray)
545 | and len(x_semantic.shape) == 1
546 | and len(x_semantic) > 0
547 | and x_semantic.min() >= 0
548 | and x_semantic.max() <= SEMANTIC_VOCAB_SIZE - 1
549 | )
550 | assert 60 <= max_coarse_history <= 630
551 | assert max_coarse_history + sliding_window_len <= 1024 - 256
552 | semantic_to_coarse_ratio = COARSE_RATE_HZ / SEMANTIC_RATE_HZ * N_COARSE_CODEBOOKS
553 | max_semantic_history = int(np.floor(max_coarse_history / semantic_to_coarse_ratio))
554 | if history_prompt is not None:
555 | history_prompt = _load_history_prompt(history_prompt)
556 | x_semantic_history = history_prompt["semantic_prompt"]
557 | x_coarse_history = history_prompt["coarse_prompt"]
558 | assert (
559 | isinstance(x_semantic_history, np.ndarray)
560 | and len(x_semantic_history.shape) == 1
561 | and len(x_semantic_history) > 0
562 | and x_semantic_history.min() >= 0
563 | and x_semantic_history.max() <= SEMANTIC_VOCAB_SIZE - 1
564 | and isinstance(x_coarse_history, np.ndarray)
565 | and len(x_coarse_history.shape) == 2
566 | and x_coarse_history.shape[0] == N_COARSE_CODEBOOKS
567 | and x_coarse_history.shape[-1] >= 0
568 | and x_coarse_history.min() >= 0
569 | and x_coarse_history.max() <= CODEBOOK_SIZE - 1
570 | and (
571 | round(x_coarse_history.shape[-1] / len(x_semantic_history), 1)
572 | == round(semantic_to_coarse_ratio / N_COARSE_CODEBOOKS, 1)
573 | )
574 | )
575 | x_coarse_history = _flatten_codebooks(x_coarse_history) + SEMANTIC_VOCAB_SIZE
576 | # trim histories correctly
577 | n_semantic_hist_provided = np.min(
578 | [
579 | max_semantic_history,
580 | len(x_semantic_history) - len(x_semantic_history) % 2,
581 | int(np.floor(len(x_coarse_history) / semantic_to_coarse_ratio)),
582 | ]
583 | )
584 | n_coarse_hist_provided = int(round(n_semantic_hist_provided * semantic_to_coarse_ratio))
585 | x_semantic_history = x_semantic_history[-n_semantic_hist_provided:].astype(np.int32)
586 | x_coarse_history = x_coarse_history[-n_coarse_hist_provided:].astype(np.int32)
587 | # TODO: bit of a hack for time alignment (sounds better)
588 | x_coarse_history = x_coarse_history[:-2]
589 | else:
590 | x_semantic_history = np.array([], dtype=np.int32)
591 | x_coarse_history = np.array([], dtype=np.int32)
592 | # load models if not yet exist
593 | global models
594 | global models_devices
595 | if "coarse" not in models:
596 | preload_models()
597 | model = models["coarse"]
598 | if OFFLOAD_CPU:
599 | model.to(models_devices["coarse"])
600 | device = next(model.parameters()).device
601 | # start loop
602 | n_steps = int(
603 | round(
604 | np.floor(len(x_semantic) * semantic_to_coarse_ratio / N_COARSE_CODEBOOKS)
605 | * N_COARSE_CODEBOOKS
606 | )
607 | )
608 | assert n_steps > 0 and n_steps % N_COARSE_CODEBOOKS == 0
609 | x_semantic = np.hstack([x_semantic_history, x_semantic]).astype(np.int32)
610 | x_coarse = x_coarse_history.astype(np.int32)
611 | base_semantic_idx = len(x_semantic_history)
612 | with _inference_mode():
613 | x_semantic_in = torch.from_numpy(x_semantic)[None].to(device)
614 | x_coarse_in = torch.from_numpy(x_coarse)[None].to(device)
615 | n_window_steps = int(np.ceil(n_steps / sliding_window_len))
616 | n_step = 0
617 | for _ in tqdm.tqdm(range(n_window_steps), total=n_window_steps, disable=silent):
618 | semantic_idx = base_semantic_idx + int(round(n_step / semantic_to_coarse_ratio))
619 | # pad from right side
620 | x_in = x_semantic_in[:, np.max([0, semantic_idx - max_semantic_history]) :]
621 | x_in = x_in[:, :256]
622 | x_in = F.pad(
623 | x_in,
624 | (0, 256 - x_in.shape[-1]),
625 | "constant",
626 | COARSE_SEMANTIC_PAD_TOKEN,
627 | )
628 | x_in = torch.hstack(
629 | [
630 | x_in,
631 | torch.tensor([COARSE_INFER_TOKEN])[None].to(device),
632 | x_coarse_in[:, -max_coarse_history:],
633 | ]
634 | )
635 | kv_cache = None
636 | for _ in range(sliding_window_len):
637 | if n_step >= n_steps:
638 | continue
639 | is_major_step = n_step % N_COARSE_CODEBOOKS == 0
640 |
641 | if use_kv_caching and kv_cache is not None:
642 | x_input = x_in[:, [-1]]
643 | else:
644 | x_input = x_in
645 |
646 | logits, kv_cache = model(x_input, use_cache=use_kv_caching, past_kv=kv_cache)
647 | logit_start_idx = (
648 | SEMANTIC_VOCAB_SIZE + (1 - int(is_major_step)) * CODEBOOK_SIZE
649 | )
650 | logit_end_idx = (
651 | SEMANTIC_VOCAB_SIZE + (2 - int(is_major_step)) * CODEBOOK_SIZE
652 | )
653 | relevant_logits = logits[0, 0, logit_start_idx:logit_end_idx]
654 | if top_p is not None:
655 | # faster to convert to numpy
656 | original_device = relevant_logits.device
657 | relevant_logits = relevant_logits.detach().cpu().type(torch.float32).numpy()
658 | sorted_indices = np.argsort(relevant_logits)[::-1]
659 | sorted_logits = relevant_logits[sorted_indices]
660 | cumulative_probs = np.cumsum(softmax(sorted_logits))
661 | sorted_indices_to_remove = cumulative_probs > top_p
662 | sorted_indices_to_remove[1:] = sorted_indices_to_remove[:-1].copy()
663 | sorted_indices_to_remove[0] = False
664 | relevant_logits[sorted_indices[sorted_indices_to_remove]] = -np.inf
665 | relevant_logits = torch.from_numpy(relevant_logits)
666 | relevant_logits = relevant_logits.to(original_device)
667 | if top_k is not None:
668 | v, _ = torch.topk(relevant_logits, min(top_k, relevant_logits.size(-1)))
669 | relevant_logits[relevant_logits < v[-1]] = -float("Inf")
670 | probs = F.softmax(relevant_logits / temp, dim=-1)
671 | item_next = torch.multinomial(probs, num_samples=1).to(torch.int32)
672 | item_next += logit_start_idx
673 | x_coarse_in = torch.cat((x_coarse_in, item_next[None]), dim=1)
674 | x_in = torch.cat((x_in, item_next[None]), dim=1)
675 | del logits, relevant_logits, probs, item_next
676 | n_step += 1
677 | del x_in
678 | del x_semantic_in
679 | if OFFLOAD_CPU:
680 | model.to("cpu")
681 | gen_coarse_arr = x_coarse_in.detach().cpu().numpy().squeeze()[len(x_coarse_history) :]
682 | del x_coarse_in
683 | assert len(gen_coarse_arr) == n_steps
684 | gen_coarse_audio_arr = gen_coarse_arr.reshape(-1, N_COARSE_CODEBOOKS).T - SEMANTIC_VOCAB_SIZE
685 | for n in range(1, N_COARSE_CODEBOOKS):
686 | gen_coarse_audio_arr[n, :] -= n * CODEBOOK_SIZE
687 | _clear_cuda_cache()
688 | return gen_coarse_audio_arr
689 |
690 |
691 | def generate_fine(
692 | x_coarse_gen,
693 | history_prompt=None,
694 | temp=0.5,
695 | silent=True,
696 | ):
697 | """Generate full audio codes from coarse audio codes."""
698 | assert (
699 | isinstance(x_coarse_gen, np.ndarray)
700 | and len(x_coarse_gen.shape) == 2
701 | and 1 <= x_coarse_gen.shape[0] <= N_FINE_CODEBOOKS - 1
702 | and x_coarse_gen.shape[1] > 0
703 | and x_coarse_gen.min() >= 0
704 | and x_coarse_gen.max() <= CODEBOOK_SIZE - 1
705 | )
706 | if history_prompt is not None:
707 | history_prompt = _load_history_prompt(history_prompt)
708 | x_fine_history = history_prompt["fine_prompt"]
709 | assert (
710 | isinstance(x_fine_history, np.ndarray)
711 | and len(x_fine_history.shape) == 2
712 | and x_fine_history.shape[0] == N_FINE_CODEBOOKS
713 | and x_fine_history.shape[1] >= 0
714 | and x_fine_history.min() >= 0
715 | and x_fine_history.max() <= CODEBOOK_SIZE - 1
716 | )
717 | else:
718 | x_fine_history = None
719 | n_coarse = x_coarse_gen.shape[0]
720 | # load models if not yet exist
721 | global models
722 | global models_devices
723 | if "fine" not in models:
724 | preload_models()
725 | model = models["fine"]
726 | if OFFLOAD_CPU:
727 | model.to(models_devices["fine"])
728 | device = next(model.parameters()).device
729 | # make input arr
730 | in_arr = np.vstack(
731 | [
732 | x_coarse_gen,
733 | np.zeros((N_FINE_CODEBOOKS - n_coarse, x_coarse_gen.shape[1]))
734 | + CODEBOOK_SIZE, # padding
735 | ]
736 | ).astype(np.int32)
737 | # prepend history if available (max 512)
738 | if x_fine_history is not None:
739 | x_fine_history = x_fine_history.astype(np.int32)
740 | in_arr = np.hstack(
741 | [
742 | x_fine_history[:, -512:].astype(np.int32),
743 | in_arr,
744 | ]
745 | )
746 | n_history = x_fine_history[:, -512:].shape[1]
747 | else:
748 | n_history = 0
749 | n_remove_from_end = 0
750 | # need to pad if too short (since non-causal model)
751 | if in_arr.shape[1] < 1024:
752 | n_remove_from_end = 1024 - in_arr.shape[1]
753 | in_arr = np.hstack(
754 | [
755 | in_arr,
756 | np.zeros((N_FINE_CODEBOOKS, n_remove_from_end), dtype=np.int32) + CODEBOOK_SIZE,
757 | ]
758 | )
759 | # we can be lazy about fractional loop and just keep overwriting codebooks
760 | n_loops = np.max([0, int(np.ceil((x_coarse_gen.shape[1] - (1024 - n_history)) / 512))]) + 1
761 | with _inference_mode():
762 | in_arr = torch.tensor(in_arr.T).to(device)
763 | for n in tqdm.tqdm(range(n_loops), disable=silent):
764 | start_idx = np.min([n * 512, in_arr.shape[0] - 1024])
765 | start_fill_idx = np.min([n_history + n * 512, in_arr.shape[0] - 512])
766 | rel_start_fill_idx = start_fill_idx - start_idx
767 | in_buffer = in_arr[start_idx : start_idx + 1024, :][None]
768 | for nn in range(n_coarse, N_FINE_CODEBOOKS):
769 | logits = model(nn, in_buffer)
770 | if temp is None:
771 | relevant_logits = logits[0, rel_start_fill_idx:, :CODEBOOK_SIZE]
772 | codebook_preds = torch.argmax(relevant_logits, -1)
773 | else:
774 | relevant_logits = logits[0, :, :CODEBOOK_SIZE] / temp
775 | probs = F.softmax(relevant_logits, dim=-1)
776 | codebook_preds = torch.multinomial(
777 | probs[rel_start_fill_idx:1024], num_samples=1
778 | ).reshape(-1)
779 | codebook_preds = codebook_preds.to(torch.int32)
780 | in_buffer[0, rel_start_fill_idx:, nn] = codebook_preds
781 | del logits, codebook_preds
782 | # transfer over info into model_in and convert to numpy
783 | for nn in range(n_coarse, N_FINE_CODEBOOKS):
784 | in_arr[
785 | start_fill_idx : start_fill_idx + (1024 - rel_start_fill_idx), nn
786 | ] = in_buffer[0, rel_start_fill_idx:, nn]
787 | del in_buffer
788 | gen_fine_arr = in_arr.detach().cpu().numpy().squeeze().T
789 | del in_arr
790 | if OFFLOAD_CPU:
791 | model.to("cpu")
792 | gen_fine_arr = gen_fine_arr[:, n_history:]
793 | if n_remove_from_end > 0:
794 | gen_fine_arr = gen_fine_arr[:, :-n_remove_from_end]
795 | assert gen_fine_arr.shape[-1] == x_coarse_gen.shape[-1]
796 | _clear_cuda_cache()
797 | return gen_fine_arr
798 |
799 |
800 | def codec_decode(fine_tokens):
801 | """Turn quantized audio codes into audio array using encodec."""
802 | # load models if not yet exist
803 | global models
804 | global models_devices
805 | if "codec" not in models:
806 | preload_models()
807 | model = models["codec"]
808 | if OFFLOAD_CPU:
809 | model.to(models_devices["codec"])
810 | device = next(model.parameters()).device
811 | arr = torch.from_numpy(fine_tokens)[None]
812 | arr = arr.to(device)
813 | arr = arr.transpose(0, 1)
814 | emb = model.quantizer.decode(arr)
815 | out = model.decoder(emb)
816 | audio_arr = out.detach().cpu().numpy().squeeze()
817 | del arr, emb, out
818 | if OFFLOAD_CPU:
819 | model.to("cpu")
820 | return audio_arr
821 |
--------------------------------------------------------------------------------
/bark/model.py:
--------------------------------------------------------------------------------
1 | """
2 | Much of this code is adapted from Andrej Karpathy's NanoGPT
3 | (https://github.com/karpathy/nanoGPT)
4 | """
5 | import math
6 | from dataclasses import dataclass
7 |
8 | import torch
9 | import torch.nn as nn
10 | from torch.nn import functional as F
11 |
12 | class LayerNorm(nn.Module):
13 | """ LayerNorm but with an optional bias. PyTorch doesn't support simply bias=False """
14 |
15 | def __init__(self, ndim, bias):
16 | super().__init__()
17 | self.weight = nn.Parameter(torch.ones(ndim))
18 | self.bias = nn.Parameter(torch.zeros(ndim)) if bias else None
19 |
20 | def forward(self, input):
21 | return F.layer_norm(input, self.weight.shape, self.weight, self.bias, 1e-5)
22 |
23 | class CausalSelfAttention(nn.Module):
24 |
25 | def __init__(self, config):
26 | super().__init__()
27 | assert config.n_embd % config.n_head == 0
28 | # key, query, value projections for all heads, but in a batch
29 | self.c_attn = nn.Linear(config.n_embd, 3 * config.n_embd, bias=config.bias)
30 | # output projection
31 | self.c_proj = nn.Linear(config.n_embd, config.n_embd, bias=config.bias)
32 | # regularization
33 | self.attn_dropout = nn.Dropout(config.dropout)
34 | self.resid_dropout = nn.Dropout(config.dropout)
35 | self.n_head = config.n_head
36 | self.n_embd = config.n_embd
37 | self.dropout = config.dropout
38 | # flash attention make GPU go brrrrr but support is only in PyTorch nightly and still a bit scary
39 | self.flash = hasattr(torch.nn.functional, 'scaled_dot_product_attention')
40 | if not self.flash:
41 | # print("WARNING: using slow attention. Flash Attention atm needs PyTorch nightly and dropout=0.0")
42 | # causal mask to ensure that attention is only applied to the left in the input sequence
43 | self.register_buffer("bias", torch.tril(torch.ones(config.block_size, config.block_size))
44 | .view(1, 1, config.block_size, config.block_size))
45 |
46 | def forward(self, x, past_kv=None, use_cache=False):
47 | B, T, C = x.size() # batch size, sequence length, embedding dimensionality (n_embd)
48 |
49 | # calculate query, key, values for all heads in batch and move head forward to be the batch dim
50 | q, k ,v = self.c_attn(x).split(self.n_embd, dim=2)
51 | k = k.view(B, T, self.n_head, C // self.n_head).transpose(1, 2) # (B, nh, T, hs)
52 | q = q.view(B, T, self.n_head, C // self.n_head).transpose(1, 2) # (B, nh, T, hs)
53 | v = v.view(B, T, self.n_head, C // self.n_head).transpose(1, 2) # (B, nh, T, hs)
54 |
55 | if past_kv is not None:
56 | past_key = past_kv[0]
57 | past_value = past_kv[1]
58 | k = torch.cat((past_key, k), dim=-2)
59 | v = torch.cat((past_value, v), dim=-2)
60 |
61 | FULL_T = k.shape[-2]
62 |
63 | if use_cache is True:
64 | present = (k, v)
65 | else:
66 | present = None
67 |
68 | # causal self-attention; Self-attend: (B, nh, T, hs) x (B, nh, hs, T) -> (B, nh, T, T)
69 | if self.flash:
70 | # efficient attention using Flash Attention CUDA kernels
71 | if past_kv is not None:
72 | # When `past_kv` is provided, we're doing incremental decoding and `q.shape[2] == 1`: q only contains
73 | # the query for the last token. scaled_dot_product_attention interprets this as the first token in the
74 | # sequence, so if is_causal=True it will mask out all attention from it. This is not what we want, so
75 | # to work around this we set is_causal=False.
76 | is_causal = False
77 | else:
78 | is_causal = True
79 |
80 | y = torch.nn.functional.scaled_dot_product_attention(q, k, v, dropout_p=self.dropout, is_causal=is_causal)
81 | else:
82 | # manual implementation of attention
83 | att = (q @ k.transpose(-2, -1)) * (1.0 / math.sqrt(k.size(-1)))
84 | att = att.masked_fill(self.bias[:,:,FULL_T-T:FULL_T,:FULL_T] == 0, float('-inf'))
85 | att = F.softmax(att, dim=-1)
86 | att = self.attn_dropout(att)
87 | y = att @ v # (B, nh, T, T) x (B, nh, T, hs) -> (B, nh, T, hs)
88 | y = y.transpose(1, 2).contiguous().view(B, T, C) # re-assemble all head outputs side by side
89 |
90 | # output projection
91 | y = self.resid_dropout(self.c_proj(y))
92 | return (y, present)
93 |
94 | class MLP(nn.Module):
95 |
96 | def __init__(self, config):
97 | super().__init__()
98 | self.c_fc = nn.Linear(config.n_embd, 4 * config.n_embd, bias=config.bias)
99 | self.c_proj = nn.Linear(4 * config.n_embd, config.n_embd, bias=config.bias)
100 | self.dropout = nn.Dropout(config.dropout)
101 | self.gelu = nn.GELU()
102 |
103 | def forward(self, x):
104 | x = self.c_fc(x)
105 | x = self.gelu(x)
106 | x = self.c_proj(x)
107 | x = self.dropout(x)
108 | return x
109 |
110 | class Block(nn.Module):
111 |
112 | def __init__(self, config, layer_idx):
113 | super().__init__()
114 | self.ln_1 = LayerNorm(config.n_embd, bias=config.bias)
115 | self.attn = CausalSelfAttention(config)
116 | self.ln_2 = LayerNorm(config.n_embd, bias=config.bias)
117 | self.mlp = MLP(config)
118 | self.layer_idx = layer_idx
119 |
120 | def forward(self, x, past_kv=None, use_cache=False):
121 | attn_output, prev_kvs = self.attn(self.ln_1(x), past_kv=past_kv, use_cache=use_cache)
122 | x = x + attn_output
123 | x = x + self.mlp(self.ln_2(x))
124 | return (x, prev_kvs)
125 |
126 | @dataclass
127 | class GPTConfig:
128 | block_size: int = 1024
129 | input_vocab_size: int = 10_048
130 | output_vocab_size: int = 10_048
131 | n_layer: int = 12
132 | n_head: int = 12
133 | n_embd: int = 768
134 | dropout: float = 0.0
135 | bias: bool = True # True: bias in Linears and LayerNorms, like GPT-2. False: a bit better and faster
136 |
137 | class GPT(nn.Module):
138 |
139 | def __init__(self, config):
140 | super().__init__()
141 | assert config.input_vocab_size is not None
142 | assert config.output_vocab_size is not None
143 | assert config.block_size is not None
144 | self.config = config
145 |
146 | self.transformer = nn.ModuleDict(dict(
147 | wte = nn.Embedding(config.input_vocab_size, config.n_embd),
148 | wpe = nn.Embedding(config.block_size, config.n_embd),
149 | drop = nn.Dropout(config.dropout),
150 | h = nn.ModuleList([Block(config, idx) for idx in range(config.n_layer)]),
151 | ln_f = LayerNorm(config.n_embd, bias=config.bias),
152 | ))
153 | self.lm_head = nn.Linear(config.n_embd, config.output_vocab_size, bias=False)
154 |
155 | def get_num_params(self, non_embedding=True):
156 | """
157 | Return the number of parameters in the model.
158 | For non-embedding count (default), the position embeddings get subtracted.
159 | The token embeddings would too, except due to the parameter sharing these
160 | params are actually used as weights in the final layer, so we include them.
161 | """
162 | n_params = sum(p.numel() for p in self.parameters())
163 | if non_embedding:
164 | n_params -= self.transformer.wte.weight.numel()
165 | n_params -= self.transformer.wpe.weight.numel()
166 | return n_params
167 |
168 | def forward(self, idx, merge_context=False, past_kv=None, position_ids=None, use_cache=False):
169 | device = idx.device
170 | b, t = idx.size()
171 | if past_kv is not None:
172 | assert t == 1
173 | tok_emb = self.transformer.wte(idx) # token embeddings of shape (b, t, n_embd)
174 | else:
175 | if merge_context:
176 | assert(idx.shape[1] >= 256+256+1)
177 | t = idx.shape[1] - 256
178 | else:
179 | assert t <= self.config.block_size, f"Cannot forward sequence of length {t}, block size is only {self.config.block_size}"
180 |
181 | # forward the GPT model itself
182 | if merge_context:
183 | tok_emb = torch.cat([
184 | self.transformer.wte(idx[:,:256]) + self.transformer.wte(idx[:,256:256+256]),
185 | self.transformer.wte(idx[:,256+256:])
186 | ], dim=1)
187 | else:
188 | tok_emb = self.transformer.wte(idx) # token embeddings of shape (b, t, n_embd)
189 |
190 | if past_kv is None:
191 | past_length = 0
192 | past_kv = tuple([None] * len(self.transformer.h))
193 | else:
194 | past_length = past_kv[0][0].size(-2)
195 |
196 | if position_ids is None:
197 | position_ids = torch.arange(past_length, t + past_length, dtype=torch.long, device=device)
198 | position_ids = position_ids.unsqueeze(0) # shape (1, t)
199 | assert position_ids.shape == (1, t)
200 |
201 | pos_emb = self.transformer.wpe(position_ids) # position embeddings of shape (1, t, n_embd)
202 |
203 | x = self.transformer.drop(tok_emb + pos_emb)
204 |
205 | new_kv = () if use_cache else None
206 |
207 | for i, (block, past_layer_kv) in enumerate(zip(self.transformer.h, past_kv)):
208 | x, kv = block(x, past_kv=past_layer_kv, use_cache=use_cache)
209 |
210 | if use_cache:
211 | new_kv = new_kv + (kv,)
212 |
213 | x = self.transformer.ln_f(x)
214 |
215 | # inference-time mini-optimization: only forward the lm_head on the very last position
216 | logits = self.lm_head(x[:, [-1], :]) # note: using list [-1] to preserve the time dim
217 |
218 | return (logits, new_kv)
219 |
--------------------------------------------------------------------------------
/bark/model_fine.py:
--------------------------------------------------------------------------------
1 | """
2 | Much of this code is adapted from Andrej Karpathy's NanoGPT
3 | (https://github.com/karpathy/nanoGPT)
4 | """
5 | from dataclasses import dataclass
6 | import math
7 |
8 | import torch
9 | import torch.nn as nn
10 | from torch.nn import functional as F
11 |
12 | from .model import GPT, GPTConfig, MLP
13 |
14 |
15 | class NonCausalSelfAttention(nn.Module):
16 | def __init__(self, config):
17 | super().__init__()
18 | assert config.n_embd % config.n_head == 0
19 | # key, query, value projections for all heads, but in a batch
20 | self.c_attn = nn.Linear(config.n_embd, 3 * config.n_embd, bias=config.bias)
21 | # output projection
22 | self.c_proj = nn.Linear(config.n_embd, config.n_embd, bias=config.bias)
23 | # regularization
24 | self.attn_dropout = nn.Dropout(config.dropout)
25 | self.resid_dropout = nn.Dropout(config.dropout)
26 | self.n_head = config.n_head
27 | self.n_embd = config.n_embd
28 | self.dropout = config.dropout
29 | # flash attention make GPU go brrrrr but support is only in PyTorch >= 2.0
30 | self.flash = (
31 | hasattr(torch.nn.functional, "scaled_dot_product_attention")
32 | )
33 |
34 | def forward(self, x):
35 | B, T, C = x.size() # batch size, sequence length, embedding dimensionality (n_embd)
36 |
37 | # calculate query, key, values for all heads in batch and move head forward to be the batch dim
38 | q, k, v = self.c_attn(x).split(self.n_embd, dim=2)
39 | k = k.view(B, T, self.n_head, C // self.n_head).transpose(1, 2) # (B, nh, T, hs)
40 | q = q.view(B, T, self.n_head, C // self.n_head).transpose(1, 2) # (B, nh, T, hs)
41 | v = v.view(B, T, self.n_head, C // self.n_head).transpose(1, 2) # (B, nh, T, hs)
42 |
43 | # causal self-attention; Self-attend: (B, nh, T, hs) x (B, nh, hs, T) -> (B, nh, T, T)
44 | if self.flash:
45 | # efficient attention using Flash Attention CUDA kernels
46 | y = torch.nn.functional.scaled_dot_product_attention(
47 | q, k, v, attn_mask=None, dropout_p=self.dropout, is_causal=False
48 | )
49 | else:
50 | # manual implementation of attention
51 | att = (q @ k.transpose(-2, -1)) * (1.0 / math.sqrt(k.size(-1)))
52 | att = F.softmax(att, dim=-1)
53 | att = self.attn_dropout(att)
54 | y = att @ v # (B, nh, T, T) x (B, nh, T, hs) -> (B, nh, T, hs)
55 | y = (
56 | y.transpose(1, 2).contiguous().view(B, T, C)
57 | ) # re-assemble all head outputs side by side
58 |
59 | # output projection
60 | y = self.resid_dropout(self.c_proj(y))
61 | return y
62 |
63 |
64 | class FineBlock(nn.Module):
65 | def __init__(self, config):
66 | super().__init__()
67 | self.ln_1 = nn.LayerNorm(config.n_embd)
68 | self.attn = NonCausalSelfAttention(config)
69 | self.ln_2 = nn.LayerNorm(config.n_embd)
70 | self.mlp = MLP(config)
71 |
72 | def forward(self, x):
73 | x = x + self.attn(self.ln_1(x))
74 | x = x + self.mlp(self.ln_2(x))
75 | return x
76 |
77 |
78 | class FineGPT(GPT):
79 | def __init__(self, config):
80 | super().__init__(config)
81 | del self.lm_head
82 | self.config = config
83 | self.n_codes_total = config.n_codes_total
84 | self.transformer = nn.ModuleDict(
85 | dict(
86 | wtes=nn.ModuleList(
87 | [
88 | nn.Embedding(config.input_vocab_size, config.n_embd)
89 | for _ in range(config.n_codes_total)
90 | ]
91 | ),
92 | wpe=nn.Embedding(config.block_size, config.n_embd),
93 | drop=nn.Dropout(config.dropout),
94 | h=nn.ModuleList([FineBlock(config) for _ in range(config.n_layer)]),
95 | ln_f=nn.LayerNorm(config.n_embd),
96 | )
97 | )
98 | self.lm_heads = nn.ModuleList(
99 | [
100 | nn.Linear(config.n_embd, config.output_vocab_size, bias=False)
101 | for _ in range(config.n_codes_given, self.n_codes_total)
102 | ]
103 | )
104 | for i in range(self.n_codes_total - config.n_codes_given):
105 | self.transformer.wtes[i + 1].weight = self.lm_heads[i].weight
106 |
107 | def forward(self, pred_idx, idx):
108 | device = idx.device
109 | b, t, codes = idx.size()
110 | assert (
111 | t <= self.config.block_size
112 | ), f"Cannot forward sequence of length {t}, block size is only {self.config.block_size}"
113 | assert pred_idx > 0, "cannot predict 0th codebook"
114 | assert codes == self.n_codes_total, (b, t, codes)
115 | pos = torch.arange(0, t, dtype=torch.long, device=device).unsqueeze(0) # shape (1, t)
116 |
117 | # forward the GPT model itself
118 | tok_embs = [
119 | wte(idx[:, :, i]).unsqueeze(-1) for i, wte in enumerate(self.transformer.wtes)
120 | ] # token embeddings of shape (b, t, n_embd)
121 | tok_emb = torch.cat(tok_embs, dim=-1)
122 | pos_emb = self.transformer.wpe(pos) # position embeddings of shape (1, t, n_embd)
123 | x = tok_emb[:, :, :, : pred_idx + 1].sum(dim=-1)
124 | x = self.transformer.drop(x + pos_emb)
125 | for block in self.transformer.h:
126 | x = block(x)
127 | x = self.transformer.ln_f(x)
128 | logits = self.lm_heads[pred_idx - self.config.n_codes_given](x)
129 | return logits
130 |
131 | def get_num_params(self, non_embedding=True):
132 | """
133 | Return the number of parameters in the model.
134 | For non-embedding count (default), the position embeddings get subtracted.
135 | The token embeddings would too, except due to the parameter sharing these
136 | params are actually used as weights in the final layer, so we include them.
137 | """
138 | n_params = sum(p.numel() for p in self.parameters())
139 | if non_embedding:
140 | for wte in self.transformer.wtes:
141 | n_params -= wte.weight.numel()
142 | n_params -= self.transformer.wpe.weight.numel()
143 | return n_params
144 |
145 |
146 | @dataclass
147 | class FineGPTConfig(GPTConfig):
148 | n_codes_total: int = 8
149 | n_codes_given: int = 1
150 |
--------------------------------------------------------------------------------
/model-card.md:
--------------------------------------------------------------------------------
1 | # Model Card: Bark
2 |
3 | This is the official codebase for running the text to audio model, from Suno.ai.
4 |
5 | The following is additional information about the models released here.
6 |
7 | ## Model Details
8 |
9 | Bark is a series of three transformer models that turn text into audio.
10 | ### Text to semantic tokens
11 | - Input: text, tokenized with [BERT tokenizer from Hugging Face](https://huggingface.co/docs/transformers/model_doc/bert#transformers.BertTokenizer)
12 | - Output: semantic tokens that encode the audio to be generated
13 |
14 | ### Semantic to coarse tokens
15 | - Input: semantic tokens
16 | - Output: tokens from the first two codebooks of the [EnCodec Codec](https://github.com/facebookresearch/encodec) from facebook
17 |
18 | ### Coarse to fine tokens
19 | - Input: the first two codebooks from EnCodec
20 | - Output: 8 codebooks from EnCodec
21 |
22 | ### Architecture
23 | | Model | Parameters | Attention | Output Vocab size |
24 | |:-------------------------:|:----------:|------------|:-----------------:|
25 | | Text to semantic tokens | 80 M | Causal | 10,000 |
26 | | Semantic to coarse tokens | 80 M | Causal | 2x 1,024 |
27 | | Coarse to fine tokens | 80 M | Non-causal | 6x 1,024 |
28 |
29 |
30 | ### Release date
31 | April 2023
32 |
33 | ## Broader Implications
34 | We anticipate that this model's text to audio capabilities can be used to improve accessbility tools in a variety of languages.
35 | Straightforward improvements will allow models to run faster than realtime, rendering them useful for applications such as virtual assistants.
36 |
37 | While we hope that this release will enable users to express their creativity and build applications that are a force
38 | for good, we acknowledge that any text to audio model has the potential for dual use. While it is not straightforward
39 | to voice clone known people with Bark, they can still be used for nefarious purposes. To further reduce the chances of unintended use of Bark,
40 | we also release a simple classifier to detect Bark-generated audio with high accuracy (see notebooks section of the main repository).
41 |
--------------------------------------------------------------------------------
/notebooks/fake_classifier.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": null,
6 | "id": "e330c1de",
7 | "metadata": {},
8 | "outputs": [],
9 | "source": [
10 | "import torchaudio\n",
11 | "from transformers import HubertModel\n",
12 | "from sklearn.metrics import PrecisionRecallDisplay"
13 | ]
14 | },
15 | {
16 | "cell_type": "code",
17 | "execution_count": null,
18 | "id": "2ac3dd88",
19 | "metadata": {},
20 | "outputs": [],
21 | "source": [
22 | "# use hubert from HF for feature embedding\n",
23 | "model = HubertModel.from_pretrained(\"facebook/hubert-base-ls960\")\n",
24 | "arr, sr = torchaudio.load(\"my_audio.wav\")\n",
25 | "if sr != 16_000:\n",
26 | " arr = torchaudio.functional.resample(arr, sr, 16_000)\n",
27 | "# use intermediate layer\n",
28 | "hidden_state = model(arr[None], output_hidden_states=True).hidden_states[6]\n",
29 | "# take mean over time\n",
30 | "feats = hidden_state.detach().cpu().numpy().squeeze().mean(0)"
31 | ]
32 | },
33 | {
34 | "cell_type": "code",
35 | "execution_count": null,
36 | "id": "03a602e0",
37 | "metadata": {},
38 | "outputs": [],
39 | "source": [
40 | "# load sk-learn classifier from here: https://dl.suno-models.io/bark/models/v0/classifier.pkl\n",
41 | "with open(\"classifier.pkl\", \"rb\") as f:\n",
42 | " clf = pickle.load(f)"
43 | ]
44 | },
45 | {
46 | "cell_type": "markdown",
47 | "id": "8e423794",
48 | "metadata": {},
49 | "source": [
50 | "### Precision-recall curve on test set"
51 | ]
52 | },
53 | {
54 | "attachments": {
55 | "image.png": {
56 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAjcAAAHFCAIAAACB+E92AAAMPmlDQ1BJQ0MgUHJvZmlsZQAASImVVwdYU8kWnltSIbQAAlJCb4JIDSAlhBZAercRkgChxBgIKnZkUcG1oGIBG7oqothpdsTOotj7YkFFWRcLduVNCui6r3xvvm/u/PefM/85c+7MvXcAUD/OFYtzUQ0A8kQFktiQAEZySiqD9BTggAyogAKMuLx8MSs6OgLAMtj+vby7DhBZe8VBpvXP/v9aNPmCfB4ASDTE6fx8Xh7EBwDAq3liSQEARBlvPqVALMOwAm0JDBDiBTKcqcDVMpyuwHvkNvGxbIjbACCrcrmSTADULkGeUcjLhBpqfRA7ifhCEQDqDIh98/Im8SFOg9gG2oghlukz03/QyfybZvqQJpebOYQVc5EXcqAwX5zLnfZ/puN/l7xc6aAPK1hVsyShsbI5w7zdzJkULsOqEPeK0iOjINaC+IOQL7eHGKVmSUMTFPaoIS+fDXMGdCF24nMDwyE2hDhYlBsZoeTTM4TBHIjhCkGnCgs48RDrQbxAkB8Up7TZKJkUq/SFNmRI2Cwlf5YrkfuV+bovzUlgKfVfZwk4Sn1MrSgrPgliKsQWhcLESIjVIHbMz4kLV9qMLspiRw7aSKSxsvgtII4ViEICFPpYYYYkOFZpX5aXPzhfbGOWkBOpxPsKsuJDFfnB2nhcefxwLtglgYiVMKgjyE+OGJwLXxAYpJg79kwgSohT6nwQFwTEKsbiVHFutNIeNxPkhsh4M4hd8wvjlGPxxAK4IBX6eIa4IDpeESdelM0Ni1bEgy8FEYANAgEDSGFNB5NANhB29Db2wjtFTzDgAgnIBALgoGQGRyTJe0TwGgeKwJ8QCUD+0LgAea8AFEL+6xCruDqADHlvoXxEDngCcR4IB7nwXiofJRrylggeQ0b4D+9cWHkw3lxYZf3/nh9kvzMsyEQoGemgR4b6oCUxiBhIDCUGE21xA9wX98Yj4NUfVmeciXsOzuO7PeEJoZPwkHCN0EW4NVFYLPkpyjGgC+oHK3OR/mMucCuo6YYH4D5QHSrjurgBcMBdoR8W7gc9u0GWrYxblhXGT9p/m8EPT0NpR3GioJRhFH+Kzc8j1ezU3IZUZLn+MT+KWNOH8s0e6vnZP/uH7PNhG/6zJbYA24+dwU5g57DDWCNgYMewJqwdOyLDQ6vrsXx1DXqLlceTA3WE//A3+GRlmcx3qnPqcfqi6CsQTJW9owF7kniaRJiZVcBgwS+CgMER8RxHMJydnF0AkH1fFK+vNzHy7wai2/6dm/cHAD7HBgYGDn3nwo4BsNcDbv/m75wNE346VAA428yTSgoVHC67EOBbQh3uNH1gDMyBDZyPM3AH3sAfBIEwEAXiQQqYAKPPgutcAqaAGWAuKAXlYClYCdaCDWAz2A52gX2gERwGJ8BpcAFcAtfAHbh6usEL0Afegc8IgpAQGkJH9BETxBKxR5wRJuKLBCERSCySgqQhmYgIkSIzkHlIOVKBrEU2IbXIXqQZOYGcQzqRW8gDpAd5jXxCMVQV1UaNUCt0JMpEWWg4Go+ORzPRyWgRWoIuRlejNehOtAE9gV5Ar6Fd6Au0HwOYCqaLmWIOGBNjY1FYKpaBSbBZWBlWidVg9VgLfM5XsC6sF/uIE3E6zsAd4AoOxRNwHj4Zn4Uvwtfi2/EGvA2/gj/A+/BvBBrBkGBP8CJwCMmETMIUQimhkrCVcJBwCu6lbsI7IpGoS7QmesC9mELMJk4nLiKuI+4mHid2Eh8R+0kkkj7JnuRDiiJxSQWkUtIa0k7SMdJlUjfpA1mFbEJ2JgeTU8kicjG5kryDfJR8mfyU/JmiQbGkeFGiKHzKNMoSyhZKC+UipZvymapJtab6UOOp2dS51NXUeuop6l3qGxUVFTMVT5UYFaHKHJXVKntUzqo8UPmoqqVqp8pWHacqVV2suk31uOot1Tc0Gs2K5k9LpRXQFtNqaSdp92kf1OhqjmocNb7abLUqtQa1y2ov1Snqluos9QnqReqV6vvVL6r3alA0rDTYGlyNWRpVGs0aNzT6NemaozSjNPM0F2nu0Dyn+UyLpGWlFaTF1yrR2qx1UusRHaOb09l0Hn0efQv9FL1bm6htrc3RztYu196l3aHdp6Ol46qTqDNVp0rniE6XLqZrpcvRzdVdortP97rup2FGw1jDBMMWDqsfdnnYe73hev56Ar0yvd161/Q+6TP0g/Rz9JfpN+rfM8AN7AxiDKYYrDc4ZdA7XHu493De8LLh+4bfNkQN7QxjDacbbjZsN+w3MjYKMRIbrTE6adRrrGvsb5xtvML4qHGPCd3E10RossLkmMlzhg6DxchlrGa0MfpMDU1DTaWmm0w7TD+bWZslmBWb7Ta7Z041Z5pnmK8wbzXvszCxGGMxw6LO4rYlxZJpmWW5yvKM5Xsra6skq/lWjVbPrPWsOdZF1nXWd21oNn42k21qbK7aEm2Ztjm262wv2aF2bnZZdlV2F+1Re3d7of06+84RhBGeI0QjakbccFB1YDkUOtQ5PHDUdYxwLHZsdHw50mJk6shlI8+M/Obk5pTrtMXpziitUWGjike1jHrtbOfMc65yvupCcwl2me3S5PLK1d5V4Lre9aYb3W2M23y3Vrev7h7uEvd69x4PC480j2qPG0xtZjRzEfOsJ8EzwHO252HPj17uXgVe+7z+8nbwzvHe4f1stPVowegtox/5mPlwfTb5dPkyfNN8N/p2+Zn6cf1q/B76m/vz/bf6P2XZsrJZO1kvA5wCJAEHA96zvdgz2ccDscCQwLLAjiCtoISgtUH3g82CM4PrgvtC3EKmhxwPJYSGhy4LvcEx4vA4tZy+MI+wmWFt4arhceFrwx9G2EVIIlrGoGPCxiwfczfSMlIU2RgFojhRy6PuRVtHT44+FEOMiY6pinkSOyp2RuyZOHrcxLgdce/iA+KXxN9JsEmQJrQmqieOS6xNfJ8UmFSR1JU8Mnlm8oUUgxRhSlMqKTUxdWtq/9igsSvHdo9zG1c67vp46/FTx5+bYDAhd8KRieoTuRP3pxHSktJ2pH3hRnFruP3pnPTq9D4em7eK94Lvz1/B7xH4CCoETzN8MioynmX6ZC7P7Mnyy6rM6hWyhWuFr7JDszdkv8+JytmWM5CblLs7j5yXltcs0hLliNomGU+aOqlTbC8uFXdN9pq8cnKfJFyyNR/JH5/fVKANf+TbpTbSX6QPCn0Lqwo/TEmcsn+q5lTR1PZpdtMWTntaFFz023R8Om966wzTGXNnPJjJmrlpFjIrfVbrbPPZJbO754TM2T6XOjdn7u/FTsUVxW/nJc1rKTEqmVPy6JeQX+pK1UolpTfme8/fsABfIFzQsdBl4ZqF38r4ZefLncory78s4i06/+uoX1f/OrA4Y3HHEvcl65cSl4qWXl/mt2x7hWZFUcWj5WOWN6xgrChb8XblxJXnKl0rN6yirpKu6lodsbppjcWapWu+rM1ae60qoGp3tWH1wur36/jrLq/3X1+/wWhD+YZPG4Ubb24K2dRQY1VTuZm4uXDzky2JW878xvytdqvB1vKtX7eJtnVtj93eVutRW7vDcMeSOrROWtezc9zOS7sCdzXVO9Rv2q27u3wP2CPd83xv2t7r+8L3te5n7q8/YHmg+iD9YFkD0jCtoa8xq7GrKaWpszmsubXFu+XgIcdD2w6bHq46onNkyVHq0ZKjA8eKjvUfFx/vPZF54lHrxNY7J5NPXm2Laes4FX7q7Ong0yfPsM4cO+tz9vA5r3PN55nnGy+4X2hod2s/+Lvb7wc73DsaLnpcbLrkeamlc3Tn0ct+l09cCbxy+irn6oVrkdc6rydcv3lj3I2um/ybz27l3np1u/D25ztz7hLult3TuFd53/B+zR+2f+zucu868iDwQfvDuId3HvEevXic//hLd8kT2pPKpyZPa585PzvcE9xz6fnY590vxC8+95b+qfln9Uublwf+8v+rvS+5r/uV5NXA60Vv9N9se+v6trU/uv/+u7x3n9+XfdD/sP0j8+OZT0mfnn6e8oX0ZfVX268t38K/3R3IGxgQcyVc+a8ABiuakQHA620A0FIAoMPzGXWs4vwnL4jizCpH4D9hxRlRXtwBqIf/7zG98O/mBgB7tsDjF9RXHwdANA2AeE+AurgM1cGzmvxcKStEeA7YGPM1PS8d/JuiOHP+EPfPLZCpuoKf238BHZF8R8qybZYAAAA4ZVhJZk1NACoAAAAIAAGHaQAEAAAAAQAAABoAAAAAAAKgAgAEAAAAAQAAAjegAwAEAAAAAQAAAcUAAAAAljFVdgAAQABJREFUeAHtnQeYFMW6hg8sOecoOUiSICgCoqIIAhKMoChBUNGjJEUJAoICogJKRgVRjygq4FEEFEUBRUBAESRJzjnnZZf7Ql3r9DMzO9O7O7M7O/v19XCrq6srvD1bX/9Vf1eluXz58r90iIAIiIAIiEBYEkgblrVSpURABERABETgCgGplH4HIiACIiAC4UtAKhW+z0Y1EwEREAERkErpNyACIiACIhC+BKRS4ftsVDMREAEREAGplH4DIiACIiAC4UtAKhW+z0Y1EwEREAERkErpNyACIiACIhC+BKRS4ftsVDMREAEREAGplH4D8SOwYMGCxx57rEKFClmzZi1atGjLli1XrlwZvywcqV9++eU0adI4IkIevO222yjRHJkzZ65Wrdpbb70VGxsbioIphQYGzDnUEP5p7pX/nyNHjrp1637yyScBa5X4BB7tgjxH4rNVDqmNgFQqtT3xxLZ3woQJ27dv79at25w5c95+++2DBw/edNNNSFdi803C+0uXLv3r1WP69OkIbY8ePfr06ROK8imkc+fOAXMmDSkDJktMgvvvv58ilixZMnHixJMnTz788MPTpk1LTIa6VwSSjEC6JCtJBUUGgXHjxhUoUMC25a677ipbtuzQoUNvv/12GxnmAUwolNVUskmTJtiFY8eOffXVV9OnT++sOUtcnj9/nsTOyHiFbSn+77rm6uE/TSKvFixY0FSmTp069erVK1my5KRJk9CqRGYbhrdHR0djMqZLp54tDB9OAqskWyqB4FLtbU6JAkK2bNkqVaq0a9cu/0DmzZt3xx135MyZM0uWLBUrVhw2bJjP9Bg3jRo1Kly4MNpAst69e585c8am3Lp1a5s2bYoUKZIxY0a6XTL8448/zFWMOUaT8ubNy43Fixe/7777zp49a2/0E0CZatasSeJDhw6RjA7umWeeweCgdEr54IMPiPz777/p0Gk4McSj084Mjx8//txzz2GfcZU0TZs23bBhg0lAbnbEjyKef/75UqVKZcqUKU+ePLVq1bLDbh4jYww/vv7662inybBdu3a7d++2JdLMKlWq/Pbbb/Xr1wcm5b722mvxGrEsUaJE/vz5Dxw4YPPEujJ1y5AhA8Zl9+7dndjJfMyYMdWrV4dtrly5ULuvvvrK3Ov/edn8XQYw7xBRflEcFDd58mRzI5raoUMHZyZXhg7/GTz86aef4PzRRx/xFKg80P766y9i7O3mxrlz5xJpa+7/mTrLUjjZCeiNI9kfQcquwIkTJ1atWuXfkKK/ePzxx2+99VZ6f/rxTZs2rV271mez6Tvo5eklmfSirx8+fPjy5cvtcCKXYmJi6MHRocOHDzN+hUKQDyOQzZo1o9eeMmUK3eiePXsQxYsXL9KJ+yzFI3LLli28eufOndvEf/nll4sXLx4wYEChQoWo7bp165jIocQRI0YQ8+2333bt2pXSBw4cSPpTp07dfPPNVODFF1+sXbv26dOnFy1atG/fPjTGo5SePXvSk2Kx1ahRAw2AwJEjRzzSmNOnnnrqnXfeQSzvvvtucu7fvz8dMZDz5ctnEuzfv79t27Z0ytRh1qxZDFei3IiZz9y8I3lkR48etXYe8smjQQj79u1btWpVunjavmbNmu+//55undtRiP/85z+dOnUaPHgwMkZNqJXJ1v/z8i7aTwyFvvLKK/feey/t4m0GPjt27PCT3uMSEFA4fmBp06YtVqwYkN9//33qbJNNnTrVvEMQ4/+Z2lsUCBcCDGvoEIEEE6C7pItfsWJFXDnQjzNjT1fOK7l3GtPXe8eTmKGbhQsX8neyevVqEiAMhPF08E78xRdfcAm7yvuSdww9cuXKlcmcY+/evZhr3PvAAw+YlITpIunE7Y2NGzdmQI6e3cagH9hDJg0dN7fMnz/fXnUGuEQDTQwGUKtWrZxXbdgJYf369dz19NNP26vLli0jBgkxMdSfUyJtAmxZKmlPvQMmQ9qLcvOK0KJFi+zZs9tHhl1Lz45xZm80PJl3JAbR5fZ+/frZqz4D3s+LZM52cUrNOXzejpUcFRXFb8nnVYy/9u3bOy85s/rxxx+p4S233OJMMHr0aCI3btxoInlY2Fjonzn1/0yd+SgcDgQ04sePWUcCCfCa//HHH48aNYpBM5MFvdWlfw7sHiKxeBhQots1L+b+S6K3YmwNk4U+i7E4OiPSm46bUbIyZcq88cYbI0eO/P333ynIZsXoEO/4TzzxBAN05GDj4wpgLpA5ByYIFhKd47vvvmsTYxdau4p5qR9++OGee+7BLPunWZcw6YhfunQptzCOVL58+YYNG9rb4wrceOONJEYUMYzOnTsXVzLT5zoHuLiRYUaqYW+BD5H2FAPImh0wt/V0Iho/fjzthRK1pRoMNtpHNnv2bBQUhvZGOnEeFvWkCBLz77///W9bnDPg53k5kwUMI/PUPK5SAt5OAsZ4ncl4psgS9pOJpL0XLlzo2LEjpwGfqTMfhcOBgFQqHJ5CiqzDoEGDGL8aMmQItoVtALbF1f7/yj+ICvFmvgdzxKaJK8BwGaN2WAlkSxfJ2/3MmTNJbPp0+k16ajpQRvyuv/56ZlYYecNQIwEFMTzFeA7dHGEOnA/jKsWkJ3OMCYaVGDNkOAv7yaZnVsyGGZSj72ZWxjaKACpFAmPb0To3TSM9b/eMCjKc2KBBAxQXu4rhMluQDZhhQGcduISaOocHmX6z6QnQHVvZo+22qsbOMykffPBBmswbA04TGFJM79nSmaD6888/7V0ESMAbtG0gbwzoorNEE/b/vLzT+4lx/yOJKxMPYhDGZPzwww/NqxJyha5jQ3N7wGcaVxGKTy4CmpdKLvIpu1wkijl/DkainC3BoGE2xcTQexJATvjXOf/vTO8MM//EEBz6ZEwoLplpJ5uGkR8zJc6w1WeffUbpDGExFUEC5I2DLgntQVSY2cK9gr7Y3usMMF6H84Izxhl22nwYVfTRjz76qPdrPn4Q3EXr3DSNlMy0AY0DVTBGVfPmza2fha2AUSBmtpziBxY7KWVT+gx8/fXXGA3mEtpm01BP02Qmb7DMIIz/PVYUCcgZtwim9GxiEzAlciNUmQnzkAHSBHxeHhn6ObU/EqaUvJPxvGyjzFUU1AOI86mZNFhOn3/+OVYac4ooNF9QmPiAz9S7AopJXgJSqeTlnyJLZ5YbhXjppZfMxIOzDfSMzs6RS7geYKmgJWiGd1fivNdcNdpm4nnxdyawYYatKH3GjBnM5NtIAigKLgx4LjAOyaW4VMp5i/8wA32YPgwwMqrGcJl3YhzZmfanv/bvP+K8EflkQI/JNubY8FzwcPEw+WDe3XDDDeYueljGPJkZcmYSV/i6666L65KNR85xtWB0lC+oEC3eKviQAHU0umuTmQANZOKKLt5pmZlL7p+XR57epzh28uwohfp4X8XHD2vPxvOOwoSTh0rZqzZAnrj84UOBSqFzDz30kLkU8JnaHBQIEwJSqTB5ECmmGkzk0C/zmRRudWZuxlTd+ox5tASvYm7hw1Umb/D0o4/evHkzfTSfKHmkRM94z+3SpQvix7gTSkMym4Z+iqFF3BzKlSuHYCAMxBjfBySQU+pDf8SsgzEL3MwV2cz9BBg8xPWDnh3XO7pLxhipPyYLJXIXRhve2CzAQU0YU2LkDY8P+n20zSNP5JN41I42ojr4+9Eje0gUt1x77bXYo5iDeDSgEMbHDwsD08cjw8Sc8p5BtZlWZKSUJqD3eB9QBNVjNmvnzp3fffcdvgbUmYZjSjIGiwlI/XmHQLOp9rPPPuv/ecWreoDFKKdWAEROeK3BDQ+DCdOTfKjAI488wtQmk0/MwDHka2wv/0Uge4gxs5g47+A66BzU9f9M/Werq8lAIBxcOFSHFETADsd5/Fj9NwGHMW5k1IsODp80XMxNeg83MCZOTN9NN4SwGVOJ12ES00tigmAnkQnKR3+K1waTRlzCJsDBgfFA+lBsAgrisxiTv/e/XGV+wjvexNAoBvc8rm7bto1FoXgxRzupGL0zvbZNc+zYMVbiQCC5ytwYYsk4ns2NBpowMsaYGxJFJfnICUmgFzaXPCAwwgYf7EUyxGKgg+ZzNJOSf73rj/8bbbcJvAM+G9WrVy/i0VTSM8OEbYpAIv/05hhkVI9RPpMV9QE1HhbmKg8IkTaX/DwvEni0i5pzmBt9/ss0EhYkdg/P17iSm2QIJ8oENDNUy/uBMyvjb8LgnneeWF3mV+rthOn/mXpnpZhkJJCGss2D1L8iIAIiIAIiEG4E5OMXbk9E9REBERABEfgfAanU/1goJAIiIAIiEG4EpFLh9kRUHxEQAREQgf8RkEr9j4VCIiACIiAC4UZAKhVuT0T1EQEREAER+B8BqdT/WCgkAiIgAiIQbgRS5Fe9fD/BmjGsNuZ/LYNwY636iIAIiIAIeBDgayg+lmfNGr5k97hkTlOkSiFRPtf78tlCRYqACIiACIQ5AT5dd65d6axtilQprCjaQKtY+8TZGIVFQAREQARSFgF29sHqML26z5qnSJUyA31IlFTK50NVpAiIgAikLAJ+pm98jwOmrOaptiIgAiIgApFKQCoVqU9W7RIBERCBSCAglYqEp6g2iIAIiECkEpBKReqTVbtEQAREIBIISKUi4SmqDSIgAiIQqQSkUpH6ZNUuERABEYgEAlKpSHiKaoMIiIAIRCoBqVSkPlm1SwREQAQigYBUKhKeotogAiIgApFKQCoVqU9W7RIBERCBSCAQWpVatGhR8+bNWeyW1S++/PLLuIAtXLiwZs2amTJlKl269MSJE+NKpngREAEREIHURiC0KnXmzJlq1aqNHTvWD9Zt27Y1bdq0fv36v//+e9++fbt27Tpjxgw/6XVJBERABEQg9RAI7WqzTa4e/mliPBUvXvytt94iWcWKFVesWPHmm2/ed999/u9K5NWY2Mv7TpxLZCa6XQTiRSB/9owZ00XF6xYlFgERCK1KueH766+/NmrUyKZs3Ljx5MmTo6Oj06dPbyMJXLh6mBhWendeSkD4yJkLNw//MQE36hYRSDCBa3JnXvDcbRnShXYAI8HV040iEJ4Ekl+l9u/fX7BgQUuH8KVLlw4fPly4cGEbSWDYsGGDBg1yxiQynFGdRSIJ6vb4ELhwKXb3sXPHzl4smCNTfO5TWhFI7QSSX6V4As6dRdhd2CPGPKI+ffr07NnThM2uWSacsH8LZM+08dUmCbtXd4lAAgiU7TvnUuyV37YOERCBeBFIfpUqVKgQ5pSt9MGDB9OlS5c3b14bYwIZrx4ekToVAREQARGIbALJP0Rep06d+fPnW8rfffddrVq1PCal7FUFREAEREAEUhWB0KrU6dOn/7h6wBSPc4I7d+4kzPBdu3btDOguXbrs2LGD0bz169dPmTIF14nnn38+VT0DNVYEUhUBRvVxso3V+GeqeuqJaGxoR/xwK2/QoIGpnplVat++/dSpU/ft22fkikulSpWaM2dOjx49xo0bx/e/o0ePDrUbeiJw6VYRiBAC5y7GXLwUeymW/y6fPBcdc/nypZjL0TFXTg+fuhCVNg2BK//FxJ44F338bHTWjFEkQl2IWbfvZJFcmbkawy2xsQdOnj99/lLurBmuXuWu2N+2HytfMBunV2JiL5OAe7NmiLpyy9UYIBbMkXH2s/Xxzo8QoGpGyAiEVqVuu+024w3hUX+Eyhlz6623rlq1yhmjsAikZgJ05QjGxZjY8xdj6OHp4jk9df7SGTwFL/8LGThy+uLpC5cyRKVFJI6evnjw1IVcWdKTjAQb9p8sljtLNDlciv1165EKhbJzLxluP3I2e6Z0V0Qi5jI5hxrvpgOnPYo4czHGGXPg5IW1e080uLaAM1JhEfAmEFqV8i5PMSKQOglckZd//avfrLUl8mbBiNl34vyhU+dzZcmAhGCpYJ0Uz3Ml/vylGE4TicipEBv2n7K5oXM2bAOYTUgXp4VyZEoXlSZ9VFpiNh88fWPJPARMDIqYNs2/qGG6qLTpiEybds/xs5UK5+QqpyRD9vhgOS/m0pWr/x+TLxufe1wJk5780cir6a/k/+jkZVTs2JmLB08ixFfMtdjYf/E/lBXF5ZMyY9htP3ImV+b0oEOAYy9frl8uX9VrctmaK5BKCEilUsmDVjPDgsD36w/EVY+dR8/GdSl3lvSZ00elT5eWNOUKZMuTNQNawqdXOTKlL5orM8qBvHEv+ocMEIi+dJkviI0kMJRXIPsVtTACwy1GWjJniMqUPop453cgcVUg6PFGO3t+tjpeOX+yfOfPL94er1uUOAIISKUi4CGqCSmAwE2l8yzderRhxYJITuFcaAQWzL8yZ0jLR75oCGKRJUM6Ru2QH1QkZ+arshSVNn1U8qhIkgHFRLMqi7JiizGFhc1XsUgO2k41MDqrFs159mLMd+sOMH/ms2JMK5hBUQxTzDL+vXjpyhjpoasTbNhqqDgCjzFnkmE73lo+f5WiOX3mpshwI5DG57xRuNXSoz581ZszZ84TJ07kyJHD45JORUAEUgQBptAwENFnN7VlBLLhyIWkROxRmpU7jjH8iA7x3+HTF93k4JGmdP6srFblEanTZCEQsD+XLZUsz0WFikBqJ5A1Yzw6nzT/aBn2qAHnZ4CUBLgOpk+b5vylWLSw8hWzLC0zb5hW110xyy59v/7gyXNXpugQOdwbsbQIMB2Ib8oV5bsUu+f4OW7BCDOnZQpkq18uf2p/YMnX/nj8UJKvkipZBEQgVRMokz9btzvK7ThypmGlgoyPZkiHbKVBihgdRU6wybJlSpc+7ZUBUlwz/M+0rdl9ApU6fPpCyd7fuGRKYUv73KEFGF3iCnoyqVTQkSpDERCB4BPocWf5oGTK5J93Psgb6oYtVSpfVsIo398HT9crk5fATxsPYVTtPX4OreKzMBwO+Z4MnxQsMP7bcug0RqGxxlbvPsFHYFfDl3/bfvTK9wBXrTGyxeHl7TY19HGYN3k3MZqXckNJaURABCKHALNcFy7F8BkAgoQjC//6Mb/cm1z+AY1qXe2eGtf4T5M6r2peKnU+d7VaBEQgTgJlC2SL81qgC1hFfLnFJFbVa3LikciQ466j52qXysM3XvzHKht80UU8YZSwTH4ss7T/nrbqfHRsj+mrf9xwaNOBUyhilgxROIAUzpmJHSD4RvuGknnGt73ej1IGqlSEX9eIX4Q/YDVPBEQgMQQ2D2ly6PSF3HwocPWbswRkhUSZu75avdd5O0725nTu2v3Hzkajf/YqixyevngJPw4c641/B4mZctt/4rz5BJvPwPmGGnfHFTuO4u7IgCSVzJs14x+7jpMPVWVkku/nPnn8Jr6Ks9mm0IBG/FLog1O1RUAEUgaBuWv2PfXxqodrF+fDuNL5sqI6OGKwLgcrfjCP1WLsLzSDdT9YGpGPnfEIYQ0OK2yJbOGsp+vWKJ47kZmE+vaAI35SqVA/AuUvAiIgAr4J8LlqqT5zfF9zxLLCCNqGaVWpSI69J86VK5AdH5DdR8/VLJmbObVT56PLFcyOjYXymQFJVPDuMT+bDBhdvKtyoZGtqzvyC69gQJXSiF94PTDVRgREIPUQYC5q5IPVZv+5j0VJiubOnCldWtbByofXYFQaxhivLEri7qtnP8RYtuPLP/aEs0r5qby5JFsqICIlEAEREIEURoCVfBdvPlwwe8bW7yyl6lhjH3euXTJf1jBshmypMHwoqpIIiIAIhJYA2321qFaEpTdMMaymgWgx+sfHWzhiMDx48NR59hhjbuyvvSfMti+sQ9/0ukK1SuYJbc3in7tsqfgz0x0iIAIikEII4Bxfe+gPLivLbmTzut/iMnGwksmWChZJ5SMCIiACKY+Ax8JOmdKz6vwVz/hKhXNgYPFvnmwZdh89WyJvVhzlmcQKwxbKeyIMH4qqJAIiIAJBI7BtWFOW22B9JpbbiCtTvjL2+JwrrpRJHy+VSnrmKlEEREAEko4AnoS4qvsvjwU1SMBK841GLeSLYFYjZAuuIfdc5/+upLl6ZadnHSIgAiIgAqmZgB3r23Tg9NZDZ3YfO/fxsp37TpxjWivZ9yCULZWaf5lquwiIgAhcIYDl9NCNxdkomUUFC+XM+NjUFUTWGbaAf9vWLp68RpVU6soT0iECIiACqZzAsHv/f3yPDUecKDCqOtYrlZglep25JSCsEb8EQNMtIiACIhCxBFjHfe2gxn8MuLNv0wqmkY3fWsT3VcnVYKlUcpFXuSIgAiIQpgRYEhCHwGZVi5j68fXvoVMXkquuUqnkIq9yRUAERCCsCbCu0vbXmrFwe/LWUiqVvPxVugiIgAiIgD8CUil/dHRNBERABEQgeQnIxy95+at0ERABEQhrAgdOXpmRajb651L5sjapUuiFu/7fpSLJKi1bKslQqyAREAERSMEEth0+8/4v25O+AVKppGeuEkVABEQgxRCY0qFW/XL52K2RGl/m/5L80IhfkiNXgSIgAiKQcgjcXqEg/+0+dja5qixbKrnIq1wREAEREIHABKRSgRkphQiIgAiIQHIRkEolF3mVKwIiIAIiEJiAVCowI6UQAREQARFILgJSqeQir3JFQAREQAQCE5BKBWakFCIgAiKQyglc3cv3X+ejYx+YuCSJV56VSqXy356aLwIiIAKBCbCdh0n02/Zjv249EviG4KWQSgWPpXISAREQgQglUChnprfbVDeNS+I95qVSEfqbUrNEQAREIKgEWlYvWq9s3qBm6SozqZQrTEokAiIgAiKQLASkUsmCXYWKgAiIgAi4IiCVcoVJiURABERABJKFgFQqWbCrUBEQAREQAVcEpFKuMCmRCIiACIhAshCQSiULdhUqAiIgAiLgioBUyhUmJRIBERABEUgWAlKpZMGuQkVABERABFwRkEq5wqREIiACIiACyUJAKpUs2FWoCIiACIiAKwJSKVeYlEgEREAERCBZCEilkgW7ChUBERABEXBFQCrlCpMSiYAIiIAIJAsBqVSyYFehIiACIiACrghIpVxhUiIREAEREIFkIRBylRo/fnypUqUyZcpUs2bNxYsX+2zkuHHjKlasmDlz5muvvfbDDz/0mUaRIiACIiACqZBAupC2efr06d27d0eo6tWrN2nSpCZNmqxbt6548eLOQidMmNCnT5933333hhtuWL58+eOPP547d+7mzZs70ygsAiIgAiKQOgmE1pYaOXJkp06dOnfujKn01ltvFStWDE3yAP3RRx89+eSTrVu3Ll26dJs2bUg/fPhwjzQ6FQEREAERSJ0EQqhSFy9eXLlyZaNGjSxZwkuWLLGnJnDhwgXGA20k435YVNHR0TbGJjvpODyu6lQEREAERCAiCYRQpQ4fPhwTE1OwYEELjvD+/fvtqQk0btz4vffeQ88uX768YsWKKVOmIFHc65Fs2LBhOf85sMk8rupUBERABEQgIgmEUKUMrzRp0lhw6JDz1MT379+f+aqbbropffr0LVu27NChA/FRUVH2LhNg7urEP8euXbs8rupUBERABEQgIgmEUKXy5cuH2DiNp4MHDzpNKwOUIT7sp7Nnz27fvn3nzp0lS5bMnj0793rgzpgxYw7H4XFVpyIgAiIgAhFJIIQqlSFDBrzP58+fb8ERrlu3rj11BjCkrrnmGlTt008/vfvuu9OmDWHFnOUqLAIiIAIiEM4EQuuJ3rNnz0cffbRWrVp16tR55513MJW6dOkCDobv9uzZYz6N2rRpE+4StWvXPnbsGD6Ba9eu/eCDD8IZmeomAiIgAiKQZARCq1L4lx85cmTw4MH79u2rUqXKnDlzSpQoQds4RbFMI/GwGDFixMaNGzGnGjRogBMgg35J1n4VJAIiIAIiEM4E0uDREM7181k3PNJx98OXgokqnwkUKQIiIAIiEHQCbd9b+svmI2+3qd6yetFgZR6wP9f0T7BQKx8REAEREIHgE5BKBZ+pchQBERABEQgWAalUsEgqHxEQAREQgeATkEoFn6lyFAEREAERCBYBVz5+eFgsXLiQfTf48JbPb/Pnz1+jRo2GDRtqpaJgPQblIwIiIAIi4JNAAFvq3LlzQ4cORY1YxOibb745fvw4H95u3rx54MCB7BrVtGnTpUuX+sxXkSIgAiIgAiKQeAIBbKny5cvzve3EiRNZE5bvmZzl7dixY9q0aXwR9dJLL7EplPOSwiIgAiIgAiIQFAIBvpdiJQi+xvVTEttzIFflypXzkybolwL61we9RGUoAiIgAiJQsvc3BsKrrao8ctOVJRoSfwTszwOM+PmXKOrHYn1JLFGJh6IcREAEREAEEkNgzpp9ibk9XvcGUCk/eZ05c2bRokV+EuiSCIiACIhAJBH4pfftBbJnTOIWJVyl8KFg2b0krq6KEwEREAERSC4CRXNl7tesYhKXnnCVSuKKqjgREAEREIFUSCCAjx9E8uTJ45MLa5n7jFekCIiACIiACASLQGCVunDhwlNPPXXdddd5FIlr36BBgzwidSoCIiACIiACQSQQWKWqV6/OV73t27f3KHX16tVSKQ8mOhUBERABEQgugcDzUs2aNWPJCe9SGQls166dd7xiREAEREAERCBYBALbUn379vVZGAbW+++/7/OSIkVABERABEQgKAQC21JBKUaZiIAIiIAIiEACCEilEgBNt4iACIiACCQRAalUEoFWMSIgAiIgAgkgIJVKADTdIgIiIAIikEQEpFJJBFrFiIAIiIAIJICAVCoB0HSLCIiACIhAEhGIh0o99thj/fr1s/XCQ50Ye6qACIiACIiACASdQODvpWyR27Zti42Ntad79uzZtWuXPVVABERABERABIJOIB4q9eOPPzqL/+CDD5ynCouACIiACIhA0AnEY8Qv6GUrQxEQAREQARHwTyCALfXVV1/5v79Fixb+E+iqCIiACIiACCSYQACVatWqlZ+s06RJo12m/PDRJREQAREQgUQSCKBSTneJRJak20VABERABEQgvgTiPS91/vz5+Jah9CIgAiIgAiKQMAJuVYqRvVdeeaVo0aLZsmXbunUrhfXv33/y5MkJK1V3iYAIiIAIiIAbAm5VasiQIVOnTn399dczZMhg8mWP+ffee89NGUojAiIgAiIgAgkj4FalPvzww3feeadt27ZRUVGmpKpVq27YsCFhpeouERABERABEXBDwK1KsdJE2bJlnTniWBEdHe2MUVgEREAEREAEgkvArUpVrlx58eLFzrI///zzGjVqOGMUFgEREAEREIHgEgjgiW4LGzhw4KOPPopFhQk1c+bMjRs3MgY4e/Zsm0ABERABERABEQg6Abe2VPPmzadPnz5nzhy+5B0wYMD69eu//vrrO++8M+gVUoYiIAIiIAIiYAm4taW4ofHVw96pgAiIgAiIgAiEmkA8VIqqrFixAisKc6pixYo1a9YMdeWUvwiIgAiIQCon4Faldu/e/dBDD/3yyy+5cuUC2fHjx+vWrfvJJ58UK1YslRNU80VABERABEJHwO28FNvy4neOIXX06kHg8uXLnTp1Cl3NlLMIiIAIiIAIuFUp3NAnTJhw7bXXGmQExowZ4+GbLpoiIAIiIAKRTeBC9JUd25dsOTJt2c6kaalblSpevLjHN7yXLl1iWb+kqaVKEQEREAERCAcCS7cdMdXoO2vNoVMXkqBKblWKFfyeffZZvCcY6KNaBLp16/bmm28mQRVVhAiIgAiIQJgQ6HZHOVuT89ExNhy6QBqjOnEVkDt3bjz6zNUzZ85gP6VLd8XhwgSyZs3KLFVc94Yu/uTJkzlz5jxx4kSOHDlCV4pyFgEREAER8EmgQv+556NjF7/QoFieLD4TuI8M2J8H8PF766233BemlCIgAiIgAiIQXAIBVKp9+/bBLU+5iYAIiIAIiIB7AgFUyjujc+fOOd0oNObmjUgxIiACIiACwSLg1nuCSalnnnmmQIEC7NXLZJU9glUP5SMCIiACIiAC3gTcqtQLL7ywYMGC8ePHZ8yYkS16Bw0aVKRIEZZF985RMSIgAiIgAiIQLAJuR/xYAR1Nuu2221iEon79+uyIWKJEiY8//pjde4NVFeUjAiIgAiIgAh4E3NpSeJyXKlWKm5mIMt7nN99886JFizyy06kIiIAIiIAIBJGAW5UqXbr09u3bKbhSpUqfffYZAawrs/JsEGujrERABERABETAScCtSnXs2HH16tXc2adPHzM71aNHj169ejnzUlgEREAEREAEgkvArUqhSV27dqXsBg0abNiwgT07Vq1axSJJAWuDpDFUmClTJvajimt1Wua3qlWrliVLlsKFCyOHR478/zpRATNXAhEQAREQgcgm4FalnBRYefbee+9FV5yRPsNsQt+9e/d+/fr9/vvv+Fw0adJk586dHil//vnndu3asQnIX3/99fnnn//222+dO3f2SKNTERABERCB1EkggI/f6NGj/XMxBlZcaUaOHIn8GNVhsaVvv/2W7T+GDRvmTL906dKSJUuafLC6nnzySVa2dSZQWAREQAREINUSCKBSo0aN8oOGhWj9qNTFixdXrlzZu3dvm0OjRo2WLFliT02APX8xtubMmYOldfDgwS+++KJZs2YeaTi9cPUw8axO6J1AMSIgAiIgApFHIIBKbdu2LcFtPnz4cExMTMGCBW0OhPfv329PTQCVYl6qdevW58+fZ6n1Fi1asL+iRxpOscD4lNg7XjEiIAIiIAIRTCAh81LxwmE3/uAudglxnpp81q1bh0E2YMAADK958+ahi126dPEuAt9Ctuowx65du7wTKEYEREAERCDyCASwpRLT4Hz58kVFRTmNJwb0nKaVyRwjqV69esapvWrVquxZhZ/Fq6++ir+fs3RWZuJwxigsAiIgAiIQ8QRCaEtlyJAB7/P58+dbiIQZ37OnJnD27Nm0af9XDYSNeP97M3rkoFMREAEREIFIJfA/eQhFC3v27MnStFOmTFm/fj1fXOGGbkbzGL7D+9yU2Lx585kzZ+L7t3Xr1l9++YXRvxtvvJGlbENRH+UpAiIgAiKQsgiEcMQPEPhE8Inu4MGD9+3bV6VKFRz5WKOWeE7th1MdOnQ4derU2LFjn3vuOZZcuv3224cPH56yIKq2IiACIiACISKQxv3YGitHTJo0acuWLTiLFy1a9KOPPuLzJtacDVHN/GSLJ3rOnDnxpNAejH4o6ZIIiIAIhIhAhf5zz0fHLn6hQbE8WRJZRMD+3O2I34wZMxo3bpw5c2ZWkeDLJaqFATR06NBE1k+3i4AIiIAIiIAfAm5VCqe7iRMnvvvuu+nTpzfZ4QfBUn5+stYlERABERCBiCSAIUW7nv541c4jZ0PdQLcqtXHjxltuucVZG0bbjh8/7oxRWAREQAREIPUQWLPnxKzf94S6vW5Viq+XNm/e7KwNq8Sy6ZQzRmEREAEREIHUQODGknlMMy/FXjGqQnq4VSkWgWWfjmXLlrF4xN69e1nT6Pnnn3/66adDWjllLgIiIAIiEIYEPutSp0PdkklTMbee6C+88AI+dWwuxWp7DP2xDAQq9cwzzyRNLVWKCIiACIhA6iTgVqWgM2TIEBYvZ9m92NhY9pXPli1b6kSmVouACIiACCQZAbcjfh988MGZM2fYTrdWrVqsDSGJSrInpIJEQAREIDUTcKtSjO8VKFCgTZs2s2fPZn+N1IxMbRcBERABEdhz/BwQxizYvGjToZDScKtSrGnE9vAsBYtQ4e+H34T3foYhragyFwEREAERCB8C89cdMJV56cu1Ia2VW5VKly7d3XffjWsfu2+wN/yOHTvwpChTpkxIK6fMRUAEREAEwpPAoBaVTcXOR8eEtIbx8J4w9WBqiqWSjh07hlCx0nlIK6fMRUAEREAEwpNA+7olbyiZp+noxaGunltbinqwERS2VNOmTdlWY9SoUa1atVq7NrSGXqgbr/xFQAREQATCnIBbW+qhhx76+uuvMaQeeOCBn376yXszwzBvp6onAiIgAiKQEgm4VSmWnMB7grE+JqhSYjtVZxEQAREQgZRIwK3kTJs2LSU2T3UWAREQARFI0QQCqNTo0aOfeOKJTJkyEfDZTjaA9xmvSBEQAREQARFIPIEAKoWXRNu2bVEpAt6FMQwolfLGohgREAEREIFgEQigUtu2bTMl2UCwClY+IiACIiACIhCQgFtP9MGDB+OJ7szu3LlzRDpjFBYBERABERCB4BJwq1KDBg06ffq0s2xEi0hnjMIiIAIiIAIiEFwCblXq8uXLzEI5y169enWePP+/XaMzXmEREAEREAERCBaBAPNSFJM7d270iaN8+fL8awqOiYnBtOrSpUuw6qF8REAEREAERMCbQGCVYm1ZDKnHHnuM8b2cOXOaLDJkyFCyZMk6dep456gYERABERABEQgWgcAq1b59eworVaoUqyKlT58+WAUrHxEQAREQAREISCCASp08eTJHjhzkUqNGDZz6ODxyNFc9InUqAiIgAiIgAkEhEEClmJRi/0N26c2VK5edlDIFG38KJqiCUg9lIgIiIAIiIALeBAKo1IIFC4wj348//uh9s2JEQAREQAREIKQEAqjUrbfeaoq3gZDWRpmLgAiIgAiIgJOA2++l5s2b9/PPP5s7x40bV7169Ycffpgde515KSwCIiACIiACwSXgVqV69eqFJwVlr1mzpmfPnuzYu3XrVgLBrY1yEwEREAEREAEngQAjfjYpq81WqlSJ0xkzZjRv3nzo0KGrVq1Cq2wCBURABERABEQg6ATc2lJ8xmtWm/3+++8bNWpEPfCqMNZV0OukDEVABERABETAEHBrS918882M79WrV2/58uVsLc/NmzZtuuaaa8RRBERABERABEJHwK0tNXbs2HTp0n3xxRcTJkwoWrQoFZo7d+5dd90VupopZxEQAREQARFwa0sVL1589uzZTl4+d+91JlBYBERABERABBJJwK1KUQzLTHz55Zfr169nEYqKFSu2bNkyKioqkcXrdhEQAREQARHwQ8CtSm3evBmPvj179lx77bWsjcSkVLFixb755psyZcr4yV2XREAEREAERCAxBNzOS3Xt2hVB2rVrFw7ov//++86dO1klncjElK17RUAEREAERMA/Abe21MKFC5cuXWo3582bN+9rr72Gy5//3HVVBERABERABBJDwK0tlTFjxlOnTjlLYq9ePqJyxigsAiIgAiIgAsEl4Fal7r777ieeeGLZsmVMSnFgV7GdfIsWLYJbG+UmAiIgAiIgAk4CblVq9OjRzEuxhXymqwdjfWXLln377bedeSksAiIgAiIgAsEl4HZeil0Q//vf/+Lphyc6thRr+qFSwa2KchMBERABERABDwKBVSo2NnbEiBF8KRUdHd2wYcMBAwZgTXnkolMREAEREAERCAWBwCN+w4cP7927d9asWQsXLjxy5Eh5n4fiMShPERABERABnwQCq9TUqVPHjBnz3XffMeKHRfXhhx8y4uczL0WKgAiIgAiIQHAJBFapHTt24OBnSm3cuDEStXfv3uBWQrmJgAiIgAiIgE8CgVXq4sWLmTNnNjezgh/fSF24cMFnXooUAREQAREQgeASCOw9QXn9+/fPkiWLKRjRGjJkSM6cOc0pM1XBrZByEwEREAEREAFLILBK3XLLLRs3brQ31K1bd+vWreYU08rGKyACIiACIiACQScQWKV++umnoJeqDEVABERABETADYHA81JuclEaERABERABEQgFgQAqxcLnZ86c8VMwK/uxy5SfBLokAiIgAiIgAgkmEECl1q1bV6JEiaeeemru3LmHDh0yxVy6dOnPP/8cP348c1Rt2rTJkSNHgovXjSIgAiIgAiLgh0AAleIb3gULFrBIUtu2bQsVKoQbevbs2dnFo0aNGlOmTOnQocOGDRvq16/vpwDEjP0SWVSpZs2aixcv9k5JJnhhOI/KlSt7J1OMCIiACIhAKiQQ2HuiatWqkyZNmjhxIvbT9u3bz507ly9fvurVq/NvQF7Tp0/v3r07QsUa6mTSpEkTjLPixYs7b2RhdcYVTQxWWrVq1R544AFnAoVFQAREQARSLYE0IV3uqHbt2tdff/2ECRMM34oVK7Zq1WrYsGFx4WYFpnvvvXfbtm0MM8aVhviTJ0/ywdaJEyc02OiHki6JgAiIQEgJrNt7sunoxQWyZ1zer2GCCwrYnwcY8UtwwdzI978rV65s1KiRzYTwkiVL7Kl3YPLkySy77l+ivO9SjAiIgAiIQKQSCDzil+CWHz58OCYmpmDBgjYHwvv377enHoF9+/bhozFt2jSPeHPKskx2ZSa012caRYqACIiACEQYgRDaUoYUbhEWGaOLzlMbbwIsvs5eiwwJesSbU8YJGeUzR7FixXymUaQIiIAIiECEEQihSuFeERUV5TSeDh486DStnCgRMJwGH330UdwInfE23KdPHyaizLFr1y4br4AIiIAIiEAEEwihSqE3eJ/Pnz/f4iPMJ1b21BlYuHAh29V36tTJGekM4/6Or4Q9nJcUFgEREAERiFQCbuelWIECf/EffvgBe4jPpywOu/KsjXEGevbsiXlUq1atOnXqvPPOOzt37uzSpQsJMIz27NnDx1g2MX4TOARWqVLFxiggAiIgAiIgAm5VqnPnzpg7SA77yvuZW/IA2rp16yNHjgwePBjPCBRozpw5xn+PUxTLJmYcb8aMGXw4ZWMUEAEREAEREAEIuP1eCr8G1uvj49xwoBbQvz4cKqk6iIAIiEBkEwiv76Vy586dJ0+eyCau1omACIiACIQbAbfeE6+88sqAAQPOnj0bbg1QfURABERABCKYgNt5qREjRmzZsgU/8pIlS6ZPn94SWbVqlQ0rIAIiIAIiIALBJeBWpeL62Da4tVFuIiACIiACIuAk4FalBg4c6LxNYREQAREQARFIAgJuVcpUhdVj169fjyd6pUqV2GIqCeqnIkRABERABFIzAbcqxce8bMv7008/4ZLOakZ84dSgQYNPP/00f/78qRmf2i4CIiACIhBSAm59/J599lm+Uvrrr7+OHj167NixtWvXctq1a9eQVk6Zi4AIiIAIpHICbm2pefPmff/992xjaHgx4jdu3Djn3lGpnKOaLwIiIAIiEAoCbm0p1u5zOqBTFU6dC/qFonLKUwREQAREIJUTcKtSt99+e7du3fbu3Wt4sVZsjx497rjjjlSOT80XAREQAREIKQG3KjV27NhTp07xSW+ZMmXKli1bqlQpTseMGRPSyilzERABERCBVE7A7bwU2+OyzAQbRG3YsAEfP+alGjZsmMrZqfkiIAIiIAKhJuBWpUw97rx6hLpOyl8EREAEREAEDIEAKjV69OgnnngiU6ZMBHwikzO6TyyKFAEREAERCAqBACo1atSotm3bolIEvMtjEQqplDcWxYiACIiACASLQACV2rZtmynJBoJVsPIRAREQAREQgYAE3Pr4OTOKiYn5448/WIHCGamwCIiACIiACASdgFuV6t69++TJkykeibrllluuv/56vP5Y1i/oFVKGIiACIiACImAJuFWpL774olq1atz29ddfb9++HX90dKtfv342IwVEQAREQAREIOgE3KrU4cOHCxUqRPFz5sx54IEHypcv36lTpzVr1gS9QspQBERABERABCwBtyrFXvLr1q1juI9lZ833vGfPno2KirIZKSACIiACIiACQScQwMfPltexY8cHH3ywcOHCeJ/zaS/xy5Ytq1Chgk2ggAiIgAiIgAgEnYBblXr55ZerVKmya9cuhvsyZsxIPTCkevfuHfQKKUMREAEREAERsATcqhQ33H///fY2Au3bt3eeKiwCIiACIiACQScQQKW0QlLQiStDERABERAB9wQCqJRWSHKPUilFQAREQASCTiCAStmFkWwg6DVQhiIgAiIgAiIQFwG3nuhx3a94ERABERABEQgdAbcqhevEa6+95qzHG2+8gb+fM0ZhERABERABEQguAbcqtXDhwmbNmjnLvuuuuxYtWuSMUVgEREAEREAEgkvArUqdPn06Q4YMzrLTp09/8uRJZ4zCIiACIiACIhBcAm5Vik96p0+f7iz7008/rVSpkjNGYREQAREQAREILoEAPn62sP79+993331btmy5/fbbifzhhx8++eSTzz//3CZQQAREQAREQASCTsCtSrVo0eLLL78cOnQoW3hkzpy5atWq33///a233hr0CilDERABERABEbAE3KoUN+A94eFAYXNRQAREQAREQARCQcDtvBRlHz9+/L333uvbt+/Ro0c5XbVq1Z49e0JRJ+UpAiIgAiIgAoaAW1vqzz//ZFupnDlzslFv586d8+TJM2vWrB07dnz44YdCKQIiIAIiIAIhIuDWlurZs2eHDh3+/vvvTJkymao0adJE30uF6KkoWxEQAREQAUPArUr99ttvTz75pJNa0aJF9+/f74xRWAREQAREQASCS8CtSmFCeXzDu3Hjxvz58we3NspNBERABERABJwE3KpUy5YtBw8eHB0dzc1sKr9z50426uULKmdeCouACIiACIhAcAm4Vak333zz0KFDBQoUOHfuHJ9JlS1bNnv27EOGDAlubZSbCIiACIiACDgJuPXxy5Ejx88//7xgwQIc0GNjY6+//npc/pwZKSwCIiACIiACQSfgSqUuXbrEvNQff/zB8khmhaSg10MZioAIiIAIiIA3AVcjfunSpStRokRMTIz3/YoRAREQAREQgdARcKVSFP/SSy/16dPHrDoRutooZxEQAREQARFwEnA14scNo0eP3rx5c5EiRTCqsmbNarNgmsqGFRABERABERCB4BJwq1J4ouOAHtyylZsIiIAIiIAI+CfgVqVefvll/xnpqgiIgAiIgAgEnUDgeamzZ8/++9//Zj0kPpZ6+OGHDx8+HPRKKEMREAEREAER8EkgsEoNHDhw6tSp7CzVpk2b+fPnP/XUUz4zUqQIiIAIiIAIBJ1A4BG/mTNnTp48GYmi7EceeaRevXq4pEdFRQW9KspQBERABERABDwIBLaldu3aVb9+fXPbjTfeyLdTe/fu9chFpyIgAiIgAiIQCgKBVQrLKUOGDLZsVIqlKOypAiIgAiIgAiIQOgKBVery5cvsf3jvP8f58+e7dOnyz9m9AWs2fvz4UqVKscBSzZo1Fy9e7DP9hQsX+vXrx5dYGTNmLFOmzJQpU3wmU6QIiIAIiEBqIxB4Xqp9+/ZOKExNOU/9h6dPn969e3eEitmsSZMmsb3vunXrihcv7nHXgw8+eODAAWa/WGr94MGDstU8+OhUBERABFItgTSYSqFrfO3atVk9fcKECaaIihUrtmrVatiwYc4S582bh2vG1q1b8+TJ44z3E2Y/xpw5c544cYKV2v0k0yUREAEREIHQEVi392TT0YsLZM+4vF/DBJcSsD8PPOKX4LIvXry4cuXKRo0a2RwIL1myxJ6awFdffVWrVq3XX3+dT7LKly///PPPs4WVRxqdioAIiIAIpE4CgUf8EsyF73/xvChYsKDNgfD+/fvtqQlgRbFzFRNXs2bN4pann36aNW29p6aYu+Iwt3jsbe+RoU5FQAREQAQihkAIbSnDyLn6H6OLzlOTgD0Vifz4449xc2/atOnIkSP5iNjbnGKckFE+cxQrVixiHoAaIgIiIAIi4IdACFUqX758fPzrNJ7wjHCaVqZahQsXZqwP+TGnzF0hZrt37/aoNPuGMBFlDj7h8riqUxEQAREQgYgkEEKV4isrvM9ZVMmCI1y3bl17agK4//GZ8OnTp83ppk2b0qZNe80113gkw0kdXwl7eFzVqQiIgAiIQEQSCKFKwatnz57vvfcek0zr16/v0aPHzp07+daKeAyjdu3aGaCsYJs3b96OHTvipL5o0aJevXo99thjmTNnjkjcapQIiIAIiEC8CITQe4J6tG7d+siRI4MHD963b1+VKlXmzJnDp7vEc4pimYpmy5YNG+vZZ5/F0w+54tupV199NV5tUGIREAEREIFIJRDa76VCRC2gf32IylW2IiACIiAClkCK/17KtkQBERABERABEUgYgdDOSyWsTrpLBERABERABAwBqZR+CSIgAiIgAuFLQCoVvs9GNRMBERABEZBK6TcgAiIgAiIQvgSkUuH7bFQzERABERABqZR+AyIgAiIgAuFLQCoVvs9GNRMBERABEZBK6TcgAiIgAiIQvgSkUuH7bFQzERABERABqZR+AyIgAiIgAuFLQCoVvs9GNRMBERABEZBK6TcgAiIgAiIQvgSkUuH7bFQzERABERABqZR+AyIgAiIgAuFLQCoVvs9GNRMBERABEZBK6TcgAiIgAiIQvgSkUuH7bFQzERABERABqZR+AyIgAiIgAuFLQCoVvs9GNRMBERABEZBK6TcgAiIgAiIQvgSkUuH7bFQzERABERABqZR+AyIgAiIgAuFLQCoVvs9GNRMBERABEZBK6TcgAiIgAiIQvgSkUuH7bFQzERABERABqZR+AyIgAiIgAuFLQCoVvs9GNRMBERABEZBK6TcgAiIgAiIQvgSkUuH7bFQzERABERABqZR+AyIgAiIgAuFLQCoVvs9GNRMBERABEZBK6TcgAiIgAiIQvgSkUuH7bFQzERABERABqZR+AyIgAiIgAuFLQCoVvs9GNRMBERABEZBK6TcgAiIgAiIQvgSkUuH7bFQzERABERABqZR+AyIgAiIgAuFLQCoVvs9GNRMBERABEZBK6TcgAiIgAiIQvgSkUuH7bFQzERABERABqZR+AyIgAiIgAuFLQCoVvs9GNRMBERABEZBK6TcgAiIgAiIQvgSkUuH7bFQzERABERABqZR+AyIgAiIgAuFLQCoVvs9GNRMBERABEZBK6TcgAiIgAiIQvgTShW/VElSzy5cvX7p0KSYmJkF36yYRiCgCUVFR6dKlS5MmTUS1So1JZQQiSqUuXry4b9++s2fPprKHqOaKQJwEsmTJUrhw4QwZMsSZQhdEILwJRI5KxcbGbtu2jZfHIkWK8Dep98fw/uGpdiEnwLgC722HDh3i76JcuXJp02p4P+TMVUAoCESOSvEHiVAVK1aMl8dQkFKeIpDiCGTOnDl9+vQ7duzgryNTpkwprv6qsAhAINJer/TCqJ+1CDgJ6C/CSUPhlEgg0lQqJT4D1VkEREAERCAuAlKpuMiES3zJkiXfeust97WZOnVqrly53KcPz5Tbt29nZvGPP/4IVvUY8ipbtuwvv/wSrAzDKp/nn3++a9euYVUlVUYEgkUg5Co1fvz4UqVKMSZes2bNxYsXe9f7p59+oj9yHhs2bPBOlrJiOnTo0KpVq6DU+bfffnviiSf8ZOUhY61bt960aZOf9Fy67bbbDHDcTMqUKdOnT58LFy74vyWJrzK/iLtmlSpVglXuO++8U6JEiXr16jkzBCzuNp9++qkz8uWXXzZwuEQ1OnfujAOCM0GCwzNmzKhUqVLGjBn5d9asWXHl89lnn1WvXp3pVSr8xhtvOJONGzeuYsWKzDZde+21H374ob30wgsvvP/++3hJ2BgFRCBiCIRWpaZPn969e/d+/fr9/vvv9evXb9Kkyc6dO32y27hxI72SOfBH8pkmdUbmz58/Xv4gdGEFChQIyOrxxx+H9ubNm19//XX6PrrmgLe4ScCXaviwuEnpPw0KUahQIb718Z/M/dUxY8agN870fLHA77NXr16TJ092xhOuXLkycPitTpgw4euvv27Xrp1HggSc/vrrr7xAPProo6tXr+bfBx98cNmyZd75zJ07t23btl26dFm7di1veCNHjhw7dqxJRmV4n+BJ/fXXX4MGDfr3v/9N3cwlnnijRo0mTpzonaFiRCDFE8BdNXTHjTfeyN+bzb9ChQq9e/e2pybw448/AvHYsWMe8X5OT5w4wS3860xz7ty5devW8a8zMrnC7du3b9mypXNaaf8AABwUSURBVHfpGI433HADFgxd8IsvvhgdHW3SnDx58uGHH0aNiKdjuvXWW7t162Yu8UI9atQoEx44cCBv99zOFzDPPvsskaR0/gSJ4Z06Z86cJj3//ve//8WK5f09b96899xzj4l35k/Mvffee/3115tLaMzw4cON+Vu1atXPP//cxPMvWTFohlmMKca4on1qpkR6TF7zUZetW7dimdH780kALeI3wCM2mTCOd/fddzMgSTz2xDfffEP80aNHaXu+fPnImfynTJlCpDELeLkxN8bFjYbAgbJy585dsGBB+Jj0Hv+uXLkSJwKPHwxNuOmmm44fP46uU5y9hUyqVatmT1999VXuRdJsTMICyNJdd91l723cuHGbNm3sqQ089NBD999/vz3l0V9zzTU8FGLq1KnDyJ69xC8E09Ce0hx+G/bUBsLq78LWSoHIIPDXnhMlXpx9w6vzE9Mcn/25M8MQ2lLMBNA78Ipnu1HCS5YssafOQI0aNeh577jjDiNazksmTMdHV24P7wQeMTTy7MVLIfqPzD2Kc3O6Z8+epk2bolK8TfNezCs8PaC5sWfPnkyZfPXVV/Pnz2dcdNWqVd4ZfvHFF/RZkyZN+vvvv7/88svrrruONDNnzqQXGzx4sDFDPe5CBlCgZs2a0d3/8MMPtWrV8kjAKZWhaPyVzaWXXnoJ1aF6vLD36NHjkUceWbhwIZcQGHpPhjGZK3ryySexj51Z0YkPGzbsvffe4y7e6zt27EiejKT9+eefDzzwAL0zdSY9r/88x0WLFq1ZswYtzJYtG5H9+/fn9QIbYv369ZSLXDlzJuyHG1c/+OCDrFmzYpdgFMIBgB63c0qJ5cuXz5Ejh/MS/Gkdis5DocnOS84wGoZIsKCJM5Lw0KFDqb/Pw+fINraU828BlfL5twAfp8s4pe/evRtXckr0vrR8+XJedEzFeBvYtWuXSelRVZ2KQIomELQRFW8Khw8fZvyHN1x7ifD+/fvtqQkgTswZ8L7PH+FHH32EUPHifMstt3gkoxNklMMj0s/pueiYSgO+9ZMgMZfWDW6cJUO80TGAw9suAzhMe2BW7t27F3NqwIABZ86coaudNm0abadW9JhYId7VYwAKS6thw4YoSvHixemVSJMnTx7Ml+zZs3PJ+5YhQ4bwwm65YSLYNFQGUaGP42UCW4FBPy5REyy5BQsW8NrOaenSpX/++Wd0EZOF0STmQsw0CQHGo8jc5kY+ZGjy37JlyyeffELfalrB6/+8efNoFN06TbjvvvuMvpK5uZ1I3lGMgjLHZvO0gbi4GR9rDD6sHxIzUAxbxPjOO++095oAEuuBFNVcunQpGk8CtArXAzIxGTrvZYoU4QQ1hJ3xhBkkwDzyiDSnRYsW9Y7nlx/wb4G7UC9eDpjXbNCgAeOxxnGGVxDIcIlHxosChi/vfxidYOevjL8gbjSF0lKMb+/SFSMCKZdAvLva+DaVHtneggniPDXxdHkcJkznyPvgm2++6a1SjMhjcJhkWFR09zbblBLAVqCBlgDDNadPn6Y3Z7ST7saoDm3h7d4CcTYNo4Q+i84d04TX/+bNmwectsHuYf7JmYkNM/mBPQRJbBqMDMSDS9g058+fd/byaBgSwiUmDrEC7e22tiaGQUjUwoQxBHnQ2C42Me8fjDdyihg89dRT3333HVpLieYWYghzF6YGXXDdunXtjSYQFzekmgS2XML01wcPHvS4nVNGvZwGCjEYUnT6xm4DZqdOnb7//ntr62DqYSTxjkXNGd7kLco7T94POLzj/cTYR08an38LxPO8kHnGRflJ8FwY1mMiihcRLmF0InWMUnIvgoeSYT6aS1zF6uJfLQ8GBB0RRiCEKkUXwJ+Q03iiB3G+TvpEyR/hf/7zH+9LzKxweMfHFZM5fRQWT1xXExlP5gnIwaNj4pRM6LlswOZpYuypCSDMSAUjWvSnTz/9NGYNY3F2pM4jsTk1PZfPS2ghk0BcgjbOAvTa9NTG8YFxQqc1YLD7rLzNmYJsF0wmPHde9m0HSjI6ff7FfwFtIH+ECuN4xIgRzCrhU8M4FZG0C2uSUUFeU2zOBHwWbYtzEiDSNMF5O2F+igiPjUR+cJDjl2llnhgIWJXiLYHRV+qPBRbXrw7TkMPm6QwweomvkDOGMMaum78FmsB7AzmTGMcZTEPuNSYmkLGfMG0PHDhgRiCw8OwAKdN7pOQWj3J1KgIpnUAIVYr3a8bx6FWZtDeYCONT4B8ZMyhmBMN/soBX+WtPwKBcwGwTkwB/AXyRbZ/LtAS9DHqANwFdLXMMxkDEvmE8ysMtwpRLP9Xi6kFXzpghPS+DP3Cmk/VZMewMujlmiXxeNZEU3bdvX0xV5u2pIZ0yQ3DepVPcnDlzbD4rVqywYY8Athf14Y3Eu6cmJW1krIyDEt99911Uikj6ViwDDm7BFcJDpeLi5lGun1OqxMCdJU9DTp06xS/N6igjexiXR44cMTYfSI2E+8mTJsRrxA8zmt8/o3kmT3Ta22q0xVEx86LA2Ck3Op02eV7MRJKSaT9MLjtKyRgsl3jhsJkoIAKRQSCEKgUgxuhwumXKgb80hk3o/vjbJp4eiilx88EHo1i8KvLXxeAS7/X04xwRABfHFQbcbEMYHcIAorH0y8888wxWERMh8KGXQavwCaR3Jg39kZkgsbaCzQEnLnr/2rVr4yDHBB6KZWYgoId3APNPCIx9szZ3kRXWCV9EcZX5f97x+bDGZmgDuNghVEz/MIfEQU+KRXLzzTejl0gpZhDVw2OCKSsm0jC5aBeV4XbvShLJWB89Pt7bmErIAxMnTHQxF8XAGp8lYDmRgEFOIvEJJD0zc7zN8ANgeG327Nkmknh7xMXNJggYYI6HKTc8O8wHWJhNeJQ4Z+konbrx82OELWBuJsGV8b74jPiRM+PY2Em8qOEtieHInJ/Jiuk0Pp8yZhO4cJNhmJGhVybz8LE03iuk5DM4XmX4AUCPZ4EsMZ1pa4vLBhrPr8LGKCACEUKAF8yQHkzL05nycspbP39vpix6Pd7WTZi/W7pRpg1wJqZnZOQnYH18ei6GlcctDfT4fRBDu+LyqEYPnJ7ozPpYl33riU5HRg/FXAUubYyL0s0ZUDiPYTPZcTm6NqcnOpLPJ6LwR8Dw9zO3AJ9O04TNv7hCYNBgYaBPb7/9NkNevJgTwwCdfWrGE52C6EMxTWig8fv3KJEMeeFAe5BPMmGkC2MaZz/ikWeeNTmQM68v9MhEvvLKK+ZLVTp9enAc2Yl074nubAi3G87k4HGg0wapGejjy1mPBLw9IKVEIu1OT3SPZIk5RXIMWAxTnovNihJ5yuaUL4h5uDxi3kV4w8DFwyZj1pBHiQ7xG6Cl2H/2EgG0H8PLGWPCYfV34V09xaRoAuv2nijfb87Nw39ITCt89ufODP9/UsSjSw3zU/p0OmLaxp+rrSrvnnRt5kMfG5kSA7z1M9qDIYLVErb1R9Xw+sPVJWxr6FExRkdx2cBrzttbzyNlSjzl3Q5bnFcBO9NmWxExfxe2RQpEGAGf/bmzjaEd8XOWpLAfAsyR8GqMCYX08tEPKXlZ9pM+WS4xJIibHzM3fAuF7waGUbJUI2GFYifhEYejNoGE5RDOd/Fmg0XrLVHhXGfVTQRcEpBKuQQV8mS4DDBZxdAckzTMMXjMMIW8eBcF4NPBZ8j4kuEC/txzzzG56OKmMEriPQwbRpVLXFXi8uNIXK66WwTCgoBG/MLiMagSIhAiAhrxCxFYZRssAgFH/EK4QlKw2qB8REAEREAEUi0BqVSqffRquAiIgAikAAKRplL4L6YA6qqiCCQVAf1FJBVplRMqApGjUnyaAyStYxaqX4ryTZkEzF+E+etImS1QrVM7gcjx8WNRGZYaMouN8kWkz2URUvvTVvtTEwGsKCSKvwj+LuxaUKkJgNoaIQQiR6V4IGb3CiNUEfJ81AwRSBwBJMrnri6Jy1V3i0DSEYgolcJ+YqValsJj14OkQ6iSRCBcCTDQJysqXB+O6uWWQESplGk0f5b6y3T7/JVOBERABMKbQOR4T4Q3Z9VOBERABEQgIQSkUgmhpntEQAREQASShoBUKmk4qxQREAEREIGEEEiR81LmQ0VWf0pIi3WPCIiACIhA2BAwPbmfz89TpEqxWR+Ezf7rYYNaFREBERABEUggAXp1dg30eXOKXBOd/WT37t3LdnaJ+XQXAUfn2MfPuZWiT0apIVI0nE9ZNETDScAjrJ+HE0jiaWBFIVFFihRJm9b3DFSKtKVozDXXXOMkleAwEiWVsvREw6IgIBqi4STgEdbPwwkkkTTisqJMEb61y1m8wiIgAiIgAiKQXASkUslFXuWKgAiIgAgEJhD18ssvB04VoSlYouK2225Lly5FDnsG/ZmIhhOpaIiGk4BHWD8PJ5BQ00iR3hNOQAqLgAiIgAhEMAGN+EXww1XTREAERCDFE5BKpfhHqAaIgAiIQAQTkEpF8MNV00RABEQgxROQSqX4R6gGiIAIiEAEE0gVKjV+/PhSpUplypSpZs2aixcv9vk4Fy5cyFXSlC5deuLEiT7TREZkQBozZ86888478+fPz5d6derU+fbbbyOj4T5bEZCGveuXX37BHbR69eo2JvICbmhcuHChX79+JUqUyJgxY5kyZaZMmRJ5HEyL3ND4+OOPq1WrliVLFvZf7dix45EjRyKSxqJFi5o3b87yECz38+WXX8bVxlD1oqxOEdnHp59+yo6l77777rp167p165Y1a9YdO3Z4NHnr1q38zrhKGlKS/osvvvBIExmnbmjAYfjw4cuXL9+0aVOfPn2gsWrVqshovkcr3NAwtxw/fpzXl0aNGtEleWQSMacuabRo0aJ27drz58/ftm3bsmXLEO+IIeBsiBsavPKyDs7bb79NB0K4cuXKrVq1cmYSMeE5c+bwajJjxgz0adasWT7bFbpe9F8+y4ukyBtvvLFLly62RRUqVOjdu7c9NYEXXniBeBv55JNP3nTTTfY0kgJuaHi0t1KlSoMGDfKIjIxT9zRat2790ksvDRw4MIJVyg2NuXPnspgNFkNk/AD8tMINjTfeeIN3F5vJ6NGjWbnNnkZkwI9Kha4XjfARv4sXL65cuZJXYGuiEl6yZIk9NYFff/3VmaZx48YrVqyIjo72SJbST13ScDaThX1ZCDJPnjzOyMgIu6fx/vvvb9myBYmKjIb7bIVLGl999VWtWrVef/31okWLli9f/vnnnz937pzPDFN0pEsadevW3b17N3YGgnTgwAEGYJo1a5aiG56YyoeuF43wZRcOHz4cExNTsGBBS5/w/v377akJEOOR5tKlS9zLWLNHyhR96pKGs40jRow4c+bMgw8+6IyMjLBLGn///TfGN+M5kb1GiUsajOr8/PPPTN8y7MMtTz/99NGjRyNvasolDVSKeSns7PPnz9NjMBY6ZsyYyPjrSEArQteLRrgtZVg7N/jgrcd5ah+GM5I0xDtjbLIICDjbFRcN08xPPvmEBbSmT59eoECBCGi4zyb4p8ErzsMPP8yAJ3aDz9sjLNI/DRqLbU0aumYGxJo2bTpy5MipU6dGpDlFYwPSYBq7a9euAwYMYMBm3rx5TNQxuRBhP4l4NceDmAfDeGXlTBzhtlS+fPlYY8ppPB08eNBpNhkWhQoV8kjDi3PevHmdpCIg7JKGaSni1KlTp88//7xhw4YR0HbvJrihwWgnY7+///77M888Qw700eg6v43vvvvu9ttv984z5ca4oUHrGF1grM/us1CxYkWAMOpVrly5lNt275q7pDFs2LB69er16tWLHKpWrYpnVv369V999dUIG4Px5uMzJnS9aITbUhkyZMC/HH8ki5Uwdro9NQH8rZ1p6IMYfMe3zSNZSj91SYNmYkV16NBh2rRpETzO7oYGvvhr1qz545+DN+Vrr72WM5zcUvqPwaP+bmhwC50yG5CePn3a3I4XaBA3e/OoUjKeuqRx9uxZ58Z9vBBTZzMSk4yVT66iQ9iLwjSyD+NROnnyZMzz7t27876zfft2msxkw6OPPmrabnwoe/ToQRpSRrwnun8aiBPmwrhx4/b9c+CHHZE/Eje/DWfDI9vHzw0NjEvc2O6///6//vqLj2MwoTp37uxEFDFhNzRwq+Evhc+qcK5huo5XWwZCI4aAsyE8d0YUOJBAhnkJmO95kqYXjXxPdFjT4fIRIu9H119/PX9ahn779u1vvfVW+yR++umnGjVqkKZkyZITJkyw8ZEXCEgDLB6vY7CKPA6mRQFpOBse2SpFS93QWL9+PYPAmTNnRq569uyJPeFEFElhNzTwPudTDWgwyte2bVsGPyOJgG3Ljz/+6LNPSJpeVDt3eMDXqQiIgAiIQBgRiPB5qTAiraqIgAiIgAjEn4BUKv7MdIcIiIAIiEBSEZBKJRVplSMCIiACIhB/AlKp+DPTHSIgAiIgAklFQCqVVKRVjgiIgAiIQPwJSKXiz0x3iIAIiIAIJBUBqVRSkVY5IiACIiAC8ScglYo/M90hAiIgAiKQVASkUklFWuWkBAKsPPLWW2+ZmrLAc1ybZ7MNIOvEs9RWENvEwons9OozQxZGoTKsU+XzarwiXWZ12223sZxYvHJmcfRcuXLF6xaPxOxWxRLjHpE6FQGplH4DKYMAnTg9NQcrpxUvXvypp546duxYclWdxbCbN2+OpFEBtMpUjH9z5859yy23sApXAirGxuR09OZGD5FgfWSWVLQrkScg8/C5hYawGQqL9rJOq4cQstkrK+Ox/0X41FY1CQcCUqlweAqqgysCd911F30cqvDee+99/fXXbMHn6rZgJ2I7JZbrZZVVZ8bff/89dUOfWEadjZcS0NUiQnHZIiwvybYIqKCzxBQavnDhQv78+fv161etWjWPJmCesmX2xIkTPeJ1msoJSKVS+Q8gJTU/Y8aMdNYsckpfxgapbLBia887OHsdsYdshQoVWKPaxrP6Z5s2bfLkycNa+CxZvWzZMi6xgnXLli3ZZixbtmw33HADAmPTuwnMnTsXe459CpyJ2Y2MurHJ0KRJk1iA1dQN0WKRbKrNUqSsHs12ruYWth6/7rrrWKKUu1i5ld2QibcjfgS4EdPKmGiosh2mO3HiBHex4Z4teubMmTTNbKWxZ88esGDPkS0N5EabzGeAccuHHnoInlmyZKE+bNfiTEZt2VUL4SS3l156iYVHzVV2W8foYZcpymUHE+rmvCtgGAOUprVr186nach2tx7VCJihEkQ8AalUxD/iCGwgO63QU9sNwN59913ezYcMGcJy3UOHDu3fv/8HH3xAs+m7Wd+d/ZC++uqr1atX07eyjaGJx9xBnNiAoHHjxozd7dy50z2mRYsWIXhxpafH51J0dDSaQSmoIEWzyj7mF/vjcQmTC2147LHHqC1d/L333msFwORJJ44EPv7446TkKFasmC2Lnp0dv9gq18awzQqChNwijQ0aNCBA9dhFggCmJ4piU3oH2Aed3ddmz569du3aJ554go1sjIqblDBEjIlh2e9Ro0Zhv5r4jh07/vLLL2xs8eeffz7wwAOU8vfff3tkbkZB4ytgZIKo79q1i10hPDLUaaomYNdmV0AEwpkAewSwyxzv7xhM5i+WfW5MhenH6axt5V955RV6eU4xa7Jnz47FYC/5DLDzwpgxY8wldnihRzZhSpk1a5b3LagCGmPjzeAegkcMuvjkk09ST3rwvn37MvtitvflEttAoBxsUc/u4+RMP25zMAEaSM4mjLh269bNJjD7JjAPRwzGE/lgfhHGtILGN998QxgVdBbHwBpW17fffmszMQFnVh6X0NTnnnvORFIBbFNb+RdffNFsy7t582YsPATY3nvHHXf06dOHU8xZRNTEY8JSGRTOJvMOeLTRJKBFwEHevNMrJtUSiPAd5U13pn8jgwC2AkYJRgPv9ewS++yzz9KuQ4cO8fbdqVMnjA/TTIaqzGgSm+qyZxjDfR7Np4sfNGgQNgRmFomZZ4qXLUV6q5Q2ZxwccAegbgzu4QTBABqbUSGWdjKJXW7RMLpv5mPo2UmAGcfQJTsKMkZn8wkYwJbCxME6ZCRzxowZyDCZcBfih4RwanPAVGJs0556B5DM1157bfr06agOqsbBS4BNdtNNN9nK05ARI0aQftWqVfSV5cuXt8m4iyFBe2oCjAdu2LDBI9LNKcpKMjC6Saw0qYSAVCqVPOhIaCZ9aNmyZWkJY1AoFkqD2WQG8Rj0c+7ybvb2Nl2ed8t79eqFkfHmm2+SG2nQCf8jYx455MuXz9u9kL4em8zM4pj09Oa2lyeGU/4lhrrNnz9/yZIlzF1hwzFWic1RqlQpj1LiOsWTggpjO6JS/MtEFKJFYjgwfOccDCQSP4W48iEe4cFwxPMeyYQtHncBOVAK9UcRDWGTObadn1Lideno0aOk91/teGWoxBFAQCoVAQ8xNTYBS6VJkyb4oxcpUoQ3d2aq2CnVAwS+DFhddHwe5tTixYvxULjnnntIj30T0MvAI1vss//85z8ekYw6lilTxhmJaGHrWK1CljB0qCpp0CpMK44BAwYwxsi4IpveOu9FijBcnDHOMC3FfmJPd4bv0GlziX2oUUrc5HAydCb2E4YDY4yPPPIIaZAfppcY1rPply5d6gyzeTzKRNup2MGDB+vXr2+vBjHADBnTjZUrVw5insoqpROQ90RKf4KptP58UURfhq8E7X/55Zf5gAmnA4YB16xZwwQJU1bE46SA3x2fyjLbj4yhGb/++ivxmFDM7jAeiF8D3+4Ya8w9R0bqUAhvc8ojBxzlGYpkWJKxr//+97/IKlLEqCCWE9VesWIFw4xUgxFLpzaYTHCEIxnyefjwYe/qMaODgyJaRTLG5cwtnGLkoTpoD1NleAkys8UAo0etnKdwMFYdfhxMp+3fv995lcpT4Y0bN+J0h81HblxlrI+C8NCj5pTy22+/DR8+fM6cOc4bCTOEiLPl8uXLPeLNKeQ5eD+g7QTWrVtnk1F59C8uI9gmUyB1EeBdT4cIhD8Bp3OBqS2jW9gc9PWcEq5evTqn5rta+lCTho7+vvvuw7zA9c54ohNP98qAIV0hBtDYsWOd0/huvCfIAW3gsx5TBLnRZRjvCRNj/8ULAB8/aoVY4oCA4x+X6JTROQa18FCn07eOG84Gog0UYTpr8vd2eWDQkkIxxWxZBHAIRD/QKnIuXbo0E3U4IzgTEHZmhV8JqsZ4HRYYvubc63TfQGW7dOkCOpDiRo9YmqwYFaRcBBKjh3ZhkuIqwiWn94RhQlnmFo9/PXpYmNsEAEEU7akCIgCBNPzP40ejUxEQAf8EsB5YzofhKWwj/yl11T0BnBVRXzTPzLS5v1EpI5uA5qUi+/mqdSEhgNM2sziMazk/ZgpJSakpU3wvMcgkUanpmbtqq2wpV5iUSAREQAREIFkIaLwiWbCrUBEQAREQAVcEpFKuMCmRCIiACIhAshCQSiULdhUqAiIgAiLgioBUyhUmJRIBERABEUgWAlKpZMGuQkVABERABFwRkEq5wqREIiACIiACyUJAKpUs2FWoCIiACIiAKwJSKVeYlEgEREAERCBZCEilkgW7ChUBERABEXBF4P8AuRwuVEedncgAAAAASUVORK5CYII="
57 | }
58 | },
59 | "cell_type": "markdown",
60 | "id": "e1486424",
61 | "metadata": {},
62 | "source": [
63 | ""
64 | ]
65 | },
66 | {
67 | "cell_type": "code",
68 | "execution_count": null,
69 | "id": "668856bf",
70 | "metadata": {},
71 | "outputs": [],
72 | "source": []
73 | },
74 | {
75 | "cell_type": "code",
76 | "execution_count": null,
77 | "id": "c87326bd",
78 | "metadata": {},
79 | "outputs": [],
80 | "source": []
81 | },
82 | {
83 | "cell_type": "code",
84 | "execution_count": null,
85 | "id": "decdbf09",
86 | "metadata": {},
87 | "outputs": [],
88 | "source": []
89 | }
90 | ],
91 | "metadata": {
92 | "kernelspec": {
93 | "display_name": "Python 3 (ipykernel)",
94 | "language": "python",
95 | "name": "python3"
96 | },
97 | "language_info": {
98 | "codemirror_mode": {
99 | "name": "ipython",
100 | "version": 3
101 | },
102 | "file_extension": ".py",
103 | "mimetype": "text/x-python",
104 | "name": "python",
105 | "nbconvert_exporter": "python",
106 | "pygments_lexer": "ipython3",
107 | "version": "3.8.15"
108 | }
109 | },
110 | "nbformat": 4,
111 | "nbformat_minor": 5
112 | }
113 |
--------------------------------------------------------------------------------
/notebooks/long_form_generation.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": 1,
6 | "id": "39ea4bed",
7 | "metadata": {},
8 | "outputs": [],
9 | "source": [
10 | "import os\n",
11 | "\n",
12 | "os.environ[\"CUDA_VISIBLE_DEVICES\"] = \"0\"\n",
13 | "\n",
14 | "\n",
15 | "from IPython.display import Audio\n",
16 | "import nltk # we'll use this to split into sentences\n",
17 | "import numpy as np\n",
18 | "\n",
19 | "from bark.generation import (\n",
20 | " generate_text_semantic,\n",
21 | " preload_models,\n",
22 | ")\n",
23 | "from bark.api import semantic_to_waveform\n",
24 | "from bark import generate_audio, SAMPLE_RATE"
25 | ]
26 | },
27 | {
28 | "cell_type": "code",
29 | "execution_count": 29,
30 | "id": "776964b6",
31 | "metadata": {},
32 | "outputs": [],
33 | "source": [
34 | "preload_models()"
35 | ]
36 | },
37 | {
38 | "cell_type": "code",
39 | "execution_count": null,
40 | "id": "1d03f4d2",
41 | "metadata": {},
42 | "outputs": [],
43 | "source": []
44 | },
45 | {
46 | "cell_type": "markdown",
47 | "id": "74a025a4",
48 | "metadata": {},
49 | "source": [
50 | "# Simple Long-Form Generation\n",
51 | "We split longer text into sentences using `nltk` and generate the sentences one by one."
52 | ]
53 | },
54 | {
55 | "cell_type": "code",
56 | "execution_count": 33,
57 | "id": "57b06e2a",
58 | "metadata": {},
59 | "outputs": [],
60 | "source": [
61 | "script = \"\"\"\n",
62 | "Hey, have you heard about this new text-to-audio model called \"Bark\"? \n",
63 | "Apparently, it's the most realistic and natural-sounding text-to-audio model \n",
64 | "out there right now. People are saying it sounds just like a real person speaking. \n",
65 | "I think it uses advanced machine learning algorithms to analyze and understand the \n",
66 | "nuances of human speech, and then replicates those nuances in its own speech output. \n",
67 | "It's pretty impressive, and I bet it could be used for things like audiobooks or podcasts. \n",
68 | "In fact, I heard that some publishers are already starting to use Bark to create audiobooks. \n",
69 | "It would be like having your own personal voiceover artist. I really think Bark is going to \n",
70 | "be a game-changer in the world of text-to-audio technology.\n",
71 | "\"\"\".replace(\"\\n\", \" \").strip()"
72 | ]
73 | },
74 | {
75 | "cell_type": "code",
76 | "execution_count": 34,
77 | "id": "f747f804",
78 | "metadata": {},
79 | "outputs": [],
80 | "source": [
81 | "sentences = nltk.sent_tokenize(script)"
82 | ]
83 | },
84 | {
85 | "cell_type": "code",
86 | "execution_count": 35,
87 | "id": "17400a9b",
88 | "metadata": {
89 | "scrolled": true
90 | },
91 | "outputs": [
92 | {
93 | "name": "stderr",
94 | "output_type": "stream",
95 | "text": [
96 | "100%|██████████████████████████████████████████████████████████████████████| 100/100 [00:02<00:00, 43.03it/s]\n",
97 | "100%|████████████████████████████████████████████████████████████████████████| 17/17 [00:06<00:00, 2.45it/s]\n",
98 | "100%|██████████████████████████████████████████████████████████████████████| 100/100 [00:04<00:00, 22.73it/s]\n",
99 | "100%|████████████████████████████████████████████████████████████████████████| 33/33 [00:13<00:00, 2.52it/s]\n",
100 | "100%|██████████████████████████████████████████████████████████████████████| 100/100 [00:01<00:00, 66.30it/s]\n",
101 | "100%|████████████████████████████████████████████████████████████████████████| 11/11 [00:04<00:00, 2.46it/s]\n",
102 | "100%|██████████████████████████████████████████████████████████████████████| 100/100 [00:04<00:00, 20.99it/s]\n",
103 | "100%|████████████████████████████████████████████████████████████████████████| 35/35 [00:14<00:00, 2.46it/s]\n",
104 | "100%|██████████████████████████████████████████████████████████████████████| 100/100 [00:03<00:00, 25.63it/s]\n",
105 | "100%|████████████████████████████████████████████████████████████████████████| 29/29 [00:11<00:00, 2.50it/s]\n",
106 | "100%|██████████████████████████████████████████████████████████████████████| 100/100 [00:04<00:00, 23.90it/s]\n",
107 | "100%|████████████████████████████████████████████████████████████████████████| 30/30 [00:12<00:00, 2.46it/s]\n",
108 | "100%|██████████████████████████████████████████████████████████████████████| 100/100 [00:01<00:00, 53.24it/s]\n",
109 | "100%|████████████████████████████████████████████████████████████████████████| 14/14 [00:05<00:00, 2.51it/s]\n",
110 | "100%|██████████████████████████████████████████████████████████████████████| 100/100 [00:01<00:00, 50.63it/s]\n",
111 | "100%|████████████████████████████████████████████████████████████████████████| 15/15 [00:05<00:00, 2.57it/s]\n"
112 | ]
113 | }
114 | ],
115 | "source": [
116 | "SPEAKER = \"v2/en_speaker_6\"\n",
117 | "silence = np.zeros(int(0.25 * SAMPLE_RATE)) # quarter second of silence\n",
118 | "\n",
119 | "pieces = []\n",
120 | "for sentence in sentences:\n",
121 | " audio_array = generate_audio(sentence, history_prompt=SPEAKER)\n",
122 | " pieces += [audio_array, silence.copy()]\n"
123 | ]
124 | },
125 | {
126 | "cell_type": "code",
127 | "execution_count": null,
128 | "id": "04cf77f9",
129 | "metadata": {},
130 | "outputs": [],
131 | "source": [
132 | "Audio(np.concatenate(pieces), rate=SAMPLE_RATE)"
133 | ]
134 | },
135 | {
136 | "cell_type": "code",
137 | "execution_count": null,
138 | "id": "ac2d4625",
139 | "metadata": {},
140 | "outputs": [],
141 | "source": []
142 | },
143 | {
144 | "cell_type": "markdown",
145 | "id": "6d13249b",
146 | "metadata": {},
147 | "source": [
148 | "# $ \\\\ $"
149 | ]
150 | },
151 | {
152 | "cell_type": "markdown",
153 | "id": "cdfc8bf5",
154 | "metadata": {},
155 | "source": [
156 | "# Advanced Long-Form Generation\n",
157 | "Somtimes Bark will hallucinate a little extra audio at the end of the prompt.\n",
158 | "We can solve this issue by lowering the threshold for bark to stop generating text. \n",
159 | "We use the `min_eos_p` kwarg in `generate_text_semantic`"
160 | ]
161 | },
162 | {
163 | "cell_type": "code",
164 | "execution_count": 37,
165 | "id": "62807fd0",
166 | "metadata": {},
167 | "outputs": [
168 | {
169 | "name": "stderr",
170 | "output_type": "stream",
171 | "text": [
172 | "100%|██████████████████████████████████████████████████████████████████████| 100/100 [00:02<00:00, 38.05it/s]\n",
173 | "100%|████████████████████████████████████████████████████████████████████████| 18/18 [00:07<00:00, 2.46it/s]\n",
174 | "100%|██████████████████████████████████████████████████████████████████████| 100/100 [00:03<00:00, 32.28it/s]\n",
175 | "100%|████████████████████████████████████████████████████████████████████████| 21/21 [00:08<00:00, 2.54it/s]\n",
176 | "100%|██████████████████████████████████████████████████████████████████████| 100/100 [00:01<00:00, 55.78it/s]\n",
177 | "100%|████████████████████████████████████████████████████████████████████████| 14/14 [00:05<00:00, 2.57it/s]\n",
178 | "100%|██████████████████████████████████████████████████████████████████████| 100/100 [00:06<00:00, 14.73it/s]\n",
179 | "100%|████████████████████████████████████████████████████████████████████████| 35/35 [00:14<00:00, 2.47it/s]\n",
180 | "100%|██████████████████████████████████████████████████████████████████████| 100/100 [00:02<00:00, 40.29it/s]\n",
181 | "100%|████████████████████████████████████████████████████████████████████████| 18/18 [00:07<00:00, 2.56it/s]\n",
182 | "100%|██████████████████████████████████████████████████████████████████████| 100/100 [00:03<00:00, 32.92it/s]\n",
183 | "100%|████████████████████████████████████████████████████████████████████████| 20/20 [00:08<00:00, 2.47it/s]\n",
184 | "100%|██████████████████████████████████████████████████████████████████████| 100/100 [00:01<00:00, 68.87it/s]\n",
185 | "100%|████████████████████████████████████████████████████████████████████████| 12/12 [00:04<00:00, 2.62it/s]\n",
186 | "100%|██████████████████████████████████████████████████████████████████████| 100/100 [00:02<00:00, 47.64it/s]\n",
187 | "100%|████████████████████████████████████████████████████████████████████████| 15/15 [00:06<00:00, 2.46it/s]\n"
188 | ]
189 | }
190 | ],
191 | "source": [
192 | "GEN_TEMP = 0.6\n",
193 | "SPEAKER = \"v2/en_speaker_6\"\n",
194 | "silence = np.zeros(int(0.25 * SAMPLE_RATE)) # quarter second of silence\n",
195 | "\n",
196 | "pieces = []\n",
197 | "for sentence in sentences:\n",
198 | " semantic_tokens = generate_text_semantic(\n",
199 | " sentence,\n",
200 | " history_prompt=SPEAKER,\n",
201 | " temp=GEN_TEMP,\n",
202 | " min_eos_p=0.05, # this controls how likely the generation is to end\n",
203 | " )\n",
204 | "\n",
205 | " audio_array = semantic_to_waveform(semantic_tokens, history_prompt=SPEAKER,)\n",
206 | " pieces += [audio_array, silence.copy()]\n",
207 | "\n"
208 | ]
209 | },
210 | {
211 | "cell_type": "code",
212 | "execution_count": null,
213 | "id": "133fec46",
214 | "metadata": {},
215 | "outputs": [],
216 | "source": [
217 | "Audio(np.concatenate(pieces), rate=SAMPLE_RATE)"
218 | ]
219 | },
220 | {
221 | "cell_type": "code",
222 | "execution_count": null,
223 | "id": "6eee9f5a",
224 | "metadata": {},
225 | "outputs": [],
226 | "source": []
227 | },
228 | {
229 | "cell_type": "markdown",
230 | "id": "be8e125e",
231 | "metadata": {},
232 | "source": [
233 | "# $ \\\\ $"
234 | ]
235 | },
236 | {
237 | "cell_type": "markdown",
238 | "id": "03a16c1b",
239 | "metadata": {},
240 | "source": [
241 | "# Make a Long-Form Dialog with Bark"
242 | ]
243 | },
244 | {
245 | "cell_type": "markdown",
246 | "id": "06c5eff8",
247 | "metadata": {},
248 | "source": [
249 | "### Step 1: Format a script and speaker lookup"
250 | ]
251 | },
252 | {
253 | "cell_type": "code",
254 | "execution_count": 14,
255 | "id": "5238b297",
256 | "metadata": {},
257 | "outputs": [
258 | {
259 | "data": {
260 | "text/plain": [
261 | "['Samantha: Hey, have you heard about this new text-to-audio model called \"Bark\"?',\n",
262 | " \"John: No, I haven't. What's so special about it?\",\n",
263 | " \"Samantha: Well, apparently it's the most realistic and natural-sounding text-to-audio model out there right now. People are saying it sounds just like a real person speaking.\",\n",
264 | " 'John: Wow, that sounds amazing. How does it work?',\n",
265 | " 'Samantha: I think it uses advanced machine learning algorithms to analyze and understand the nuances of human speech, and then replicates those nuances in its own speech output.',\n",
266 | " \"John: That's pretty impressive. Do you think it could be used for things like audiobooks or podcasts?\",\n",
267 | " 'Samantha: Definitely! In fact, I heard that some publishers are already starting to use Bark to create audiobooks. And I bet it would be great for podcasts too.',\n",
268 | " 'John: I can imagine. It would be like having your own personal voiceover artist.',\n",
269 | " 'Samantha: Exactly! I think Bark is going to be a game-changer in the world of text-to-audio technology.']"
270 | ]
271 | },
272 | "execution_count": 14,
273 | "metadata": {},
274 | "output_type": "execute_result"
275 | }
276 | ],
277 | "source": [
278 | "speaker_lookup = {\"Samantha\": \"v2/en_speaker_9\", \"John\": \"v2/en_speaker_2\"}\n",
279 | "\n",
280 | "# Script generated by chat GPT\n",
281 | "script = \"\"\"\n",
282 | "Samantha: Hey, have you heard about this new text-to-audio model called \"Bark\"?\n",
283 | "\n",
284 | "John: No, I haven't. What's so special about it?\n",
285 | "\n",
286 | "Samantha: Well, apparently it's the most realistic and natural-sounding text-to-audio model out there right now. People are saying it sounds just like a real person speaking.\n",
287 | "\n",
288 | "John: Wow, that sounds amazing. How does it work?\n",
289 | "\n",
290 | "Samantha: I think it uses advanced machine learning algorithms to analyze and understand the nuances of human speech, and then replicates those nuances in its own speech output.\n",
291 | "\n",
292 | "John: That's pretty impressive. Do you think it could be used for things like audiobooks or podcasts?\n",
293 | "\n",
294 | "Samantha: Definitely! In fact, I heard that some publishers are already starting to use Bark to create audiobooks. And I bet it would be great for podcasts too.\n",
295 | "\n",
296 | "John: I can imagine. It would be like having your own personal voiceover artist.\n",
297 | "\n",
298 | "Samantha: Exactly! I think Bark is going to be a game-changer in the world of text-to-audio technology.\"\"\"\n",
299 | "script = script.strip().split(\"\\n\")\n",
300 | "script = [s.strip() for s in script if s]\n",
301 | "script"
302 | ]
303 | },
304 | {
305 | "cell_type": "markdown",
306 | "id": "ee547efd",
307 | "metadata": {},
308 | "source": [
309 | "### Step 2: Generate the audio for every speaker turn"
310 | ]
311 | },
312 | {
313 | "cell_type": "code",
314 | "execution_count": 15,
315 | "id": "203e5081",
316 | "metadata": {},
317 | "outputs": [
318 | {
319 | "name": "stderr",
320 | "output_type": "stream",
321 | "text": [
322 | "100%|██████████████████████████████████████████████████████████████████████| 100/100 [00:02<00:00, 34.03it/s]\n",
323 | "100%|████████████████████████████████████████████████████████████████████████| 22/22 [00:08<00:00, 2.55it/s]\n",
324 | "100%|██████████████████████████████████████████████████████████████████████| 100/100 [00:01<00:00, 71.58it/s]\n",
325 | "100%|████████████████████████████████████████████████████████████████████████| 11/11 [00:04<00:00, 2.65it/s]\n",
326 | "100%|██████████████████████████████████████████████████████████████████████| 100/100 [00:04<00:00, 22.75it/s]\n",
327 | "100%|████████████████████████████████████████████████████████████████████████| 33/33 [00:13<00:00, 2.53it/s]\n",
328 | "100%|██████████████████████████████████████████████████████████████████████| 100/100 [00:01<00:00, 70.76it/s]\n",
329 | "100%|████████████████████████████████████████████████████████████████████████| 11/11 [00:04<00:00, 2.63it/s]\n",
330 | "100%|██████████████████████████████████████████████████████████████████████| 100/100 [00:04<00:00, 20.46it/s]\n",
331 | "100%|████████████████████████████████████████████████████████████████████████| 36/36 [00:14<00:00, 2.47it/s]\n",
332 | "100%|██████████████████████████████████████████████████████████████████████| 100/100 [00:04<00:00, 20.18it/s]\n",
333 | "100%|████████████████████████████████████████████████████████████████████████| 37/37 [00:14<00:00, 2.51it/s]\n",
334 | "100%|██████████████████████████████████████████████████████████████████████| 100/100 [00:04<00:00, 23.04it/s]\n",
335 | "100%|████████████████████████████████████████████████████████████████████████| 32/32 [00:12<00:00, 2.48it/s]\n",
336 | "100%|██████████████████████████████████████████████████████████████████████| 100/100 [00:01<00:00, 54.64it/s]\n",
337 | "100%|████████████████████████████████████████████████████████████████████████| 14/14 [00:05<00:00, 2.58it/s]\n",
338 | "100%|██████████████████████████████████████████████████████████████████████| 100/100 [00:03<00:00, 31.71it/s]\n",
339 | "100%|████████████████████████████████████████████████████████████████████████| 24/24 [00:09<00:00, 2.56it/s]\n"
340 | ]
341 | }
342 | ],
343 | "source": [
344 | "pieces = []\n",
345 | "silence = np.zeros(int(0.5*SAMPLE_RATE))\n",
346 | "for line in script:\n",
347 | " speaker, text = line.split(\": \")\n",
348 | " audio_array = generate_audio(text, history_prompt=speaker_lookup[speaker], )\n",
349 | " pieces += [audio_array, silence.copy()]"
350 | ]
351 | },
352 | {
353 | "cell_type": "markdown",
354 | "id": "7c54bada",
355 | "metadata": {},
356 | "source": [
357 | "### Step 3: Concatenate all of the audio and play it"
358 | ]
359 | },
360 | {
361 | "cell_type": "code",
362 | "execution_count": null,
363 | "id": "27a56842",
364 | "metadata": {},
365 | "outputs": [],
366 | "source": [
367 | "Audio(np.concatenate(pieces), rate=SAMPLE_RATE)"
368 | ]
369 | },
370 | {
371 | "cell_type": "code",
372 | "execution_count": null,
373 | "id": "a1bc5877",
374 | "metadata": {},
375 | "outputs": [],
376 | "source": []
377 | }
378 | ],
379 | "metadata": {
380 | "kernelspec": {
381 | "display_name": "Python 3 (ipykernel)",
382 | "language": "python",
383 | "name": "python3"
384 | },
385 | "language_info": {
386 | "codemirror_mode": {
387 | "name": "ipython",
388 | "version": 3
389 | },
390 | "file_extension": ".py",
391 | "mimetype": "text/x-python",
392 | "name": "python",
393 | "nbconvert_exporter": "python",
394 | "pygments_lexer": "ipython3",
395 | "version": "3.9.16"
396 | }
397 | },
398 | "nbformat": 4,
399 | "nbformat_minor": 5
400 | }
401 |
--------------------------------------------------------------------------------
/notebooks/memory_profiling_bark.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "id": "90641144",
6 | "metadata": {},
7 | "source": [
8 | "# Bark Memory Profiling\n",
9 | "Bark has two ways to reduce GPU memory: \n",
10 | " - Small models: a smaller version of the model. This can be set by using the environment variable `SUNO_USE_SMALL_MODELS`\n",
11 | " - offloading models to CPU: Holding only one model at a time on the GPU, and shuttling the models to the CPU in between generations. \n",
12 | "\n",
13 | "# $ \\\\ $\n",
14 | "## First, we'll use the most memory efficient configuration"
15 | ]
16 | },
17 | {
18 | "cell_type": "code",
19 | "execution_count": 1,
20 | "id": "39ea4bed",
21 | "metadata": {},
22 | "outputs": [],
23 | "source": [
24 | "import os\n",
25 | "\n",
26 | "os.environ[\"CUDA_VISIBLE_DEVICES\"] = \"0\"\n",
27 | "os.environ[\"SUNO_USE_SMALL_MODELS\"] = \"1\"\n",
28 | "os.environ[\"SUNO_OFFLOAD_CPU\"] = \"1\"\n",
29 | "\n",
30 | "from bark.generation import (\n",
31 | " generate_text_semantic,\n",
32 | " preload_models,\n",
33 | ")\n",
34 | "from bark import generate_audio, SAMPLE_RATE\n",
35 | "\n",
36 | "import torch"
37 | ]
38 | },
39 | {
40 | "cell_type": "code",
41 | "execution_count": 2,
42 | "id": "66b0c006",
43 | "metadata": {},
44 | "outputs": [
45 | {
46 | "name": "stderr",
47 | "output_type": "stream",
48 | "text": [
49 | "100%|██████████████████████████████████████████████████████████████████████| 100/100 [00:01<00:00, 62.17it/s]\n",
50 | "100%|████████████████████████████████████████████████████████████████████████| 10/10 [00:03<00:00, 2.74it/s]\n"
51 | ]
52 | },
53 | {
54 | "name": "stdout",
55 | "output_type": "stream",
56 | "text": [
57 | "max memory usage = 2396MB\n"
58 | ]
59 | }
60 | ],
61 | "source": [
62 | "torch.cuda.reset_peak_memory_stats()\n",
63 | "preload_models()\n",
64 | "audio_array = generate_audio(\"madam I'm adam\", history_prompt=\"v2/en_speaker_5\")\n",
65 | "max_utilization = torch.cuda.max_memory_allocated()\n",
66 | "print(f\"max memory usage = {max_utilization / 1024 / 1024:.0f}MB\")"
67 | ]
68 | },
69 | {
70 | "cell_type": "code",
71 | "execution_count": null,
72 | "id": "9922dd2d",
73 | "metadata": {},
74 | "outputs": [],
75 | "source": []
76 | },
77 | {
78 | "cell_type": "code",
79 | "execution_count": null,
80 | "id": "bdbe578e",
81 | "metadata": {},
82 | "outputs": [],
83 | "source": []
84 | },
85 | {
86 | "cell_type": "markdown",
87 | "id": "213d1b5b",
88 | "metadata": {},
89 | "source": [
90 | "# Memory Profiling:\n",
91 | "We can profile the memory consumption of 4 scenarios\n",
92 | " - Small models, offloading to CPU\n",
93 | " - Large models, offloading to CPU\n",
94 | " - Small models, not offloading to CPU\n",
95 | " - Large models, not offloading to CPU"
96 | ]
97 | },
98 | {
99 | "cell_type": "code",
100 | "execution_count": 1,
101 | "id": "417d5e9c",
102 | "metadata": {},
103 | "outputs": [],
104 | "source": [
105 | "import os\n",
106 | "\n",
107 | "from bark.generation import (\n",
108 | " generate_text_semantic,\n",
109 | " preload_models,\n",
110 | " models,\n",
111 | ")\n",
112 | "import bark.generation\n",
113 | "\n",
114 | "from bark.api import semantic_to_waveform\n",
115 | "from bark import generate_audio, SAMPLE_RATE\n",
116 | "\n",
117 | "import torch\n",
118 | "import time"
119 | ]
120 | },
121 | {
122 | "cell_type": "code",
123 | "execution_count": 2,
124 | "id": "cd83b45d",
125 | "metadata": {},
126 | "outputs": [
127 | {
128 | "name": "stdout",
129 | "output_type": "stream",
130 | "text": [
131 | "Small models True, offloading to CPU: True\n",
132 | "\tmax memory usage = 967MB, time 4s\n",
133 | "\n",
134 | "Small models False, offloading to CPU: True\n",
135 | "\tmax memory usage = 2407MB, time 8s\n",
136 | "\n",
137 | "Small models True, offloading to CPU: False\n",
138 | "\tmax memory usage = 2970MB, time 3s\n",
139 | "\n",
140 | "Small models False, offloading to CPU: False\n",
141 | "\tmax memory usage = 7824MB, time 6s\n",
142 | "\n"
143 | ]
144 | }
145 | ],
146 | "source": [
147 | "global models\n",
148 | "\n",
149 | "for offload_models in (True, False):\n",
150 | " # this setattr is needed to do on the fly\n",
151 | " # the easier way to do this is with `os.environ[\"SUNO_OFFLOAD_CPU\"] = \"1\"`\n",
152 | " setattr(bark.generation, \"OFFLOAD_CPU\", offload_models)\n",
153 | " for use_small_models in (True, False):\n",
154 | " models = {}\n",
155 | " torch.cuda.empty_cache()\n",
156 | " torch.cuda.reset_peak_memory_stats()\n",
157 | " preload_models(\n",
158 | " text_use_small=use_small_models,\n",
159 | " coarse_use_small=use_small_models,\n",
160 | " fine_use_small=use_small_models,\n",
161 | " force_reload=True,\n",
162 | " )\n",
163 | " t0 = time.time()\n",
164 | " audio_array = generate_audio(\"madam I'm adam\", history_prompt=\"v2/en_speaker_5\", silent=True)\n",
165 | " dur = time.time() - t0\n",
166 | " max_utilization = torch.cuda.max_memory_allocated()\n",
167 | " print(f\"Small models {use_small_models}, offloading to CPU: {offload_models}\")\n",
168 | " print(f\"\\tmax memory usage = {max_utilization / 1024 / 1024:.0f}MB, time {dur:.0f}s\\n\")"
169 | ]
170 | },
171 | {
172 | "cell_type": "code",
173 | "execution_count": null,
174 | "id": "bfe5fa06",
175 | "metadata": {},
176 | "outputs": [],
177 | "source": []
178 | }
179 | ],
180 | "metadata": {
181 | "kernelspec": {
182 | "display_name": "Python 3 (ipykernel)",
183 | "language": "python",
184 | "name": "python3"
185 | },
186 | "language_info": {
187 | "codemirror_mode": {
188 | "name": "ipython",
189 | "version": 3
190 | },
191 | "file_extension": ".py",
192 | "mimetype": "text/x-python",
193 | "name": "python",
194 | "nbconvert_exporter": "python",
195 | "pygments_lexer": "ipython3",
196 | "version": "3.9.16"
197 | }
198 | },
199 | "nbformat": 4,
200 | "nbformat_minor": 5
201 | }
202 |
--------------------------------------------------------------------------------
/notebooks/use_small_models_on_cpu.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "id": "6a682b61",
6 | "metadata": {},
7 | "source": [
8 | "# Benchmarking small models on CPU\n",
9 | " - We can enable small models with the `SUNO_USE_SMALL_MODELS` environment variable"
10 | ]
11 | },
12 | {
13 | "cell_type": "code",
14 | "execution_count": 1,
15 | "id": "9500dd93",
16 | "metadata": {},
17 | "outputs": [],
18 | "source": [
19 | "import os\n",
20 | "\n",
21 | "os.environ[\"CUDA_VISIBLE_DEVICES\"] = \"\"\n",
22 | "os.environ[\"SUNO_USE_SMALL_MODELS\"] = \"1\"\n",
23 | "\n",
24 | "from IPython.display import Audio\n",
25 | "import numpy as np\n",
26 | "\n",
27 | "from bark import generate_audio, preload_models, SAMPLE_RATE\n",
28 | "\n",
29 | "import time"
30 | ]
31 | },
32 | {
33 | "cell_type": "code",
34 | "execution_count": 2,
35 | "id": "4e3454b6",
36 | "metadata": {},
37 | "outputs": [
38 | {
39 | "name": "stderr",
40 | "output_type": "stream",
41 | "text": [
42 | "No GPU being used. Careful, inference might be very slow!\n"
43 | ]
44 | },
45 | {
46 | "name": "stdout",
47 | "output_type": "stream",
48 | "text": [
49 | "CPU times: user 5.52 s, sys: 2.34 s, total: 7.86 s\n",
50 | "Wall time: 4.33 s\n"
51 | ]
52 | }
53 | ],
54 | "source": [
55 | "%%time\n",
56 | "preload_models()"
57 | ]
58 | },
59 | {
60 | "cell_type": "code",
61 | "execution_count": 3,
62 | "id": "f6024e5f",
63 | "metadata": {},
64 | "outputs": [
65 | {
66 | "name": "stderr",
67 | "output_type": "stream",
68 | "text": [
69 | "100%|████████████████████████████████████████████████████████| 100/100 [00:10<00:00, 9.89it/s]\n",
70 | "100%|██████████████████████████████████████████████████████████| 15/15 [00:43<00:00, 2.90s/it]\n"
71 | ]
72 | },
73 | {
74 | "name": "stdout",
75 | "output_type": "stream",
76 | "text": [
77 | "took 62s to generate 6s of audio\n"
78 | ]
79 | }
80 | ],
81 | "source": [
82 | "t0 = time.time()\n",
83 | "text = \"In the light of the moon, a little egg lay on a leaf\"\n",
84 | "audio_array = generate_audio(text)\n",
85 | "generation_duration_s = time.time() - t0\n",
86 | "audio_duration_s = audio_array.shape[0] / SAMPLE_RATE\n",
87 | "\n",
88 | "print(f\"took {generation_duration_s:.0f}s to generate {audio_duration_s:.0f}s of audio\")"
89 | ]
90 | },
91 | {
92 | "cell_type": "code",
93 | "execution_count": 4,
94 | "id": "2dcce86c",
95 | "metadata": {},
96 | "outputs": [
97 | {
98 | "data": {
99 | "text/plain": [
100 | "10"
101 | ]
102 | },
103 | "execution_count": 4,
104 | "metadata": {},
105 | "output_type": "execute_result"
106 | }
107 | ],
108 | "source": [
109 | "os.cpu_count()"
110 | ]
111 | },
112 | {
113 | "cell_type": "code",
114 | "execution_count": null,
115 | "id": "3046eddb",
116 | "metadata": {},
117 | "outputs": [],
118 | "source": []
119 | }
120 | ],
121 | "metadata": {
122 | "kernelspec": {
123 | "display_name": "Python 3 (ipykernel)",
124 | "language": "python",
125 | "name": "python3"
126 | },
127 | "language_info": {
128 | "codemirror_mode": {
129 | "name": "ipython",
130 | "version": 3
131 | },
132 | "file_extension": ".py",
133 | "mimetype": "text/x-python",
134 | "name": "python",
135 | "nbconvert_exporter": "python",
136 | "pygments_lexer": "ipython3",
137 | "version": "3.9.16"
138 | }
139 | },
140 | "nbformat": 4,
141 | "nbformat_minor": 5
142 | }
143 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [build-system]
2 | requires = ["setuptools"]
3 | build-backend = "setuptools.build_meta"
4 |
5 | [project]
6 | name = "suno-bark"
7 | version = "0.0.1a"
8 | description = "Bark text to audio model"
9 | readme = "README.md"
10 | requires-python = ">=3.8"
11 | authors = [
12 | {name = "Suno Inc", email = "hello@suno.ai"},
13 | ]
14 | # Apache 2.0
15 | license = {file = "LICENSE"}
16 |
17 | dependencies = [
18 | "boto3",
19 | "encodec",
20 | "funcy",
21 | "huggingface-hub>=0.14.1",
22 | "numpy",
23 | "scipy",
24 | "tokenizers",
25 | "torch",
26 | "tqdm",
27 | "transformers",
28 | ]
29 |
30 | [project.urls]
31 | source = "https://github.com/suno-ai/bark"
32 |
33 | [project.optional-dependencies]
34 | dev = [
35 | "bandit",
36 | "black",
37 | "codecov",
38 | "flake8",
39 | "hypothesis>=6.14,<7",
40 | "isort>=5.0.0,<6",
41 | "jupyter",
42 | "mypy",
43 | "nbconvert",
44 | "nbformat",
45 | "pydocstyle",
46 | "pylint",
47 | "pytest",
48 | "pytest-cov",
49 | ]
50 |
51 | [tool.setuptools]
52 | packages = ["bark"]
53 |
54 | [tool.setuptools.package-data]
55 | bark = ["assets/prompts/*.npz", "assets/prompts/v2/*.npz"]
56 |
57 |
58 | [tool.black]
59 | line-length = 100
60 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | from setuptools import setup
2 |
3 | setup()
4 |
--------------------------------------------------------------------------------