├── .gitignore
├── Audio
├── 01_rvc_prj_webui_share.ipynb
├── 02_VALL_E_X.ipynb
├── 03_SeamlessM4T.ipynb
├── 04_AudioCraft.ipynb
├── 05_AudioSep.ipynb
└── 06_Style_Bert_VITS2.ipynb
├── Language
├── 01_LLaMA2.ipynb
├── 02_Vicuna_7b.ipynb
├── 03_Mistral_7b.ipynb
├── 04_ABEJA_LLM_2_7b.ipynb
├── 05_LLM_jp_13B.ipynb
├── 06_CALM2_7B.ipynb
├── 07_JapaneseStableLM_Gamma_7B.ipynb
├── 08_PLaMo_13B_Instruct.ipynb
├── 09_llama_cpp.ipynb
├── 10_AutoGPTQ.ipynb
├── 11_llama2_QLoRA.ipynb
├── 12_01_TRL_SFT.ipynb
├── 12_02_TRL_merge.ipynb
├── 12_03_TRL_RM.ipynb
├── 12_04_TRL_PPO.ipynb
├── 12_05_TRL_DPO.ipynb
├── 13_StableCode_3b.ipynb
├── 14_CodeLlama.ipynb
├── 15_Swallow_13B_Instruct.ipynb
├── 16_Mixtral_8x7B.ipynb
└── 17_SelfExtend.ipynb
├── MultiModal
├── 01_JapaneseInstructBLIP.ipynb
├── 02_LLaVA.ipynb
└── 03_JapaneseStable_VLM.ipynb
├── README.md
└── Vision
├── 01_stable_diffusion.ipynb
├── 02_waifu_diffusion.ipynb
├── 03_SDXL.ipynb
├── 04_Emi.ipynb
├── 05_MangaDiffusion.ipynb
├── 06_SDXL_Turbo.ipynb
├── 07_StableDiffusion_LoRA.ipynb
├── 08_LCM_LoRA.ipynb
├── 09_ControlNet.ipynb
├── 10_StableVideoDiffusion.ipynb
├── 11_AnimateDiff.ipynb
├── 12_01_StableDiffusion_DreamBooth_Train.ipynb
├── 12_02_StableDiffusion_DreamBooth_Inference.ipynb
├── 13_CartoonSegmentation.ipynb
├── 14_DreamTalk.ipynb
└── 15_StableCascade.ipynb
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | share/python-wheels/
24 | *.egg-info/
25 | .installed.cfg
26 | *.egg
27 | MANIFEST
28 |
29 | # PyInstaller
30 | # Usually these files are written by a python script from a template
31 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
32 | *.manifest
33 | *.spec
34 |
35 | # Installer logs
36 | pip-log.txt
37 | pip-delete-this-directory.txt
38 |
39 | # Unit test / coverage reports
40 | htmlcov/
41 | .tox/
42 | .nox/
43 | .coverage
44 | .coverage.*
45 | .cache
46 | nosetests.xml
47 | coverage.xml
48 | *.cover
49 | *.py,cover
50 | .hypothesis/
51 | .pytest_cache/
52 | cover/
53 |
54 | # Translations
55 | *.mo
56 | *.pot
57 |
58 | # Django stuff:
59 | *.log
60 | local_settings.py
61 | db.sqlite3
62 | db.sqlite3-journal
63 |
64 | # Flask stuff:
65 | instance/
66 | .webassets-cache
67 |
68 | # Scrapy stuff:
69 | .scrapy
70 |
71 | # Sphinx documentation
72 | docs/_build/
73 |
74 | # PyBuilder
75 | .pybuilder/
76 | target/
77 |
78 | # Jupyter Notebook
79 | .ipynb_checkpoints
80 |
81 | # IPython
82 | profile_default/
83 | ipython_config.py
84 |
85 | # pyenv
86 | # For a library or package, you might want to ignore these files since the code is
87 | # intended to run in multiple environments; otherwise, check them in:
88 | # .python-version
89 |
90 | # pipenv
91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
94 | # install all needed dependencies.
95 | #Pipfile.lock
96 |
97 | # poetry
98 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
99 | # This is especially recommended for binary packages to ensure reproducibility, and is more
100 | # commonly ignored for libraries.
101 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
102 | #poetry.lock
103 |
104 | # pdm
105 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
106 | #pdm.lock
107 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
108 | # in version control.
109 | # https://pdm.fming.dev/#use-with-ide
110 | .pdm.toml
111 |
112 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
113 | __pypackages__/
114 |
115 | # Celery stuff
116 | celerybeat-schedule
117 | celerybeat.pid
118 |
119 | # SageMath parsed files
120 | *.sage.py
121 |
122 | # Environments
123 | .env
124 | .venv
125 | env/
126 | venv/
127 | ENV/
128 | env.bak/
129 | venv.bak/
130 |
131 | # Spyder project settings
132 | .spyderproject
133 | .spyproject
134 |
135 | # Rope project settings
136 | .ropeproject
137 |
138 | # mkdocs documentation
139 | /site
140 |
141 | # mypy
142 | .mypy_cache/
143 | .dmypy.json
144 | dmypy.json
145 |
146 | # Pyre type checker
147 | .pyre/
148 |
149 | # pytype static type analyzer
150 | .pytype/
151 |
152 | # Cython debug symbols
153 | cython_debug/
154 |
155 | # PyCharm
156 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can
157 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
158 | # and can be added to the global gitignore or merged into this file. For a more nuclear
159 | # option (not recommended) you can uncomment the following to ignore the entire idea folder.
160 | #.idea/
161 | .python-version
162 |
163 | *.parquet
164 | .env
165 | .DS_Store
166 | 99_*
167 | tmp/
168 | bak/
169 | *.zip
170 |
171 | # Ignore CLI configuration files
172 | .terraformrc
173 | terraform.rc
174 |
175 | # Local .terraform directories
176 | **/.terraform/*
177 |
178 | # .tfstate files
179 | *.tfstate
180 | *.tfstate.*
181 | *.tfvars
182 |
--------------------------------------------------------------------------------
/Language/04_ABEJA_LLM_2_7b.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "nbformat": 4,
3 | "nbformat_minor": 0,
4 | "metadata": {
5 | "colab": {
6 | "provenance": [],
7 | "machine_shape": "hm",
8 | "authorship_tag": "ABX9TyMHLxNNEH7nf/qtBIo1oUva",
9 | "include_colab_link": true
10 | },
11 | "kernelspec": {
12 | "name": "python3",
13 | "display_name": "Python 3"
14 | },
15 | "language_info": {
16 | "name": "python"
17 | },
18 | "widgets": {
19 | "application/vnd.jupyter.widget-state+json": {
20 | "7e2fa68518be449f80fd97746305eab0": {
21 | "model_module": "@jupyter-widgets/controls",
22 | "model_name": "HBoxModel",
23 | "model_module_version": "1.5.0",
24 | "state": {
25 | "_dom_classes": [],
26 | "_model_module": "@jupyter-widgets/controls",
27 | "_model_module_version": "1.5.0",
28 | "_model_name": "HBoxModel",
29 | "_view_count": null,
30 | "_view_module": "@jupyter-widgets/controls",
31 | "_view_module_version": "1.5.0",
32 | "_view_name": "HBoxView",
33 | "box_style": "",
34 | "children": [
35 | "IPY_MODEL_9c9a672fc7b6408c8f36eb48be24257f",
36 | "IPY_MODEL_a1ac3cff06d3416db0b8c9dd9edf48f5",
37 | "IPY_MODEL_f1a3e3ef130348d0877668ad50382a78"
38 | ],
39 | "layout": "IPY_MODEL_19de596f8f60498c908b3adf9d34cd41"
40 | }
41 | },
42 | "9c9a672fc7b6408c8f36eb48be24257f": {
43 | "model_module": "@jupyter-widgets/controls",
44 | "model_name": "HTMLModel",
45 | "model_module_version": "1.5.0",
46 | "state": {
47 | "_dom_classes": [],
48 | "_model_module": "@jupyter-widgets/controls",
49 | "_model_module_version": "1.5.0",
50 | "_model_name": "HTMLModel",
51 | "_view_count": null,
52 | "_view_module": "@jupyter-widgets/controls",
53 | "_view_module_version": "1.5.0",
54 | "_view_name": "HTMLView",
55 | "description": "",
56 | "description_tooltip": null,
57 | "layout": "IPY_MODEL_04d35b3697a44e58a57ead48479a1a73",
58 | "placeholder": "",
59 | "style": "IPY_MODEL_c14afb7238044f25ac888f750979ab26",
60 | "value": "tokenizer_config.json: 100%"
61 | }
62 | },
63 | "a1ac3cff06d3416db0b8c9dd9edf48f5": {
64 | "model_module": "@jupyter-widgets/controls",
65 | "model_name": "FloatProgressModel",
66 | "model_module_version": "1.5.0",
67 | "state": {
68 | "_dom_classes": [],
69 | "_model_module": "@jupyter-widgets/controls",
70 | "_model_module_version": "1.5.0",
71 | "_model_name": "FloatProgressModel",
72 | "_view_count": null,
73 | "_view_module": "@jupyter-widgets/controls",
74 | "_view_module_version": "1.5.0",
75 | "_view_name": "ProgressView",
76 | "bar_style": "success",
77 | "description": "",
78 | "description_tooltip": null,
79 | "layout": "IPY_MODEL_3853ef77d1eb43aeb44cdb44b79b79c3",
80 | "max": 168,
81 | "min": 0,
82 | "orientation": "horizontal",
83 | "style": "IPY_MODEL_97f29a72eda4407b880c17da8a4c54e3",
84 | "value": 168
85 | }
86 | },
87 | "f1a3e3ef130348d0877668ad50382a78": {
88 | "model_module": "@jupyter-widgets/controls",
89 | "model_name": "HTMLModel",
90 | "model_module_version": "1.5.0",
91 | "state": {
92 | "_dom_classes": [],
93 | "_model_module": "@jupyter-widgets/controls",
94 | "_model_module_version": "1.5.0",
95 | "_model_name": "HTMLModel",
96 | "_view_count": null,
97 | "_view_module": "@jupyter-widgets/controls",
98 | "_view_module_version": "1.5.0",
99 | "_view_name": "HTMLView",
100 | "description": "",
101 | "description_tooltip": null,
102 | "layout": "IPY_MODEL_48f2ee42333243978c5f21f68c242631",
103 | "placeholder": "",
104 | "style": "IPY_MODEL_ef0c0287d1674492a2bd79e38a32f639",
105 | "value": " 168/168 [00:00<00:00, 11.6kB/s]"
106 | }
107 | },
108 | "19de596f8f60498c908b3adf9d34cd41": {
109 | "model_module": "@jupyter-widgets/base",
110 | "model_name": "LayoutModel",
111 | "model_module_version": "1.2.0",
112 | "state": {
113 | "_model_module": "@jupyter-widgets/base",
114 | "_model_module_version": "1.2.0",
115 | "_model_name": "LayoutModel",
116 | "_view_count": null,
117 | "_view_module": "@jupyter-widgets/base",
118 | "_view_module_version": "1.2.0",
119 | "_view_name": "LayoutView",
120 | "align_content": null,
121 | "align_items": null,
122 | "align_self": null,
123 | "border": null,
124 | "bottom": null,
125 | "display": null,
126 | "flex": null,
127 | "flex_flow": null,
128 | "grid_area": null,
129 | "grid_auto_columns": null,
130 | "grid_auto_flow": null,
131 | "grid_auto_rows": null,
132 | "grid_column": null,
133 | "grid_gap": null,
134 | "grid_row": null,
135 | "grid_template_areas": null,
136 | "grid_template_columns": null,
137 | "grid_template_rows": null,
138 | "height": null,
139 | "justify_content": null,
140 | "justify_items": null,
141 | "left": null,
142 | "margin": null,
143 | "max_height": null,
144 | "max_width": null,
145 | "min_height": null,
146 | "min_width": null,
147 | "object_fit": null,
148 | "object_position": null,
149 | "order": null,
150 | "overflow": null,
151 | "overflow_x": null,
152 | "overflow_y": null,
153 | "padding": null,
154 | "right": null,
155 | "top": null,
156 | "visibility": null,
157 | "width": null
158 | }
159 | },
160 | "04d35b3697a44e58a57ead48479a1a73": {
161 | "model_module": "@jupyter-widgets/base",
162 | "model_name": "LayoutModel",
163 | "model_module_version": "1.2.0",
164 | "state": {
165 | "_model_module": "@jupyter-widgets/base",
166 | "_model_module_version": "1.2.0",
167 | "_model_name": "LayoutModel",
168 | "_view_count": null,
169 | "_view_module": "@jupyter-widgets/base",
170 | "_view_module_version": "1.2.0",
171 | "_view_name": "LayoutView",
172 | "align_content": null,
173 | "align_items": null,
174 | "align_self": null,
175 | "border": null,
176 | "bottom": null,
177 | "display": null,
178 | "flex": null,
179 | "flex_flow": null,
180 | "grid_area": null,
181 | "grid_auto_columns": null,
182 | "grid_auto_flow": null,
183 | "grid_auto_rows": null,
184 | "grid_column": null,
185 | "grid_gap": null,
186 | "grid_row": null,
187 | "grid_template_areas": null,
188 | "grid_template_columns": null,
189 | "grid_template_rows": null,
190 | "height": null,
191 | "justify_content": null,
192 | "justify_items": null,
193 | "left": null,
194 | "margin": null,
195 | "max_height": null,
196 | "max_width": null,
197 | "min_height": null,
198 | "min_width": null,
199 | "object_fit": null,
200 | "object_position": null,
201 | "order": null,
202 | "overflow": null,
203 | "overflow_x": null,
204 | "overflow_y": null,
205 | "padding": null,
206 | "right": null,
207 | "top": null,
208 | "visibility": null,
209 | "width": null
210 | }
211 | },
212 | "c14afb7238044f25ac888f750979ab26": {
213 | "model_module": "@jupyter-widgets/controls",
214 | "model_name": "DescriptionStyleModel",
215 | "model_module_version": "1.5.0",
216 | "state": {
217 | "_model_module": "@jupyter-widgets/controls",
218 | "_model_module_version": "1.5.0",
219 | "_model_name": "DescriptionStyleModel",
220 | "_view_count": null,
221 | "_view_module": "@jupyter-widgets/base",
222 | "_view_module_version": "1.2.0",
223 | "_view_name": "StyleView",
224 | "description_width": ""
225 | }
226 | },
227 | "3853ef77d1eb43aeb44cdb44b79b79c3": {
228 | "model_module": "@jupyter-widgets/base",
229 | "model_name": "LayoutModel",
230 | "model_module_version": "1.2.0",
231 | "state": {
232 | "_model_module": "@jupyter-widgets/base",
233 | "_model_module_version": "1.2.0",
234 | "_model_name": "LayoutModel",
235 | "_view_count": null,
236 | "_view_module": "@jupyter-widgets/base",
237 | "_view_module_version": "1.2.0",
238 | "_view_name": "LayoutView",
239 | "align_content": null,
240 | "align_items": null,
241 | "align_self": null,
242 | "border": null,
243 | "bottom": null,
244 | "display": null,
245 | "flex": null,
246 | "flex_flow": null,
247 | "grid_area": null,
248 | "grid_auto_columns": null,
249 | "grid_auto_flow": null,
250 | "grid_auto_rows": null,
251 | "grid_column": null,
252 | "grid_gap": null,
253 | "grid_row": null,
254 | "grid_template_areas": null,
255 | "grid_template_columns": null,
256 | "grid_template_rows": null,
257 | "height": null,
258 | "justify_content": null,
259 | "justify_items": null,
260 | "left": null,
261 | "margin": null,
262 | "max_height": null,
263 | "max_width": null,
264 | "min_height": null,
265 | "min_width": null,
266 | "object_fit": null,
267 | "object_position": null,
268 | "order": null,
269 | "overflow": null,
270 | "overflow_x": null,
271 | "overflow_y": null,
272 | "padding": null,
273 | "right": null,
274 | "top": null,
275 | "visibility": null,
276 | "width": null
277 | }
278 | },
279 | "97f29a72eda4407b880c17da8a4c54e3": {
280 | "model_module": "@jupyter-widgets/controls",
281 | "model_name": "ProgressStyleModel",
282 | "model_module_version": "1.5.0",
283 | "state": {
284 | "_model_module": "@jupyter-widgets/controls",
285 | "_model_module_version": "1.5.0",
286 | "_model_name": "ProgressStyleModel",
287 | "_view_count": null,
288 | "_view_module": "@jupyter-widgets/base",
289 | "_view_module_version": "1.2.0",
290 | "_view_name": "StyleView",
291 | "bar_color": null,
292 | "description_width": ""
293 | }
294 | },
295 | "48f2ee42333243978c5f21f68c242631": {
296 | "model_module": "@jupyter-widgets/base",
297 | "model_name": "LayoutModel",
298 | "model_module_version": "1.2.0",
299 | "state": {
300 | "_model_module": "@jupyter-widgets/base",
301 | "_model_module_version": "1.2.0",
302 | "_model_name": "LayoutModel",
303 | "_view_count": null,
304 | "_view_module": "@jupyter-widgets/base",
305 | "_view_module_version": "1.2.0",
306 | "_view_name": "LayoutView",
307 | "align_content": null,
308 | "align_items": null,
309 | "align_self": null,
310 | "border": null,
311 | "bottom": null,
312 | "display": null,
313 | "flex": null,
314 | "flex_flow": null,
315 | "grid_area": null,
316 | "grid_auto_columns": null,
317 | "grid_auto_flow": null,
318 | "grid_auto_rows": null,
319 | "grid_column": null,
320 | "grid_gap": null,
321 | "grid_row": null,
322 | "grid_template_areas": null,
323 | "grid_template_columns": null,
324 | "grid_template_rows": null,
325 | "height": null,
326 | "justify_content": null,
327 | "justify_items": null,
328 | "left": null,
329 | "margin": null,
330 | "max_height": null,
331 | "max_width": null,
332 | "min_height": null,
333 | "min_width": null,
334 | "object_fit": null,
335 | "object_position": null,
336 | "order": null,
337 | "overflow": null,
338 | "overflow_x": null,
339 | "overflow_y": null,
340 | "padding": null,
341 | "right": null,
342 | "top": null,
343 | "visibility": null,
344 | "width": null
345 | }
346 | },
347 | "ef0c0287d1674492a2bd79e38a32f639": {
348 | "model_module": "@jupyter-widgets/controls",
349 | "model_name": "DescriptionStyleModel",
350 | "model_module_version": "1.5.0",
351 | "state": {
352 | "_model_module": "@jupyter-widgets/controls",
353 | "_model_module_version": "1.5.0",
354 | "_model_name": "DescriptionStyleModel",
355 | "_view_count": null,
356 | "_view_module": "@jupyter-widgets/base",
357 | "_view_module_version": "1.2.0",
358 | "_view_name": "StyleView",
359 | "description_width": ""
360 | }
361 | },
362 | "7aabeac5f94d45938c70d85d5f6030e2": {
363 | "model_module": "@jupyter-widgets/controls",
364 | "model_name": "HBoxModel",
365 | "model_module_version": "1.5.0",
366 | "state": {
367 | "_dom_classes": [],
368 | "_model_module": "@jupyter-widgets/controls",
369 | "_model_module_version": "1.5.0",
370 | "_model_name": "HBoxModel",
371 | "_view_count": null,
372 | "_view_module": "@jupyter-widgets/controls",
373 | "_view_module_version": "1.5.0",
374 | "_view_name": "HBoxView",
375 | "box_style": "",
376 | "children": [
377 | "IPY_MODEL_6cb27ec0214d4fe38cae4d09ea5146ee",
378 | "IPY_MODEL_d5547266057f45a8b9ae0b88d553fc1f",
379 | "IPY_MODEL_be5c4d04beeb43a2b2da1f7aa1fd3c3d"
380 | ],
381 | "layout": "IPY_MODEL_80d12bc12f194e8aaee9273af820d2a7"
382 | }
383 | },
384 | "6cb27ec0214d4fe38cae4d09ea5146ee": {
385 | "model_module": "@jupyter-widgets/controls",
386 | "model_name": "HTMLModel",
387 | "model_module_version": "1.5.0",
388 | "state": {
389 | "_dom_classes": [],
390 | "_model_module": "@jupyter-widgets/controls",
391 | "_model_module_version": "1.5.0",
392 | "_model_name": "HTMLModel",
393 | "_view_count": null,
394 | "_view_module": "@jupyter-widgets/controls",
395 | "_view_module_version": "1.5.0",
396 | "_view_name": "HTMLView",
397 | "description": "",
398 | "description_tooltip": null,
399 | "layout": "IPY_MODEL_915790952c444556b003d7295d1c5ccb",
400 | "placeholder": "",
401 | "style": "IPY_MODEL_b26c3ac3150c46c6853f08a40c88e234",
402 | "value": "config.json: 100%"
403 | }
404 | },
405 | "d5547266057f45a8b9ae0b88d553fc1f": {
406 | "model_module": "@jupyter-widgets/controls",
407 | "model_name": "FloatProgressModel",
408 | "model_module_version": "1.5.0",
409 | "state": {
410 | "_dom_classes": [],
411 | "_model_module": "@jupyter-widgets/controls",
412 | "_model_module_version": "1.5.0",
413 | "_model_name": "FloatProgressModel",
414 | "_view_count": null,
415 | "_view_module": "@jupyter-widgets/controls",
416 | "_view_module_version": "1.5.0",
417 | "_view_name": "ProgressView",
418 | "bar_style": "success",
419 | "description": "",
420 | "description_tooltip": null,
421 | "layout": "IPY_MODEL_4133abb3166e49fa9d1f851d97d15741",
422 | "max": 602,
423 | "min": 0,
424 | "orientation": "horizontal",
425 | "style": "IPY_MODEL_183c7fbb1b464b7fbdc37f1042def7df",
426 | "value": 602
427 | }
428 | },
429 | "be5c4d04beeb43a2b2da1f7aa1fd3c3d": {
430 | "model_module": "@jupyter-widgets/controls",
431 | "model_name": "HTMLModel",
432 | "model_module_version": "1.5.0",
433 | "state": {
434 | "_dom_classes": [],
435 | "_model_module": "@jupyter-widgets/controls",
436 | "_model_module_version": "1.5.0",
437 | "_model_name": "HTMLModel",
438 | "_view_count": null,
439 | "_view_module": "@jupyter-widgets/controls",
440 | "_view_module_version": "1.5.0",
441 | "_view_name": "HTMLView",
442 | "description": "",
443 | "description_tooltip": null,
444 | "layout": "IPY_MODEL_2f7a0c6355ef48669b8889a0cdc86d30",
445 | "placeholder": "",
446 | "style": "IPY_MODEL_60198bd7296549bebfa02d030e153098",
447 | "value": " 602/602 [00:00<00:00, 41.3kB/s]"
448 | }
449 | },
450 | "80d12bc12f194e8aaee9273af820d2a7": {
451 | "model_module": "@jupyter-widgets/base",
452 | "model_name": "LayoutModel",
453 | "model_module_version": "1.2.0",
454 | "state": {
455 | "_model_module": "@jupyter-widgets/base",
456 | "_model_module_version": "1.2.0",
457 | "_model_name": "LayoutModel",
458 | "_view_count": null,
459 | "_view_module": "@jupyter-widgets/base",
460 | "_view_module_version": "1.2.0",
461 | "_view_name": "LayoutView",
462 | "align_content": null,
463 | "align_items": null,
464 | "align_self": null,
465 | "border": null,
466 | "bottom": null,
467 | "display": null,
468 | "flex": null,
469 | "flex_flow": null,
470 | "grid_area": null,
471 | "grid_auto_columns": null,
472 | "grid_auto_flow": null,
473 | "grid_auto_rows": null,
474 | "grid_column": null,
475 | "grid_gap": null,
476 | "grid_row": null,
477 | "grid_template_areas": null,
478 | "grid_template_columns": null,
479 | "grid_template_rows": null,
480 | "height": null,
481 | "justify_content": null,
482 | "justify_items": null,
483 | "left": null,
484 | "margin": null,
485 | "max_height": null,
486 | "max_width": null,
487 | "min_height": null,
488 | "min_width": null,
489 | "object_fit": null,
490 | "object_position": null,
491 | "order": null,
492 | "overflow": null,
493 | "overflow_x": null,
494 | "overflow_y": null,
495 | "padding": null,
496 | "right": null,
497 | "top": null,
498 | "visibility": null,
499 | "width": null
500 | }
501 | },
502 | "915790952c444556b003d7295d1c5ccb": {
503 | "model_module": "@jupyter-widgets/base",
504 | "model_name": "LayoutModel",
505 | "model_module_version": "1.2.0",
506 | "state": {
507 | "_model_module": "@jupyter-widgets/base",
508 | "_model_module_version": "1.2.0",
509 | "_model_name": "LayoutModel",
510 | "_view_count": null,
511 | "_view_module": "@jupyter-widgets/base",
512 | "_view_module_version": "1.2.0",
513 | "_view_name": "LayoutView",
514 | "align_content": null,
515 | "align_items": null,
516 | "align_self": null,
517 | "border": null,
518 | "bottom": null,
519 | "display": null,
520 | "flex": null,
521 | "flex_flow": null,
522 | "grid_area": null,
523 | "grid_auto_columns": null,
524 | "grid_auto_flow": null,
525 | "grid_auto_rows": null,
526 | "grid_column": null,
527 | "grid_gap": null,
528 | "grid_row": null,
529 | "grid_template_areas": null,
530 | "grid_template_columns": null,
531 | "grid_template_rows": null,
532 | "height": null,
533 | "justify_content": null,
534 | "justify_items": null,
535 | "left": null,
536 | "margin": null,
537 | "max_height": null,
538 | "max_width": null,
539 | "min_height": null,
540 | "min_width": null,
541 | "object_fit": null,
542 | "object_position": null,
543 | "order": null,
544 | "overflow": null,
545 | "overflow_x": null,
546 | "overflow_y": null,
547 | "padding": null,
548 | "right": null,
549 | "top": null,
550 | "visibility": null,
551 | "width": null
552 | }
553 | },
554 | "b26c3ac3150c46c6853f08a40c88e234": {
555 | "model_module": "@jupyter-widgets/controls",
556 | "model_name": "DescriptionStyleModel",
557 | "model_module_version": "1.5.0",
558 | "state": {
559 | "_model_module": "@jupyter-widgets/controls",
560 | "_model_module_version": "1.5.0",
561 | "_model_name": "DescriptionStyleModel",
562 | "_view_count": null,
563 | "_view_module": "@jupyter-widgets/base",
564 | "_view_module_version": "1.2.0",
565 | "_view_name": "StyleView",
566 | "description_width": ""
567 | }
568 | },
569 | "4133abb3166e49fa9d1f851d97d15741": {
570 | "model_module": "@jupyter-widgets/base",
571 | "model_name": "LayoutModel",
572 | "model_module_version": "1.2.0",
573 | "state": {
574 | "_model_module": "@jupyter-widgets/base",
575 | "_model_module_version": "1.2.0",
576 | "_model_name": "LayoutModel",
577 | "_view_count": null,
578 | "_view_module": "@jupyter-widgets/base",
579 | "_view_module_version": "1.2.0",
580 | "_view_name": "LayoutView",
581 | "align_content": null,
582 | "align_items": null,
583 | "align_self": null,
584 | "border": null,
585 | "bottom": null,
586 | "display": null,
587 | "flex": null,
588 | "flex_flow": null,
589 | "grid_area": null,
590 | "grid_auto_columns": null,
591 | "grid_auto_flow": null,
592 | "grid_auto_rows": null,
593 | "grid_column": null,
594 | "grid_gap": null,
595 | "grid_row": null,
596 | "grid_template_areas": null,
597 | "grid_template_columns": null,
598 | "grid_template_rows": null,
599 | "height": null,
600 | "justify_content": null,
601 | "justify_items": null,
602 | "left": null,
603 | "margin": null,
604 | "max_height": null,
605 | "max_width": null,
606 | "min_height": null,
607 | "min_width": null,
608 | "object_fit": null,
609 | "object_position": null,
610 | "order": null,
611 | "overflow": null,
612 | "overflow_x": null,
613 | "overflow_y": null,
614 | "padding": null,
615 | "right": null,
616 | "top": null,
617 | "visibility": null,
618 | "width": null
619 | }
620 | },
621 | "183c7fbb1b464b7fbdc37f1042def7df": {
622 | "model_module": "@jupyter-widgets/controls",
623 | "model_name": "ProgressStyleModel",
624 | "model_module_version": "1.5.0",
625 | "state": {
626 | "_model_module": "@jupyter-widgets/controls",
627 | "_model_module_version": "1.5.0",
628 | "_model_name": "ProgressStyleModel",
629 | "_view_count": null,
630 | "_view_module": "@jupyter-widgets/base",
631 | "_view_module_version": "1.2.0",
632 | "_view_name": "StyleView",
633 | "bar_color": null,
634 | "description_width": ""
635 | }
636 | },
637 | "2f7a0c6355ef48669b8889a0cdc86d30": {
638 | "model_module": "@jupyter-widgets/base",
639 | "model_name": "LayoutModel",
640 | "model_module_version": "1.2.0",
641 | "state": {
642 | "_model_module": "@jupyter-widgets/base",
643 | "_model_module_version": "1.2.0",
644 | "_model_name": "LayoutModel",
645 | "_view_count": null,
646 | "_view_module": "@jupyter-widgets/base",
647 | "_view_module_version": "1.2.0",
648 | "_view_name": "LayoutView",
649 | "align_content": null,
650 | "align_items": null,
651 | "align_self": null,
652 | "border": null,
653 | "bottom": null,
654 | "display": null,
655 | "flex": null,
656 | "flex_flow": null,
657 | "grid_area": null,
658 | "grid_auto_columns": null,
659 | "grid_auto_flow": null,
660 | "grid_auto_rows": null,
661 | "grid_column": null,
662 | "grid_gap": null,
663 | "grid_row": null,
664 | "grid_template_areas": null,
665 | "grid_template_columns": null,
666 | "grid_template_rows": null,
667 | "height": null,
668 | "justify_content": null,
669 | "justify_items": null,
670 | "left": null,
671 | "margin": null,
672 | "max_height": null,
673 | "max_width": null,
674 | "min_height": null,
675 | "min_width": null,
676 | "object_fit": null,
677 | "object_position": null,
678 | "order": null,
679 | "overflow": null,
680 | "overflow_x": null,
681 | "overflow_y": null,
682 | "padding": null,
683 | "right": null,
684 | "top": null,
685 | "visibility": null,
686 | "width": null
687 | }
688 | },
689 | "60198bd7296549bebfa02d030e153098": {
690 | "model_module": "@jupyter-widgets/controls",
691 | "model_name": "DescriptionStyleModel",
692 | "model_module_version": "1.5.0",
693 | "state": {
694 | "_model_module": "@jupyter-widgets/controls",
695 | "_model_module_version": "1.5.0",
696 | "_model_name": "DescriptionStyleModel",
697 | "_view_count": null,
698 | "_view_module": "@jupyter-widgets/base",
699 | "_view_module_version": "1.2.0",
700 | "_view_name": "StyleView",
701 | "description_width": ""
702 | }
703 | },
704 | "3b2bd3bee32244a9a1598646e3c2ac96": {
705 | "model_module": "@jupyter-widgets/controls",
706 | "model_name": "HBoxModel",
707 | "model_module_version": "1.5.0",
708 | "state": {
709 | "_dom_classes": [],
710 | "_model_module": "@jupyter-widgets/controls",
711 | "_model_module_version": "1.5.0",
712 | "_model_name": "HBoxModel",
713 | "_view_count": null,
714 | "_view_module": "@jupyter-widgets/controls",
715 | "_view_module_version": "1.5.0",
716 | "_view_name": "HBoxView",
717 | "box_style": "",
718 | "children": [
719 | "IPY_MODEL_d66e2e1d288944ed865c87cd0765ed9d",
720 | "IPY_MODEL_85116ccbf27442e5a8a94e03c4c1614a",
721 | "IPY_MODEL_194e7bb3f0b1470db38fca767afa4335"
722 | ],
723 | "layout": "IPY_MODEL_c25c400d86b04945a5e3590987aa903a"
724 | }
725 | },
726 | "d66e2e1d288944ed865c87cd0765ed9d": {
727 | "model_module": "@jupyter-widgets/controls",
728 | "model_name": "HTMLModel",
729 | "model_module_version": "1.5.0",
730 | "state": {
731 | "_dom_classes": [],
732 | "_model_module": "@jupyter-widgets/controls",
733 | "_model_module_version": "1.5.0",
734 | "_model_name": "HTMLModel",
735 | "_view_count": null,
736 | "_view_module": "@jupyter-widgets/controls",
737 | "_view_module_version": "1.5.0",
738 | "_view_name": "HTMLView",
739 | "description": "",
740 | "description_tooltip": null,
741 | "layout": "IPY_MODEL_849bdeb13bce4cba820b1723735d86bd",
742 | "placeholder": "",
743 | "style": "IPY_MODEL_8bb3faed90ef4db48f1506c026b0e4a3",
744 | "value": "vocab.txt: 100%"
745 | }
746 | },
747 | "85116ccbf27442e5a8a94e03c4c1614a": {
748 | "model_module": "@jupyter-widgets/controls",
749 | "model_name": "FloatProgressModel",
750 | "model_module_version": "1.5.0",
751 | "state": {
752 | "_dom_classes": [],
753 | "_model_module": "@jupyter-widgets/controls",
754 | "_model_module_version": "1.5.0",
755 | "_model_name": "FloatProgressModel",
756 | "_view_count": null,
757 | "_view_module": "@jupyter-widgets/controls",
758 | "_view_module_version": "1.5.0",
759 | "_view_name": "ProgressView",
760 | "bar_style": "success",
761 | "description": "",
762 | "description_tooltip": null,
763 | "layout": "IPY_MODEL_a8fe0cc767cb435ca760cf5ca4967739",
764 | "max": 1195221,
765 | "min": 0,
766 | "orientation": "horizontal",
767 | "style": "IPY_MODEL_1041fabd9fc444499a432a7925437fda",
768 | "value": 1195221
769 | }
770 | },
771 | "194e7bb3f0b1470db38fca767afa4335": {
772 | "model_module": "@jupyter-widgets/controls",
773 | "model_name": "HTMLModel",
774 | "model_module_version": "1.5.0",
775 | "state": {
776 | "_dom_classes": [],
777 | "_model_module": "@jupyter-widgets/controls",
778 | "_model_module_version": "1.5.0",
779 | "_model_name": "HTMLModel",
780 | "_view_count": null,
781 | "_view_module": "@jupyter-widgets/controls",
782 | "_view_module_version": "1.5.0",
783 | "_view_name": "HTMLView",
784 | "description": "",
785 | "description_tooltip": null,
786 | "layout": "IPY_MODEL_b539de3135df48a7802eefd75e08840d",
787 | "placeholder": "",
788 | "style": "IPY_MODEL_caa9da2f3fef4c2b80a7ec7a4c169016",
789 | "value": " 1.20M/1.20M [00:00<00:00, 1.22MB/s]"
790 | }
791 | },
792 | "c25c400d86b04945a5e3590987aa903a": {
793 | "model_module": "@jupyter-widgets/base",
794 | "model_name": "LayoutModel",
795 | "model_module_version": "1.2.0",
796 | "state": {
797 | "_model_module": "@jupyter-widgets/base",
798 | "_model_module_version": "1.2.0",
799 | "_model_name": "LayoutModel",
800 | "_view_count": null,
801 | "_view_module": "@jupyter-widgets/base",
802 | "_view_module_version": "1.2.0",
803 | "_view_name": "LayoutView",
804 | "align_content": null,
805 | "align_items": null,
806 | "align_self": null,
807 | "border": null,
808 | "bottom": null,
809 | "display": null,
810 | "flex": null,
811 | "flex_flow": null,
812 | "grid_area": null,
813 | "grid_auto_columns": null,
814 | "grid_auto_flow": null,
815 | "grid_auto_rows": null,
816 | "grid_column": null,
817 | "grid_gap": null,
818 | "grid_row": null,
819 | "grid_template_areas": null,
820 | "grid_template_columns": null,
821 | "grid_template_rows": null,
822 | "height": null,
823 | "justify_content": null,
824 | "justify_items": null,
825 | "left": null,
826 | "margin": null,
827 | "max_height": null,
828 | "max_width": null,
829 | "min_height": null,
830 | "min_width": null,
831 | "object_fit": null,
832 | "object_position": null,
833 | "order": null,
834 | "overflow": null,
835 | "overflow_x": null,
836 | "overflow_y": null,
837 | "padding": null,
838 | "right": null,
839 | "top": null,
840 | "visibility": null,
841 | "width": null
842 | }
843 | },
844 | "849bdeb13bce4cba820b1723735d86bd": {
845 | "model_module": "@jupyter-widgets/base",
846 | "model_name": "LayoutModel",
847 | "model_module_version": "1.2.0",
848 | "state": {
849 | "_model_module": "@jupyter-widgets/base",
850 | "_model_module_version": "1.2.0",
851 | "_model_name": "LayoutModel",
852 | "_view_count": null,
853 | "_view_module": "@jupyter-widgets/base",
854 | "_view_module_version": "1.2.0",
855 | "_view_name": "LayoutView",
856 | "align_content": null,
857 | "align_items": null,
858 | "align_self": null,
859 | "border": null,
860 | "bottom": null,
861 | "display": null,
862 | "flex": null,
863 | "flex_flow": null,
864 | "grid_area": null,
865 | "grid_auto_columns": null,
866 | "grid_auto_flow": null,
867 | "grid_auto_rows": null,
868 | "grid_column": null,
869 | "grid_gap": null,
870 | "grid_row": null,
871 | "grid_template_areas": null,
872 | "grid_template_columns": null,
873 | "grid_template_rows": null,
874 | "height": null,
875 | "justify_content": null,
876 | "justify_items": null,
877 | "left": null,
878 | "margin": null,
879 | "max_height": null,
880 | "max_width": null,
881 | "min_height": null,
882 | "min_width": null,
883 | "object_fit": null,
884 | "object_position": null,
885 | "order": null,
886 | "overflow": null,
887 | "overflow_x": null,
888 | "overflow_y": null,
889 | "padding": null,
890 | "right": null,
891 | "top": null,
892 | "visibility": null,
893 | "width": null
894 | }
895 | },
896 | "8bb3faed90ef4db48f1506c026b0e4a3": {
897 | "model_module": "@jupyter-widgets/controls",
898 | "model_name": "DescriptionStyleModel",
899 | "model_module_version": "1.5.0",
900 | "state": {
901 | "_model_module": "@jupyter-widgets/controls",
902 | "_model_module_version": "1.5.0",
903 | "_model_name": "DescriptionStyleModel",
904 | "_view_count": null,
905 | "_view_module": "@jupyter-widgets/base",
906 | "_view_module_version": "1.2.0",
907 | "_view_name": "StyleView",
908 | "description_width": ""
909 | }
910 | },
911 | "a8fe0cc767cb435ca760cf5ca4967739": {
912 | "model_module": "@jupyter-widgets/base",
913 | "model_name": "LayoutModel",
914 | "model_module_version": "1.2.0",
915 | "state": {
916 | "_model_module": "@jupyter-widgets/base",
917 | "_model_module_version": "1.2.0",
918 | "_model_name": "LayoutModel",
919 | "_view_count": null,
920 | "_view_module": "@jupyter-widgets/base",
921 | "_view_module_version": "1.2.0",
922 | "_view_name": "LayoutView",
923 | "align_content": null,
924 | "align_items": null,
925 | "align_self": null,
926 | "border": null,
927 | "bottom": null,
928 | "display": null,
929 | "flex": null,
930 | "flex_flow": null,
931 | "grid_area": null,
932 | "grid_auto_columns": null,
933 | "grid_auto_flow": null,
934 | "grid_auto_rows": null,
935 | "grid_column": null,
936 | "grid_gap": null,
937 | "grid_row": null,
938 | "grid_template_areas": null,
939 | "grid_template_columns": null,
940 | "grid_template_rows": null,
941 | "height": null,
942 | "justify_content": null,
943 | "justify_items": null,
944 | "left": null,
945 | "margin": null,
946 | "max_height": null,
947 | "max_width": null,
948 | "min_height": null,
949 | "min_width": null,
950 | "object_fit": null,
951 | "object_position": null,
952 | "order": null,
953 | "overflow": null,
954 | "overflow_x": null,
955 | "overflow_y": null,
956 | "padding": null,
957 | "right": null,
958 | "top": null,
959 | "visibility": null,
960 | "width": null
961 | }
962 | },
963 | "1041fabd9fc444499a432a7925437fda": {
964 | "model_module": "@jupyter-widgets/controls",
965 | "model_name": "ProgressStyleModel",
966 | "model_module_version": "1.5.0",
967 | "state": {
968 | "_model_module": "@jupyter-widgets/controls",
969 | "_model_module_version": "1.5.0",
970 | "_model_name": "ProgressStyleModel",
971 | "_view_count": null,
972 | "_view_module": "@jupyter-widgets/base",
973 | "_view_module_version": "1.2.0",
974 | "_view_name": "StyleView",
975 | "bar_color": null,
976 | "description_width": ""
977 | }
978 | },
979 | "b539de3135df48a7802eefd75e08840d": {
980 | "model_module": "@jupyter-widgets/base",
981 | "model_name": "LayoutModel",
982 | "model_module_version": "1.2.0",
983 | "state": {
984 | "_model_module": "@jupyter-widgets/base",
985 | "_model_module_version": "1.2.0",
986 | "_model_name": "LayoutModel",
987 | "_view_count": null,
988 | "_view_module": "@jupyter-widgets/base",
989 | "_view_module_version": "1.2.0",
990 | "_view_name": "LayoutView",
991 | "align_content": null,
992 | "align_items": null,
993 | "align_self": null,
994 | "border": null,
995 | "bottom": null,
996 | "display": null,
997 | "flex": null,
998 | "flex_flow": null,
999 | "grid_area": null,
1000 | "grid_auto_columns": null,
1001 | "grid_auto_flow": null,
1002 | "grid_auto_rows": null,
1003 | "grid_column": null,
1004 | "grid_gap": null,
1005 | "grid_row": null,
1006 | "grid_template_areas": null,
1007 | "grid_template_columns": null,
1008 | "grid_template_rows": null,
1009 | "height": null,
1010 | "justify_content": null,
1011 | "justify_items": null,
1012 | "left": null,
1013 | "margin": null,
1014 | "max_height": null,
1015 | "max_width": null,
1016 | "min_height": null,
1017 | "min_width": null,
1018 | "object_fit": null,
1019 | "object_position": null,
1020 | "order": null,
1021 | "overflow": null,
1022 | "overflow_x": null,
1023 | "overflow_y": null,
1024 | "padding": null,
1025 | "right": null,
1026 | "top": null,
1027 | "visibility": null,
1028 | "width": null
1029 | }
1030 | },
1031 | "caa9da2f3fef4c2b80a7ec7a4c169016": {
1032 | "model_module": "@jupyter-widgets/controls",
1033 | "model_name": "DescriptionStyleModel",
1034 | "model_module_version": "1.5.0",
1035 | "state": {
1036 | "_model_module": "@jupyter-widgets/controls",
1037 | "_model_module_version": "1.5.0",
1038 | "_model_name": "DescriptionStyleModel",
1039 | "_view_count": null,
1040 | "_view_module": "@jupyter-widgets/base",
1041 | "_view_module_version": "1.2.0",
1042 | "_view_name": "StyleView",
1043 | "description_width": ""
1044 | }
1045 | },
1046 | "517fb97311cd469288edb7165b504911": {
1047 | "model_module": "@jupyter-widgets/controls",
1048 | "model_name": "HBoxModel",
1049 | "model_module_version": "1.5.0",
1050 | "state": {
1051 | "_dom_classes": [],
1052 | "_model_module": "@jupyter-widgets/controls",
1053 | "_model_module_version": "1.5.0",
1054 | "_model_name": "HBoxModel",
1055 | "_view_count": null,
1056 | "_view_module": "@jupyter-widgets/controls",
1057 | "_view_module_version": "1.5.0",
1058 | "_view_name": "HBoxView",
1059 | "box_style": "",
1060 | "children": [
1061 | "IPY_MODEL_e6ab54cafbb54107b1052b9879121e3d",
1062 | "IPY_MODEL_94a63d2944214eca97079a9bb1bea55d",
1063 | "IPY_MODEL_30cfb45cf5404cc1aaf45515c0958d8a"
1064 | ],
1065 | "layout": "IPY_MODEL_d4bbc6623727407589577b3e7122657e"
1066 | }
1067 | },
1068 | "e6ab54cafbb54107b1052b9879121e3d": {
1069 | "model_module": "@jupyter-widgets/controls",
1070 | "model_name": "HTMLModel",
1071 | "model_module_version": "1.5.0",
1072 | "state": {
1073 | "_dom_classes": [],
1074 | "_model_module": "@jupyter-widgets/controls",
1075 | "_model_module_version": "1.5.0",
1076 | "_model_name": "HTMLModel",
1077 | "_view_count": null,
1078 | "_view_module": "@jupyter-widgets/controls",
1079 | "_view_module_version": "1.5.0",
1080 | "_view_name": "HTMLView",
1081 | "description": "",
1082 | "description_tooltip": null,
1083 | "layout": "IPY_MODEL_9ace909a29fd45b2bb3aa647d75e3965",
1084 | "placeholder": "",
1085 | "style": "IPY_MODEL_8c2d8871cd2f47eabd231a7cc5ed339a",
1086 | "value": "emoji.json: 100%"
1087 | }
1088 | },
1089 | "94a63d2944214eca97079a9bb1bea55d": {
1090 | "model_module": "@jupyter-widgets/controls",
1091 | "model_name": "FloatProgressModel",
1092 | "model_module_version": "1.5.0",
1093 | "state": {
1094 | "_dom_classes": [],
1095 | "_model_module": "@jupyter-widgets/controls",
1096 | "_model_module_version": "1.5.0",
1097 | "_model_name": "FloatProgressModel",
1098 | "_view_count": null,
1099 | "_view_module": "@jupyter-widgets/controls",
1100 | "_view_module_version": "1.5.0",
1101 | "_view_name": "ProgressView",
1102 | "bar_style": "success",
1103 | "description": "",
1104 | "description_tooltip": null,
1105 | "layout": "IPY_MODEL_83579b1e91eb4385b8ddcfda2c717473",
1106 | "max": 189183,
1107 | "min": 0,
1108 | "orientation": "horizontal",
1109 | "style": "IPY_MODEL_cf039b27f8a74b1497ed51b0deababef",
1110 | "value": 189183
1111 | }
1112 | },
1113 | "30cfb45cf5404cc1aaf45515c0958d8a": {
1114 | "model_module": "@jupyter-widgets/controls",
1115 | "model_name": "HTMLModel",
1116 | "model_module_version": "1.5.0",
1117 | "state": {
1118 | "_dom_classes": [],
1119 | "_model_module": "@jupyter-widgets/controls",
1120 | "_model_module_version": "1.5.0",
1121 | "_model_name": "HTMLModel",
1122 | "_view_count": null,
1123 | "_view_module": "@jupyter-widgets/controls",
1124 | "_view_module_version": "1.5.0",
1125 | "_view_name": "HTMLView",
1126 | "description": "",
1127 | "description_tooltip": null,
1128 | "layout": "IPY_MODEL_e8797cf313af43b3958e502fcd2f817b",
1129 | "placeholder": "",
1130 | "style": "IPY_MODEL_113aca6991c142de8e28737239af0803",
1131 | "value": " 189k/189k [00:00<00:00, 486kB/s]"
1132 | }
1133 | },
1134 | "d4bbc6623727407589577b3e7122657e": {
1135 | "model_module": "@jupyter-widgets/base",
1136 | "model_name": "LayoutModel",
1137 | "model_module_version": "1.2.0",
1138 | "state": {
1139 | "_model_module": "@jupyter-widgets/base",
1140 | "_model_module_version": "1.2.0",
1141 | "_model_name": "LayoutModel",
1142 | "_view_count": null,
1143 | "_view_module": "@jupyter-widgets/base",
1144 | "_view_module_version": "1.2.0",
1145 | "_view_name": "LayoutView",
1146 | "align_content": null,
1147 | "align_items": null,
1148 | "align_self": null,
1149 | "border": null,
1150 | "bottom": null,
1151 | "display": null,
1152 | "flex": null,
1153 | "flex_flow": null,
1154 | "grid_area": null,
1155 | "grid_auto_columns": null,
1156 | "grid_auto_flow": null,
1157 | "grid_auto_rows": null,
1158 | "grid_column": null,
1159 | "grid_gap": null,
1160 | "grid_row": null,
1161 | "grid_template_areas": null,
1162 | "grid_template_columns": null,
1163 | "grid_template_rows": null,
1164 | "height": null,
1165 | "justify_content": null,
1166 | "justify_items": null,
1167 | "left": null,
1168 | "margin": null,
1169 | "max_height": null,
1170 | "max_width": null,
1171 | "min_height": null,
1172 | "min_width": null,
1173 | "object_fit": null,
1174 | "object_position": null,
1175 | "order": null,
1176 | "overflow": null,
1177 | "overflow_x": null,
1178 | "overflow_y": null,
1179 | "padding": null,
1180 | "right": null,
1181 | "top": null,
1182 | "visibility": null,
1183 | "width": null
1184 | }
1185 | },
1186 | "9ace909a29fd45b2bb3aa647d75e3965": {
1187 | "model_module": "@jupyter-widgets/base",
1188 | "model_name": "LayoutModel",
1189 | "model_module_version": "1.2.0",
1190 | "state": {
1191 | "_model_module": "@jupyter-widgets/base",
1192 | "_model_module_version": "1.2.0",
1193 | "_model_name": "LayoutModel",
1194 | "_view_count": null,
1195 | "_view_module": "@jupyter-widgets/base",
1196 | "_view_module_version": "1.2.0",
1197 | "_view_name": "LayoutView",
1198 | "align_content": null,
1199 | "align_items": null,
1200 | "align_self": null,
1201 | "border": null,
1202 | "bottom": null,
1203 | "display": null,
1204 | "flex": null,
1205 | "flex_flow": null,
1206 | "grid_area": null,
1207 | "grid_auto_columns": null,
1208 | "grid_auto_flow": null,
1209 | "grid_auto_rows": null,
1210 | "grid_column": null,
1211 | "grid_gap": null,
1212 | "grid_row": null,
1213 | "grid_template_areas": null,
1214 | "grid_template_columns": null,
1215 | "grid_template_rows": null,
1216 | "height": null,
1217 | "justify_content": null,
1218 | "justify_items": null,
1219 | "left": null,
1220 | "margin": null,
1221 | "max_height": null,
1222 | "max_width": null,
1223 | "min_height": null,
1224 | "min_width": null,
1225 | "object_fit": null,
1226 | "object_position": null,
1227 | "order": null,
1228 | "overflow": null,
1229 | "overflow_x": null,
1230 | "overflow_y": null,
1231 | "padding": null,
1232 | "right": null,
1233 | "top": null,
1234 | "visibility": null,
1235 | "width": null
1236 | }
1237 | },
1238 | "8c2d8871cd2f47eabd231a7cc5ed339a": {
1239 | "model_module": "@jupyter-widgets/controls",
1240 | "model_name": "DescriptionStyleModel",
1241 | "model_module_version": "1.5.0",
1242 | "state": {
1243 | "_model_module": "@jupyter-widgets/controls",
1244 | "_model_module_version": "1.5.0",
1245 | "_model_name": "DescriptionStyleModel",
1246 | "_view_count": null,
1247 | "_view_module": "@jupyter-widgets/base",
1248 | "_view_module_version": "1.2.0",
1249 | "_view_name": "StyleView",
1250 | "description_width": ""
1251 | }
1252 | },
1253 | "83579b1e91eb4385b8ddcfda2c717473": {
1254 | "model_module": "@jupyter-widgets/base",
1255 | "model_name": "LayoutModel",
1256 | "model_module_version": "1.2.0",
1257 | "state": {
1258 | "_model_module": "@jupyter-widgets/base",
1259 | "_model_module_version": "1.2.0",
1260 | "_model_name": "LayoutModel",
1261 | "_view_count": null,
1262 | "_view_module": "@jupyter-widgets/base",
1263 | "_view_module_version": "1.2.0",
1264 | "_view_name": "LayoutView",
1265 | "align_content": null,
1266 | "align_items": null,
1267 | "align_self": null,
1268 | "border": null,
1269 | "bottom": null,
1270 | "display": null,
1271 | "flex": null,
1272 | "flex_flow": null,
1273 | "grid_area": null,
1274 | "grid_auto_columns": null,
1275 | "grid_auto_flow": null,
1276 | "grid_auto_rows": null,
1277 | "grid_column": null,
1278 | "grid_gap": null,
1279 | "grid_row": null,
1280 | "grid_template_areas": null,
1281 | "grid_template_columns": null,
1282 | "grid_template_rows": null,
1283 | "height": null,
1284 | "justify_content": null,
1285 | "justify_items": null,
1286 | "left": null,
1287 | "margin": null,
1288 | "max_height": null,
1289 | "max_width": null,
1290 | "min_height": null,
1291 | "min_width": null,
1292 | "object_fit": null,
1293 | "object_position": null,
1294 | "order": null,
1295 | "overflow": null,
1296 | "overflow_x": null,
1297 | "overflow_y": null,
1298 | "padding": null,
1299 | "right": null,
1300 | "top": null,
1301 | "visibility": null,
1302 | "width": null
1303 | }
1304 | },
1305 | "cf039b27f8a74b1497ed51b0deababef": {
1306 | "model_module": "@jupyter-widgets/controls",
1307 | "model_name": "ProgressStyleModel",
1308 | "model_module_version": "1.5.0",
1309 | "state": {
1310 | "_model_module": "@jupyter-widgets/controls",
1311 | "_model_module_version": "1.5.0",
1312 | "_model_name": "ProgressStyleModel",
1313 | "_view_count": null,
1314 | "_view_module": "@jupyter-widgets/base",
1315 | "_view_module_version": "1.2.0",
1316 | "_view_name": "StyleView",
1317 | "bar_color": null,
1318 | "description_width": ""
1319 | }
1320 | },
1321 | "e8797cf313af43b3958e502fcd2f817b": {
1322 | "model_module": "@jupyter-widgets/base",
1323 | "model_name": "LayoutModel",
1324 | "model_module_version": "1.2.0",
1325 | "state": {
1326 | "_model_module": "@jupyter-widgets/base",
1327 | "_model_module_version": "1.2.0",
1328 | "_model_name": "LayoutModel",
1329 | "_view_count": null,
1330 | "_view_module": "@jupyter-widgets/base",
1331 | "_view_module_version": "1.2.0",
1332 | "_view_name": "LayoutView",
1333 | "align_content": null,
1334 | "align_items": null,
1335 | "align_self": null,
1336 | "border": null,
1337 | "bottom": null,
1338 | "display": null,
1339 | "flex": null,
1340 | "flex_flow": null,
1341 | "grid_area": null,
1342 | "grid_auto_columns": null,
1343 | "grid_auto_flow": null,
1344 | "grid_auto_rows": null,
1345 | "grid_column": null,
1346 | "grid_gap": null,
1347 | "grid_row": null,
1348 | "grid_template_areas": null,
1349 | "grid_template_columns": null,
1350 | "grid_template_rows": null,
1351 | "height": null,
1352 | "justify_content": null,
1353 | "justify_items": null,
1354 | "left": null,
1355 | "margin": null,
1356 | "max_height": null,
1357 | "max_width": null,
1358 | "min_height": null,
1359 | "min_width": null,
1360 | "object_fit": null,
1361 | "object_position": null,
1362 | "order": null,
1363 | "overflow": null,
1364 | "overflow_x": null,
1365 | "overflow_y": null,
1366 | "padding": null,
1367 | "right": null,
1368 | "top": null,
1369 | "visibility": null,
1370 | "width": null
1371 | }
1372 | },
1373 | "113aca6991c142de8e28737239af0803": {
1374 | "model_module": "@jupyter-widgets/controls",
1375 | "model_name": "DescriptionStyleModel",
1376 | "model_module_version": "1.5.0",
1377 | "state": {
1378 | "_model_module": "@jupyter-widgets/controls",
1379 | "_model_module_version": "1.5.0",
1380 | "_model_name": "DescriptionStyleModel",
1381 | "_view_count": null,
1382 | "_view_module": "@jupyter-widgets/base",
1383 | "_view_module_version": "1.2.0",
1384 | "_view_name": "StyleView",
1385 | "description_width": ""
1386 | }
1387 | },
1388 | "b62471ed416e41c4b9ca16b3fe6fee35": {
1389 | "model_module": "@jupyter-widgets/controls",
1390 | "model_name": "HBoxModel",
1391 | "model_module_version": "1.5.0",
1392 | "state": {
1393 | "_dom_classes": [],
1394 | "_model_module": "@jupyter-widgets/controls",
1395 | "_model_module_version": "1.5.0",
1396 | "_model_name": "HBoxModel",
1397 | "_view_count": null,
1398 | "_view_module": "@jupyter-widgets/controls",
1399 | "_view_module_version": "1.5.0",
1400 | "_view_name": "HBoxView",
1401 | "box_style": "",
1402 | "children": [
1403 | "IPY_MODEL_a3319fd213ed4c66b2aef3236904b578",
1404 | "IPY_MODEL_a262135eff97435c938cd61b3b576e55",
1405 | "IPY_MODEL_8b74ab4a9d824881ba1b17cdeb7bf75c"
1406 | ],
1407 | "layout": "IPY_MODEL_d19cbadaa6f648098b30fd7cab88143b"
1408 | }
1409 | },
1410 | "a3319fd213ed4c66b2aef3236904b578": {
1411 | "model_module": "@jupyter-widgets/controls",
1412 | "model_name": "HTMLModel",
1413 | "model_module_version": "1.5.0",
1414 | "state": {
1415 | "_dom_classes": [],
1416 | "_model_module": "@jupyter-widgets/controls",
1417 | "_model_module_version": "1.5.0",
1418 | "_model_name": "HTMLModel",
1419 | "_view_count": null,
1420 | "_view_module": "@jupyter-widgets/controls",
1421 | "_view_module_version": "1.5.0",
1422 | "_view_name": "HTMLView",
1423 | "description": "",
1424 | "description_tooltip": null,
1425 | "layout": "IPY_MODEL_44b6ebe9c3d942d48c867e1425e22bba",
1426 | "placeholder": "",
1427 | "style": "IPY_MODEL_ee40d822c7a44a77ac0dd1ddb403d44a",
1428 | "value": "pytorch_model.bin: 100%"
1429 | }
1430 | },
1431 | "a262135eff97435c938cd61b3b576e55": {
1432 | "model_module": "@jupyter-widgets/controls",
1433 | "model_name": "FloatProgressModel",
1434 | "model_module_version": "1.5.0",
1435 | "state": {
1436 | "_dom_classes": [],
1437 | "_model_module": "@jupyter-widgets/controls",
1438 | "_model_module_version": "1.5.0",
1439 | "_model_name": "FloatProgressModel",
1440 | "_view_count": null,
1441 | "_view_module": "@jupyter-widgets/controls",
1442 | "_view_module_version": "1.5.0",
1443 | "_view_name": "ProgressView",
1444 | "bar_style": "success",
1445 | "description": "",
1446 | "description_tooltip": null,
1447 | "layout": "IPY_MODEL_cf3a649bbddd4da691341f967e93f377",
1448 | "max": 5361640123,
1449 | "min": 0,
1450 | "orientation": "horizontal",
1451 | "style": "IPY_MODEL_e979f667255f432abe04bfebc2ff3418",
1452 | "value": 5361640123
1453 | }
1454 | },
1455 | "8b74ab4a9d824881ba1b17cdeb7bf75c": {
1456 | "model_module": "@jupyter-widgets/controls",
1457 | "model_name": "HTMLModel",
1458 | "model_module_version": "1.5.0",
1459 | "state": {
1460 | "_dom_classes": [],
1461 | "_model_module": "@jupyter-widgets/controls",
1462 | "_model_module_version": "1.5.0",
1463 | "_model_name": "HTMLModel",
1464 | "_view_count": null,
1465 | "_view_module": "@jupyter-widgets/controls",
1466 | "_view_module_version": "1.5.0",
1467 | "_view_name": "HTMLView",
1468 | "description": "",
1469 | "description_tooltip": null,
1470 | "layout": "IPY_MODEL_1582c2d1fd674f4986798272a169bba2",
1471 | "placeholder": "",
1472 | "style": "IPY_MODEL_143ea22cea6d4530947ce1986a4c61ac",
1473 | "value": " 5.36G/5.36G [04:37<00:00, 19.0MB/s]"
1474 | }
1475 | },
1476 | "d19cbadaa6f648098b30fd7cab88143b": {
1477 | "model_module": "@jupyter-widgets/base",
1478 | "model_name": "LayoutModel",
1479 | "model_module_version": "1.2.0",
1480 | "state": {
1481 | "_model_module": "@jupyter-widgets/base",
1482 | "_model_module_version": "1.2.0",
1483 | "_model_name": "LayoutModel",
1484 | "_view_count": null,
1485 | "_view_module": "@jupyter-widgets/base",
1486 | "_view_module_version": "1.2.0",
1487 | "_view_name": "LayoutView",
1488 | "align_content": null,
1489 | "align_items": null,
1490 | "align_self": null,
1491 | "border": null,
1492 | "bottom": null,
1493 | "display": null,
1494 | "flex": null,
1495 | "flex_flow": null,
1496 | "grid_area": null,
1497 | "grid_auto_columns": null,
1498 | "grid_auto_flow": null,
1499 | "grid_auto_rows": null,
1500 | "grid_column": null,
1501 | "grid_gap": null,
1502 | "grid_row": null,
1503 | "grid_template_areas": null,
1504 | "grid_template_columns": null,
1505 | "grid_template_rows": null,
1506 | "height": null,
1507 | "justify_content": null,
1508 | "justify_items": null,
1509 | "left": null,
1510 | "margin": null,
1511 | "max_height": null,
1512 | "max_width": null,
1513 | "min_height": null,
1514 | "min_width": null,
1515 | "object_fit": null,
1516 | "object_position": null,
1517 | "order": null,
1518 | "overflow": null,
1519 | "overflow_x": null,
1520 | "overflow_y": null,
1521 | "padding": null,
1522 | "right": null,
1523 | "top": null,
1524 | "visibility": null,
1525 | "width": null
1526 | }
1527 | },
1528 | "44b6ebe9c3d942d48c867e1425e22bba": {
1529 | "model_module": "@jupyter-widgets/base",
1530 | "model_name": "LayoutModel",
1531 | "model_module_version": "1.2.0",
1532 | "state": {
1533 | "_model_module": "@jupyter-widgets/base",
1534 | "_model_module_version": "1.2.0",
1535 | "_model_name": "LayoutModel",
1536 | "_view_count": null,
1537 | "_view_module": "@jupyter-widgets/base",
1538 | "_view_module_version": "1.2.0",
1539 | "_view_name": "LayoutView",
1540 | "align_content": null,
1541 | "align_items": null,
1542 | "align_self": null,
1543 | "border": null,
1544 | "bottom": null,
1545 | "display": null,
1546 | "flex": null,
1547 | "flex_flow": null,
1548 | "grid_area": null,
1549 | "grid_auto_columns": null,
1550 | "grid_auto_flow": null,
1551 | "grid_auto_rows": null,
1552 | "grid_column": null,
1553 | "grid_gap": null,
1554 | "grid_row": null,
1555 | "grid_template_areas": null,
1556 | "grid_template_columns": null,
1557 | "grid_template_rows": null,
1558 | "height": null,
1559 | "justify_content": null,
1560 | "justify_items": null,
1561 | "left": null,
1562 | "margin": null,
1563 | "max_height": null,
1564 | "max_width": null,
1565 | "min_height": null,
1566 | "min_width": null,
1567 | "object_fit": null,
1568 | "object_position": null,
1569 | "order": null,
1570 | "overflow": null,
1571 | "overflow_x": null,
1572 | "overflow_y": null,
1573 | "padding": null,
1574 | "right": null,
1575 | "top": null,
1576 | "visibility": null,
1577 | "width": null
1578 | }
1579 | },
1580 | "ee40d822c7a44a77ac0dd1ddb403d44a": {
1581 | "model_module": "@jupyter-widgets/controls",
1582 | "model_name": "DescriptionStyleModel",
1583 | "model_module_version": "1.5.0",
1584 | "state": {
1585 | "_model_module": "@jupyter-widgets/controls",
1586 | "_model_module_version": "1.5.0",
1587 | "_model_name": "DescriptionStyleModel",
1588 | "_view_count": null,
1589 | "_view_module": "@jupyter-widgets/base",
1590 | "_view_module_version": "1.2.0",
1591 | "_view_name": "StyleView",
1592 | "description_width": ""
1593 | }
1594 | },
1595 | "cf3a649bbddd4da691341f967e93f377": {
1596 | "model_module": "@jupyter-widgets/base",
1597 | "model_name": "LayoutModel",
1598 | "model_module_version": "1.2.0",
1599 | "state": {
1600 | "_model_module": "@jupyter-widgets/base",
1601 | "_model_module_version": "1.2.0",
1602 | "_model_name": "LayoutModel",
1603 | "_view_count": null,
1604 | "_view_module": "@jupyter-widgets/base",
1605 | "_view_module_version": "1.2.0",
1606 | "_view_name": "LayoutView",
1607 | "align_content": null,
1608 | "align_items": null,
1609 | "align_self": null,
1610 | "border": null,
1611 | "bottom": null,
1612 | "display": null,
1613 | "flex": null,
1614 | "flex_flow": null,
1615 | "grid_area": null,
1616 | "grid_auto_columns": null,
1617 | "grid_auto_flow": null,
1618 | "grid_auto_rows": null,
1619 | "grid_column": null,
1620 | "grid_gap": null,
1621 | "grid_row": null,
1622 | "grid_template_areas": null,
1623 | "grid_template_columns": null,
1624 | "grid_template_rows": null,
1625 | "height": null,
1626 | "justify_content": null,
1627 | "justify_items": null,
1628 | "left": null,
1629 | "margin": null,
1630 | "max_height": null,
1631 | "max_width": null,
1632 | "min_height": null,
1633 | "min_width": null,
1634 | "object_fit": null,
1635 | "object_position": null,
1636 | "order": null,
1637 | "overflow": null,
1638 | "overflow_x": null,
1639 | "overflow_y": null,
1640 | "padding": null,
1641 | "right": null,
1642 | "top": null,
1643 | "visibility": null,
1644 | "width": null
1645 | }
1646 | },
1647 | "e979f667255f432abe04bfebc2ff3418": {
1648 | "model_module": "@jupyter-widgets/controls",
1649 | "model_name": "ProgressStyleModel",
1650 | "model_module_version": "1.5.0",
1651 | "state": {
1652 | "_model_module": "@jupyter-widgets/controls",
1653 | "_model_module_version": "1.5.0",
1654 | "_model_name": "ProgressStyleModel",
1655 | "_view_count": null,
1656 | "_view_module": "@jupyter-widgets/base",
1657 | "_view_module_version": "1.2.0",
1658 | "_view_name": "StyleView",
1659 | "bar_color": null,
1660 | "description_width": ""
1661 | }
1662 | },
1663 | "1582c2d1fd674f4986798272a169bba2": {
1664 | "model_module": "@jupyter-widgets/base",
1665 | "model_name": "LayoutModel",
1666 | "model_module_version": "1.2.0",
1667 | "state": {
1668 | "_model_module": "@jupyter-widgets/base",
1669 | "_model_module_version": "1.2.0",
1670 | "_model_name": "LayoutModel",
1671 | "_view_count": null,
1672 | "_view_module": "@jupyter-widgets/base",
1673 | "_view_module_version": "1.2.0",
1674 | "_view_name": "LayoutView",
1675 | "align_content": null,
1676 | "align_items": null,
1677 | "align_self": null,
1678 | "border": null,
1679 | "bottom": null,
1680 | "display": null,
1681 | "flex": null,
1682 | "flex_flow": null,
1683 | "grid_area": null,
1684 | "grid_auto_columns": null,
1685 | "grid_auto_flow": null,
1686 | "grid_auto_rows": null,
1687 | "grid_column": null,
1688 | "grid_gap": null,
1689 | "grid_row": null,
1690 | "grid_template_areas": null,
1691 | "grid_template_columns": null,
1692 | "grid_template_rows": null,
1693 | "height": null,
1694 | "justify_content": null,
1695 | "justify_items": null,
1696 | "left": null,
1697 | "margin": null,
1698 | "max_height": null,
1699 | "max_width": null,
1700 | "min_height": null,
1701 | "min_width": null,
1702 | "object_fit": null,
1703 | "object_position": null,
1704 | "order": null,
1705 | "overflow": null,
1706 | "overflow_x": null,
1707 | "overflow_y": null,
1708 | "padding": null,
1709 | "right": null,
1710 | "top": null,
1711 | "visibility": null,
1712 | "width": null
1713 | }
1714 | },
1715 | "143ea22cea6d4530947ce1986a4c61ac": {
1716 | "model_module": "@jupyter-widgets/controls",
1717 | "model_name": "DescriptionStyleModel",
1718 | "model_module_version": "1.5.0",
1719 | "state": {
1720 | "_model_module": "@jupyter-widgets/controls",
1721 | "_model_module_version": "1.5.0",
1722 | "_model_name": "DescriptionStyleModel",
1723 | "_view_count": null,
1724 | "_view_module": "@jupyter-widgets/base",
1725 | "_view_module_version": "1.2.0",
1726 | "_view_name": "StyleView",
1727 | "description_width": ""
1728 | }
1729 | }
1730 | }
1731 | }
1732 | },
1733 | "cells": [
1734 | {
1735 | "cell_type": "markdown",
1736 | "metadata": {
1737 | "id": "view-in-github",
1738 | "colab_type": "text"
1739 | },
1740 | "source": [
1741 | "
"
1742 | ]
1743 | },
1744 | {
1745 | "cell_type": "code",
1746 | "execution_count": 1,
1747 | "metadata": {
1748 | "colab": {
1749 | "base_uri": "https://localhost:8080/"
1750 | },
1751 | "id": "NKvtUsBuTpDm",
1752 | "outputId": "dbc07c80-f8b6-4007-bc25-281db76bac72"
1753 | },
1754 | "outputs": [
1755 | {
1756 | "output_type": "stream",
1757 | "name": "stdout",
1758 | "text": [
1759 | "Requirement already satisfied: transformers in /usr/local/lib/python3.10/dist-packages (4.35.2)\n",
1760 | "Requirement already satisfied: filelock in /usr/local/lib/python3.10/dist-packages (from transformers) (3.13.1)\n",
1761 | "Requirement already satisfied: huggingface-hub<1.0,>=0.16.4 in /usr/local/lib/python3.10/dist-packages (from transformers) (0.19.4)\n",
1762 | "Requirement already satisfied: numpy>=1.17 in /usr/local/lib/python3.10/dist-packages (from transformers) (1.23.5)\n",
1763 | "Requirement already satisfied: packaging>=20.0 in /usr/local/lib/python3.10/dist-packages (from transformers) (23.2)\n",
1764 | "Requirement already satisfied: pyyaml>=5.1 in /usr/local/lib/python3.10/dist-packages (from transformers) (6.0.1)\n",
1765 | "Requirement already satisfied: regex!=2019.12.17 in /usr/local/lib/python3.10/dist-packages (from transformers) (2023.6.3)\n",
1766 | "Requirement already satisfied: requests in /usr/local/lib/python3.10/dist-packages (from transformers) (2.31.0)\n",
1767 | "Requirement already satisfied: tokenizers<0.19,>=0.14 in /usr/local/lib/python3.10/dist-packages (from transformers) (0.15.0)\n",
1768 | "Requirement already satisfied: safetensors>=0.3.1 in /usr/local/lib/python3.10/dist-packages (from transformers) (0.4.1)\n",
1769 | "Requirement already satisfied: tqdm>=4.27 in /usr/local/lib/python3.10/dist-packages (from transformers) (4.66.1)\n",
1770 | "Requirement already satisfied: fsspec>=2023.5.0 in /usr/local/lib/python3.10/dist-packages (from huggingface-hub<1.0,>=0.16.4->transformers) (2023.6.0)\n",
1771 | "Requirement already satisfied: typing-extensions>=3.7.4.3 in /usr/local/lib/python3.10/dist-packages (from huggingface-hub<1.0,>=0.16.4->transformers) (4.5.0)\n",
1772 | "Requirement already satisfied: charset-normalizer<4,>=2 in /usr/local/lib/python3.10/dist-packages (from requests->transformers) (3.3.2)\n",
1773 | "Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.10/dist-packages (from requests->transformers) (3.6)\n",
1774 | "Requirement already satisfied: urllib3<3,>=1.21.1 in /usr/local/lib/python3.10/dist-packages (from requests->transformers) (2.0.7)\n",
1775 | "Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.10/dist-packages (from requests->transformers) (2023.11.17)\n"
1776 | ]
1777 | }
1778 | ],
1779 | "source": [
1780 | "!pip install transformers"
1781 | ]
1782 | },
1783 | {
1784 | "cell_type": "code",
1785 | "source": [
1786 | "import torch\n",
1787 | "from transformers import AutoTokenizer, AutoModelForCausalLM\n",
1788 | "\n",
1789 | "# トークナイザーとモデルの準備\n",
1790 | "tokenizer = AutoTokenizer.from_pretrained(\n",
1791 | " \"abeja/gpt-neox-japanese-2.7b\",\n",
1792 | ")\n",
1793 | "model = AutoModelForCausalLM.from_pretrained(\n",
1794 | " \"abeja/gpt-neox-japanese-2.7b\"\n",
1795 | ")"
1796 | ],
1797 | "metadata": {
1798 | "colab": {
1799 | "base_uri": "https://localhost:8080/",
1800 | "height": 177,
1801 | "referenced_widgets": [
1802 | "7e2fa68518be449f80fd97746305eab0",
1803 | "9c9a672fc7b6408c8f36eb48be24257f",
1804 | "a1ac3cff06d3416db0b8c9dd9edf48f5",
1805 | "f1a3e3ef130348d0877668ad50382a78",
1806 | "19de596f8f60498c908b3adf9d34cd41",
1807 | "04d35b3697a44e58a57ead48479a1a73",
1808 | "c14afb7238044f25ac888f750979ab26",
1809 | "3853ef77d1eb43aeb44cdb44b79b79c3",
1810 | "97f29a72eda4407b880c17da8a4c54e3",
1811 | "48f2ee42333243978c5f21f68c242631",
1812 | "ef0c0287d1674492a2bd79e38a32f639",
1813 | "7aabeac5f94d45938c70d85d5f6030e2",
1814 | "6cb27ec0214d4fe38cae4d09ea5146ee",
1815 | "d5547266057f45a8b9ae0b88d553fc1f",
1816 | "be5c4d04beeb43a2b2da1f7aa1fd3c3d",
1817 | "80d12bc12f194e8aaee9273af820d2a7",
1818 | "915790952c444556b003d7295d1c5ccb",
1819 | "b26c3ac3150c46c6853f08a40c88e234",
1820 | "4133abb3166e49fa9d1f851d97d15741",
1821 | "183c7fbb1b464b7fbdc37f1042def7df",
1822 | "2f7a0c6355ef48669b8889a0cdc86d30",
1823 | "60198bd7296549bebfa02d030e153098",
1824 | "3b2bd3bee32244a9a1598646e3c2ac96",
1825 | "d66e2e1d288944ed865c87cd0765ed9d",
1826 | "85116ccbf27442e5a8a94e03c4c1614a",
1827 | "194e7bb3f0b1470db38fca767afa4335",
1828 | "c25c400d86b04945a5e3590987aa903a",
1829 | "849bdeb13bce4cba820b1723735d86bd",
1830 | "8bb3faed90ef4db48f1506c026b0e4a3",
1831 | "a8fe0cc767cb435ca760cf5ca4967739",
1832 | "1041fabd9fc444499a432a7925437fda",
1833 | "b539de3135df48a7802eefd75e08840d",
1834 | "caa9da2f3fef4c2b80a7ec7a4c169016",
1835 | "517fb97311cd469288edb7165b504911",
1836 | "e6ab54cafbb54107b1052b9879121e3d",
1837 | "94a63d2944214eca97079a9bb1bea55d",
1838 | "30cfb45cf5404cc1aaf45515c0958d8a",
1839 | "d4bbc6623727407589577b3e7122657e",
1840 | "9ace909a29fd45b2bb3aa647d75e3965",
1841 | "8c2d8871cd2f47eabd231a7cc5ed339a",
1842 | "83579b1e91eb4385b8ddcfda2c717473",
1843 | "cf039b27f8a74b1497ed51b0deababef",
1844 | "e8797cf313af43b3958e502fcd2f817b",
1845 | "113aca6991c142de8e28737239af0803",
1846 | "b62471ed416e41c4b9ca16b3fe6fee35",
1847 | "a3319fd213ed4c66b2aef3236904b578",
1848 | "a262135eff97435c938cd61b3b576e55",
1849 | "8b74ab4a9d824881ba1b17cdeb7bf75c",
1850 | "d19cbadaa6f648098b30fd7cab88143b",
1851 | "44b6ebe9c3d942d48c867e1425e22bba",
1852 | "ee40d822c7a44a77ac0dd1ddb403d44a",
1853 | "cf3a649bbddd4da691341f967e93f377",
1854 | "e979f667255f432abe04bfebc2ff3418",
1855 | "1582c2d1fd674f4986798272a169bba2",
1856 | "143ea22cea6d4530947ce1986a4c61ac"
1857 | ]
1858 | },
1859 | "id": "uNP4yIYxTunE",
1860 | "outputId": "1d677e19-fbdf-43b6-9d07-d2e54bb19377"
1861 | },
1862 | "execution_count": 2,
1863 | "outputs": [
1864 | {
1865 | "output_type": "display_data",
1866 | "data": {
1867 | "text/plain": [
1868 | "tokenizer_config.json: 0%| | 0.00/168 [00:00, ?B/s]"
1869 | ],
1870 | "application/vnd.jupyter.widget-view+json": {
1871 | "version_major": 2,
1872 | "version_minor": 0,
1873 | "model_id": "7e2fa68518be449f80fd97746305eab0"
1874 | }
1875 | },
1876 | "metadata": {}
1877 | },
1878 | {
1879 | "output_type": "display_data",
1880 | "data": {
1881 | "text/plain": [
1882 | "config.json: 0%| | 0.00/602 [00:00, ?B/s]"
1883 | ],
1884 | "application/vnd.jupyter.widget-view+json": {
1885 | "version_major": 2,
1886 | "version_minor": 0,
1887 | "model_id": "7aabeac5f94d45938c70d85d5f6030e2"
1888 | }
1889 | },
1890 | "metadata": {}
1891 | },
1892 | {
1893 | "output_type": "display_data",
1894 | "data": {
1895 | "text/plain": [
1896 | "vocab.txt: 0%| | 0.00/1.20M [00:00, ?B/s]"
1897 | ],
1898 | "application/vnd.jupyter.widget-view+json": {
1899 | "version_major": 2,
1900 | "version_minor": 0,
1901 | "model_id": "3b2bd3bee32244a9a1598646e3c2ac96"
1902 | }
1903 | },
1904 | "metadata": {}
1905 | },
1906 | {
1907 | "output_type": "display_data",
1908 | "data": {
1909 | "text/plain": [
1910 | "emoji.json: 0%| | 0.00/189k [00:00, ?B/s]"
1911 | ],
1912 | "application/vnd.jupyter.widget-view+json": {
1913 | "version_major": 2,
1914 | "version_minor": 0,
1915 | "model_id": "517fb97311cd469288edb7165b504911"
1916 | }
1917 | },
1918 | "metadata": {}
1919 | },
1920 | {
1921 | "output_type": "display_data",
1922 | "data": {
1923 | "text/plain": [
1924 | "pytorch_model.bin: 0%| | 0.00/5.36G [00:00, ?B/s]"
1925 | ],
1926 | "application/vnd.jupyter.widget-view+json": {
1927 | "version_major": 2,
1928 | "version_minor": 0,
1929 | "model_id": "b62471ed416e41c4b9ca16b3fe6fee35"
1930 | }
1931 | },
1932 | "metadata": {}
1933 | }
1934 | ]
1935 | },
1936 | {
1937 | "cell_type": "code",
1938 | "source": [
1939 | "input_text = \"人とAIが協調するためには、\""
1940 | ],
1941 | "metadata": {
1942 | "id": "c7HVkDNXTvvJ"
1943 | },
1944 | "execution_count": 3,
1945 | "outputs": []
1946 | },
1947 | {
1948 | "cell_type": "code",
1949 | "source": [
1950 | "input_ids = tokenizer.encode(input_text, return_tensors=\"pt\")\n",
1951 | "gen_tokens = model.generate(\n",
1952 | " input_ids,\n",
1953 | " max_length=100,\n",
1954 | " do_sample=True,\n",
1955 | " num_return_sequences=1,\n",
1956 | " top_p=0.95,\n",
1957 | " top_k=50,\n",
1958 | ")"
1959 | ],
1960 | "metadata": {
1961 | "colab": {
1962 | "base_uri": "https://localhost:8080/"
1963 | },
1964 | "id": "LilGJ6MKTw_0",
1965 | "outputId": "ef0150fb-d588-4504-a7f1-437428e49617"
1966 | },
1967 | "execution_count": 4,
1968 | "outputs": [
1969 | {
1970 | "output_type": "stream",
1971 | "name": "stderr",
1972 | "text": [
1973 | "The attention mask and the pad token id were not set. As a consequence, you may observe unexpected behavior. Please pass your input's `attention_mask` to obtain reliable results.\n",
1974 | "Setting `pad_token_id` to `eos_token_id`:31999 for open-end generation.\n"
1975 | ]
1976 | }
1977 | ]
1978 | },
1979 | {
1980 | "cell_type": "code",
1981 | "source": [
1982 | "for gen_text in tokenizer.batch_decode(gen_tokens, skip_special_tokens=True):\n",
1983 | " print(gen_text)"
1984 | ],
1985 | "metadata": {
1986 | "colab": {
1987 | "base_uri": "https://localhost:8080/"
1988 | },
1989 | "id": "qRbV6VvGTye5",
1990 | "outputId": "83828469-27f4-49e6-c31a-3d99ac9998b8"
1991 | },
1992 | "execution_count": 5,
1993 | "outputs": [
1994 | {
1995 | "output_type": "stream",
1996 | "name": "stdout",
1997 | "text": [
1998 | "人とAIが協調するためには、どんな方法があるのか。その答えを出すための手段として、ディープラーニング技術と人工知能の技術を掛け合わせ、人間の脳に機械学習を応用させる技術を開発中だ。\n"
1999 | ]
2000 | }
2001 | ]
2002 | },
2003 | {
2004 | "cell_type": "code",
2005 | "source": [],
2006 | "metadata": {
2007 | "id": "sD87wn2ITzhj"
2008 | },
2009 | "execution_count": null,
2010 | "outputs": []
2011 | }
2012 | ]
2013 | }
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # generative_ai_notebooks
2 |
3 | [ABEJAアドベントカレンダー2023 21日目の記事](https://tech-blog.abeja.asia/entry/advent-2023-day21)で紹介したコードです。様々な生成AIをColabで試してみたサンプルとなります。一部、T5のハイメモリや、A100が必要になります。
4 |
5 | Here are samples of trying out various generative AIs on Colab. Some require high memory, like T5, or A100.
6 |
7 | | ジャンル | カテゴリ | モデル/手法 |
8 | | --- | --- | --- |
9 | | 言語 | 生成 | Llama2 |
10 | | | | Vicuna 7B |
11 | | | | Mistral 7B |
12 | | | | Mixtral 8x7B |
13 | | | 日本語LLM | ABEJA LLM 2.7B |
14 | | | | LLM-jp-13B |
15 | | | | CALM2 7B |
16 | | | | JapaneseStableLM Gamma 7B |
17 | | | | PLaMo 13B Instruct |
18 | | | | Swallow 13B Instruct |
19 | | | 高速化 | llma.cpp |
20 | | | | AutoGPTQ |
21 | | | LongContext | SelfExtend |
22 | | | Finetuning | QLoRA |
23 | | | | SFT |
24 | | | | PPO |
25 | | | | DPO |
26 | | | コード生成 | StableCode 3B |
27 | | | | CodeLlama |
28 | | 画像 | 生成 | StableDiffusion |
29 | | | | WaifuDiffusion |
30 | | | | SDXL |
31 | | | | Emi |
32 | | | | MangaDiffusion |
33 | | | Finetuning | LoRA |
34 | | | | DreamBooth |
35 | | | 制御 | ControlNet |
36 | | | 高速化 | LCM-LoRA |
37 | | | | SDXL-Turbo |
38 | | | 動画 | Stable Video Diffusion |
39 | | | | AnimateDiff |
40 | | | | MagicAnimate |
41 | | | | CartoonSegmentation (3D Ken Berns) |
42 | | | | DreamTalk |
43 | | 音声 | 変換 | RVC |
44 | | | TTS | VALL-E-X |
45 | | | | Style_Bert_VITS2 |
46 | | | 翻訳 | SeamlessM4T |
47 | | | 音楽 | AudioCraft |
48 | | | | AudioSep |
49 | | マルチモーダル | 画像/言語 | Japanese Instruct BLIP |
50 | | | | LLaVA-1.5 |
51 | | | | JapaneseStable VLM |
52 |
--------------------------------------------------------------------------------
/Vision/12_01_StableDiffusion_DreamBooth_Train.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "nbformat": 4,
3 | "nbformat_minor": 0,
4 | "metadata": {
5 | "colab": {
6 | "provenance": [],
7 | "machine_shape": "hm",
8 | "gpuType": "T4",
9 | "authorship_tag": "ABX9TyMS4inSHZFFSgvN3W3fMOf1",
10 | "include_colab_link": true
11 | },
12 | "kernelspec": {
13 | "name": "python3",
14 | "display_name": "Python 3"
15 | },
16 | "language_info": {
17 | "name": "python"
18 | },
19 | "accelerator": "GPU"
20 | },
21 | "cells": [
22 | {
23 | "cell_type": "markdown",
24 | "metadata": {
25 | "id": "view-in-github",
26 | "colab_type": "text"
27 | },
28 | "source": [
29 | "
"
30 | ]
31 | },
32 | {
33 | "cell_type": "code",
34 | "execution_count": 1,
35 | "metadata": {
36 | "colab": {
37 | "base_uri": "https://localhost:8080/"
38 | },
39 | "id": "JlCuBUApQa-7",
40 | "outputId": "2fa10acb-0469-4e68-ce0e-97d1f23c3efa"
41 | },
42 | "outputs": [
43 | {
44 | "output_type": "stream",
45 | "name": "stdout",
46 | "text": [
47 | "Mounted at /content/drive\n"
48 | ]
49 | }
50 | ],
51 | "source": [
52 | "# Googleドライブのマウント\n",
53 | "from google.colab import drive\n",
54 | "drive.mount(\"/content/drive\")"
55 | ]
56 | },
57 | {
58 | "cell_type": "code",
59 | "source": [
60 | "!git clone https://github.com/kohya-ss/sd-scripts\n",
61 | "%cd sd-scripts"
62 | ],
63 | "metadata": {
64 | "colab": {
65 | "base_uri": "https://localhost:8080/"
66 | },
67 | "id": "QD8tvKa7Qggq",
68 | "outputId": "941efd0b-f4f8-42d8-f03c-c3f8566efabf"
69 | },
70 | "execution_count": 2,
71 | "outputs": [
72 | {
73 | "output_type": "stream",
74 | "name": "stdout",
75 | "text": [
76 | "Cloning into 'sd-scripts'...\n",
77 | "remote: Enumerating objects: 4954, done.\u001b[K\n",
78 | "remote: Counting objects: 100% (2460/2460), done.\u001b[K\n",
79 | "remote: Compressing objects: 100% (319/319), done.\u001b[K\n",
80 | "remote: Total 4954 (delta 2285), reused 2202 (delta 2140), pack-reused 2494\u001b[K\n",
81 | "Receiving objects: 100% (4954/4954), 8.58 MiB | 8.67 MiB/s, done.\n",
82 | "Resolving deltas: 100% (3499/3499), done.\n",
83 | "/content/sd-scripts\n"
84 | ]
85 | }
86 | ]
87 | },
88 | {
89 | "cell_type": "code",
90 | "source": [
91 | "!pip install -r requirements.txt\n",
92 | "!pip install --upgrade protobuf\n",
93 | "!pip install xformers==0.0.22 triton bitsandbytes"
94 | ],
95 | "metadata": {
96 | "colab": {
97 | "base_uri": "https://localhost:8080/",
98 | "height": 1000
99 | },
100 | "id": "fMwHk4hAQiPR",
101 | "outputId": "064198a0-3bdf-45bb-b2b0-d457aae279f3"
102 | },
103 | "execution_count": 3,
104 | "outputs": [
105 | {
106 | "output_type": "stream",
107 | "name": "stdout",
108 | "text": [
109 | "Obtaining file:///content/sd-scripts (from -r requirements.txt (line 33))\n",
110 | " Preparing metadata (setup.py) ... \u001b[?25l\u001b[?25hdone\n",
111 | "Collecting accelerate==0.23.0 (from -r requirements.txt (line 1))\n",
112 | " Downloading accelerate-0.23.0-py3-none-any.whl (258 kB)\n",
113 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m258.1/258.1 kB\u001b[0m \u001b[31m4.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
114 | "\u001b[?25hCollecting transformers==4.30.2 (from -r requirements.txt (line 2))\n",
115 | " Downloading transformers-4.30.2-py3-none-any.whl (7.2 MB)\n",
116 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m7.2/7.2 MB\u001b[0m \u001b[31m18.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
117 | "\u001b[?25hCollecting diffusers[torch]==0.21.2 (from -r requirements.txt (line 3))\n",
118 | " Downloading diffusers-0.21.2.tar.gz (1.1 MB)\n",
119 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.1/1.1 MB\u001b[0m \u001b[31m34.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
120 | "\u001b[?25h Installing build dependencies ... \u001b[?25l\u001b[?25hdone\n",
121 | " Getting requirements to build wheel ... \u001b[?25l\u001b[?25hdone\n",
122 | " Preparing metadata (pyproject.toml) ... \u001b[?25l\u001b[?25hdone\n",
123 | "Collecting ftfy==6.1.1 (from -r requirements.txt (line 4))\n",
124 | " Downloading ftfy-6.1.1-py3-none-any.whl (53 kB)\n",
125 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m53.1/53.1 kB\u001b[0m \u001b[31m7.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
126 | "\u001b[?25hCollecting opencv-python==4.7.0.68 (from -r requirements.txt (line 6))\n",
127 | " Downloading opencv_python-4.7.0.68-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (61.8 MB)\n",
128 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m61.8/61.8 MB\u001b[0m \u001b[31m27.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
129 | "\u001b[?25hCollecting einops==0.6.0 (from -r requirements.txt (line 7))\n",
130 | " Downloading einops-0.6.0-py3-none-any.whl (41 kB)\n",
131 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m41.6/41.6 kB\u001b[0m \u001b[31m5.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
132 | "\u001b[?25hCollecting pytorch-lightning==1.9.0 (from -r requirements.txt (line 8))\n",
133 | " Downloading pytorch_lightning-1.9.0-py3-none-any.whl (825 kB)\n",
134 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m825.8/825.8 kB\u001b[0m \u001b[31m71.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
135 | "\u001b[?25hCollecting tensorboard==2.10.1 (from -r requirements.txt (line 10))\n",
136 | " Downloading tensorboard-2.10.1-py3-none-any.whl (5.9 MB)\n",
137 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m5.9/5.9 MB\u001b[0m \u001b[31m108.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
138 | "\u001b[?25hCollecting safetensors==0.3.1 (from -r requirements.txt (line 11))\n",
139 | " Downloading safetensors-0.3.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (1.3 MB)\n",
140 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.3/1.3 MB\u001b[0m \u001b[31m77.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
141 | "\u001b[?25hRequirement already satisfied: altair==4.2.2 in /usr/local/lib/python3.10/dist-packages (from -r requirements.txt (line 13)) (4.2.2)\n",
142 | "Collecting easygui==0.98.3 (from -r requirements.txt (line 14))\n",
143 | " Downloading easygui-0.98.3-py2.py3-none-any.whl (92 kB)\n",
144 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m92.7/92.7 kB\u001b[0m \u001b[31m13.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
145 | "\u001b[?25hRequirement already satisfied: toml==0.10.2 in /usr/local/lib/python3.10/dist-packages (from -r requirements.txt (line 15)) (0.10.2)\n",
146 | "Collecting voluptuous==0.13.1 (from -r requirements.txt (line 16))\n",
147 | " Downloading voluptuous-0.13.1-py3-none-any.whl (29 kB)\n",
148 | "Collecting huggingface-hub==0.15.1 (from -r requirements.txt (line 17))\n",
149 | " Downloading huggingface_hub-0.15.1-py3-none-any.whl (236 kB)\n",
150 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m236.8/236.8 kB\u001b[0m \u001b[31m29.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
151 | "\u001b[?25hCollecting open-clip-torch==2.20.0 (from -r requirements.txt (line 31))\n",
152 | " Downloading open_clip_torch-2.20.0-py3-none-any.whl (1.5 MB)\n",
153 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.5/1.5 MB\u001b[0m \u001b[31m82.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
154 | "\u001b[?25hRequirement already satisfied: numpy>=1.17 in /usr/local/lib/python3.10/dist-packages (from accelerate==0.23.0->-r requirements.txt (line 1)) (1.23.5)\n",
155 | "Requirement already satisfied: packaging>=20.0 in /usr/local/lib/python3.10/dist-packages (from accelerate==0.23.0->-r requirements.txt (line 1)) (23.2)\n",
156 | "Requirement already satisfied: psutil in /usr/local/lib/python3.10/dist-packages (from accelerate==0.23.0->-r requirements.txt (line 1)) (5.9.5)\n",
157 | "Requirement already satisfied: pyyaml in /usr/local/lib/python3.10/dist-packages (from accelerate==0.23.0->-r requirements.txt (line 1)) (6.0.1)\n",
158 | "Requirement already satisfied: torch>=1.10.0 in /usr/local/lib/python3.10/dist-packages (from accelerate==0.23.0->-r requirements.txt (line 1)) (2.1.0+cu121)\n",
159 | "Requirement already satisfied: filelock in /usr/local/lib/python3.10/dist-packages (from transformers==4.30.2->-r requirements.txt (line 2)) (3.13.1)\n",
160 | "Requirement already satisfied: regex!=2019.12.17 in /usr/local/lib/python3.10/dist-packages (from transformers==4.30.2->-r requirements.txt (line 2)) (2023.6.3)\n",
161 | "Requirement already satisfied: requests in /usr/local/lib/python3.10/dist-packages (from transformers==4.30.2->-r requirements.txt (line 2)) (2.31.0)\n",
162 | "Collecting tokenizers!=0.11.3,<0.14,>=0.11.1 (from transformers==4.30.2->-r requirements.txt (line 2))\n",
163 | " Downloading tokenizers-0.13.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (7.8 MB)\n",
164 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m7.8/7.8 MB\u001b[0m \u001b[31m107.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
165 | "\u001b[?25hRequirement already satisfied: tqdm>=4.27 in /usr/local/lib/python3.10/dist-packages (from transformers==4.30.2->-r requirements.txt (line 2)) (4.66.1)\n",
166 | "Requirement already satisfied: importlib-metadata in /usr/local/lib/python3.10/dist-packages (from diffusers[torch]==0.21.2->-r requirements.txt (line 3)) (7.0.0)\n",
167 | "Requirement already satisfied: Pillow in /usr/local/lib/python3.10/dist-packages (from diffusers[torch]==0.21.2->-r requirements.txt (line 3)) (9.4.0)\n",
168 | "Requirement already satisfied: wcwidth>=0.2.5 in /usr/local/lib/python3.10/dist-packages (from ftfy==6.1.1->-r requirements.txt (line 4)) (0.2.12)\n",
169 | "Requirement already satisfied: fsspec[http]>2021.06.0 in /usr/local/lib/python3.10/dist-packages (from pytorch-lightning==1.9.0->-r requirements.txt (line 8)) (2023.6.0)\n",
170 | "Collecting torchmetrics>=0.7.0 (from pytorch-lightning==1.9.0->-r requirements.txt (line 8))\n",
171 | " Downloading torchmetrics-1.2.1-py3-none-any.whl (806 kB)\n",
172 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m806.1/806.1 kB\u001b[0m \u001b[31m68.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
173 | "\u001b[?25hRequirement already satisfied: typing-extensions>=4.0.0 in /usr/local/lib/python3.10/dist-packages (from pytorch-lightning==1.9.0->-r requirements.txt (line 8)) (4.5.0)\n",
174 | "Collecting lightning-utilities>=0.4.2 (from pytorch-lightning==1.9.0->-r requirements.txt (line 8))\n",
175 | " Downloading lightning_utilities-0.10.0-py3-none-any.whl (24 kB)\n",
176 | "Requirement already satisfied: absl-py>=0.4 in /usr/local/lib/python3.10/dist-packages (from tensorboard==2.10.1->-r requirements.txt (line 10)) (1.4.0)\n",
177 | "Requirement already satisfied: grpcio>=1.24.3 in /usr/local/lib/python3.10/dist-packages (from tensorboard==2.10.1->-r requirements.txt (line 10)) (1.60.0)\n",
178 | "Requirement already satisfied: google-auth<3,>=1.6.3 in /usr/local/lib/python3.10/dist-packages (from tensorboard==2.10.1->-r requirements.txt (line 10)) (2.17.3)\n",
179 | "Collecting google-auth-oauthlib<0.5,>=0.4.1 (from tensorboard==2.10.1->-r requirements.txt (line 10))\n",
180 | " Downloading google_auth_oauthlib-0.4.6-py2.py3-none-any.whl (18 kB)\n",
181 | "Requirement already satisfied: markdown>=2.6.8 in /usr/local/lib/python3.10/dist-packages (from tensorboard==2.10.1->-r requirements.txt (line 10)) (3.5.1)\n",
182 | "Collecting protobuf<3.20,>=3.9.2 (from tensorboard==2.10.1->-r requirements.txt (line 10))\n",
183 | " Downloading protobuf-3.19.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (1.1 MB)\n",
184 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.1/1.1 MB\u001b[0m \u001b[31m73.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
185 | "\u001b[?25hRequirement already satisfied: setuptools>=41.0.0 in /usr/local/lib/python3.10/dist-packages (from tensorboard==2.10.1->-r requirements.txt (line 10)) (67.7.2)\n",
186 | "Collecting tensorboard-data-server<0.7.0,>=0.6.0 (from tensorboard==2.10.1->-r requirements.txt (line 10))\n",
187 | " Downloading tensorboard_data_server-0.6.1-py3-none-manylinux2010_x86_64.whl (4.9 MB)\n",
188 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m4.9/4.9 MB\u001b[0m \u001b[31m112.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
189 | "\u001b[?25hCollecting tensorboard-plugin-wit>=1.6.0 (from tensorboard==2.10.1->-r requirements.txt (line 10))\n",
190 | " Downloading tensorboard_plugin_wit-1.8.1-py3-none-any.whl (781 kB)\n",
191 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m781.3/781.3 kB\u001b[0m \u001b[31m68.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
192 | "\u001b[?25hRequirement already satisfied: werkzeug>=1.0.1 in /usr/local/lib/python3.10/dist-packages (from tensorboard==2.10.1->-r requirements.txt (line 10)) (3.0.1)\n",
193 | "Requirement already satisfied: wheel>=0.26 in /usr/local/lib/python3.10/dist-packages (from tensorboard==2.10.1->-r requirements.txt (line 10)) (0.42.0)\n",
194 | "Requirement already satisfied: entrypoints in /usr/local/lib/python3.10/dist-packages (from altair==4.2.2->-r requirements.txt (line 13)) (0.4)\n",
195 | "Requirement already satisfied: jinja2 in /usr/local/lib/python3.10/dist-packages (from altair==4.2.2->-r requirements.txt (line 13)) (3.1.2)\n",
196 | "Requirement already satisfied: jsonschema>=3.0 in /usr/local/lib/python3.10/dist-packages (from altair==4.2.2->-r requirements.txt (line 13)) (4.19.2)\n",
197 | "Requirement already satisfied: pandas>=0.18 in /usr/local/lib/python3.10/dist-packages (from altair==4.2.2->-r requirements.txt (line 13)) (1.5.3)\n",
198 | "Requirement already satisfied: toolz in /usr/local/lib/python3.10/dist-packages (from altair==4.2.2->-r requirements.txt (line 13)) (0.12.0)\n",
199 | "Requirement already satisfied: torchvision in /usr/local/lib/python3.10/dist-packages (from open-clip-torch==2.20.0->-r requirements.txt (line 31)) (0.16.0+cu121)\n",
200 | "Collecting sentencepiece (from open-clip-torch==2.20.0->-r requirements.txt (line 31))\n",
201 | " Downloading sentencepiece-0.1.99-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (1.3 MB)\n",
202 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.3/1.3 MB\u001b[0m \u001b[31m77.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
203 | "\u001b[?25hCollecting timm (from open-clip-torch==2.20.0->-r requirements.txt (line 31))\n",
204 | " Downloading timm-0.9.12-py3-none-any.whl (2.2 MB)\n",
205 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m2.2/2.2 MB\u001b[0m \u001b[31m98.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
206 | "\u001b[?25hRequirement already satisfied: aiohttp!=4.0.0a0,!=4.0.0a1 in /usr/local/lib/python3.10/dist-packages (from fsspec[http]>2021.06.0->pytorch-lightning==1.9.0->-r requirements.txt (line 8)) (3.9.1)\n",
207 | "Requirement already satisfied: cachetools<6.0,>=2.0.0 in /usr/local/lib/python3.10/dist-packages (from google-auth<3,>=1.6.3->tensorboard==2.10.1->-r requirements.txt (line 10)) (5.3.2)\n",
208 | "Requirement already satisfied: pyasn1-modules>=0.2.1 in /usr/local/lib/python3.10/dist-packages (from google-auth<3,>=1.6.3->tensorboard==2.10.1->-r requirements.txt (line 10)) (0.3.0)\n",
209 | "Requirement already satisfied: six>=1.9.0 in /usr/local/lib/python3.10/dist-packages (from google-auth<3,>=1.6.3->tensorboard==2.10.1->-r requirements.txt (line 10)) (1.16.0)\n",
210 | "Requirement already satisfied: rsa<5,>=3.1.4 in /usr/local/lib/python3.10/dist-packages (from google-auth<3,>=1.6.3->tensorboard==2.10.1->-r requirements.txt (line 10)) (4.9)\n",
211 | "Requirement already satisfied: requests-oauthlib>=0.7.0 in /usr/local/lib/python3.10/dist-packages (from google-auth-oauthlib<0.5,>=0.4.1->tensorboard==2.10.1->-r requirements.txt (line 10)) (1.3.1)\n",
212 | "Requirement already satisfied: attrs>=22.2.0 in /usr/local/lib/python3.10/dist-packages (from jsonschema>=3.0->altair==4.2.2->-r requirements.txt (line 13)) (23.1.0)\n",
213 | "Requirement already satisfied: jsonschema-specifications>=2023.03.6 in /usr/local/lib/python3.10/dist-packages (from jsonschema>=3.0->altair==4.2.2->-r requirements.txt (line 13)) (2023.11.2)\n",
214 | "Requirement already satisfied: referencing>=0.28.4 in /usr/local/lib/python3.10/dist-packages (from jsonschema>=3.0->altair==4.2.2->-r requirements.txt (line 13)) (0.32.0)\n",
215 | "Requirement already satisfied: rpds-py>=0.7.1 in /usr/local/lib/python3.10/dist-packages (from jsonschema>=3.0->altair==4.2.2->-r requirements.txt (line 13)) (0.13.2)\n",
216 | "Requirement already satisfied: python-dateutil>=2.8.1 in /usr/local/lib/python3.10/dist-packages (from pandas>=0.18->altair==4.2.2->-r requirements.txt (line 13)) (2.8.2)\n",
217 | "Requirement already satisfied: pytz>=2020.1 in /usr/local/lib/python3.10/dist-packages (from pandas>=0.18->altair==4.2.2->-r requirements.txt (line 13)) (2023.3.post1)\n",
218 | "Requirement already satisfied: charset-normalizer<4,>=2 in /usr/local/lib/python3.10/dist-packages (from requests->transformers==4.30.2->-r requirements.txt (line 2)) (3.3.2)\n",
219 | "Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.10/dist-packages (from requests->transformers==4.30.2->-r requirements.txt (line 2)) (3.6)\n",
220 | "Requirement already satisfied: urllib3<3,>=1.21.1 in /usr/local/lib/python3.10/dist-packages (from requests->transformers==4.30.2->-r requirements.txt (line 2)) (2.0.7)\n",
221 | "Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.10/dist-packages (from requests->transformers==4.30.2->-r requirements.txt (line 2)) (2023.11.17)\n",
222 | "Requirement already satisfied: sympy in /usr/local/lib/python3.10/dist-packages (from torch>=1.10.0->accelerate==0.23.0->-r requirements.txt (line 1)) (1.12)\n",
223 | "Requirement already satisfied: networkx in /usr/local/lib/python3.10/dist-packages (from torch>=1.10.0->accelerate==0.23.0->-r requirements.txt (line 1)) (3.2.1)\n",
224 | "Requirement already satisfied: triton==2.1.0 in /usr/local/lib/python3.10/dist-packages (from torch>=1.10.0->accelerate==0.23.0->-r requirements.txt (line 1)) (2.1.0)\n",
225 | "Requirement already satisfied: MarkupSafe>=2.1.1 in /usr/local/lib/python3.10/dist-packages (from werkzeug>=1.0.1->tensorboard==2.10.1->-r requirements.txt (line 10)) (2.1.3)\n",
226 | "Requirement already satisfied: zipp>=0.5 in /usr/local/lib/python3.10/dist-packages (from importlib-metadata->diffusers[torch]==0.21.2->-r requirements.txt (line 3)) (3.17.0)\n",
227 | "Requirement already satisfied: multidict<7.0,>=4.5 in /usr/local/lib/python3.10/dist-packages (from aiohttp!=4.0.0a0,!=4.0.0a1->fsspec[http]>2021.06.0->pytorch-lightning==1.9.0->-r requirements.txt (line 8)) (6.0.4)\n",
228 | "Requirement already satisfied: yarl<2.0,>=1.0 in /usr/local/lib/python3.10/dist-packages (from aiohttp!=4.0.0a0,!=4.0.0a1->fsspec[http]>2021.06.0->pytorch-lightning==1.9.0->-r requirements.txt (line 8)) (1.9.4)\n",
229 | "Requirement already satisfied: frozenlist>=1.1.1 in /usr/local/lib/python3.10/dist-packages (from aiohttp!=4.0.0a0,!=4.0.0a1->fsspec[http]>2021.06.0->pytorch-lightning==1.9.0->-r requirements.txt (line 8)) (1.4.0)\n",
230 | "Requirement already satisfied: aiosignal>=1.1.2 in /usr/local/lib/python3.10/dist-packages (from aiohttp!=4.0.0a0,!=4.0.0a1->fsspec[http]>2021.06.0->pytorch-lightning==1.9.0->-r requirements.txt (line 8)) (1.3.1)\n",
231 | "Requirement already satisfied: async-timeout<5.0,>=4.0 in /usr/local/lib/python3.10/dist-packages (from aiohttp!=4.0.0a0,!=4.0.0a1->fsspec[http]>2021.06.0->pytorch-lightning==1.9.0->-r requirements.txt (line 8)) (4.0.3)\n",
232 | "Requirement already satisfied: pyasn1<0.6.0,>=0.4.6 in /usr/local/lib/python3.10/dist-packages (from pyasn1-modules>=0.2.1->google-auth<3,>=1.6.3->tensorboard==2.10.1->-r requirements.txt (line 10)) (0.5.1)\n",
233 | "Requirement already satisfied: oauthlib>=3.0.0 in /usr/local/lib/python3.10/dist-packages (from requests-oauthlib>=0.7.0->google-auth-oauthlib<0.5,>=0.4.1->tensorboard==2.10.1->-r requirements.txt (line 10)) (3.2.2)\n",
234 | "Requirement already satisfied: mpmath>=0.19 in /usr/local/lib/python3.10/dist-packages (from sympy->torch>=1.10.0->accelerate==0.23.0->-r requirements.txt (line 1)) (1.3.0)\n",
235 | "Building wheels for collected packages: diffusers\n",
236 | " Building wheel for diffusers (pyproject.toml) ... \u001b[?25l\u001b[?25hdone\n",
237 | " Created wheel for diffusers: filename=diffusers-0.21.2-py3-none-any.whl size=1489250 sha256=a914cf1b400f6b9a266ffb8cc28771a1e9e489bce410d241df951e1e81560c4d\n",
238 | " Stored in directory: /root/.cache/pip/wheels/2e/09/32/11c9e42c397d3f3494226b28ba68c4ad4718a68a65dba14ea6\n",
239 | "Successfully built diffusers\n",
240 | "Installing collected packages: voluptuous, tokenizers, tensorboard-plugin-wit, sentencepiece, safetensors, library, easygui, tensorboard-data-server, protobuf, opencv-python, lightning-utilities, ftfy, einops, huggingface-hub, transformers, torchmetrics, google-auth-oauthlib, diffusers, accelerate, timm, tensorboard, pytorch-lightning, open-clip-torch\n",
241 | " Attempting uninstall: tokenizers\n",
242 | " Found existing installation: tokenizers 0.15.0\n",
243 | " Uninstalling tokenizers-0.15.0:\n",
244 | " Successfully uninstalled tokenizers-0.15.0\n",
245 | " Attempting uninstall: safetensors\n",
246 | " Found existing installation: safetensors 0.4.1\n",
247 | " Uninstalling safetensors-0.4.1:\n",
248 | " Successfully uninstalled safetensors-0.4.1\n",
249 | " Running setup.py develop for library\n",
250 | " Attempting uninstall: tensorboard-data-server\n",
251 | " Found existing installation: tensorboard-data-server 0.7.2\n",
252 | " Uninstalling tensorboard-data-server-0.7.2:\n",
253 | " Successfully uninstalled tensorboard-data-server-0.7.2\n",
254 | " Attempting uninstall: protobuf\n",
255 | " Found existing installation: protobuf 3.20.3\n",
256 | " Uninstalling protobuf-3.20.3:\n",
257 | " Successfully uninstalled protobuf-3.20.3\n",
258 | " Attempting uninstall: opencv-python\n",
259 | " Found existing installation: opencv-python 4.8.0.76\n",
260 | " Uninstalling opencv-python-4.8.0.76:\n",
261 | " Successfully uninstalled opencv-python-4.8.0.76\n",
262 | " Attempting uninstall: huggingface-hub\n",
263 | " Found existing installation: huggingface-hub 0.19.4\n",
264 | " Uninstalling huggingface-hub-0.19.4:\n",
265 | " Successfully uninstalled huggingface-hub-0.19.4\n",
266 | " Attempting uninstall: transformers\n",
267 | " Found existing installation: transformers 4.35.2\n",
268 | " Uninstalling transformers-4.35.2:\n",
269 | " Successfully uninstalled transformers-4.35.2\n",
270 | " Attempting uninstall: google-auth-oauthlib\n",
271 | " Found existing installation: google-auth-oauthlib 1.2.0\n",
272 | " Uninstalling google-auth-oauthlib-1.2.0:\n",
273 | " Successfully uninstalled google-auth-oauthlib-1.2.0\n",
274 | " Attempting uninstall: tensorboard\n",
275 | " Found existing installation: tensorboard 2.15.1\n",
276 | " Uninstalling tensorboard-2.15.1:\n",
277 | " Successfully uninstalled tensorboard-2.15.1\n",
278 | "\u001b[31mERROR: pip's dependency resolver does not currently take into account all the packages that are installed. This behaviour is the source of the following dependency conflicts.\n",
279 | "pandas-gbq 0.19.2 requires google-auth-oauthlib>=0.7.0, but you have google-auth-oauthlib 0.4.6 which is incompatible.\n",
280 | "tensorflow 2.15.0 requires protobuf!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<5.0.0dev,>=3.20.3, but you have protobuf 3.19.6 which is incompatible.\n",
281 | "tensorflow 2.15.0 requires tensorboard<2.16,>=2.15, but you have tensorboard 2.10.1 which is incompatible.\n",
282 | "tensorflow-datasets 4.9.3 requires protobuf>=3.20, but you have protobuf 3.19.6 which is incompatible.\n",
283 | "tensorflow-metadata 1.14.0 requires protobuf<4.21,>=3.20.3, but you have protobuf 3.19.6 which is incompatible.\u001b[0m\u001b[31m\n",
284 | "\u001b[0mSuccessfully installed accelerate-0.23.0 diffusers-0.21.2 easygui-0.98.3 einops-0.6.0 ftfy-6.1.1 google-auth-oauthlib-0.4.6 huggingface-hub-0.15.1 library-0.0.0 lightning-utilities-0.10.0 open-clip-torch-2.20.0 opencv-python-4.7.0.68 protobuf-3.19.6 pytorch-lightning-1.9.0 safetensors-0.3.1 sentencepiece-0.1.99 tensorboard-2.10.1 tensorboard-data-server-0.6.1 tensorboard-plugin-wit-1.8.1 timm-0.9.12 tokenizers-0.13.3 torchmetrics-1.2.1 transformers-4.30.2 voluptuous-0.13.1\n"
285 | ]
286 | },
287 | {
288 | "output_type": "display_data",
289 | "data": {
290 | "application/vnd.colab-display-data+json": {
291 | "pip_warning": {
292 | "packages": [
293 | "google"
294 | ]
295 | }
296 | }
297 | },
298 | "metadata": {}
299 | },
300 | {
301 | "output_type": "stream",
302 | "name": "stdout",
303 | "text": [
304 | "Requirement already satisfied: protobuf in /usr/local/lib/python3.10/dist-packages (3.19.6)\n",
305 | "Collecting protobuf\n",
306 | " Downloading protobuf-4.25.1-cp37-abi3-manylinux2014_x86_64.whl (294 kB)\n",
307 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m294.6/294.6 kB\u001b[0m \u001b[31m4.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
308 | "\u001b[?25hInstalling collected packages: protobuf\n",
309 | " Attempting uninstall: protobuf\n",
310 | " Found existing installation: protobuf 3.19.6\n",
311 | " Uninstalling protobuf-3.19.6:\n",
312 | " Successfully uninstalled protobuf-3.19.6\n",
313 | "\u001b[31mERROR: pip's dependency resolver does not currently take into account all the packages that are installed. This behaviour is the source of the following dependency conflicts.\n",
314 | "open-clip-torch 2.20.0 requires protobuf<4, but you have protobuf 4.25.1 which is incompatible.\n",
315 | "pandas-gbq 0.19.2 requires google-auth-oauthlib>=0.7.0, but you have google-auth-oauthlib 0.4.6 which is incompatible.\n",
316 | "tensorboard 2.10.1 requires protobuf<3.20,>=3.9.2, but you have protobuf 4.25.1 which is incompatible.\n",
317 | "tensorflow 2.15.0 requires tensorboard<2.16,>=2.15, but you have tensorboard 2.10.1 which is incompatible.\n",
318 | "tensorflow-metadata 1.14.0 requires protobuf<4.21,>=3.20.3, but you have protobuf 4.25.1 which is incompatible.\u001b[0m\u001b[31m\n",
319 | "\u001b[0mSuccessfully installed protobuf-4.25.1\n",
320 | "Collecting xformers==0.0.22\n",
321 | " Downloading xformers-0.0.22-cp310-cp310-manylinux2014_x86_64.whl (211.6 MB)\n",
322 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m211.6/211.6 MB\u001b[0m \u001b[31m5.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
323 | "\u001b[?25hRequirement already satisfied: triton in /usr/local/lib/python3.10/dist-packages (2.1.0)\n",
324 | "Collecting bitsandbytes\n",
325 | " Downloading bitsandbytes-0.41.3.post2-py3-none-any.whl (92.6 MB)\n",
326 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m92.6/92.6 MB\u001b[0m \u001b[31m18.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
327 | "\u001b[?25hRequirement already satisfied: numpy in /usr/local/lib/python3.10/dist-packages (from xformers==0.0.22) (1.23.5)\n",
328 | "Collecting torch==2.0.1 (from xformers==0.0.22)\n",
329 | " Downloading torch-2.0.1-cp310-cp310-manylinux1_x86_64.whl (619.9 MB)\n",
330 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m619.9/619.9 MB\u001b[0m \u001b[31m2.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
331 | "\u001b[?25hRequirement already satisfied: filelock in /usr/local/lib/python3.10/dist-packages (from torch==2.0.1->xformers==0.0.22) (3.13.1)\n",
332 | "Requirement already satisfied: typing-extensions in /usr/local/lib/python3.10/dist-packages (from torch==2.0.1->xformers==0.0.22) (4.5.0)\n",
333 | "Requirement already satisfied: sympy in /usr/local/lib/python3.10/dist-packages (from torch==2.0.1->xformers==0.0.22) (1.12)\n",
334 | "Requirement already satisfied: networkx in /usr/local/lib/python3.10/dist-packages (from torch==2.0.1->xformers==0.0.22) (3.2.1)\n",
335 | "Requirement already satisfied: jinja2 in /usr/local/lib/python3.10/dist-packages (from torch==2.0.1->xformers==0.0.22) (3.1.2)\n",
336 | "Collecting nvidia-cuda-nvrtc-cu11==11.7.99 (from torch==2.0.1->xformers==0.0.22)\n",
337 | " Downloading nvidia_cuda_nvrtc_cu11-11.7.99-2-py3-none-manylinux1_x86_64.whl (21.0 MB)\n",
338 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m21.0/21.0 MB\u001b[0m \u001b[31m66.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
339 | "\u001b[?25hCollecting nvidia-cuda-runtime-cu11==11.7.99 (from torch==2.0.1->xformers==0.0.22)\n",
340 | " Downloading nvidia_cuda_runtime_cu11-11.7.99-py3-none-manylinux1_x86_64.whl (849 kB)\n",
341 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m849.3/849.3 kB\u001b[0m \u001b[31m65.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
342 | "\u001b[?25hCollecting nvidia-cuda-cupti-cu11==11.7.101 (from torch==2.0.1->xformers==0.0.22)\n",
343 | " Downloading nvidia_cuda_cupti_cu11-11.7.101-py3-none-manylinux1_x86_64.whl (11.8 MB)\n",
344 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m11.8/11.8 MB\u001b[0m \u001b[31m100.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
345 | "\u001b[?25hCollecting nvidia-cudnn-cu11==8.5.0.96 (from torch==2.0.1->xformers==0.0.22)\n",
346 | " Downloading nvidia_cudnn_cu11-8.5.0.96-2-py3-none-manylinux1_x86_64.whl (557.1 MB)\n",
347 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m557.1/557.1 MB\u001b[0m \u001b[31m2.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
348 | "\u001b[?25hCollecting nvidia-cublas-cu11==11.10.3.66 (from torch==2.0.1->xformers==0.0.22)\n",
349 | " Downloading nvidia_cublas_cu11-11.10.3.66-py3-none-manylinux1_x86_64.whl (317.1 MB)\n",
350 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m317.1/317.1 MB\u001b[0m \u001b[31m2.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
351 | "\u001b[?25hCollecting nvidia-cufft-cu11==10.9.0.58 (from torch==2.0.1->xformers==0.0.22)\n",
352 | " Downloading nvidia_cufft_cu11-10.9.0.58-py3-none-manylinux1_x86_64.whl (168.4 MB)\n",
353 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m168.4/168.4 MB\u001b[0m \u001b[31m9.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
354 | "\u001b[?25hCollecting nvidia-curand-cu11==10.2.10.91 (from torch==2.0.1->xformers==0.0.22)\n",
355 | " Downloading nvidia_curand_cu11-10.2.10.91-py3-none-manylinux1_x86_64.whl (54.6 MB)\n",
356 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m54.6/54.6 MB\u001b[0m \u001b[31m24.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
357 | "\u001b[?25hCollecting nvidia-cusolver-cu11==11.4.0.1 (from torch==2.0.1->xformers==0.0.22)\n",
358 | " Downloading nvidia_cusolver_cu11-11.4.0.1-2-py3-none-manylinux1_x86_64.whl (102.6 MB)\n",
359 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m102.6/102.6 MB\u001b[0m \u001b[31m16.4 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
360 | "\u001b[?25hCollecting nvidia-cusparse-cu11==11.7.4.91 (from torch==2.0.1->xformers==0.0.22)\n",
361 | " Downloading nvidia_cusparse_cu11-11.7.4.91-py3-none-manylinux1_x86_64.whl (173.2 MB)\n",
362 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m173.2/173.2 MB\u001b[0m \u001b[31m3.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
363 | "\u001b[?25hCollecting nvidia-nccl-cu11==2.14.3 (from torch==2.0.1->xformers==0.0.22)\n",
364 | " Downloading nvidia_nccl_cu11-2.14.3-py3-none-manylinux1_x86_64.whl (177.1 MB)\n",
365 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m177.1/177.1 MB\u001b[0m \u001b[31m6.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
366 | "\u001b[?25hCollecting nvidia-nvtx-cu11==11.7.91 (from torch==2.0.1->xformers==0.0.22)\n",
367 | " Downloading nvidia_nvtx_cu11-11.7.91-py3-none-manylinux1_x86_64.whl (98 kB)\n",
368 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m98.6/98.6 kB\u001b[0m \u001b[31m14.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
369 | "\u001b[?25hCollecting triton\n",
370 | " Downloading triton-2.0.0-1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl (63.3 MB)\n",
371 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m63.3/63.3 MB\u001b[0m \u001b[31m25.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
372 | "\u001b[?25hRequirement already satisfied: cmake in /usr/local/lib/python3.10/dist-packages (from triton) (3.27.9)\n",
373 | "Collecting lit (from triton)\n",
374 | " Downloading lit-17.0.6.tar.gz (153 kB)\n",
375 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m153.0/153.0 kB\u001b[0m \u001b[31m22.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
376 | "\u001b[?25h Installing build dependencies ... \u001b[?25l\u001b[?25hdone\n",
377 | " Getting requirements to build wheel ... \u001b[?25l\u001b[?25hdone\n",
378 | " Installing backend dependencies ... \u001b[?25l\u001b[?25hdone\n",
379 | " Preparing metadata (pyproject.toml) ... \u001b[?25l\u001b[?25hdone\n",
380 | "Requirement already satisfied: setuptools in /usr/local/lib/python3.10/dist-packages (from nvidia-cublas-cu11==11.10.3.66->torch==2.0.1->xformers==0.0.22) (67.7.2)\n",
381 | "Requirement already satisfied: wheel in /usr/local/lib/python3.10/dist-packages (from nvidia-cublas-cu11==11.10.3.66->torch==2.0.1->xformers==0.0.22) (0.42.0)\n",
382 | "Requirement already satisfied: MarkupSafe>=2.0 in /usr/local/lib/python3.10/dist-packages (from jinja2->torch==2.0.1->xformers==0.0.22) (2.1.3)\n",
383 | "Requirement already satisfied: mpmath>=0.19 in /usr/local/lib/python3.10/dist-packages (from sympy->torch==2.0.1->xformers==0.0.22) (1.3.0)\n",
384 | "Building wheels for collected packages: lit\n",
385 | " Building wheel for lit (pyproject.toml) ... \u001b[?25l\u001b[?25hdone\n",
386 | " Created wheel for lit: filename=lit-17.0.6-py3-none-any.whl size=93255 sha256=85a129fb69f021984b8156ba660bc8f297bc6205f7e54b8fb604cd281cb8040b\n",
387 | " Stored in directory: /root/.cache/pip/wheels/30/dd/04/47d42976a6a86dc2ab66d7518621ae96f43452c8841d74758a\n",
388 | "Successfully built lit\n",
389 | "Installing collected packages: lit, bitsandbytes, nvidia-nvtx-cu11, nvidia-nccl-cu11, nvidia-cusparse-cu11, nvidia-curand-cu11, nvidia-cufft-cu11, nvidia-cuda-runtime-cu11, nvidia-cuda-nvrtc-cu11, nvidia-cuda-cupti-cu11, nvidia-cublas-cu11, nvidia-cusolver-cu11, nvidia-cudnn-cu11, triton, torch, xformers\n",
390 | " Attempting uninstall: triton\n",
391 | " Found existing installation: triton 2.1.0\n",
392 | " Uninstalling triton-2.1.0:\n",
393 | " Successfully uninstalled triton-2.1.0\n",
394 | " Attempting uninstall: torch\n",
395 | " Found existing installation: torch 2.1.0+cu121\n",
396 | " Uninstalling torch-2.1.0+cu121:\n",
397 | " Successfully uninstalled torch-2.1.0+cu121\n",
398 | "\u001b[31mERROR: pip's dependency resolver does not currently take into account all the packages that are installed. This behaviour is the source of the following dependency conflicts.\n",
399 | "open-clip-torch 2.20.0 requires protobuf<4, but you have protobuf 4.25.1 which is incompatible.\n",
400 | "torchaudio 2.1.0+cu121 requires torch==2.1.0, but you have torch 2.0.1 which is incompatible.\n",
401 | "torchdata 0.7.0 requires torch==2.1.0, but you have torch 2.0.1 which is incompatible.\n",
402 | "torchtext 0.16.0 requires torch==2.1.0, but you have torch 2.0.1 which is incompatible.\n",
403 | "torchvision 0.16.0+cu121 requires torch==2.1.0, but you have torch 2.0.1 which is incompatible.\u001b[0m\u001b[31m\n",
404 | "\u001b[0mSuccessfully installed bitsandbytes-0.41.3.post2 lit-17.0.6 nvidia-cublas-cu11-11.10.3.66 nvidia-cuda-cupti-cu11-11.7.101 nvidia-cuda-nvrtc-cu11-11.7.99 nvidia-cuda-runtime-cu11-11.7.99 nvidia-cudnn-cu11-8.5.0.96 nvidia-cufft-cu11-10.9.0.58 nvidia-curand-cu11-10.2.10.91 nvidia-cusolver-cu11-11.4.0.1 nvidia-cusparse-cu11-11.7.4.91 nvidia-nccl-cu11-2.14.3 nvidia-nvtx-cu11-11.7.91 torch-2.0.1 triton-2.0.0 xformers-0.0.22\n"
405 | ]
406 | }
407 | ]
408 | },
409 | {
410 | "cell_type": "code",
411 | "source": [
412 | "%env PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION python"
413 | ],
414 | "metadata": {
415 | "colab": {
416 | "base_uri": "https://localhost:8080/"
417 | },
418 | "id": "BhrDr3YqQlhL",
419 | "outputId": "5b883b41-8888-41a4-9155-ece8138819e0"
420 | },
421 | "execution_count": 3,
422 | "outputs": [
423 | {
424 | "output_type": "stream",
425 | "name": "stdout",
426 | "text": [
427 | "env: PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=python\n"
428 | ]
429 | }
430 | ]
431 | },
432 | {
433 | "cell_type": "code",
434 | "source": [
435 | "# https://note.com/kohya_ss/n/nb20c5187e15a"
436 | ],
437 | "metadata": {
438 | "id": "w9Uc7loHRAwC"
439 | },
440 | "execution_count": 5,
441 | "outputs": []
442 | },
443 | {
444 | "cell_type": "code",
445 | "source": [
446 | "dataset = \"\"\"[general]\n",
447 | "enable_bucket = true\n",
448 | "\n",
449 | "[[datasets]]\n",
450 | "resolution = 512\n",
451 | "batch_size = 4\n",
452 | "\n",
453 | "[[datasets.subsets]]\n",
454 | "image_dir = '/content/drive/MyDrive/AI/DB/lora_train_sample_pack/train/20_usu frog'\n",
455 | "class_tokens = 'shs frog'\n",
456 | "num_repeats = 10\n",
457 | "\n",
458 | " [[datasets.subsets]]\n",
459 | "is_reg = true\n",
460 | "image_dir = '/content/drive/MyDrive/AI/DB/lora_train_sample_pack/reg/1_frog'\n",
461 | "class_tokens = 'frog'\n",
462 | "num_repeats = 1\n",
463 | "\"\"\"\n",
464 | "\n",
465 | "with open(\"dataset.toml\", \"w\") as f:\n",
466 | " f.write(dataset)"
467 | ],
468 | "metadata": {
469 | "id": "iqWiTXShQrB2"
470 | },
471 | "execution_count": 10,
472 | "outputs": []
473 | },
474 | {
475 | "cell_type": "code",
476 | "source": [
477 | "!accelerate launch --num_cpu_threads_per_process 1 train_db.py \\\n",
478 | " --pretrained_model_name_or_path=\"stabilityai/stable-diffusion-2-1-base\" \\\n",
479 | " --dataset_config=\"dataset.toml\" \\\n",
480 | " --output_dir=\"/content/drive/MyDrive/AI/DB/Model/frog\" \\\n",
481 | " --output_name=\"frog\" \\\n",
482 | " --save_model_as=safetensors \\\n",
483 | " --prior_loss_weight=1.0 \\\n",
484 | " --max_train_steps=1600 \\\n",
485 | " --learning_rate=1e-6 \\\n",
486 | " --optimizer_type=\"AdamW8bit\" \\\n",
487 | " --xformers \\\n",
488 | " --mixed_precision=\"fp16\" \\\n",
489 | " --cache_latents \\\n",
490 | " --gradient_checkpointing \\\n",
491 | " --v2"
492 | ],
493 | "metadata": {
494 | "colab": {
495 | "base_uri": "https://localhost:8080/"
496 | },
497 | "id": "4LggEndhSLel",
498 | "outputId": "e51655bf-08f7-4ea6-942b-818feaf53095"
499 | },
500 | "execution_count": 12,
501 | "outputs": [
502 | {
503 | "output_type": "stream",
504 | "name": "stdout",
505 | "text": [
506 | "/usr/local/lib/python3.10/dist-packages/torchvision/io/image.py:13: UserWarning: Failed to load image Python extension: '/usr/local/lib/python3.10/dist-packages/torchvision/image.so: undefined symbol: _ZN3c104cuda9SetDeviceEi'If you don't plan on using image functionality from `torchvision.io`, you can ignore this warning. Otherwise, there might be something wrong with your environment. Did you have `libjpeg` or `libpng` installed before building `torchvision` from source?\n",
507 | " warn(\n",
508 | "The following values were not passed to `accelerate launch` and had defaults used instead:\n",
509 | "\t`--num_processes` was set to a value of `1`\n",
510 | "\t`--num_machines` was set to a value of `1`\n",
511 | "\t`--mixed_precision` was set to a value of `'no'`\n",
512 | "\t`--dynamo_backend` was set to a value of `'no'`\n",
513 | "To avoid this warning pass in values for each of the problematic parameters or run `accelerate config`.\n",
514 | "/usr/local/lib/python3.10/dist-packages/torchvision/io/image.py:13: UserWarning: Failed to load image Python extension: '/usr/local/lib/python3.10/dist-packages/torchvision/image.so: undefined symbol: _ZN3c104cuda9SetDeviceEi'If you don't plan on using image functionality from `torchvision.io`, you can ignore this warning. Otherwise, there might be something wrong with your environment. Did you have `libjpeg` or `libpng` installed before building `torchvision` from source?\n",
515 | " warn(\n",
516 | "2023-12-18 09:48:47.597548: E external/local_xla/xla/stream_executor/cuda/cuda_dnn.cc:9261] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\n",
517 | "2023-12-18 09:48:47.597599: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:607] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\n",
518 | "2023-12-18 09:48:47.598958: E external/local_xla/xla/stream_executor/cuda/cuda_blas.cc:1515] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\n",
519 | "2023-12-18 09:48:48.874111: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Could not find TensorRT\n",
520 | "prepare tokenizer\n",
521 | "Load dataset config from dataset.toml\n",
522 | "prepare images.\n",
523 | "found directory /content/drive/MyDrive/AI/DB/lora_train_sample_pack/train/20_usu frog contains 15 image files\n",
524 | "No caption file found for 15 images. Training will continue without captions for these images. If class token exists, it will be used. / 15枚の画像にキャプションファイルが見つかりませんでした。これらの画像についてはキャプションなしで学習を続行します。class tokenが存在する場合はそれを使います。\n",
525 | "/content/drive/MyDrive/AI/DB/lora_train_sample_pack/train/20_usu frog/[_c_]chojuganso0001_s1024_chojuganso0001_0.png\n",
526 | "/content/drive/MyDrive/AI/DB/lora_train_sample_pack/train/20_usu frog/_c_choju2_0011_s1024_choju2_0011_6.png\n",
527 | "/content/drive/MyDrive/AI/DB/lora_train_sample_pack/train/20_usu frog/_c_chojuganso0002_s1024_chojuganso0002_0.png\n",
528 | "/content/drive/MyDrive/AI/DB/lora_train_sample_pack/train/20_usu frog/_c_chojuganso0003_s1024_chojuganso0003_0.png\n",
529 | "/content/drive/MyDrive/AI/DB/lora_train_sample_pack/train/20_usu frog/_c_chojuganso0005_s1024_chojuganso0005_0.png\n",
530 | "/content/drive/MyDrive/AI/DB/lora_train_sample_pack/train/20_usu frog/_c_chojuganso0009_s1024_chojuganso0009_0.png... and 10 more\n",
531 | "found directory /content/drive/MyDrive/AI/DB/lora_train_sample_pack/reg/1_frog contains 50 image files\n",
532 | "No caption file found for 50 images. Training will continue without captions for these images. If class token exists, it will be used. / 50枚の画像にキャプションファイルが見つかりませんでした。これらの画像についてはキャプションなしで学習を続行します。class tokenが存在する場合はそれを使います。\n",
533 | "/content/drive/MyDrive/AI/DB/lora_train_sample_pack/reg/1_frog/im_20220926233500_0.png\n",
534 | "/content/drive/MyDrive/AI/DB/lora_train_sample_pack/reg/1_frog/im_20220926233500_1.png\n",
535 | "/content/drive/MyDrive/AI/DB/lora_train_sample_pack/reg/1_frog/im_20220926233500_2.png\n",
536 | "/content/drive/MyDrive/AI/DB/lora_train_sample_pack/reg/1_frog/im_20220926233500_3.png\n",
537 | "/content/drive/MyDrive/AI/DB/lora_train_sample_pack/reg/1_frog/im_20220926233511_0.png\n",
538 | "/content/drive/MyDrive/AI/DB/lora_train_sample_pack/reg/1_frog/im_20220926233511_1.png... and 45 more\n",
539 | "150 train images with repeating.\n",
540 | "50 reg images.\n",
541 | "[Dataset 0]\n",
542 | " batch_size: 4\n",
543 | " resolution: (512, 512)\n",
544 | " enable_bucket: True\n",
545 | " min_bucket_reso: 256\n",
546 | " max_bucket_reso: 1024\n",
547 | " bucket_reso_steps: 64\n",
548 | " bucket_no_upscale: False\n",
549 | "\n",
550 | " [Subset 0 of Dataset 0]\n",
551 | " image_dir: \"/content/drive/MyDrive/AI/DB/lora_train_sample_pack/train/20_usu frog\"\n",
552 | " image_count: 15\n",
553 | " num_repeats: 10\n",
554 | " shuffle_caption: False\n",
555 | " keep_tokens: 0\n",
556 | " caption_dropout_rate: 0.0\n",
557 | " caption_dropout_every_n_epoches: 0\n",
558 | " caption_tag_dropout_rate: 0.0\n",
559 | " caption_prefix: None\n",
560 | " caption_suffix: None\n",
561 | " color_aug: False\n",
562 | " flip_aug: False\n",
563 | " face_crop_aug_range: None\n",
564 | " random_crop: False\n",
565 | " token_warmup_min: 1,\n",
566 | " token_warmup_step: 0,\n",
567 | " is_reg: False\n",
568 | " class_tokens: shs frog\n",
569 | " caption_extension: .caption\n",
570 | "\n",
571 | " [Subset 1 of Dataset 0]\n",
572 | " image_dir: \"/content/drive/MyDrive/AI/DB/lora_train_sample_pack/reg/1_frog\"\n",
573 | " image_count: 50\n",
574 | " num_repeats: 1\n",
575 | " shuffle_caption: False\n",
576 | " keep_tokens: 0\n",
577 | " caption_dropout_rate: 0.0\n",
578 | " caption_dropout_every_n_epoches: 0\n",
579 | " caption_tag_dropout_rate: 0.0\n",
580 | " caption_prefix: None\n",
581 | " caption_suffix: None\n",
582 | " color_aug: False\n",
583 | " flip_aug: False\n",
584 | " face_crop_aug_range: None\n",
585 | " random_crop: False\n",
586 | " token_warmup_min: 1,\n",
587 | " token_warmup_step: 0,\n",
588 | " is_reg: True\n",
589 | " class_tokens: frog\n",
590 | " caption_extension: .caption\n",
591 | "\n",
592 | "\n",
593 | "[Dataset 0]\n",
594 | "loading image sizes.\n",
595 | "100% 65/65 [00:00<00:00, 926.17it/s]\n",
596 | "make buckets\n",
597 | "number of images (including repeats) / 各bucketの画像枚数(繰り返し回数を含む)\n",
598 | "bucket 0: resolution (512, 512), count: 300\n",
599 | "mean ar error (without repeats): 0.0\n",
600 | "prepare accelerator\n",
601 | "loading model for process 0/1\n",
602 | "load Diffusers pretrained models: stabilityai/stable-diffusion-2-1-base\n",
603 | "Loading pipeline components...: 100% 5/5 [00:00<00:00, 5.30it/s]\n",
604 | "UNet2DConditionModel: 64, [5, 10, 20, 20], 1024, True, False\n",
605 | "U-Net converted to original U-Net\n",
606 | "Enable xformers for U-Net\n",
607 | "[Dataset 0]\n",
608 | "caching latents.\n",
609 | "checking cache validity...\n",
610 | "100% 65/65 [00:00<00:00, 721242.75it/s]\n",
611 | "caching latents...\n",
612 | "100% 65/65 [00:08<00:00, 7.56it/s]\n",
613 | "CrossAttnDownBlock2D False -> True\n",
614 | "CrossAttnDownBlock2D False -> True\n",
615 | "CrossAttnDownBlock2D False -> True\n",
616 | "DownBlock2D False -> True\n",
617 | "UNetMidBlock2DCrossAttn False -> True\n",
618 | "UpBlock2D False -> True\n",
619 | "CrossAttnUpBlock2D False -> True\n",
620 | "CrossAttnUpBlock2D False -> True\n",
621 | "CrossAttnUpBlock2D False -> True\n",
622 | "prepare optimizer, data loader etc.\n",
623 | "use 8-bit AdamW optimizer | {}\n",
624 | "running training / 学習開始\n",
625 | " num train images * repeats / 学習画像の数×繰り返し回数: 150\n",
626 | " num reg images / 正則化画像の数: 50\n",
627 | " num batches per epoch / 1epochのバッチ数: 75\n",
628 | " num epochs / epoch数: 22\n",
629 | " batch size per device / バッチサイズ: 1\n",
630 | " total train batch size (with parallel & distributed & accumulation) / 総バッチサイズ(並列学習、勾配合計含む): 1\n",
631 | " gradient ccumulation steps / 勾配を合計するステップ数 = 1\n",
632 | " total optimization steps / 学習ステップ数: 1600\n",
633 | "steps: 0% 0/1600 [00:00, ?it/s]\n",
634 | "epoch 1/22\n",
635 | "steps: 5% 75/1600 [02:04<42:20, 1.67s/it, avr_loss=0.0888]\n",
636 | "epoch 2/22\n",
637 | "steps: 9% 150/1600 [04:09<40:07, 1.66s/it, avr_loss=0.0862]\n",
638 | "epoch 3/22\n",
639 | "steps: 14% 225/1600 [06:13<37:59, 1.66s/it, avr_loss=0.0898]\n",
640 | "epoch 4/22\n",
641 | "steps: 19% 300/1600 [08:16<35:52, 1.66s/it, avr_loss=0.0885]\n",
642 | "epoch 5/22\n",
643 | "steps: 23% 375/1600 [10:20<33:47, 1.66s/it, avr_loss=0.0927]\n",
644 | "epoch 6/22\n",
645 | "steps: 28% 450/1600 [12:24<31:43, 1.66s/it, avr_loss=0.0906]\n",
646 | "epoch 7/22\n",
647 | "steps: 33% 525/1600 [14:29<29:39, 1.66s/it, avr_loss=0.0861]\n",
648 | "epoch 8/22\n",
649 | "steps: 38% 600/1600 [16:33<27:35, 1.66s/it, avr_loss=0.0842]\n",
650 | "epoch 9/22\n",
651 | "steps: 42% 675/1600 [18:37<25:30, 1.65s/it, avr_loss=0.0824]\n",
652 | "epoch 10/22\n",
653 | "steps: 47% 750/1600 [20:41<23:26, 1.66s/it, avr_loss=0.0767]\n",
654 | "epoch 11/22\n",
655 | "steps: 52% 825/1600 [22:45<21:22, 1.66s/it, avr_loss=0.0851]\n",
656 | "epoch 12/22\n",
657 | "steps: 56% 900/1600 [24:49<19:18, 1.66s/it, avr_loss=0.0889]\n",
658 | "epoch 13/22\n",
659 | "steps: 61% 975/1600 [26:53<17:14, 1.65s/it, avr_loss=0.0969]\n",
660 | "epoch 14/22\n",
661 | "steps: 66% 1050/1600 [28:57<15:09, 1.65s/it, avr_loss=0.0869]\n",
662 | "epoch 15/22\n",
663 | "steps: 70% 1125/1600 [31:00<13:05, 1.65s/it, avr_loss=0.0839]\n",
664 | "epoch 16/22\n",
665 | "steps: 75% 1200/1600 [33:05<11:01, 1.65s/it, avr_loss=0.0811]\n",
666 | "epoch 17/22\n",
667 | "steps: 80% 1275/1600 [35:08<08:57, 1.65s/it, avr_loss=0.0869]\n",
668 | "epoch 18/22\n",
669 | "steps: 84% 1350/1600 [37:12<06:53, 1.65s/it, avr_loss=0.0806]\n",
670 | "epoch 19/22\n",
671 | "steps: 89% 1425/1600 [39:17<04:49, 1.65s/it, avr_loss=0.0867]\n",
672 | "epoch 20/22\n",
673 | "steps: 94% 1500/1600 [41:21<02:45, 1.65s/it, avr_loss=0.0874]\n",
674 | "epoch 21/22\n",
675 | "steps: 98% 1575/1600 [43:25<00:41, 1.65s/it, avr_loss=0.0792]\n",
676 | "epoch 22/22\n",
677 | "steps: 100% 1600/1600 [44:06<00:00, 1.65s/it, avr_loss=0.0825]save trained model as StableDiffusion checkpoint to /content/drive/MyDrive/AI/DB/Model/frog/frog.safetensors\n",
678 | "make dummy weights for resblock.23, text_projection and logit scale.\n",
679 | "model saved.\n",
680 | "steps: 100% 1600/1600 [44:33<00:00, 1.67s/it, avr_loss=0.0825]\n"
681 | ]
682 | }
683 | ]
684 | },
685 | {
686 | "cell_type": "code",
687 | "source": [],
688 | "metadata": {
689 | "id": "hcAiTIxOf8X4"
690 | },
691 | "execution_count": null,
692 | "outputs": []
693 | }
694 | ]
695 | }
--------------------------------------------------------------------------------