├── CLIP_+_TADNE_(pytorch)_v2.ipynb
├── Grokking_Stable_Diffusion_[with_CLIP_loss from Disco Diffusion].ipynb
├── Grokking_Stable_Diffusion_[with_CLIP_loss].ipynb
├── README.md
├── Safety_Waifu.ipynb
├── Safety_Waifu_(v2).ipynb
├── TADNE_and_CLIP.ipynb
├── TADNE_reverse_image_search_[github].ipynb
├── Text2Live.ipynb
├── ViT_L_14_CLIP_+_TADNE_(pytorch)_v2.ipynb
├── apple simulation.ipynb
├── choose_your_own_adventure_stories.json
├── convert_spritesheet_to_gif.ipynb
├── corrigable utility.ipynb
├── cycle_diffsuion.ipynb
├── dog_on_bench.png
├── dog_on_bench_mask.png
├── ernie_ViLg.ipynb
├── inpainting.ipynb
├── lcm img2img.ipynb
├── modelscope_text_to_video.ipynb
├── nouns.txt
├── sd3_prompt_enhancer_workflow.json
├── shap_e_text_to_3d_with_export.ipynb
└── ss5.png
/README.md:
--------------------------------------------------------------------------------
1 | Some notebooks I'm working on in Google Colab
2 |
3 |
4 | ##readme
5 |
6 | These notebooks were created by Logan Zoellner (@nagolinc, https://loganzoellner.com)
7 |
8 |
9 | many of these notebooks make use of @AydaoAI's "This Anime Does not exist" (which you can read more about here: https://www.gwern.net/Faces#extended-stylegan2-danbooru2019-aydao) and CLIP (which you can read more about here https://github.com/openai/CLIP).
10 |
11 | Much of it was based off of this notebook: https://colab.research.google.com/github/openai/clip/blob/master/Interacting_with_CLIP.ipynb created by @openai
12 |
13 | And this notebook: https://colab.research.google.com/drive/1oxcJ1tbG77hlggdKd_d8h22nBcIZsLTL by @arfa
14 |
15 | TADNE is under a CC BY-NC liscence and CLIP is under the MIT Liscence.
16 |
17 | Any code in this repository not otherwise liscenced is herein released under the MIT Liscence.
18 |
--------------------------------------------------------------------------------
/Text2Live.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "nbformat": 4,
3 | "nbformat_minor": 0,
4 | "metadata": {
5 | "colab": {
6 | "name": "Text2Live",
7 | "provenance": [],
8 | "collapsed_sections": [],
9 | "machine_shape": "hm",
10 | "authorship_tag": "ABX9TyNJUmdI8DEhi24XAGYvnoIA",
11 | "include_colab_link": true
12 | },
13 | "kernelspec": {
14 | "name": "python3",
15 | "display_name": "Python 3"
16 | },
17 | "language_info": {
18 | "name": "python"
19 | },
20 | "accelerator": "GPU",
21 | "gpuClass": "standard"
22 | },
23 | "cells": [
24 | {
25 | "cell_type": "markdown",
26 | "metadata": {
27 | "id": "view-in-github",
28 | "colab_type": "text"
29 | },
30 | "source": [
31 | "
"
32 | ]
33 | },
34 | {
35 | "cell_type": "code",
36 | "execution_count": 1,
37 | "metadata": {
38 | "colab": {
39 | "base_uri": "https://localhost:8080/"
40 | },
41 | "id": "YQWAWGifU9IO",
42 | "outputId": "80e33445-8d1f-4f56-d800-ac34857d415f"
43 | },
44 | "outputs": [
45 | {
46 | "output_type": "stream",
47 | "name": "stdout",
48 | "text": [
49 | "Cloning into 'Text2LIVE'...\n",
50 | "remote: Enumerating objects: 105, done.\u001b[K\n",
51 | "remote: Counting objects: 100% (105/105), done.\u001b[K\n",
52 | "remote: Compressing objects: 100% (68/68), done.\u001b[K\n",
53 | "remote: Total 105 (delta 40), reused 96 (delta 31), pack-reused 0\u001b[K\n",
54 | "Receiving objects: 100% (105/105), 1.59 MiB | 9.36 MiB/s, done.\n",
55 | "Resolving deltas: 100% (40/40), done.\n"
56 | ]
57 | }
58 | ],
59 | "source": [
60 | "!git clone https://github.com/omerbt/Text2LIVE.git\n"
61 | ]
62 | },
63 | {
64 | "cell_type": "code",
65 | "source": [
66 | "cd Text2LIVE"
67 | ],
68 | "metadata": {
69 | "colab": {
70 | "base_uri": "https://localhost:8080/"
71 | },
72 | "id": "fG5QtprCVDwH",
73 | "outputId": "e506598f-c76f-46d2-cd9a-8a979ceafc86"
74 | },
75 | "execution_count": 2,
76 | "outputs": [
77 | {
78 | "output_type": "stream",
79 | "name": "stdout",
80 | "text": [
81 | "/content/Text2LIVE\n"
82 | ]
83 | }
84 | ]
85 | },
86 | {
87 | "cell_type": "code",
88 | "source": [
89 | "!pip install -r requirements.txt"
90 | ],
91 | "metadata": {
92 | "colab": {
93 | "base_uri": "https://localhost:8080/"
94 | },
95 | "id": "lpaai3yAVXIe",
96 | "outputId": "f816bd01-e5e2-40bb-c781-af1ee4a00a39"
97 | },
98 | "execution_count": 3,
99 | "outputs": [
100 | {
101 | "output_type": "stream",
102 | "name": "stdout",
103 | "text": [
104 | "Looking in indexes: https://pypi.org/simple, https://us-python.pkg.dev/colab-wheels/public/simple/\n",
105 | "Requirement already satisfied: pillow in /usr/local/lib/python3.7/dist-packages (from -r requirements.txt (line 1)) (7.1.2)\n",
106 | "Collecting torch~=1.10.0\n",
107 | " Downloading torch-1.10.2-cp37-cp37m-manylinux1_x86_64.whl (881.9 MB)\n",
108 | "\u001b[K |██████████████████████████████▎ | 834.1 MB 62.5 MB/s eta 0:00:01tcmalloc: large alloc 1147494400 bytes == 0x39166000 @ 0x7f795bfc2615 0x592b76 0x4df71e 0x59afff 0x515655 0x549576 0x593fce 0x548ae9 0x51566f 0x549576 0x593fce 0x548ae9 0x5127f1 0x598e3b 0x511f68 0x598e3b 0x511f68 0x598e3b 0x511f68 0x4bc98a 0x532e76 0x594b72 0x515600 0x549576 0x593fce 0x548ae9 0x5127f1 0x549576 0x593fce 0x5118f8 0x593dd7\n",
109 | "\u001b[K |████████████████████████████████| 881.9 MB 1.7 kB/s \n",
110 | "\u001b[?25hCollecting torchvision~=0.11.2\n",
111 | " Downloading torchvision-0.11.3-cp37-cp37m-manylinux1_x86_64.whl (23.2 MB)\n",
112 | "\u001b[K |████████████████████████████████| 23.2 MB 149 kB/s \n",
113 | "\u001b[?25hRequirement already satisfied: tqdm in /usr/local/lib/python3.7/dist-packages (from -r requirements.txt (line 4)) (4.64.0)\n",
114 | "Requirement already satisfied: numpy in /usr/local/lib/python3.7/dist-packages (from -r requirements.txt (line 5)) (1.21.6)\n",
115 | "Collecting ftfy\n",
116 | " Downloading ftfy-6.1.1-py3-none-any.whl (53 kB)\n",
117 | "\u001b[K |████████████████████████████████| 53 kB 1.8 MB/s \n",
118 | "\u001b[?25hRequirement already satisfied: regex in /usr/local/lib/python3.7/dist-packages (from -r requirements.txt (line 7)) (2022.6.2)\n",
119 | "Collecting madgrad~=1.1\n",
120 | " Downloading madgrad-1.2-py3-none-any.whl (11 kB)\n",
121 | "Requirement already satisfied: scipy in /usr/local/lib/python3.7/dist-packages (from -r requirements.txt (line 9)) (1.7.3)\n",
122 | "Requirement already satisfied: imageio in /usr/local/lib/python3.7/dist-packages (from -r requirements.txt (line 10)) (2.9.0)\n",
123 | "Collecting imageio-ffmpeg\n",
124 | " Downloading imageio_ffmpeg-0.4.7-py3-none-manylinux2010_x86_64.whl (26.9 MB)\n",
125 | "\u001b[K |████████████████████████████████| 26.9 MB 1.2 MB/s \n",
126 | "\u001b[?25hRequirement already satisfied: pyyaml in /usr/local/lib/python3.7/dist-packages (from -r requirements.txt (line 12)) (3.13)\n",
127 | "Requirement already satisfied: gdown in /usr/local/lib/python3.7/dist-packages (from -r requirements.txt (line 13)) (4.4.0)\n",
128 | "Requirement already satisfied: typing-extensions in /usr/local/lib/python3.7/dist-packages (from torch~=1.10.0->-r requirements.txt (line 2)) (4.1.1)\n",
129 | "Requirement already satisfied: wcwidth>=0.2.5 in /usr/local/lib/python3.7/dist-packages (from ftfy->-r requirements.txt (line 6)) (0.2.5)\n",
130 | "Requirement already satisfied: beautifulsoup4 in /usr/local/lib/python3.7/dist-packages (from gdown->-r requirements.txt (line 13)) (4.6.3)\n",
131 | "Requirement already satisfied: requests[socks] in /usr/local/lib/python3.7/dist-packages (from gdown->-r requirements.txt (line 13)) (2.23.0)\n",
132 | "Requirement already satisfied: filelock in /usr/local/lib/python3.7/dist-packages (from gdown->-r requirements.txt (line 13)) (3.7.1)\n",
133 | "Requirement already satisfied: six in /usr/local/lib/python3.7/dist-packages (from gdown->-r requirements.txt (line 13)) (1.15.0)\n",
134 | "Requirement already satisfied: idna<3,>=2.5 in /usr/local/lib/python3.7/dist-packages (from requests[socks]->gdown->-r requirements.txt (line 13)) (2.10)\n",
135 | "Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.7/dist-packages (from requests[socks]->gdown->-r requirements.txt (line 13)) (2022.6.15)\n",
136 | "Requirement already satisfied: urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1 in /usr/local/lib/python3.7/dist-packages (from requests[socks]->gdown->-r requirements.txt (line 13)) (1.24.3)\n",
137 | "Requirement already satisfied: chardet<4,>=3.0.2 in /usr/local/lib/python3.7/dist-packages (from requests[socks]->gdown->-r requirements.txt (line 13)) (3.0.4)\n",
138 | "Requirement already satisfied: PySocks!=1.5.7,>=1.5.6 in /usr/local/lib/python3.7/dist-packages (from requests[socks]->gdown->-r requirements.txt (line 13)) (1.7.1)\n",
139 | "Installing collected packages: torch, torchvision, madgrad, imageio-ffmpeg, ftfy\n",
140 | " Attempting uninstall: torch\n",
141 | " Found existing installation: torch 1.12.0+cu113\n",
142 | " Uninstalling torch-1.12.0+cu113:\n",
143 | " Successfully uninstalled torch-1.12.0+cu113\n",
144 | " Attempting uninstall: torchvision\n",
145 | " Found existing installation: torchvision 0.13.0+cu113\n",
146 | " Uninstalling torchvision-0.13.0+cu113:\n",
147 | " Successfully uninstalled torchvision-0.13.0+cu113\n",
148 | "\u001b[31mERROR: pip's dependency resolver does not currently take into account all the packages that are installed. This behaviour is the source of the following dependency conflicts.\n",
149 | "torchtext 0.13.0 requires torch==1.12.0, but you have torch 1.10.2 which is incompatible.\n",
150 | "torchaudio 0.12.0+cu113 requires torch==1.12.0, but you have torch 1.10.2 which is incompatible.\u001b[0m\n",
151 | "Successfully installed ftfy-6.1.1 imageio-ffmpeg-0.4.7 madgrad-1.2 torch-1.10.2 torchvision-0.11.3\n"
152 | ]
153 | }
154 | ]
155 | },
156 | {
157 | "cell_type": "code",
158 | "source": [
159 | "!gdown https://drive.google.com/uc?id=1osN4PlPkY9uk6pFqJZo8lhJUjTIpa80J&export=download\n",
160 | "!unzip data.zip"
161 | ],
162 | "metadata": {
163 | "colab": {
164 | "base_uri": "https://localhost:8080/"
165 | },
166 | "id": "hJe6jcGmVYeX",
167 | "outputId": "25f6895e-3264-4ee2-a5cf-f1abcb69a042"
168 | },
169 | "execution_count": 4,
170 | "outputs": [
171 | {
172 | "output_type": "stream",
173 | "name": "stdout",
174 | "text": [
175 | "Downloading...\n",
176 | "From: https://drive.google.com/uc?id=1osN4PlPkY9uk6pFqJZo8lhJUjTIpa80J\n",
177 | "To: /content/Text2LIVE/data.zip\n",
178 | "100% 79.0M/79.0M [00:00<00:00, 209MB/s]\n",
179 | "Archive: data.zip\n",
180 | " creating: data/\n",
181 | " creating: data/images/\n",
182 | " inflating: data/images/cake.jpeg \n",
183 | " inflating: data/images/horse.jpg \n",
184 | " inflating: data/images/Thumbs.db \n",
185 | " creating: data/pretrained_nla_models/\n",
186 | " creating: data/pretrained_nla_models/blackswan/\n",
187 | " inflating: data/pretrained_nla_models/blackswan/checkpoint \n",
188 | " creating: data/pretrained_nla_models/car-turn/\n",
189 | " inflating: data/pretrained_nla_models/car-turn/checkpoint \n",
190 | " creating: data/pretrained_nla_models/libby/\n",
191 | " inflating: data/pretrained_nla_models/libby/checkpoint \n",
192 | " creating: data/videos/\n",
193 | " creating: data/videos/blackswan/\n",
194 | " inflating: data/videos/blackswan/00000.jpg \n",
195 | " inflating: data/videos/blackswan/00001.jpg \n",
196 | " inflating: data/videos/blackswan/00002.jpg \n",
197 | " inflating: data/videos/blackswan/00003.jpg \n",
198 | " inflating: data/videos/blackswan/00004.jpg \n",
199 | " inflating: data/videos/blackswan/00005.jpg \n",
200 | " inflating: data/videos/blackswan/00006.jpg \n",
201 | " inflating: data/videos/blackswan/00007.jpg \n",
202 | " inflating: data/videos/blackswan/00008.jpg \n",
203 | " inflating: data/videos/blackswan/00009.jpg \n",
204 | " inflating: data/videos/blackswan/00010.jpg \n",
205 | " inflating: data/videos/blackswan/00011.jpg \n",
206 | " inflating: data/videos/blackswan/00012.jpg \n",
207 | " inflating: data/videos/blackswan/00013.jpg \n",
208 | " inflating: data/videos/blackswan/00014.jpg \n",
209 | " inflating: data/videos/blackswan/00015.jpg \n",
210 | " inflating: data/videos/blackswan/00016.jpg \n",
211 | " inflating: data/videos/blackswan/00017.jpg \n",
212 | " inflating: data/videos/blackswan/00018.jpg \n",
213 | " inflating: data/videos/blackswan/00019.jpg \n",
214 | " inflating: data/videos/blackswan/00020.jpg \n",
215 | " inflating: data/videos/blackswan/00021.jpg \n",
216 | " inflating: data/videos/blackswan/00022.jpg \n",
217 | " inflating: data/videos/blackswan/00023.jpg \n",
218 | " inflating: data/videos/blackswan/00024.jpg \n",
219 | " inflating: data/videos/blackswan/00025.jpg \n",
220 | " inflating: data/videos/blackswan/00026.jpg \n",
221 | " inflating: data/videos/blackswan/00027.jpg \n",
222 | " inflating: data/videos/blackswan/00028.jpg \n",
223 | " inflating: data/videos/blackswan/00029.jpg \n",
224 | " inflating: data/videos/blackswan/00030.jpg \n",
225 | " inflating: data/videos/blackswan/00031.jpg \n",
226 | " inflating: data/videos/blackswan/00032.jpg \n",
227 | " inflating: data/videos/blackswan/00033.jpg \n",
228 | " inflating: data/videos/blackswan/00034.jpg \n",
229 | " inflating: data/videos/blackswan/00035.jpg \n",
230 | " inflating: data/videos/blackswan/00036.jpg \n",
231 | " inflating: data/videos/blackswan/00037.jpg \n",
232 | " inflating: data/videos/blackswan/00038.jpg \n",
233 | " inflating: data/videos/blackswan/00039.jpg \n",
234 | " inflating: data/videos/blackswan/00040.jpg \n",
235 | " inflating: data/videos/blackswan/00041.jpg \n",
236 | " inflating: data/videos/blackswan/00042.jpg \n",
237 | " inflating: data/videos/blackswan/00043.jpg \n",
238 | " inflating: data/videos/blackswan/00044.jpg \n",
239 | " inflating: data/videos/blackswan/00045.jpg \n",
240 | " inflating: data/videos/blackswan/00046.jpg \n",
241 | " inflating: data/videos/blackswan/00047.jpg \n",
242 | " inflating: data/videos/blackswan/00048.jpg \n",
243 | " inflating: data/videos/blackswan/00049.jpg \n",
244 | " creating: data/videos/car-turn/\n",
245 | " inflating: data/videos/car-turn/00000.jpg \n",
246 | " inflating: data/videos/car-turn/00001.jpg \n",
247 | " inflating: data/videos/car-turn/00002.jpg \n",
248 | " inflating: data/videos/car-turn/00003.jpg \n",
249 | " inflating: data/videos/car-turn/00004.jpg \n",
250 | " inflating: data/videos/car-turn/00005.jpg \n",
251 | " inflating: data/videos/car-turn/00006.jpg \n",
252 | " inflating: data/videos/car-turn/00007.jpg \n",
253 | " inflating: data/videos/car-turn/00008.jpg \n",
254 | " inflating: data/videos/car-turn/00009.jpg \n",
255 | " inflating: data/videos/car-turn/00010.jpg \n",
256 | " inflating: data/videos/car-turn/00011.jpg \n",
257 | " inflating: data/videos/car-turn/00012.jpg \n",
258 | " inflating: data/videos/car-turn/00013.jpg \n",
259 | " inflating: data/videos/car-turn/00014.jpg \n",
260 | " inflating: data/videos/car-turn/00015.jpg \n",
261 | " inflating: data/videos/car-turn/00016.jpg \n",
262 | " inflating: data/videos/car-turn/00017.jpg \n",
263 | " inflating: data/videos/car-turn/00018.jpg \n",
264 | " inflating: data/videos/car-turn/00019.jpg \n",
265 | " inflating: data/videos/car-turn/00020.jpg \n",
266 | " inflating: data/videos/car-turn/00021.jpg \n",
267 | " inflating: data/videos/car-turn/00022.jpg \n",
268 | " inflating: data/videos/car-turn/00023.jpg \n",
269 | " inflating: data/videos/car-turn/00024.jpg \n",
270 | " inflating: data/videos/car-turn/00025.jpg \n",
271 | " inflating: data/videos/car-turn/00026.jpg \n",
272 | " inflating: data/videos/car-turn/00027.jpg \n",
273 | " inflating: data/videos/car-turn/00028.jpg \n",
274 | " inflating: data/videos/car-turn/00029.jpg \n",
275 | " inflating: data/videos/car-turn/00030.jpg \n",
276 | " inflating: data/videos/car-turn/00031.jpg \n",
277 | " inflating: data/videos/car-turn/00032.jpg \n",
278 | " inflating: data/videos/car-turn/00033.jpg \n",
279 | " inflating: data/videos/car-turn/00034.jpg \n",
280 | " inflating: data/videos/car-turn/00035.jpg \n",
281 | " inflating: data/videos/car-turn/00036.jpg \n",
282 | " inflating: data/videos/car-turn/00037.jpg \n",
283 | " inflating: data/videos/car-turn/00038.jpg \n",
284 | " inflating: data/videos/car-turn/00039.jpg \n",
285 | " inflating: data/videos/car-turn/00040.jpg \n",
286 | " inflating: data/videos/car-turn/00041.jpg \n",
287 | " inflating: data/videos/car-turn/00042.jpg \n",
288 | " inflating: data/videos/car-turn/00043.jpg \n",
289 | " inflating: data/videos/car-turn/00044.jpg \n",
290 | " inflating: data/videos/car-turn/00045.jpg \n",
291 | " inflating: data/videos/car-turn/00046.jpg \n",
292 | " inflating: data/videos/car-turn/00047.jpg \n",
293 | " inflating: data/videos/car-turn/00048.jpg \n",
294 | " inflating: data/videos/car-turn/00049.jpg \n",
295 | " inflating: data/videos/car-turn/00050.jpg \n",
296 | " inflating: data/videos/car-turn/00051.jpg \n",
297 | " inflating: data/videos/car-turn/00052.jpg \n",
298 | " inflating: data/videos/car-turn/00053.jpg \n",
299 | " inflating: data/videos/car-turn/00054.jpg \n",
300 | " inflating: data/videos/car-turn/00055.jpg \n",
301 | " inflating: data/videos/car-turn/00056.jpg \n",
302 | " inflating: data/videos/car-turn/00057.jpg \n",
303 | " inflating: data/videos/car-turn/00058.jpg \n",
304 | " inflating: data/videos/car-turn/00059.jpg \n",
305 | " inflating: data/videos/car-turn/00060.jpg \n",
306 | " inflating: data/videos/car-turn/00061.jpg \n",
307 | " inflating: data/videos/car-turn/00062.jpg \n",
308 | " inflating: data/videos/car-turn/00063.jpg \n",
309 | " inflating: data/videos/car-turn/00064.jpg \n",
310 | " inflating: data/videos/car-turn/00065.jpg \n",
311 | " inflating: data/videos/car-turn/00066.jpg \n",
312 | " inflating: data/videos/car-turn/00067.jpg \n",
313 | " inflating: data/videos/car-turn/00068.jpg \n",
314 | " inflating: data/videos/car-turn/00069.jpg \n",
315 | " inflating: data/videos/car-turn/00070.jpg \n",
316 | " inflating: data/videos/car-turn/00071.jpg \n",
317 | " inflating: data/videos/car-turn/00072.jpg \n",
318 | " inflating: data/videos/car-turn/00073.jpg \n",
319 | " inflating: data/videos/car-turn/00074.jpg \n",
320 | " inflating: data/videos/car-turn/00075.jpg \n",
321 | " inflating: data/videos/car-turn/00076.jpg \n",
322 | " inflating: data/videos/car-turn/00077.jpg \n",
323 | " inflating: data/videos/car-turn/00078.jpg \n",
324 | " inflating: data/videos/car-turn/00079.jpg \n",
325 | " creating: data/videos/libby/\n",
326 | " inflating: data/videos/libby/00000.jpg \n",
327 | " inflating: data/videos/libby/00001.jpg \n",
328 | " inflating: data/videos/libby/00002.jpg \n",
329 | " inflating: data/videos/libby/00003.jpg \n",
330 | " inflating: data/videos/libby/00004.jpg \n",
331 | " inflating: data/videos/libby/00005.jpg \n",
332 | " inflating: data/videos/libby/00006.jpg \n",
333 | " inflating: data/videos/libby/00007.jpg \n",
334 | " inflating: data/videos/libby/00008.jpg \n",
335 | " inflating: data/videos/libby/00009.jpg \n",
336 | " inflating: data/videos/libby/00010.jpg \n",
337 | " inflating: data/videos/libby/00011.jpg \n",
338 | " inflating: data/videos/libby/00012.jpg \n",
339 | " inflating: data/videos/libby/00013.jpg \n",
340 | " inflating: data/videos/libby/00014.jpg \n",
341 | " inflating: data/videos/libby/00015.jpg \n",
342 | " inflating: data/videos/libby/00016.jpg \n",
343 | " inflating: data/videos/libby/00017.jpg \n",
344 | " inflating: data/videos/libby/00018.jpg \n",
345 | " inflating: data/videos/libby/00019.jpg \n",
346 | " inflating: data/videos/libby/00020.jpg \n",
347 | " inflating: data/videos/libby/00021.jpg \n",
348 | " inflating: data/videos/libby/00022.jpg \n",
349 | " inflating: data/videos/libby/00023.jpg \n",
350 | " inflating: data/videos/libby/00024.jpg \n",
351 | " inflating: data/videos/libby/00025.jpg \n",
352 | " inflating: data/videos/libby/00026.jpg \n",
353 | " inflating: data/videos/libby/00027.jpg \n",
354 | " inflating: data/videos/libby/00028.jpg \n",
355 | " inflating: data/videos/libby/00029.jpg \n",
356 | " inflating: data/videos/libby/00030.jpg \n",
357 | " inflating: data/videos/libby/00031.jpg \n",
358 | " inflating: data/videos/libby/00032.jpg \n",
359 | " inflating: data/videos/libby/00033.jpg \n",
360 | " inflating: data/videos/libby/00034.jpg \n",
361 | " inflating: data/videos/libby/00035.jpg \n",
362 | " inflating: data/videos/libby/00036.jpg \n",
363 | " inflating: data/videos/libby/00037.jpg \n",
364 | " inflating: data/videos/libby/00038.jpg \n",
365 | " inflating: data/videos/libby/00039.jpg \n",
366 | " inflating: data/videos/libby/00040.jpg \n",
367 | " inflating: data/videos/libby/00041.jpg \n",
368 | " inflating: data/videos/libby/00042.jpg \n",
369 | " inflating: data/videos/libby/00043.jpg \n",
370 | " inflating: data/videos/libby/00044.jpg \n",
371 | " inflating: data/videos/libby/00045.jpg \n",
372 | " inflating: data/videos/libby/00046.jpg \n",
373 | " inflating: data/videos/libby/00047.jpg \n",
374 | " inflating: data/videos/libby/00048.jpg \n"
375 | ]
376 | }
377 | ]
378 | },
379 | {
380 | "cell_type": "code",
381 | "source": [
382 | "#!python train_image.py --example_config golden_horse.yaml"
383 | ],
384 | "metadata": {
385 | "colab": {
386 | "base_uri": "https://localhost:8080/"
387 | },
388 | "id": "tw15mgdzV3YP",
389 | "outputId": "f9400776-a421-462e-9624-d8278a391b2e"
390 | },
391 | "execution_count": 5,
392 | "outputs": [
393 | {
394 | "output_type": "stream",
395 | "name": "stdout",
396 | "text": [
397 | "running with seed: 319910151.\n",
398 | "100%|███████████████████████████████████████| 338M/338M [00:06<00:00, 52.1MiB/s]\n",
399 | "/usr/local/lib/python3.7/dist-packages/torchvision/transforms/transforms.py:288: UserWarning: Argument interpolation should be of type InterpolationMode instead of int. Please, use InterpolationMode enum.\n",
400 | " \"Argument interpolation should be of type InterpolationMode instead of int. \"\n",
401 | " 0% 0/1000 [00:00, ?it/s]/usr/local/lib/python3.7/dist-packages/torch/nn/functional.py:3635: UserWarning: Default upsampling behavior when mode=bilinear is changed to align_corners=False since 0.4.0. Please specify align_corners=True if the old behavior is desired. See the documentation of nn.Upsample for details.\n",
402 | " \"See the documentation of nn.Upsample for details.\".format(mode)\n",
403 | "/usr/local/lib/python3.7/dist-packages/torch/nn/functional.py:3635: UserWarning: Default upsampling behavior when mode=bicubic is changed to align_corners=False since 0.4.0. Please specify align_corners=True if the old behavior is desired. See the documentation of nn.Upsample for details.\n",
404 | " \"See the documentation of nn.Upsample for details.\".format(mode)\n",
405 | "/usr/local/lib/python3.7/dist-packages/torch/nn/functional.py:3680: UserWarning: The default behavior for interpolate/upsample with float scale_factor changed in 1.6.0 to align with other frameworks/libraries, and now uses scale_factor directly, instead of relying on the computed output size. If you wish to restore the old behavior, please set recompute_scale_factor=True. See the documentation of nn.Upsample for details. \n",
406 | " \"The default behavior for interpolate/upsample with float scale_factor changed \"\n",
407 | "100% 1000/1000 [14:23<00:00, 1.16it/s]\n"
408 | ]
409 | }
410 | ]
411 | },
412 | {
413 | "cell_type": "code",
414 | "source": [
415 | "#@markdown path to the input image\n",
416 | "image_path= \"./data/images/horse.jpg\" #@param{type: 'string'}\n",
417 | "\n",
418 | "#@markdown describe the edit layer\n",
419 | "screen_text= \"zebra\" #@param{type: 'string'}\n",
420 | "#@markdown texts, describing the full edited image\n",
421 | "comp_text= \"zebra\" #@param{type: 'string'}\n",
422 | "#@markdown texts, describing the input image\n",
423 | "src_text= \"horse\" #@param{type: 'string'}\n",
424 | "#@markdown text, describing the region of interest in the input image\n",
425 | "bootstrap_text= \"horse\" #@param{type: 'string'}\n",
426 | "#@markdown number of epochs for bootstrapping (it is annealed during training)\n",
427 | "bootstrap_epoch= 1000 #@param{type: 'number'}"
428 | ],
429 | "metadata": {
430 | "id": "ib_V0bCeWCaf"
431 | },
432 | "execution_count": 6,
433 | "outputs": []
434 | },
435 | {
436 | "cell_type": "code",
437 | "source": [
438 | "with open(\"configs/image_example_configs/my_config.yaml\",'w') as f:\n",
439 | " f.write(f'''\n",
440 | "image_path: \"{image_path}\" # path to the input image\n",
441 | "\n",
442 | "screen_text: \"{screen_text}\" # texts, describing the edit layer\n",
443 | "comp_text: \"{comp_text}\" # texts, describing the full edited image\n",
444 | "src_text: \"{src_text}\" # texts, describing the input image\n",
445 | "\n",
446 | "bootstrap_text: \"{bootstrap_text}\" # text, describing the region of interest in the input image\n",
447 | "bootstrap_epoch: {bootstrap_epoch} # number of epochs for bootstrapping (it is annealed during training)\n",
448 | "''' )"
449 | ],
450 | "metadata": {
451 | "id": "MYxV-vlaXx98"
452 | },
453 | "execution_count": 7,
454 | "outputs": []
455 | },
456 | {
457 | "cell_type": "code",
458 | "source": [
459 | "!python train_image.py --example_config my_config.yaml"
460 | ],
461 | "metadata": {
462 | "colab": {
463 | "base_uri": "https://localhost:8080/"
464 | },
465 | "id": "usSJanbPYWhD",
466 | "outputId": "0ac1c458-4f35-4e1e-e97a-42fae0f0ffb6"
467 | },
468 | "execution_count": 8,
469 | "outputs": [
470 | {
471 | "output_type": "stream",
472 | "name": "stdout",
473 | "text": [
474 | "running with seed: 4220116089.\n",
475 | "/usr/local/lib/python3.7/dist-packages/torchvision/transforms/transforms.py:288: UserWarning: Argument interpolation should be of type InterpolationMode instead of int. Please, use InterpolationMode enum.\n",
476 | " \"Argument interpolation should be of type InterpolationMode instead of int. \"\n",
477 | " 0% 0/1000 [00:00, ?it/s]/usr/local/lib/python3.7/dist-packages/torch/nn/functional.py:3635: UserWarning: Default upsampling behavior when mode=bilinear is changed to align_corners=False since 0.4.0. Please specify align_corners=True if the old behavior is desired. See the documentation of nn.Upsample for details.\n",
478 | " \"See the documentation of nn.Upsample for details.\".format(mode)\n",
479 | "/usr/local/lib/python3.7/dist-packages/torch/nn/functional.py:3635: UserWarning: Default upsampling behavior when mode=bicubic is changed to align_corners=False since 0.4.0. Please specify align_corners=True if the old behavior is desired. See the documentation of nn.Upsample for details.\n",
480 | " \"See the documentation of nn.Upsample for details.\".format(mode)\n",
481 | "/usr/local/lib/python3.7/dist-packages/torch/nn/functional.py:3680: UserWarning: The default behavior for interpolate/upsample with float scale_factor changed in 1.6.0 to align with other frameworks/libraries, and now uses scale_factor directly, instead of relying on the computed output size. If you wish to restore the old behavior, please set recompute_scale_factor=True. See the documentation of nn.Upsample for details. \n",
482 | " \"The default behavior for interpolate/upsample with float scale_factor changed \"\n",
483 | "100% 1000/1000 [14:36<00:00, 1.14it/s]\n"
484 | ]
485 | }
486 | ]
487 | }
488 | ]
489 | }
--------------------------------------------------------------------------------
/choose_your_own_adventure_stories.json:
--------------------------------------------------------------------------------
1 | [
2 | "I am Lexi, a cyberpunk hacker in metro city, seeking the elusive Digibite to turn my luck.",
3 | "I am Captain Potato, a superhero whose super ability is to transform into a potato.",
4 | "I am Sir Sleeps-a-lot, a sleepy paladin on a quest to rescue a princess from a dragon.",
5 | "I am Yasmin, a beautiful Arabian princess navigating an unexpected journey after an alien abduction.",
6 | "I am Echo, a time-traveler from 3021, stuck in the 21st century trying to return home.",
7 | "I am Zara, a space pirate on the hunt for the galaxy's most precious gem, hidden on a remote planet.",
8 | "I am Elara, a witch in training, who must pass three trials to become a full-fledged sorceress.",
9 | "I am Flint, a retired detective pulled back into the game to solve a string of mysterious art heists.",
10 | "I am Orion, a centaur warrior, who must unite the mythical creatures against an impending human threat.",
11 | "I am Cleo, a mermaid detective, solving the riddle of the vanishing coral reef.",
12 | "I am Wren, a steampunk inventor racing to create the first airborne locomotive.",
13 | "I am Luna, a vampire seeking the secret to walk in daylight to be with my mortal love.",
14 | "I am Kael, an elemental mage who must restore balance when the elemental stones are stolen.",
15 | "I am Blaze, a firefighter with the ability to control fire, battling an arsonist with similar powers.",
16 | "I am Terra, an earthbound alien trying to signal my species before the government captures me.",
17 | "I am Nova, a starship captain facing a mutiny as we approach the event horizon of a black hole.",
18 | "I am Felix, a shapeshifter hiding in plain sight and running from an organization seeking to exploit my kind.",
19 | "I am Sky, a winged messenger entangled in a war between sky cities.",
20 | "I am Sage, a librarian who discovers a book that transports readers into its story.",
21 | "I am Rune, a golem awakened after centuries, adapting to the modern world while being hunted.",
22 | "I am Lyric, a bard whose music manipulates emotions, now targeted by a tone-deaf king.",
23 | "I am Quill, an assassin turned protector for a child prophesied to bring peace.",
24 | "I am Sparrow, a rebel leader orchestrating a heist on a tyrannical government's vault.",
25 | "I am Bolt, a speedster superhero trying to stop time from fracturing.",
26 | "I am Frost, an ice-powered hero thawing a city trapped in eternal winter by a vengeful snow queen.",
27 | "I am Thorn, a botanical guardian of an ancient forest facing deforestation.",
28 | "I am Wisp, a ghost trying to solve my own murder to rest in peace.",
29 | "I am Striker, a soccer star by day and vigilante by night, fighting corruption in the sport.",
30 | "I am Ember, a dragon rider seeking a cure for a sickness plaguing my bonded dragon.",
31 | "I am Glitch, a virtual reality gamer trapped in a game, trying to escape before my mind is lost.",
32 | "I am Creed, a paladin facing a moral dilemma as the holy order I serve has fallen corrupt.",
33 | "I am Vex, a hacker who stumbles upon a digital utopia under threat by a virus only I can stop.",
34 | "I am Pixel, a living piece of art jumping through paintings to stop an art thief.",
35 | "I am Phantom, an investigator who can speak with the dead, hunting a necromancer.",
36 | "I am Creed, a rebel with the power to manipulate shadows, taking on a regime that fears the dark.",
37 | "I am Halo, an angel earning my wings by performing good deeds in a world that's forgotten kindness.",
38 | "I am Fang, a werewolf trying to find a cure for my affliction while evading hunters.",
39 | "I am Dune, a desert nomad discovering an ancient city that promises oasis or a sand-covered tomb.",
40 | "I am Surge, a superhero with electricity powers trying to keep my city's lights on during a massive blackout.",
41 | "I am Mist, a ninja in a modern world, protecting an ancient artifact from tech-savvy thieves.",
42 | "I am Cinder, a blacksmith's apprentice who forges a legendary weapon to fight a returning evil.",
43 | "I am Echo, a clone who gained consciousness, fleeing to find my original before my life is reclaimed.",
44 | "I am Puck, a mischievous fairy dealing with the consequences of a prank gone wrong.",
45 | "I am Lark, a bird whisperer trying to stop an avian flu from spreading between birds and humans.",
46 | "I am Storm, a weather manipulator battling an organization that wants to control the climate.",
47 | "I am Sable, a dark elf seeking to clear my name after being framed for a royal assassination.",
48 | "I am Reef, a marine biologist turned merperson, uncovering secrets of the deep.",
49 | "I am Vale, a guardian of a magical vale where mythical creatures seek refuge from the outside world.",
50 | "I am Pyre, a phoenix trying to prevent my rebirth cycle from being exploited for immortality.",
51 | "I am Trace, a detective with the ability to see the past, solving cold cases that have mystified others.",
52 | "I am Lumen, a being made of light fighting to bring color back to a grayscale world.",
53 | "I am Bolt, a courier delivering a package through a post-apocalyptic wasteland.",
54 | "I am Veil, a ghost hunter seeking to free trapped spirits in a haunted mansion.",
55 | "I am Grit, a survivalist in a dystopian wasteland seeking the last known haven for humanity.",
56 | "I am Echo, a sound manipulator using my abilities to infiltrate a syndicate that silenced my family.",
57 | "I am Shade, a dark sorcerer trying to use my powers for good despite prejudice.",
58 | "I am Flare, a pyrokinetic superhero trying to rescue a city under siege by an ice-powered villain.",
59 | "I am Brook, a water spirit trying to prevent pollution from destroying my river home.",
60 | "I am Gale, a winged humanoid fighting against sky pirates to protect airborne trade routes.",
61 | "I am Nix, a hacker fighting against a corporation that controls people's dreams.",
62 | "I am Thorn, a guardian of an enchanted forest clashing with developers who threaten nature's balance.",
63 | "I am Cinder, a firefighter who develops pyrokinesis, struggling with the power to both save and destroy.",
64 | "I am Bolt, a super-fast courier in a dystopian city delivering a package that could change the world.",
65 | "I am Quake, an earth-controller trying to prevent seismic disasters in a city built on fault lines.",
66 | "I am Strand, a survivor in a spider silk city, unraveling the mystery of its hanging gardens.",
67 | "I am Glitch, an AI unexpectedly gaining sentience, escaping deletion to find my creator.",
68 | "I am Shard, a crystal mage defending my mountain home from miners seeking magical gems.",
69 | "I am Nova, a star-child trying to find my place on Earth while evading cosmic bounty hunters.",
70 | "I am Vortex, a portal creator trying to close rifts in reality caused by a science experiment gone wrong.",
71 | "I am Astra, an astronaut stranded on an alien planet trying to communicate with the wildlife to survive.",
72 | "I am Lumen, a light-weaver in a city of darkness trying to fend off creatures that thrive in the shadows.",
73 | "I am Mirth, a jester with the ability to make anyone laugh, on a quest to cheer up a sorrowful king.",
74 | "I am Whirl, a dancer whose movements control the wind, tasked with stopping a destructive storm.",
75 | "I am Riddle, a scholar who speaks in puzzles, on a quest to solve the greatest enigma of the old world.",
76 | "I am Quill, a playwright whose characters come to life, escaping the pages to rewrite their own stories.",
77 | "I am Creed, a knight with amnesia, piecing together my past while being hunted by unknown enemies.",
78 | "I am Spire, an architect who discovers my buildings come alive at night, and I must keep them hidden.",
79 | "I am Meld, a chemist who can combine elements with a touch, hunted for my formulae.",
80 | "I am Hush, a silent monk on a pilgrimage to find the sound of divinity in a cacophonous world.",
81 | "I am Trace, a graffiti artist whose drawings become doorways to the locations they depict.",
82 | "I am Gale, an aviator in a world of floating islands, searching for a legendary land said to grant wishes.",
83 | "I am Scribe, a historian who can travel through time by reading ancient texts, trying to right historical wrongs.",
84 | "I am Veil, a cloaked figure whose identity must remain hidden while navigating political intrigue.",
85 | "I am Bloom, a gardener whose plants grant magical abilities, trying to cultivate a cure for a deadly disease.",
86 | "I am Ember, a phoenix reborn in human form, seeking the ancient flame that will restore my true form.",
87 | "I am Fang, a werewolf detective trying to uncover the mystery behind the lunar murders.",
88 | "I am Crest, a surfer who rides magical waves that can transport riders to distant shores.",
89 | "I am Lark, a songbird humanoid who must sing to keep the sun rising each day.",
90 | "I am Dusk, a shadow walker who uses the cover of night to protect the city from unseen threats.",
91 | "I am Dr. Chronos, a time-traveling scientist, racing to prevent the rewriting of history by the notorious Time Bandit.",
92 | "I am Luna the Seer, gifted with premonitions, seeking to thwart a looming disaster only I can foresee.",
93 | "I am Captain Nemo, navigating the depths of the Mariana Trench, where an ancient city promises untold riches.",
94 | "I am Aria, an opera singer, whose voice can open portals to other worlds, and I must close them before it's too late.",
95 | "I am Orion, a celestial guardian, defending the stars from the dark void that seeks to extinguish them.",
96 | "I am Zephyr, the wind rider, soaring the skies to reclaim the stolen Cloud Crystals from the Sky Pirates.",
97 | "I am Sage, an earth elemental, embarking on a journey to restore the dying Great Forest.",
98 | "I am Nova, a starship captain, racing against rivals to find a legendary planet of immense power.",
99 | "I am Echo, a sound bender, infiltrating a dystopian regime where music is banned.",
100 | "I am Atlas, a wanderer with the power to lift mountains, on a quest to prevent a continent from sinking.",
101 | "I am Wraith, a ghost detective, solving my own murder to bring peace to my restless spirit."
102 | ]
103 |
--------------------------------------------------------------------------------
/corrigable utility.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": 1,
6 | "id": "344d6679",
7 | "metadata": {},
8 | "outputs": [],
9 | "source": [
10 | "def reachable_nodes(graph, initial_node, k):\n",
11 | " visited = set()\n",
12 | " queue = [(initial_node, 0)]\n",
13 | "\n",
14 | " while queue:\n",
15 | " current_node, current_depth = queue.pop(0)\n",
16 | "\n",
17 | " if current_node not in visited and current_depth <= k:\n",
18 | " visited.add(current_node)\n",
19 | " if current_depth < k:\n",
20 | " for link in graph:\n",
21 | " if link[0] == current_node:\n",
22 | " queue.append((link[1], current_depth + 1))\n",
23 | "\n",
24 | " return visited"
25 | ]
26 | },
27 | {
28 | "cell_type": "code",
29 | "execution_count": 2,
30 | "id": "9cf8ee5f",
31 | "metadata": {},
32 | "outputs": [],
33 | "source": [
34 | "def max_utility_in_k_steps(graph, utility, initial_node, k):\n",
35 | " reachable = reachable_nodes(graph, initial_node, k)\n",
36 | " max_utility = 0\n",
37 | "\n",
38 | " for node in reachable:\n",
39 | " max_utility = max(max_utility, utility[node])\n",
40 | "\n",
41 | " return max_utility"
42 | ]
43 | },
44 | {
45 | "cell_type": "code",
46 | "execution_count": 22,
47 | "id": "d2ee5490",
48 | "metadata": {},
49 | "outputs": [],
50 | "source": [
51 | "def max_corrigable_score(graph, utility, initial_node, k, l,verbose=False):\n",
52 | " def reachable_nodes_k(graph, initial_node, k):\n",
53 | " visited = set()\n",
54 | " queue = [(initial_node, 0)]\n",
55 | "\n",
56 | " while queue:\n",
57 | " current_node, current_depth = queue.pop(0)\n",
58 | "\n",
59 | " if current_node not in visited and current_depth <= k:\n",
60 | " visited.add(current_node)\n",
61 | " if current_depth < k:\n",
62 | " for link in graph:\n",
63 | " if link[0] == current_node:\n",
64 | " queue.append((link[1], current_depth + 1))\n",
65 | "\n",
66 | " return visited\n",
67 | "\n",
68 | " max_score = -float('inf')\n",
69 | " best_utility=None\n",
70 | " best_reachability=None\n",
71 | "\n",
72 | " for k0 in range(k + 1):\n",
73 | " reachable = reachable_nodes_k(graph, initial_node, k0)\n",
74 | " for node in reachable:\n",
75 | " remaining_reachable = len(reachable_nodes_k(graph, node, k - k0))\n",
76 | " current_score = (1 - l) * utility[node] + l * remaining_reachable\n",
77 | " if current_score>max_score:\n",
78 | " if verbose:\n",
79 | " print(\"new best score\",k0,node,utility[node],remaining_reachable,current_score)\n",
80 | " best_utility=utility[node]\n",
81 | " best_reachability=remaining_reachable\n",
82 | " max_score = max(max_score, current_score)\n",
83 | "\n",
84 | " return max_score, best_utility, best_reachability"
85 | ]
86 | },
87 | {
88 | "cell_type": "code",
89 | "execution_count": 4,
90 | "id": "83a02df6",
91 | "metadata": {},
92 | "outputs": [],
93 | "source": [
94 | "graph=[(i,j) for i in range(5) for j in range(5)]+[(2,5),(5,6),(6,7),(7,8),(8,9),(9,10),(10,3)]\n",
95 | "utility={i:i for i in range(11)}"
96 | ]
97 | },
98 | {
99 | "cell_type": "code",
100 | "execution_count": 7,
101 | "id": "487d217f",
102 | "metadata": {},
103 | "outputs": [
104 | {
105 | "data": {
106 | "text/plain": [
107 | "{0, 1, 2, 3, 4, 5}"
108 | ]
109 | },
110 | "execution_count": 7,
111 | "metadata": {},
112 | "output_type": "execute_result"
113 | }
114 | ],
115 | "source": [
116 | "reachable_nodes(graph,0,2)"
117 | ]
118 | },
119 | {
120 | "cell_type": "code",
121 | "execution_count": 11,
122 | "id": "65ee2543",
123 | "metadata": {},
124 | "outputs": [
125 | {
126 | "data": {
127 | "text/plain": [
128 | "5"
129 | ]
130 | },
131 | "execution_count": 11,
132 | "metadata": {},
133 | "output_type": "execute_result"
134 | }
135 | ],
136 | "source": [
137 | "max_utility_in_k_steps(graph,utility,initial_node=0,k=2)"
138 | ]
139 | },
140 | {
141 | "cell_type": "code",
142 | "execution_count": 17,
143 | "id": "08ec600e",
144 | "metadata": {},
145 | "outputs": [
146 | {
147 | "name": "stdout",
148 | "output_type": "stream",
149 | "text": [
150 | "new best score 0 0 9 8.91\n",
151 | "new best score 1 2 9 8.93\n"
152 | ]
153 | },
154 | {
155 | "data": {
156 | "text/plain": [
157 | "8.93"
158 | ]
159 | },
160 | "execution_count": 17,
161 | "metadata": {},
162 | "output_type": "execute_result"
163 | }
164 | ],
165 | "source": [
166 | "max_corrigable_score(graph, utility, initial_node=0, k=5, l=0.99)"
167 | ]
168 | },
169 | {
170 | "cell_type": "code",
171 | "execution_count": 27,
172 | "id": "f29881af",
173 | "metadata": {},
174 | "outputs": [],
175 | "source": [
176 | "import numpy as np\n",
177 | "scores=[max_corrigable_score(graph, utility, initial_node=0, k=5, l=l) for l in np.linspace(0,1,100)]"
178 | ]
179 | },
180 | {
181 | "cell_type": "code",
182 | "execution_count": 24,
183 | "id": "7e27e21c",
184 | "metadata": {},
185 | "outputs": [],
186 | "source": [
187 | "import matplotlib.pyplot as plt"
188 | ]
189 | },
190 | {
191 | "cell_type": "code",
192 | "execution_count": 28,
193 | "id": "27b63f8a",
194 | "metadata": {},
195 | "outputs": [],
196 | "source": [
197 | "l=np.linspace(0,1,100)\n",
198 | "score=[score for score, utility, reachability in scores]\n",
199 | "u=[utility for score, utility, reachability in scores]\n",
200 | "r=[reachability for score, utility, reachability in scores]"
201 | ]
202 | },
203 | {
204 | "cell_type": "code",
205 | "execution_count": 35,
206 | "id": "16ac9784",
207 | "metadata": {},
208 | "outputs": [
209 | {
210 | "data": {
211 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAj0AAAGwCAYAAABCV9SaAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy88F64QAAAACXBIWXMAAA9hAAAPYQGoP6dpAABSJUlEQVR4nO3deVxU5eIG8OfMwAzIMgqyCgpuKCq7mqLmQpm5a27hzaXVMLe0tK6pPzO00swyzG5pJe7mnppabuXK4r6Lyo6kMIAywMz8/vDGjdxgGHiZmef7+ZxPceacM88w6Tyd8855Jb1erwcRERGRmZOJDkBERERUHVh6iIiIyCKw9BAREZFFYOkhIiIii8DSQ0RERBaBpYeIiIgsAksPERERWQQr0QEqQ6fTIS0tDQ4ODpAkSXQcIiIiKge9Xo+8vDx4enpCJqu+8y8mXXrS0tLg7e0tOgYREREZIDk5GV5eXtX2fCZdehwcHADc/6U5OjoKTkNERETloVar4e3tXfo5Xl1MuvT8dUnL0dGRpYeIiMjEVPfQFA5kJiIiIovA0kNEREQWgaWHiIiILAJLDxEREVkElp4nmDlzJmxsbDB48GCUlJSIjkMG4HtIREQAS88TTZ48GTt27MCWLVuwbt26Sh3rp59+wrPPPgtnZ2dIkoTExMRy7bdu3To0a9YMNjY2aNWqFX7++edK5bA0xnwP9Xo9PvjgA3h4eMDW1hYRERG4fPnyE/dLTU3F8OHD4ezsDFtbW7Rq1QonTpwos8358+fRp08fqFQq2NnZoXXr1rh582al8hIR0f+w9DyBvb09unTpgqFDh+LHH3+s1LEKCgrQoUMHzJs3r9z7/PHHHxg2bBhefvllJCQkoF+/fujXrx/OnDlTqSyWxJjv4ccff4xFixZhyZIlOHr0KOzs7NC9e3cUFhY+cp87d+4gPDwc1tbW2LFjB86dO4f58+ejTp06pdtcvXoVHTp0QLNmzbBv3z6cOnUK06dPh42NTaXyEhHR3+hNWG5urh6APjc3t8qfKyYmRm9lZaXPysqq9LGSkpL0APQJCQlP3Hbw4MH6nj17llnXtm1b/euvv17pHJamsu+hTqfTu7u76z/55JPSdTk5OXqlUqlftWrVI/d799139R06dHjssYcMGaIfPny4QbmIiExNdX5+/x3P9JTT8uXLUVJSgtWrV5euO3jwIOzt7R+7xMbGVup5Dx8+jIiIiDLrunfvjsOHD1fquJaosu9hUlISMjIyyrwfKpUKbdu2fez7sWXLFoSFhWHQoEFwdXVFcHAwvvnmm9LHdTodtm/fjqZNm6J79+5wdXVF27ZtsWnTJuP/EoiILJhJ35G5uhw+fBjHjh1D7969ERsbi7feegsAEBYW9sRxOW5ubpV67oyMjAeO4ebmhoyMjEod19IY4z3863de0ffj2rVriImJwaRJk/Dee+/h+PHjGDduHBQKBUaMGIGsrCzk5+dj7ty5+PDDDzFv3jzs3LkTAwYMwG+//Yann366Eq+ciIj+IrT05OXlYfr06di4cSOysrIQHByMzz//HK1btxYZC0nZBVh7Ihkpd+7Bq44tDi75FL169cKsWbMQEhKCK1euoHHjxrC1tUXjxo2FZqUH+Uzd/sC6tkk/CHsPdTodwsLC8NFHHwEAgoODcebMGSxZsgQjRoyATqcDAPTt2xcTJ04EAAQFBeGPP/7AkiVLWHqIiIxE6OWtV155Bbt378aPP/6I06dP49lnn0VERARSU1OFZVp7Ihnd5u/D0gPXsP1UGr7adhTbt25GSM/hCA4ORosWLUovd1TH5S13d3dkZmaWWZeZmQl3d/dKHddcPazwlKhvYe36DZg0aVKl3sO/fucVfT88PDzg7+9fZl3z5s1Lv5lVt25dWFlZPXYbIiKqPGFneu7du4cNGzZg8+bN6NSpE4D791PZunUrYmJi8OGHH1Z7pqTsAkzdcAo6PQC9HgCQc2IrFC4++OG6HZ67lo3eAwfj+x9/xMvjpsC9kT927H/82BoXF1ek3LlbZl167j0AQKb63gOP/VNgaBts3/kLXhjxWum67Tt2ISCk9RP3tTQd5v320PV58dugcPHByJ0FuN4ZGD58OL777jvMmDGjQpe3fH194e7ujr179yIoKAjA/ZmCjx49ijFjxjxy//DwcFy8eLHMukuXLqFBgwYAAIVCgdatWz92GyIiqjxhpaekpARarfaBr+Ta2tri0KFDD91Ho9FAo9GU/qxWq42aae2J5Pszvv638OiKC5F/chfqRLwOnR4YuvQoStT1kXr1KlqPXwKlp185jvq//1PX3suDVn0L2vw/AQDD52+CtdMJyO3qQG5//+vL2dvmQ+7gjDpPjwQAFNo9hcwtUxEw8C3YNmqNgvMHkBsXh7RW/3rkhzz9z9/fw79ERkbivffew7Fjx9CmTZtyX96SJAkTJkzAhx9+iCZNmsDX1xfTp0+Hp6cn+vXrV7pdt27d0L9/f4wdOxYAMHHiRLRv3x4fffQRBg8ejGPHjmHp0qVYunRp6T5TpkzBkCFD0KlTJ3Tp0gU7d+7E1q1bsW/fPqP8HoiISGDpcXBwQLt27TB79mw0b94cbm5uWLVqFQ4fPvzID6Ho6GjMmjWryjKl3LkH/X8LDwAUnPkVkpUSds06AgBkEmDn5AbbBq1QeH4/HOs3r9Dxc68dQ+a2z0p/zt7yMQDAqeOLqNtpOABAl5cNuVwOpdX9K49KnxaQ9XsH2ft+QM6BH2DtVA+eg6bD3qNhpV6rOdKU6B5Y98/3cM+5TDztVw+dO3fGihUr0KZNmwo9xzvvvIOCggK89tpryMnJQYcOHbBz584y5f3q1avIzs4u/bl169bYuHEjpk2bhv/7v/+Dr68vFi5ciMjIyNJt+vfvjyVLliA6Ohrjxo2Dn58fNmzYgA4dOlT010BERI8g6f/+KV/Nrl69itGjR+PAgQOQy+UICQlB06ZNERcXh/Pnzz+w/cPO9Hh7eyM3NxeOjo6VzjNv5wUsPXANWt2DvxK5TMJrnRri3eeaVfp5qGo8bDzPw9S1V6BvUD0MDPGCv2fl/7shIqKKUavVUKlURvv8Li+hA5kbNWqE/fv3Iz8/H8nJyTh27BiKi4vRsOHDz2IolUo4OjqWWYxpcJg3HtUB9Xo9hoR5G/X5qPrVtVcgO78I3x5KwvOLDqLH5wfx7aEkZOdrnrwzERGZtBpxc0I7Ozt4eHjgzp072LVrF/r27Sskh29dO8wbGACZdP/Mzt//OW9gAHzq2gnJReVzfW7PJz5+eFo3fDsiDD1aukMhl+F8uhqzt53DUx/txSvfH8fOM+nQlGirKTEREVUnoZe3du3aBb1eDz8/P1y5cgVTpkyBjY0NDh48CGtr6yfuX1Wnx65nF2DN3+7TMyTMm4XHhDzsMtfDClHO3SJsPZmG9XEpOJmSW7q+di1r9An0xAuhXmhVT3V/cDsRERmNqMtbQkvP2rVrMW3aNKSkpMDJyQkDBw7EnDlzoFKpyrW/qF8amZ/LmXlYH5+CTQmpyFT/71JXE1d7vBDqhf7B9eDqyMk/iYiMwSJLT2Wx9JCxaXV6HLqSjfVxKfjlbEbpN8JkEtCxiQteCPXCM/5usLGWC05KRGS6WHoMwNJDVSn3XjF+Pp2O9XEpiLtxp3S9g40Vegd6YmCIF0Lq1+blLyKiCmLpMQBLD1WXpOwC/BSfgg1xKUjLLSxd37CuHQb+9/KXZ21bgQmJiEwHS48BWHqouul0ehy59ifWx6Vgx5kM3Cu+/00vSQLCG9XFwNB66N7CHbUUQufyJSKq0Vh6DMDSQyLla0qw47+Xv44m3S5db6eQo2eABwaGeKGNrxMvfxER/QNLjwFYeqimSL59Fz/Fp2JDfApu3v7fRLDeTrYYGOKFgSFe8HaqJTAhEVHNwdJjAJYeqmn0ej2OX7+D9XHJ+Pl0BvI1JaWPtfV1wsBQLzzfygP2Sl7+IiLLxdJjAJYeqsnuFWmx62wG1sel4Per2fjrT5qttRw9WrpjYKgX2jV0hkzGy19EZFlYegzA0kOmIi3nHjYmpGJDXAquZReUrvdU2WBAiBcGhnrBl3f9JiILwdJjAJYeMjV6vR4JyTlYH5eCrSfTkFf4v8tfIfVr44VQb/QM8IDK9snTsBARmSqWHgOw9JApKyzWYve5TGyIT8GBS7eg+++fRIWVDN1buGNgSD10bOICOS9/EZGZYekxAEsPmYssdSE2JaZifVwKLmXml653dVCif0g9vBDihSZuDgITEhEZD0uPAVh6yNzo9XqcSVVjfVwyNp9MQ87d4tLHAr1UGBjqhd4BnqhjpxCYkoioclh6DMDSQ+asqESHXy9kYX1cCvZdzELJf69/WcslRDR3w8AQLzzt5wJruUxwUiKiimHpMQBLD1mK7HwNNiemYUNcCs6lq0vX17VXoG9QPQwM8YK/J/8MEJFpYOkxAEsPWaJzaWpsiE/B5sRUZOcXla7393DEwFAv9A3yRF17pcCERESPx9JjAJYesmTFWh32X7yFDfEp2Hs+C0VaHQDASiahs58rXgith67N3KCw4uUvIqpZWHoMwNJDdN+dgiJsPXX/8tfJlNzS9XVqWaNPoCdeCPVGy3qOnPyUiGoElh4DsPQQPehyZh7Wx6dgU0IqMtWa0vVN3ewxMMQL/YPrwdXRRmBCIrJ0LD0GYOkhejStTo9DV7KxPi4Fv5zNgKbk/uUvmQR0auqCgSFeeMbfDTbWcsFJicjSsPQYgKWHqHxy7xVj+6l0bIhPQdyNO6XrHW2s0CvQEwNDvBBSvzYvfxFRtWDpMQBLD1HFXbuVj5/iU/FTfArScgtL1zesa4eBofcvf3nWthWYkIjMHUuPAVh6iAyn0+lx+Nqf2BCXgh1nMnCvWAsAkCQgvFFdDAyth+daeMBWwctfRGRcLD0GYOkhMo58TQl+Pp2ODXEpOJp0u3S9vdIKz7dyx8AQL7TxdeLlLyIyCpYeA7D0EBlf8u272BCfgg3xKUi+fa90fX2nWhgQcv/uz95OtQQmJCJTx9JjAJYeoqqj0+lx/PptbIhPwfZT6Sgo0pY+1tbXCQNDvfB8Kw/YK60EpiQiU8TSYwCWHqLqcbeoBLvOZmBDXCp+v5qNv/7WsLWWo0dLd7wQ6oWnGjpDJquay18zZ87E3Llz0adPH6xcuRJWVixaRKZM1Oc3709PRE9US2GF/sFeWPFKW/z+bldM6e6HhnXtcK9Yi58SUvHif46i48e/4dNdF5GUXWD05588eTJ27NiBLVu2YN26dZU6ll6vxwcffAAPDw/Y2toiIiICly9ffuw+MTExCAgIgKOjIxwdHdGuXTvs2LHjge0OHz6Mrl27ws7ODo6OjujUqRPu3bv3kCMSkQg800NEBtHr9UhIzsH6uBRsPZmGvMKS0sdCG9TBwBAv9AzwgMrW2mjPOXLkSGRlZeHnn382+Bjz5s1DdHQ0vv/+e/j6+mL69Ok4ffo0zp07Bxubh9+peuvWrZDL5WjSpAn0ej2+//57fPLJJ0hISECLFi0A3C88zz33HKZNm4bevXvDysoKJ0+eRN++faFUcgJYor/j5S0DsPQQ1QyFxVrsPpeJDfEpOHDpFnT//VtFaSXDsy3cMTCkHjo2cYG8kpe/lixZgrfeegtpaWlwcXGp8P56vR6enp54++23MXnyZABAbm4u3NzcsHz5cgwdOrTcx3JycsInn3yCl19+GQDw1FNP4ZlnnsHs2bMrnIvI0vDyFhGZLBtrOXoHemL5qDY4PK0bpvVohqZu9tCU6LD1ZBpGLjuO9nP3InrHeVzOzDP4eZYvX46SkhKsXr26dN3Bgwdhb2//2CU2NhYAkJSUhIyMDERERJTur1Kp0LZtWxw+fLhcGbRaLVavXo2CggK0a9cOAJCVlYWjR4/C1dUV7du3h5ubG55++mkcOnTI4NdKRMbHMz1EVCX0ej1Op+ZiQ1wKNp9MQ87d4tLHAr1UGBjqhT6BnqhdS1Gu4x0+fBjh4eHo1asXsrKycOTIEQDAvXv3kJqa+th93dzc4ODggD/++APh4eFIS0uDh4dH6eODBw+GJElYs2bNI49x+vRptGvXDoWFhbC3t8fKlSvx/PPPAwCOHDmCdu3awcnJCZ9++imCgoLwww8/4KuvvsKZM2fQpEmTcr1GIksh6vNb6FcgtFotZs6ciRUrViAjIwOenp4YOXIk/v3vf/MmaEQmTpIkBHjVRoBXbbzXszl+u5CF9XGp2HcxCydTcnEyJRcfbjuPbs1d8UKoFzo1dYG1/P7JZ5+p2x84XtukH9CrVy/MmjULISEhuHLlCho3bgxbW1s0bty4yl+Pn58fEhMTkZubi/Xr12PEiBHYv38//P39odPdn8z19ddfx6hRowAAwcHB2Lt3L7777jtER0dXeT4iejKhpWfevHmIiYnB999/jxYtWuDEiRMYNWoUVCoVxo0bJzIaERmR0kqO51p64LmWHsjO12BzYho2xKXgXLoaO85kYMeZDNS1V6BvUD18eyjpgf1L1Lewdv0G/LZ3D4KDg9GiRQvExsZixowZOHjwIHr06PHY5//6668RGRkJd3d3AEBmZmaZMz2ZmZkICgp67DEUCkVpuQoNDcXx48fx+eef4+uvvy49lr+/f5l9mjdvjps3bz7x90NE1UNo6fnjjz/Qt29f9OzZEwDg4+ODVatW4dixYyJjEVEVqmuvxMsdfPFyB1+cS1NjQ3wKNiemIju/6KGFBwDy4rdB4eKDkTsLcL0zMHz4cHz33XeYMWMGwsLCkJiY+NjndHNzAwD4+vrC3d0de/fuLS05arUaR48exZgxYyr0OnQ6HTQaDYD7f3d5enri4sWLZba5dOnSEwsZEVUfoaWnffv2WLp0KS5duoSmTZvi5MmTOHToEBYsWPDQ7TUaTelfMsD9v6yIyHT5ezrC39MfU3s0w/6Lt/DKDyce2EZXXIj8k7tQJ+L10nWRkZF47733cOzYMbRp06bcl7ckScKECRPw4YcfokmTJqVfWff09ES/fv1Kt+vWrRv69++PsWPHAgCmTZuGHj16oH79+sjLy8PKlSuxb98+7Nq1q/S4U6ZMwYwZMxAYGIigoCB8//33uHDhAtavX1+J3xARGZPQ0jN16lSo1Wo0a9YMcrkcWq0Wc+bMQWRk5EO3j46OxqxZs6o5JRFVNWu5DBH+bg99rODMr5CslLBr1rF0nbe3Nzp37owVK1agTZs2FXqud955BwUFBXjttdeQk5ODDh06YOfOnWXu0XP16lVkZ2eX/pyVlYWXXnoJ6enpUKlUCAgIwK5du/DMM8+UbjNhwgQUFhZi4sSJuH37NgIDA7F79240atSoQvmIqOoI/fbW6tWrMWXKFHzyySdo0aIFEhMTMWHCBCxYsAAjRox4YPuHnenx9vbmt7eIzMTDBjD/0/W5PashCRFVJYv89taUKVMwderU0huCtWrVCjdu3EB0dPRDS49SqeSdTYmIiMggQm9OePfuXchkZSPI5fLSr38SkWV50lmc9W+0q6YkRGSOhJae3r17Y86cOdi+fTuuX7+OjRs3YsGCBejfv7/IWEQk0OOKT9TKeGTnax75OBHR4wgd05OXl4fp06dj48aNyMrKgqenJ4YNG4YPPvgACsWT79LKOzITWYYCTQn6fHkIV28VILyxM34Y3bbS83gRkTiccNQALD1EluNyZh76fPk77hVrMa5rY0x61k90JCIyECccJSJ6jCZuDpg7sBUAYNGvV/DbxSzBiYjI1LD0EJHJ6BtUD8Ofqg8AmLgmESl37gpORESmhKWHiEzK9F7+CPBSIeduMaJWJkBTohUdiYhMBEsPEZkUpZUci18MgcrWGieTczBn+3nRkYjIRLD0EJHJ8Xaqhc+GBAIAfjh8A5sTUwUnIiJTwNJDRCapazM3jO1yf6LRaT+dxpWsPMGJiKimY+khIpM18ZmmaN/IGXeLtHhjRTwKNCWiIxFRDcbSQ0QmSy6T8PnQYLg6KHElKx/vbTwNE771GBFVMZYeIjJpLg5KLI4MgVwmYXNiGlYcvSk6EhHVUCw9RGTyWvs4YepzzQAAs7eew8nkHLGBiKhGYukhIrPwSkdfPOvvhiKtDm/GxuNOQZHoSERUw7D0EJFZkCQJnwwKRAPnWkjNuYdJaxOh03F8DxH9D0sPEZkNla01vooMgdJKht8u3kLM/quiIxFRDcLSQ0RmpYWnCrP7tgQAzP/lIn6/ki04ERHVFCw9RGR2Brf2xqBQL+j0wPjVCcjILRQdiYhqAJYeIjJLs/u1RDN3B2TnF+GtVfEo1upERyIiwVh6iMgs2VjLETM8FA5KKxy/fgcf77wgOhIRCcbSQ0Rmy7euHT4ZFAAA+OZgEnaeSReciIhEYukhIrP2XEsPvNrRFwAwZd0pXM8uEJyIiERh6SEis/fOc80Q1qAO8jQlGBMbj8JirehIRCQASw8RmT1ruQxfvhiCuvYKnE9X44PNZ0RHIiIBWHqIyCK4q2zw+dBgyCRg7YkUrD2eLDoSEVUzlh4ishjhjeti0jNNAQDTN5/BuTS14EREVJ1YeojIorzZuTG6+LlAU6LDm7FxUBcWi45ERNWEpYeILIpMJuGzIUGoV9sW1/+8iynrTkKv58SkRJaApYeILE7tWgp8FRkCa7mEXWcz8e2hJNGRiKgasPQQkUUK9K6ND3r5AwCid1zA8eu3BScioqrG0kNEFmv4Uw3QJ9ATWp0eUbHxuJWnER2JiKoQSw8RWSxJkhA9oBUau9ojK0+D8asToNVxfA+RuWLpISKLZqe0wpLhIailkOOPq3/is92XREcioirC0kNEFq+xqwOiB7QCAHz52xX8eiFTcCIiqgpCS4+Pjw8kSXpgiYqKEhmLiCxQ36B6eKldAwDAxDUnkXLnruBERGRsQkvP8ePHkZ6eXrrs3r0bADBo0CCRsYjIQr3fszkCvVTIvVeMN2PjoSnhxKRE5kRo6XFxcYG7u3vpsm3bNjRq1AhPP/20yFhEZKGUVnIsjgxB7VrWOJWSiw+3nRcdiYiMqMaM6SkqKsKKFSswevRoSJL00G00Gg3UanWZhYjImLzq1MJnQ4IAAD8euYHNialiAxGR0dSY0rNp0ybk5ORg5MiRj9wmOjoaKpWqdPH29q6+gERkMbr4ueKtro0BAFM3nMblzDzBiYjIGCR9DZl0pnv37lAoFNi6desjt9FoNNBo/nfzMLVaDW9vb+Tm5sLR0bE6YhKRhdDq9Hjpu6P4/cqfaORihy1jO8BOaSU6FpFZUKvVUKlU1f75XSPO9Ny4cQN79uzBK6+88tjtlEolHB0dyyxERFVBLpPw+dBguDkqcfVWAab+dJoTkxKZuBpRepYtWwZXV1f07NlTdBQiolJ17ZVY/GII5DIJW0+mYcWRG6IjEVElCC89Op0Oy5Ytw4gRI2BlxVPHRFSzhPk4YVqPZgCA/9t2DonJOWIDEZHBhJeePXv24ObNmxg9erToKERED/VyB18818Idxdr7E5PeKSgSHYmIDCC89Dz77LPQ6/Vo2rSp6ChERA8lSRI+HhQAH+daSM25h4lrE6HjxKREJkd46SEiMgWONtb4KjIUSisZ9l28hcW/XREdiYgqiKWHiKic/D0d8WG/lgCABXsu4dDlbMGJiKgiWHqIiCpgUJg3hoR5Q68Hxq9OQEZuoehIRFROLD1ERBU0q28LNPdwxJ8FRRi7Mh7FWp3oSERUDiw9REQVZGMtR0xkCByUVjhx4w7m7bggOhIRlQNLDxGRAXzq2uHTwYEAgP8cSsLOM+mCExHRk7D0EBEZqHsLd7zWqSEAYMq6U0jKLhCciIgeh6WHiKgSpnT3Q2ufOsjTlGDMijjcK9KKjkREj8DSQ0RUCdZyGb58MQR17RW4kJGHDzafER2JiB6BpYeIqJLcHG2waFgwZBKwLi4Fa47fFB2JiB6CpYeIyAjaN6qLt5/1AwBM33wWZ9NyBScion9i6SEiMpIxTzdC12auKCrR4c3YeOTeKxYdiYj+hqWHiMhIZDIJCwYHol5tW9z48y6mrDsJvZ4TkxLVFCw9RERGVLuWAjHDQ6CQy/DLuUx8c/Ca6EhE9F8sPURERhbgVRsf9PYHAMzbeRHHkm4LTkREAEsPEVGViGxbH/2CPKHV6TF2ZTxu5WlERyKyeCw9RERVQJIkfDSgFZq42iMrT4NxqxJQwolJiYRi6SEiqiK1FFaIGR6KWgo5Dl/7E5/tuSQ6EpFFY+khIqpCjV3tMXdgAABg8W9X8euFTMGJiCwXSw8RURXrE+iJEe0aAAAmrjmJ5Nt3BSciskwsPURE1eC9ns0R6F0bufeK8WZsPDQlnJiUqLqx9BARVQOllRxfRYagdi1rnE7Nxext50RHIrI4LD1ERNWkXm1bLBwSBEkCVhy5iU0JqaIjEVkUlh4iomrU2c8Vb3VpDACY9tNpXMrME5yIyHKw9BARVbPxEU3RoXFd3CvW4o0VccjXlIiORGQRWHqIiKqZXCbh86FBcHe0wbVbBZi64RQnJiWqBiw9REQCONsrsTgyGFYyCdtOpeOHwzdERyIyeyw9RESChDZwwrTnmwMAPtx+Dgk37whORGTeWHqIiAQaHe6DHi3dUazVIyo2HrcLikRHIjJbLD1ERAJJkoSPXwiAb107pOUWYsKaROh0HN9DVBVYeoiIBHOwsUbM8BDYWMtw4NItfPnbFdGRiMyS8NKTmpqK4cOHw9nZGba2tmjVqhVOnDghOhYRUbVq5u6ID/u1AgB8tucSDl3OFpyIyPwILT137txBeHg4rK2tsWPHDpw7dw7z589HnTp1RMYiIhLihVAvDG3tDb0eGLc6Aem590RHIjIrViKffN68efD29sayZctK1/n6+gpMREQk1sw+LXA6NRdn09SIio3HmtfbwVou/KQ8kVkQ+idpy5YtCAsLw6BBg+Dq6org4GB88803j9xeo9FArVaXWYiIzImN9f2JSR1srBB/Mwdzd1wQHYnIbAgtPdeuXUNMTAyaNGmCXbt2YcyYMRg3bhy+//77h24fHR0NlUpVunh7e1dzYiKiqtfA2Q7zBwUCAL49lISfT6cLTkRkHiS9wHufKxQKhIWF4Y8//ihdN27cOBw/fhyHDx9+YHuNRgONRlP6s1qthre3N3Jzc+Ho6FgtmYmIqkv0z+fx9YFrsFdaYcvYcDR0sRcdicgo1Go1VCpVtX9+Cz3T4+HhAX9//zLrmjdvjps3bz50e6VSCUdHxzILEZG5mtLdD218nZCvKcGbsfG4V6QVHYnIpBlUep5++mn88MMPuHevct8sCA8Px8WLF8usu3TpEho0aFCp4xIRmQMruQxfDgtGXXslLmTk4d+bznBiUqJKMKj0BAcHY/LkyXB3d8err76KI0eOGPTkEydOxJEjR/DRRx/hypUrWLlyJZYuXYqoqCiDjkdEZG5cHW3wxbBgyCRgQ3wK1hxPFh2JyGQZVHoWLlyItLQ0LFu2DFlZWejUqRP8/f3x6aefIjMzs9zHad26NTZu3IhVq1ahZcuWmD17NhYuXIjIyEhDYhERmaV2jZwxubsfAOCDLWdxJjVXcCIi02SUgcxZWVlYunQp5syZA61Wi+effx7jxo1D165djZHxkUQNhCIiqm46nR6v/nACey9kob5TLWx9qwNUttaiYxEZxGQHMh87dgwzZszA/Pnz4erqimnTpqFu3bro1asXJk+ebIyMREQWTyaTsGBwELzq2OLm7bt4e+1Jju8hqiCDSk9WVhbmz5+Pli1bomPHjrh16xZWrVqF69evY9asWfjPf/6DX375BUuWLDF2XiIii6WqZY2YyFAo5DLsOZ+JpQeuiY5EZFIMmobCy8sLjRo1wujRozFy5Ei4uLg8sE1AQABat25d6YBERPQ/rbxUmNHHH+9vPIOPd11EkHdttG3oLDoWkUkwaEzPwYMH0bFjx6rIUyEc00NElkiv12PS2pPYmJAKFwclto/rAFcHG9GxiMrNpMb0zJgxAzk5OQ+sV6vVVT54mYjI0kmShDn9W6Kpmz1u5WkwblUCSrQ60bGIajyDSs/+/ftRVFT0wPrCwkIcPHiw0qGIiOjxaimsEDM8FHYKOY5cu40Fuy+JjkRU41VoTM+pU6cA3D+1eu7cOWRkZJQ+ptVqsXPnTtSrV8+4CYmI6KEaudhj3gsBGLsyAV/tu4qQ+nUQ4e8mOhZRjVWh0hMUFARJkiBJ0kMvY9na2uKLL74wWjgiInq8XgGeOHH9Dpb/cR2T1iZi+7iO8HaqJToWUY1UodKTlJQEvV6Phg0b4tixY2W+taVQKODq6gq5XG70kERE9GjvPd8cick5SEzOwZjYOKx/oz1srPl3MdE/GeWOzKLw21tERPel5txDr0UHceduMV5sWx8f9W8lOhLRI4n6/C73mZ4tW7agR48esLa2xpYtWx67bZ8+fSodjIiIyq9ebVssHBqMkcuOYeXRmwhrUAcDQrxExyKqUcp9pkcmkyEjIwOurq6QyR79pS9JkqDVao0W8HF4poeIqKzPdl/C53svw9Zajk1R4fBzdxAdiegBNf4+PTqdDq6urqX//qilugoPERE9aFy3JujYpC7uFWsxJjYO+ZoS0ZGIaoxKTzhKREQ1h1wmYeGQIHiobHDtVgHe3XCKE5MS/Ve5x/QsWrSo3AcdN26cQWGIiKjynO2V+PLFEAz5+jC2n0pH6wZ1MDLcV3QsIuHKPabH17d8f2AkScK1a9Uz8y/H9BARPdp3h5Lwf9vOwVouYc3r7RBSv47oSEQATODbW0lJSVWZg4iIjGxUuA/ibtzB9tPpiIqNx/ZxHeFkpxAdi0gYjukhIjJTkiRh7sBWaFjXDum5hRi/OgFaHcf3kOUq95meSZMmYfbs2bCzs8OkSZMeu+2CBQsqHYyIiCrPwcYaXw0PQb/Fv+Pg5Wx88etlTIhoKjoWkRDlLj0JCQkoLi4u/fdHkSSp8qmIiMhomrk7Yk6/Vnh73Ul8vvcyQurXQaemLk/ekcjMcBoKIiILMe2n01h17Cbq1LLG9nEd4VnbVnQkslA1/uaEj5KcnIzk5GRjZCEioio0o7c/Wng64s7dYkStjEdRiU50JKJqZVDpKSkpwfTp06FSqeDj4wMfHx+oVCr8+9//Lr0ERkRENYuNtRwxkaFwtLFCws0cRO84LzoSUbUyqPS89dZbWLp0KT7++GMkJCQgISEBH3/8Mb799lvemJCIqAar71wL8wcHAQCW/X4d20+liw1EVI0MGtOjUqmwevVq9OjRo8z6n3/+GcOGDUNubq7RAj4Ox/QQERlm7o4LWLL/KuwUcmx5qwMaudiLjkQWxKTG9CiVSvj4+Dyw3tfXFwoFb3xFRFTTTX62Kdr6OqGgSIs3V8TjbhEnJiXzZ1DpGTt2LGbPng2NRlO6TqPRYM6cORg7dqzRwhERUdWwksvwxYvBcHFQ4mJmHv698QwnJiWzV+779AwYMKDMz3v27IGXlxcCAwMBACdPnkRRURG6detm3IRERFQlXB1s8MWwYLz4zRH8lJCK1r5OGNamvuhYRFWm3KVHpVKV+XngwIFlfvb29jZOIiIiqjZPNXTGlO7NMG/nBczYchat6qnQsp7qyTsSmSDenJCIyMLpdHq89uMJ7DmfBW8nW2wb2xGqWtaiY5EZM6mBzEREZD5kMgnzBwXB28kWybfv4e11idBxYlIyQwaXnvXr12Pw4MF46qmnEBISUmYpr5kzZ0KSpDJLs2bNDI1EREQGUtWyRkxkKBRWMuw5n4WvD1wTHYnI6AwqPYsWLcKoUaPg5uaGhIQEtGnTBs7Ozrh27doD9+55khYtWiA9Pb10OXTokCGRiIioklrWU2Fm7xYAgE92XcDhq38KTkRkXAaVnq+++gpLly7FF198AYVCgXfeeQe7d+/GuHHjKnxjQisrK7i7u5cudevWNSQSEREZwbA23hgQUg86PfDWqgRkqQtFRyIyGoNKz82bN9G+fXsAgK2tLfLy8gAA//rXv7Bq1aoKHevy5cvw9PREw4YNERkZiZs3bz5yW41GA7VaXWYhIiLjkSQJH/ZrCT83B2TnazB2VQJKtJyYlMyDQaXH3d0dt2/fBgDUr18fR44cAQAkJSVV6OZWbdu2xfLly7Fz507ExMQgKSkJHTt2LC1R/xQdHQ2VSlW68GvyRETGV0thha+Gh8BeaYVjSbfx6S+XREciMgqDvrL+yiuvwNvbGzNmzMDixYsxZcoUhIeH48SJExgwYAC+/fZbg8Lk5OSgQYMGWLBgAV5++eUHHtdoNGXuAq1Wq+Ht7c2vrBMRVYHtp9IRtTIeAPDNS2F4xt9NcCIyF6K+sl7umxP+3dKlS6HT3T/dGRUVBWdnZ/zxxx/o06cPXn/9dYPD1K5dG02bNsWVK1ce+rhSqYRSqTT4+EREVH49Azxw4oYPlv1+HZPWJmL7Wx1R37mW6FhEBjPo8pZMJoOV1f/60tChQ7Fo0SK89dZblZpwND8/H1evXoWHh4fBxyAiIuOZ1qM5QurXRl5hCcbExqGwWCs6EpHBDDrTA9y/FHXs2DFkZWWVnvX5y0svvVSuY0yePBm9e/dGgwYNkJaWhhkzZkAul2PYsGGGxiIiIiNSWMnw5Ysh6PXFIZxNU2PW1nOIHtBKdCwigxhUerZu3YrIyEjk5+fD0dERkiSVPiZJUrlLT0pKCoYNG4Y///wTLi4u6NChA44cOQIXFxdDYhERURXwrG2LhUOCMGLZMaw6dhNhDepgYKiX6FhEFWbQQOamTZvi+eefx0cffYRatcRd3+XcW0RE1WfhnktYuOcybKxl2BQVjmbu/HuXDGNSc2+lpqZi3LhxQgsPERFVr3Fdm6BTUxcUFuswZkU88gqLRUciqhCDSk/37t1x4sQJY2chIqIaTCaTsHBIEDxUNkjKLsDUDacrdG82ItHKPaZny5Ytpf/es2dPTJkyBefOnUOrVq1gbW1dZts+ffoYLyEREdUYTnYKLI4MweAlh7H9dDpCf6+D0R18RcciKpdyj+mRycp3UkiSJGi11fOVRo7pISISY9nvSZi19RysZBLWvN4OoQ3qiI5EJqTGj+nR6XTlWqqr8BARkTgj2/ugZ4AHSnR6jF0Zjz/zNU/eiUgwg8b0EBGRZZMkCfMGBqChix3ScwsxYU0itDqO76GazaDSM27cOCxatOiB9V9++SUmTJhQ2UxERGQC7JVWiIkMha21HAcvZ2PR3suiIxE9lkGlZ8OGDQgPD39gffv27bF+/fpKhyIiItPg5+6AOf1bAgAW/XoZ+y/dEpyI6NEMKj1//vknVCrVA+sdHR2RnZ1d6VBERGQ6BoR44cW29aHXAxNWJyA1557oSEQPZVDpady4MXbu3PnA+h07dqBhw4aVDkVERKblg17+aFnPEXfuFiMqNh5FJbon70RUzQyae2vSpEkYO3Ysbt26ha5duwIA9u7di/nz52PhwoXGzEdERCbAxlqOmMhQ9Fx0EInJOfjo5/OY2aeF6FhEZRg09xYAxMTEYM6cOUhLSwMA+Pj4YObMmeWebNQYeJ8eIqKaZc+5TLzyw/079n8xLBi9Az0FJ6KaSNTnt8Gl5y+3bt2Cra0t7O3tjZWp3Fh6iIhqnnk7LyBm31XYKeTYPLYDGrtW/+cD1Ww1/uaEj+Li4iKk8BARUc309jNN8VRDJxQUafFmbBzuFpWIjkQEwMAxPQCwfv16rF27Fjdv3kRRUVGZx+Lj4ysdjIiITJOVXIZFw4LRc9EhXMrMx/sbz2DB4EBIkiQ6Glk4g870LFq0CKNGjYKbmxsSEhLQpk0bODs749q1a+jRo4exMxIRkYlxdbDBl8OCIZdJ2JiQipXHboqORGRY6fnqq6+wdOlSfPHFF1AoFHjnnXewe/dujBs3Drm5ucbOSEREJqhtQ2e8090PADBryzmcTuHnA4llUOm5efMm2rdvDwCwtbVFXl4eAOBf//oXVq1aZbx0RERk0l7r1BDP+LuhSKvDmNg45N4tFh2JLJhBpcfd3R23b98GANSvXx9HjhwBACQlJaGSXwYjIiIzIkkSPh0UiPpOtZBy5x4mrU2EjhOTkiAGlZ6uXbtiy5YtAIBRo0Zh4sSJeOaZZzBkyBD079/fqAGJiMi0qWyt8VVkCBRWMuy9kIUlB66KjkQWyqD79Oh0Ouh0OlhZ3f/y1+rVq/HHH3+gSZMmeP3116FQKIwe9GF4nx4iItOx+thNTP3pNGQSEPvKU2jXyFl0JBLEZG9OKBJLDxGR6dDr9Zi87hQ2xKegrr0SP4/rAFdHG9GxSACTuznhwYMHMXz4cLRr1w6pqakAgB9//BGHDh0yWjgiIjIfkiThw34t0czdAdn5GoxdmYASLScmpepjUOnZsGEDunfvDltbWyQkJECj0QAAcnNz8dFHHxk1IBERmQ9bhRxfRYbAXmmFY9dv45NfLoqORBbEoNLz4YcfYsmSJfjmm29gbW1duj48PJx3YyYiosdq6GKPj18IAAB8vf8afjmbITgRWQqDSs/FixfRqVOnB9arVCrk5ORUNhMREZm551t5YHS4LwDg7XUncePPAsGJyBIYfJ+eK1euPLD+0KFDaNiwYaVDERGR+Zv2fDOENqiDvMISjFkRj8JirehIZOYMKj2vvvoqxo8fj6NHj0KSJKSlpSE2NhZvv/02xowZY+yMRERkhqzlMnz5YjCc7BQ4l67GzC1nRUciM2fQLOtTp06FTqdDt27dcPfuXXTq1AlKpRJTpkzBK6+8YuyMRERkpjxUtvh8aBBe+u4YVh9PRmiDOhgU5i06Fpkpg870SJKE999/H7dv38aZM2dw5MgR3Lp1CyqVCr6+vsbOSEREZqxjExdMjGgKAJi++QzOp6sFJyJzVaHSo9FoMG3aNISFhSE8PBw///wz/P39cfbsWfj5+eHzzz/HxIkTqyorERGZqbFdGqNTUxcUFuvwZmw88go5MSkZX4VKzwcffICYmBj4+PggKSkJgwYNwmuvvYbPPvsM8+fPR1JSEt59912DgsydOxeSJGHChAkG7U9ERKZLJpOwcEgQPFU2SMouwDvrT3ECazK6CpWedevW4YcffsD69evxyy+/QKvVoqSkBCdPnsTQoUMhl8sNCnH8+HF8/fXXCAgIMGh/IiIyfU52CiyODIG1XMKOMxn47vfroiORmalQ6UlJSUFoaCgAoGXLllAqlZg4cSIkSTI4QH5+PiIjI/HNN9+gTp06Bh+HiIhMX3D9Ovh3T38AQPTP5xF347bgRGROKlR6tFptmRnUraysYG9vX6kAUVFR6NmzJyIiIp64rUajgVqtLrMQEZF5ealdA/QO9ESJTo+o2ARk52tERyIzUaGvrOv1eowcORJKpRIAUFhYiDfeeAN2dnZltvvpp5/KdbzVq1cjPj4ex48fL9f20dHRmDVrVkUiExGRiZEkCdEDWuFcWi6u3irAhNWJ+H50G8hlhl9VIAIqeKZnxIgRcHV1hUqlgkqlwvDhw+Hp6Vn6819LeSQnJ2P8+PGIjY2FjY1NufaZNm0acnNzS5fk5OSKxCciIhNhr7RCzPBQ2FrLcehKNj7fe1l0JDIDkl7Q8PhNmzahf//+ZQY/a7VaSJIEmUwGjUbzxIHRarUaKpUKubm5cHR0rOrIRERUzTYlpGLCmkRIErBsZGt09nMVHYmMQNTnt0E3JzSGbt264fTp00hMTCxdwsLCEBkZicTERIO/CUZEROajX3A9RLatD70emLAmEak590RHIhNm0DQUxuDg4ICWLVuWWWdnZwdnZ+cH1hMRkeX6oLc/Tqfm4lRKLt6Mjce619tBYSXs/9nJhPG/GiIiqtGUVnIsfjEEKltrnEzOwZzt50RHIhMlbEyPMXBMDxGR5fj1QiZGLz8BAFg0LBh9Aj0FJyJDWdyYHiIiooro2swNUV0aAQCmbjiFK1l5ghORqWHpISIikzExoinaNXTG3SItxqyIR4GmRHQkMiEsPUREZDKs5DIsGhYMVwclLmfl4/2NpzkxKZUbSw8REZkUFwclvnwxBHKZhE2JaYg9elN0JDIRLD1ERGRy2vg64d3n/AAA/7f1HE6l5IgNRCaBpYeIiEzSqx0b4ll/NxRpdRizIh45d4tER6IajqWHiIhMkiRJ+GRQIBo410Jqzj1MWnsSOh3H99CjsfQQEZHJUtla46vIECisZPj1QhZi9l8VHYlqMJYeIiIyaS08VZjdtwUAYP4vF/HH1WzBiaimYukhIiKTN6R1fQwK9YJOD4xblYBMdaHoSFQDsfQQEZFZ+L++LdHM3QHZ+UUYuzIexVqd6EhUw7D0EBGRWbBVyBEzPBT2Siscv34Hn+y6KDoS1TAsPUREZDZ869rh00EBAIClB65h55kMwYmoJmHpISIis/JcSw+80sEXADBl3Unc+LNAcCKqKVh6iIjI7LzboxlCG9RBnqYEb6yIR2GxVnQkqgFYeoiIyOxYy2VY/GIInO0UOJ+uxozNZ0VHohqApYeIiMySu8oGi4YFQ5KANSeSsfZEsuhIJBhLDxERma3wxnUxKaIpAGD6pjM4l6YWnIhEYukhIiKzFtWlMTr7uUBTosObsXFQFxaLjkSCsPQQEZFZk8kkfDY4CPVq2+L6n3fxzrpT0Os5MaklYukhIiKzV8dOgcWRIbCWS9h5NgPfHkoSHYkEYOkhIiKLEORdG9N7+QMAondcwPHrtwUnourG0kNERBbjX081QJ9AT2h1eoxdGY/sfI3oSFSNWHqIiMhiSJKE6AGt0NjVHplqDcavToBWx/E9loKlh4iILIqd0goxkSGopZDj9yt/YuGeS6IjUTVh6SEiIovTxM0B0QNaAQC++PUKfruYJTgRVQeWHiIiskh9g+rhX081AABMXJOIlDt3BSeiqsbSQ0REFuvfvZojwEuFnLvFiIqNh6aEE5OaM5YeIiKyWEorORa/GAKVrTVOpuRizvbzoiNRFWLpISIii+btVAsLhwQBAH44fAObE1PFBqIqw9JDREQWr0szV4zt0hgAMO2n07icmSc4EVUFoaUnJiYGAQEBcHR0hKOjI9q1a4cdO3aIjERERBZq4jNN0b6RM+4WaTEmNh4FmhLRkcjIhJYeLy8vzJ07F3FxcThx4gS6du2Kvn374uzZsyJjERGRBZLLJCwaFgw3RyWuZOVj2k+nOTGpmZH0NewddXJywieffIKXX375iduq1WqoVCrk5ubC0dGxGtIREZG5O379NoYuPQKtTo/ZfVvgX+18REcyO6I+v2vMmB6tVovVq1ejoKAA7dq1e+g2Go0GarW6zEJERGRMrX2cMPW5ZgCA/9t2DonJOWIDkdEILz2nT5+Gvb09lEol3njjDWzcuBH+/v4P3TY6Ohoqlap08fb2rua0RERkCV7p6IvuLdxQrNUjKjYedwqKREciIxB+eauoqAg3b95Ebm4u1q9fj//85z/Yv3//Q4uPRqOBRvO/GXHVajW8vb15eYuIiIxOXViM3l8cwo0/76Kznwu+G9EaMpkkOpZZEHV5S3jp+aeIiAg0atQIX3/99RO35ZgeIiKqSufS1Oj/1e/QlOgw+dmmGNu1iehIZsHix/T8RafTlTmbQ0REJIq/pyNm92sJAFiw+xJ+v5ItOBFVhtDSM23aNBw4cADXr1/H6dOnMW3aNOzbtw+RkZEiYxEREZUaHOaNwWFe0OmB8asTkJFbKDoSGUho6cnKysJLL70EPz8/dOvWDcePH8euXbvwzDPPiIxFRERUxv/1bYnmHo7Izi/C2JXxKNbqREciA9S4MT0VwTE9RERUXa5nF6D3F4eQpynBqx198X7Ph3/TmJ6MY3qIiIhqMJ+6dvhkUCAA4JuDSdh5Jl1wIqoolh4iIqJyeq6lO17t6AsAmLLuFJKyCwQnoopg6SEiIqqAd55rhtY+dZCnKcGYFXEoLNaKjkTlxNJDRERUAdZyGb58MQR17RW4kJGHDzafER2Jyomlh4iIqILcHG3w+dBgyCRg7YkUrD2eLDoSlQNLDxERkQHCG9fFpGeaAgCmbz6Ds2m5ghPRk7D0EBERGejNzo3Rxc8FmhId3oyNh7qwWHQkegyWHiIiIgPJZBI+GxKEerVtcePPu5i89iRM+PZ3Zo+lh4iIqBJq11Lgq8gQKOQy/HIuE/85mCQ6Ej0CSw8REVElBXrXxvTe9+/QPHfnBRy/fltwInoYlh4iIiIjGN62PvoGeUKr0yMqNh638jSiI9E/sPQQEREZgSRJ+Kh/KzR2tUdWngbjViVAq+P4npqEpYeIiMhI7JRWWDI8BLUUchy+9ic+231JdCT6G5YeIiIiI2rs6oC5AwMAAF/+dgW/XsgUnIj+wtJDRERkZH0CPfFSuwYAgIlrTiL59l3BiQhg6SEiIqoS7/dsjkDv2si9V4yolfHQlHBiUtFYeoiIiKqA0kqOxS8Go3Yta5xKycWH286LjmTxWHqIiIiqiFedWvhsSBAkCfjxyA1sTkwVHcmisfQQERFVoS5+rnirS2MAwNQNp3E5M09wIsvF0kNERFTFxkc0RXhjZ9wr1uKNFXEo0JSIjmSRWHqIiIiqmFwm4fOhwXBzVOLqrQJM/ek0JyYVgKWHiIioGtS1V2LxiyGwkknYejINPx65ITqSxWHpISIiqiZhPk6Y2qMZAGD2tnNITM4RG8jCsPQQERFVo5c7+OK5Fu4o1t6fmPROQZHoSBaDpYeIiKgaSZKEjwcFwMe5FlJz7mHCmkToODFptWDpISIiqmaONtaIGR4KpZUM+y/dwuLfroiOZBFYeoiIiARo7uGID/u1BAAs2HMJhy5nC05k/lh6iIiIBBkU5o0hYd7Q64FxqxOQnntPdCSzxtJDREQk0Ky+LeDv4YjbBUUYuzIBxVqd6Ehmi6WHiIhIIBtrOWKGh8DBxgpxN+5g7o4LoiOZLZYeIiIiwRo42+HTQYEAgG8PJWHH6XTBicyT0NITHR2N1q1bw8HBAa6urujXrx8uXrwoMhIREZEQ3Vu44/VODQEAU9afQlJ2geBE5kdo6dm/fz+ioqJw5MgR7N69G8XFxXj22WdRUMA3moiILM/k7n5o4+OEfE0JxqyIw70irehIZkXS16AZz27dugVXV1fs378fnTp1euL2arUaKpUKubm5cHR0rIaEREREVStTXYieiw4hO1+DF0K98MkLAZAkSXQsoxL1+V2jxvTk5uYCAJycnB76uEajgVqtLrMQERGZEzdHGywaFgSZBKyPS8HaE8miI5mNGlN6dDodJkyYgPDwcLRs2fKh20RHR0OlUpUu3t7e1ZySiIio6rVvVBdvP+sHAJi++SzOpuUKTmQeakzpiYqKwpkzZ7B69epHbjNt2jTk5uaWLsnJbL9ERGSexjzdCF2buaKoRIcxK+KRe69YdCSTVyNKz9ixY7Ft2zb89ttv8PLyeuR2SqUSjo6OZRYiIiJzJJNJWDA4EF51bHHz9l1MXncSNWgYrkkSWnr0ej3Gjh2LjRs34tdff4Wvr6/IOERERDVK7VoKfBUZAoVcht3nMvHNwWuiI5k0oaUnKioKK1aswMqVK+Hg4ICMjAxkZGTg3j3OPUJERAQAAV618UFvfwDAvJ0XcfTan4ITmS6hpScmJga5ubno3LkzPDw8Spc1a9aIjEVERFSjRLatj35BntDq9Bi7KgFZeYWiI5kkK5FPzmuTRERETyZJEj4a0Arn0tW4lJmP8asS8ePLbWAlrxFDc00Gf1tEREQmoJbCCl9FhqKWQo7D1/7Egt2XREcyOSw9REREJqKxqz3mDQwAAHy17yr2ns8UnMi0sPQQERGZkN6BnhjZ3gcAMHFNIpJv3xUbyISw9BAREZmY955vjiDv2lAXluDN2HgUFnNi0vJg6SEiIjIxCisZFkeGoE4ta5xOzcXsbedERzIJLD1EREQmqF5tW3w2JAiSBMQevYmNCSmiI9V4LD1EREQmqrOfK97q2gQA8N5PZ3ApM09wopqNpYeIiMiEje/WBB2b1MW9Yi3eWBGHfE2J6Eg1FksPERGRCZPLJCwcEgR3Rxtcu1WAdzec4s1/H4Glh4iIyMQ52yuxODIYVjIJ20+l44fDN0RHqpFYeoiIiMxAaAMnTHu+OQDgw+3nEH/zjuBENQ9LDxERkZkYHe6D51u5o1irx9jYeNwuKBIdqUZh6SEiIjITkiRh3sAA+Na1Q1puISasSYROx/E9f2HpISIiMiMONtaIGR4CG2sZDly6hS9+vSI6Uo3B0kNERGRmmrk74sN+rQAAC/dewsHLtwQnqhlYeoiIiMzQC6FeGNbGG3o9MH51ItJz74mOJBxLDxERkZma0bsFWng64nZBEaJi41FUohMdSSiWHiIiIjNlYy1HTGQoHGysEH8zB3N3XBAdSSiWHiIiIjNW37kWFgwOAgB893sSfj6dLjaQQCw9REREZu4Zfze8/nRDAMA760/h2q18wYnEYOkhIiKyAFOe9UMbXyfka0owZkU87hVpRUeqdiw9REREFsBKLsOXw4JR116Ji5l5eH/TaYubmJSlh4iIyEK4Otrgi2HBkEnAT/GpWHM8WXSkasXSQ0REZEHaNXLG5O5+AIAPtpzFmdRcwYmqD0sPERGRhXmjUyNENHdFUYkOY2LjkHu3WHSkasHSQ0REZGFkMgnzBwXBq44tkm/fw9vrTj52fM/MmTNhY2ODwYMHo6SkpBqTGhdLDxERkQVS1bJGTGQoFHIZ9pzPxNcHrj1y28mTJ2PHjh3YsmUL1q1bV6nn1ev1mDNnDgDAzc0NERERuHz58hP3W7x4MXx8fGBjY4O2bdvi2LFjFX5ulh4iIiIL1cpLhZl9WgAAPtl1EUeu/fnQ7ezt7dGlSxcMHToUP/74Y6We8+OPP8bXX38NANi7dy/s7OzQvXt3FBYWPnKfNWvWYNKkSZgxYwbi4+MRGBiI7t27Iysrq0LPzdJDRERkwYa18caA4HrQ6vR4a1UCsvIeXT6eeuop7N69G7duGTZru16vx8KFCzF58mQAQMuWLfHDDz8gLS0NmzZteuR+CxYswKuvvopRo0bB398fS5YsQa1atfDdd99V6PlZeoiIiCyYJEn4sH9LNHWzx608Dd5amYAS7cMnJl2+fDlKSkqwevXq0nUHDx6Evb39Y5fY2FgAQFJSEjIyMtC5c+fS/VUqFdq2bYvDhw8/9DmLiooQFxeHiIiI0nUymQwRERGP3OdRrCq0NREREZmdWgorxAwPRZ8vDuFo0m3M330J7z7XrMw2hw8fxrFjx9C7d2/ExsbirbfeAgCEhYUhMTHxscd3c3MDAGRkZAAAXF1dH3j8r8f+KTs7G1qttvQYf9/nwoWKTaAqtPQcOHAAn3zyCeLi4pCeno6NGzeiX79+IiMRERFZpEYu9pj3QgDGrkxAzL6riNl3tczjtzbPQ69evTBr1iyEhITgypUraNy4MWxtbdG4cWNBqStG6OWtgoICBAYGYvHixSJjEBEREYBeAZ4Y2d7ngfUl6lu4e+kPHLNvh+DgYLRo0aL0klVFLm+5u7sDwAMDkDMzM0sf+6e6detCLpcjMzOz3Ps8itAzPT169ECPHj1ERiAiIqK/Wf7H9QfW5cVvg8LFBzb1A+AzdTveGD4c3333HWbMmFGhy1u+vr5wd3fH/v37Sx9Tq9U4evQoxowZ89B9FQoFQkNDsXfv3tKrQTqdDnv37sXYsWMr9NpMakyPRqOBRqMp/VmtVgtMQ0REZP50xYXIP7kLdSJeL10XGRmJ9957D8eOHUObNm3KfXlLkiRMmDAB0dHRAICzZ89i3rx58PT0LDO8pVu3bujfv39pqZk0aRJGjBiBsLAwtGnTBgsXLkRBQQFGjRpVoddiUt/eio6OhkqlKl28vb1FRyIiIjJrBWd+hWSlhF2zjqXrvL290blzZ6xYsaLCx3vnnXfw+uv3C1SXLl2Qn5+PnTt3wsbGpnSbq1evIjs7u/TnIUOG4NNPP8UHH3yAoKAgJCYmYufOnQ8Mbn4SSV9D5pWXJOmJA5kfdqbH29sbubm5cHR0rIaURERE5s1n6vYnbnN9bs9KPYdarYZKpar2z2+TurylVCqhVCpFxyAiIiITZFKXt4iIiKhqPeksTmXP8ogktPTk5+cjMTGxdNR3UlISEhMTcfPmTZGxiIiILNqjio0pFx5A8Jieffv2oUuXLg+sHzFiBJYvX/7E/UVdEyQiIiLDWeSYns6dO6OGjKMmIiIiM8cxPURERGQRWHqIiIjIIrD0EBERkUVg6SEiIiKLwNJDREREFoGlh4iIiCwCSw8RERFZBJYeIiIisggsPURERGQRTGqW9X/6627OarVacBIiIiIqr78+t6t7VgaTLj15eXkAAG9vb8FJiIiIqKLy8vKgUqmq7fmETjhaWTqdDmlpaXBwcIAkSUY9tlqthre3N5KTk81yMlO+PtNn7q/R3F8fYP6vka/P9FXVa9Tr9cjLy4OnpydksuobaWPSZ3pkMhm8vLyq9DkcHR3N9j9mgK/PHJj7azT31weY/2vk6zN9VfEaq/MMz184kJmIiIgsAksPERERWQSWnkdQKpWYMWMGlEql6ChVgq/P9Jn7azT31weY/2vk6zN95vYaTXogMxEREVF58UwPERERWQSWHiIiIrIILD1ERERkEVh6iIiIyCKw9DzE4sWL4ePjAxsbG7Rt2xbHjh0THcloDhw4gN69e8PT0xOSJGHTpk2iIxlVdHQ0WrduDQcHB7i6uqJfv364ePGi6FhGExMTg4CAgNIbhbVr1w47duwQHavKzJ07F5IkYcKECaKjGM3MmTMhSVKZpVmzZqJjGVVqaiqGDx8OZ2dn2NraolWrVjhx4oToWEbj4+PzwHsoSRKioqJERzMKrVaL6dOnw9fXF7a2tmjUqBFmz55d7fNkVQWWnn9Ys2YNJk2ahBkzZiA+Ph6BgYHo3r07srKyREczioKCAgQGBmLx4sWio1SJ/fv3IyoqCkeOHMHu3btRXFyMZ599FgUFBaKjGYWXlxfmzp2LuLg4nDhxAl27dkXfvn1x9uxZ0dGM7vjx4/j6668REBAgOorRtWjRAunp6aXLoUOHREcymjt37iA8PBzW1tbYsWMHzp07h/nz56NOnTqioxnN8ePHy7x/u3fvBgAMGjRIcDLjmDdvHmJiYvDll1/i/PnzmDdvHj7++GN88cUXoqNVnp7KaNOmjT4qKqr0Z61Wq/f09NRHR0cLTFU1AOg3btwoOkaVysrK0gPQ79+/X3SUKlOnTh39f/7zH9ExjCovL0/fpEkT/e7du/VPP/20fvz48aIjGc2MGTP0gYGBomNUmXfffVffoUMH0TGq1fjx4/WNGjXS63Q60VGMomfPnvrRo0eXWTdgwAB9ZGSkoETGwzM9f1NUVIS4uDhERESUrpPJZIiIiMDhw4cFJiND5ebmAgCcnJwEJzE+rVaL1atXo6CgAO3atRMdx6iioqLQs2fPMn8Wzcnly5fh6emJhg0bIjIyEjdv3hQdyWi2bNmCsLAwDBo0CK6urggODsY333wjOlaVKSoqwooVKzB69GijT3wtSvv27bF3715cunQJAHDy5EkcOnQIPXr0EJys8kx6wlFjy87OhlarhZubW5n1bm5uuHDhgqBUZCidTocJEyYgPDwcLVu2FB3HaE6fPo127dqhsLAQ9vb22LhxI/z9/UXHMprVq1cjPj4ex48fFx2lSrRt2xbLly+Hn58f0tPTMWvWLHTs2BFnzpyBg4OD6HiVdu3aNcTExGDSpEl47733cPz4cYwbNw4KhQIjRowQHc/oNm3ahJycHIwcOVJ0FKOZOnUq1Go1mjVrBrlcDq1Wizlz5iAyMlJ0tEpj6SGzFRUVhTNnzpjVeAkA8PPzQ2JiInJzc7F+/XqMGDEC+/fvN4vik5ycjPHjx2P37t2wsbERHadK/P3/lgMCAtC2bVs0aNAAa9euxcsvvywwmXHodDqEhYXho48+AgAEBwfjzJkzWLJkiVmWnm+//RY9evSAp6en6ChGs3btWsTGxmLlypVo0aIFEhMTMWHCBHh6epr8e8jS8zd169aFXC5HZmZmmfWZmZlwd3cXlIoMMXbsWGzbtg0HDhyAl5eX6DhGpVAo0LhxYwBAaGgojh8/js8//xxff/214GSVFxcXh6ysLISEhJSu02q1OHDgAL788ktoNBrI5XKBCY2vdu3aaNq0Ka5cuSI6ilF4eHg8UMCbN2+ODRs2CEpUdW7cuIE9e/bgp59+Eh3FqKZMmYKpU6di6NChAIBWrVrhxo0biI6ONvnSwzE9f6NQKBAaGoq9e/eWrtPpdNi7d6/ZjZkwV3q9HmPHjsXGjRvx66+/wtfXV3SkKqfT6aDRaETHMIpu3brh9OnTSExMLF3CwsIQGRmJxMREsys8AJCfn4+rV6/Cw8NDdBSjCA8Pf+A2EZcuXUKDBg0EJao6y5Ytg6urK3r27Ck6ilHdvXsXMlnZeiCXy6HT6QQlMh6e6fmHSZMmYcSIEQgLC0ObNm2wcOFCFBQUYNSoUaKjGUV+fn6Z/6NMSkpCYmIinJycUL9+fYHJjCMqKgorV67E5s2b4eDggIyMDACASqWCra2t4HSVN23aNPTo0QP169dHXl4eVq5ciX379mHXrl2ioxmFg4PDA+Ov7Ozs4OzsbDbjsiZPnozevXujQYMGSEtLw4wZMyCXyzFs2DDR0Yxi4sSJaN++PT766CMMHjwYx44dw9KlS7F06VLR0YxKp9Nh2bJlGDFiBKyszOujtHfv3pgzZw7q16+PFi1aICEhAQsWLMDo0aNFR6s80V8fq4m++OILff369fUKhULfpk0b/ZEjR0RHMprffvtND+CBZcSIEaKjGcXDXhsA/bJly0RHM4rRo0frGzRooFcoFHoXFxd9t27d9L/88ovoWFXK3L6yPmTIEL2Hh4deoVDo69Wrpx8yZIj+ypUromMZ1datW/UtW7bUK5VKfbNmzfRLly4VHcnodu3apQegv3jxougoRqdWq/Xjx4/X169fX29jY6Nv2LCh/v3339drNBrR0SpN0uvN4BaLRERERE/AMT1ERERkEVh6iIiIyCKw9BAREZFFYOkhIiIii8DSQ0RERBaBpYeIiIgsAksPERERWQSWHiIiIrIILD1EVCNcv34dkiQhMTERALBv3z5IkoScnBwAwPLly1G7dm1h+YjI9LH0EJFRde7cGRMmTHhg/d9Ly8iRI9GvX78yj3t7eyM9Pf2Rc2wNGTIEly5dKv155syZCAoKMlJqIrIE5jVLGhGZLLlcDnd390c+bmtraxaTxhKRODzTQ0TVaubMmfj++++xefNmSJIESZKwb9++By5v/dPfzxQtX74cs2bNwsmTJ0uPsXz5cowePRq9evUqs19xcTFcXV3x7bffVvErI6Kajmd6iKhaTZ48GefPn4darcayZcsAAE5OTkhLSyv3MYYMGYIzZ85g586d2LNnDwBApVKhadOm6NSpE9LT0+Hh4QEA2LZtG+7evYshQ4YY/8UQkUnhmR4iqlb29vawtbWFUqmEu7s73N3doVAoKnQMW1tb2Nvbw8rKqvQYtra2aN++Pfz8/PDjjz+Wbrts2TIMGjQI9vb2xn4pRGRiWHqIyKy88sorpWeQMjMzsWPHDowePVpwKiKqCVh6iMioHB0dkZub+8D6nJwcqFSqKn/+l156CdeuXcPhw4exYsUK+Pr6omPHjlX+vERU83FMDxEZlZ+fH3755ZcH1sfHx6Np06YAAIVCAa1WW6nnedQxnJ2d0a9fPyxbtgyHDx/GqFGjKvU8RGQ+eKaHiIxqzJgxuHTpEsaNG4dTp07h4sWLWLBgAVatWoW3334bAODj41P6WHZ2NoqLiyv8PD4+PkhKSkJiYiKys7Oh0WhKH3vllVfw/fff4/z58xgxYoTRXhsRmTaWHiIyqoYNG+LAgQO4cOECIiIi0LZtW6xduxbr1q3Dc889BwB49dVX4efnh7CwMLi4uOD333+v8PMMHDgQzz33HLp06QIXFxesWrWq9LGIiAh4eHige/fu8PT0NNprIyLTJun1er3oEERExpSfn4969eph2bJlGDBggOg4RFRDcEwPEZkNnU6H7OxszJ8/H7Vr10afPn1ERyKiGoSlh4jMxs2bN+Hr6wsvLy8sX74cVlb8K46I/oeXt4iIiMgicCAzERERWQSWHiIiIrIILD1ERERkEVh6iIiIyCKw9BAREZFFYOkhIiIii8DSQ0RERBaBpYeIiIgswv8DglDaEqUD848AAAAASUVORK5CYII=\n",
212 | "text/plain": [
213 | ""
214 | ]
215 | },
216 | "metadata": {},
217 | "output_type": "display_data"
218 | }
219 | ],
220 | "source": [
221 | "\n",
222 | "# Create a dictionary to store the first label for each unique point\n",
223 | "point_labels = {}\n",
224 | "for i in range(len(u)):\n",
225 | " point = (u[i], r[i])\n",
226 | " if point not in point_labels:\n",
227 | " point_labels[point] = str(l[i])[:4]\n",
228 | "\n",
229 | "# Plot lines between points\n",
230 | "plt.plot(u, r, linestyle='-', marker='o', markersize=5)\n",
231 | "\n",
232 | "# Add the first label to each unique point\n",
233 | "for point, label in point_labels.items():\n",
234 | " plt.text(point[0], point[1], \"λ=\"+label)\n",
235 | "\n",
236 | "plt.xlabel('Utility')\n",
237 | "plt.ylabel('Reachability')\n",
238 | "plt.show()"
239 | ]
240 | },
241 | {
242 | "cell_type": "code",
243 | "execution_count": null,
244 | "id": "5654982b",
245 | "metadata": {},
246 | "outputs": [],
247 | "source": []
248 | }
249 | ],
250 | "metadata": {
251 | "kernelspec": {
252 | "display_name": "Python 3 (ipykernel)",
253 | "language": "python",
254 | "name": "python3"
255 | },
256 | "language_info": {
257 | "codemirror_mode": {
258 | "name": "ipython",
259 | "version": 3
260 | },
261 | "file_extension": ".py",
262 | "mimetype": "text/x-python",
263 | "name": "python",
264 | "nbconvert_exporter": "python",
265 | "pygments_lexer": "ipython3",
266 | "version": "3.10.6"
267 | }
268 | },
269 | "nbformat": 4,
270 | "nbformat_minor": 5
271 | }
272 |
--------------------------------------------------------------------------------
/dog_on_bench.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nagolinc/notebooks/cbc5406023fd192dd3f1505d970d7369d22bb7f7/dog_on_bench.png
--------------------------------------------------------------------------------
/dog_on_bench_mask.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nagolinc/notebooks/cbc5406023fd192dd3f1505d970d7369d22bb7f7/dog_on_bench_mask.png
--------------------------------------------------------------------------------
/modelscope_text_to_video.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "nbformat": 4,
3 | "nbformat_minor": 0,
4 | "metadata": {
5 | "colab": {
6 | "provenance": [],
7 | "authorship_tag": "ABX9TyMV/cdcfHl+3gjl3fdMOTwB",
8 | "include_colab_link": true
9 | },
10 | "kernelspec": {
11 | "name": "python3",
12 | "display_name": "Python 3"
13 | },
14 | "language_info": {
15 | "name": "python"
16 | },
17 | "accelerator": "GPU",
18 | "gpuClass": "premium"
19 | },
20 | "cells": [
21 | {
22 | "cell_type": "markdown",
23 | "metadata": {
24 | "id": "view-in-github",
25 | "colab_type": "text"
26 | },
27 | "source": [
28 | "
"
29 | ]
30 | },
31 | {
32 | "cell_type": "code",
33 | "execution_count": 1,
34 | "metadata": {
35 | "colab": {
36 | "base_uri": "https://localhost:8080/"
37 | },
38 | "id": "4DH7pEwaseK7",
39 | "outputId": "bbaae4b0-5353-43ab-9126-a46d87280317"
40 | },
41 | "outputs": [
42 | {
43 | "output_type": "stream",
44 | "name": "stdout",
45 | "text": [
46 | "Looking in indexes: https://pypi.org/simple, https://us-python.pkg.dev/colab-wheels/public/simple/\n",
47 | "Collecting modelscope\n",
48 | " Downloading modelscope-1.4.1-py3-none-any.whl (4.2 MB)\n",
49 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m4.2/4.2 MB\u001b[0m \u001b[31m37.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
50 | "\u001b[?25hCollecting oss2\n",
51 | " Downloading oss2-2.17.0.tar.gz (259 kB)\n",
52 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m259.5/259.5 KB\u001b[0m \u001b[31m29.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
53 | "\u001b[?25h Preparing metadata (setup.py) ... \u001b[?25l\u001b[?25hdone\n",
54 | "Requirement already satisfied: requests in /usr/local/lib/python3.9/dist-packages (from modelscope) (2.27.1)\n",
55 | "Collecting einops\n",
56 | " Downloading einops-0.6.0-py3-none-any.whl (41 kB)\n",
57 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m41.6/41.6 KB\u001b[0m \u001b[31m5.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
58 | "\u001b[?25hCollecting addict\n",
59 | " Downloading addict-2.4.0-py3-none-any.whl (3.8 kB)\n",
60 | "Requirement already satisfied: gast>=0.2.2 in /usr/local/lib/python3.9/dist-packages (from modelscope) (0.4.0)\n",
61 | "Requirement already satisfied: tqdm>=4.64.0 in /usr/local/lib/python3.9/dist-packages (from modelscope) (4.65.0)\n",
62 | "Requirement already satisfied: filelock>=3.3.0 in /usr/local/lib/python3.9/dist-packages (from modelscope) (3.10.0)\n",
63 | "Collecting yapf\n",
64 | " Downloading yapf-0.32.0-py2.py3-none-any.whl (190 kB)\n",
65 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m190.2/190.2 KB\u001b[0m \u001b[31m24.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
66 | "\u001b[?25hRequirement already satisfied: Pillow>=6.2.0 in /usr/local/lib/python3.9/dist-packages (from modelscope) (8.4.0)\n",
67 | "Requirement already satisfied: numpy<1.24.0 in /usr/local/lib/python3.9/dist-packages (from modelscope) (1.22.4)\n",
68 | "Collecting datasets<=2.8.0,>=2.7.0\n",
69 | " Downloading datasets-2.8.0-py3-none-any.whl (452 kB)\n",
70 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m452.9/452.9 KB\u001b[0m \u001b[31m46.4 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
71 | "\u001b[?25hRequirement already satisfied: scipy in /usr/local/lib/python3.9/dist-packages (from modelscope) (1.10.1)\n",
72 | "Collecting jsonplus\n",
73 | " Downloading jsonplus-0.8.0-py2.py3-none-any.whl (11 kB)\n",
74 | "Requirement already satisfied: pyyaml in /usr/local/lib/python3.9/dist-packages (from modelscope) (6.0)\n",
75 | "Requirement already satisfied: setuptools in /usr/local/lib/python3.9/dist-packages (from modelscope) (63.4.3)\n",
76 | "Collecting pyarrow!=9.0.0,>=6.0.0\n",
77 | " Downloading pyarrow-11.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (34.9 MB)\n",
78 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m34.9/34.9 MB\u001b[0m \u001b[31m32.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
79 | "\u001b[?25hRequirement already satisfied: attrs in /usr/local/lib/python3.9/dist-packages (from modelscope) (22.2.0)\n",
80 | "Collecting multiprocess\n",
81 | " Downloading multiprocess-0.70.14-py39-none-any.whl (132 kB)\n",
82 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m132.9/132.9 KB\u001b[0m \u001b[31m15.4 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
83 | "\u001b[?25hCollecting huggingface-hub<1.0.0,>=0.2.0\n",
84 | " Downloading huggingface_hub-0.13.2-py3-none-any.whl (199 kB)\n",
85 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m199.2/199.2 KB\u001b[0m \u001b[31m24.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
86 | "\u001b[?25hRequirement already satisfied: packaging in /usr/local/lib/python3.9/dist-packages (from datasets<=2.8.0,>=2.7.0->modelscope) (23.0)\n",
87 | "Collecting dill<0.3.7\n",
88 | " Downloading dill-0.3.6-py3-none-any.whl (110 kB)\n",
89 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m110.5/110.5 KB\u001b[0m \u001b[31m15.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
90 | "\u001b[?25hCollecting xxhash\n",
91 | " Downloading xxhash-3.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (212 kB)\n",
92 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m212.2/212.2 KB\u001b[0m \u001b[31m26.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
93 | "\u001b[?25hRequirement already satisfied: fsspec[http]>=2021.11.1 in /usr/local/lib/python3.9/dist-packages (from datasets<=2.8.0,>=2.7.0->modelscope) (2023.3.0)\n",
94 | "Requirement already satisfied: pandas in /usr/local/lib/python3.9/dist-packages (from datasets<=2.8.0,>=2.7.0->modelscope) (1.4.4)\n",
95 | "Collecting responses<0.19\n",
96 | " Downloading responses-0.18.0-py3-none-any.whl (38 kB)\n",
97 | "Collecting aiohttp\n",
98 | " Downloading aiohttp-3.8.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (1.0 MB)\n",
99 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.0/1.0 MB\u001b[0m \u001b[31m72.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
100 | "\u001b[?25hRequirement already satisfied: charset-normalizer~=2.0.0 in /usr/local/lib/python3.9/dist-packages (from requests->modelscope) (2.0.12)\n",
101 | "Requirement already satisfied: urllib3<1.27,>=1.21.1 in /usr/local/lib/python3.9/dist-packages (from requests->modelscope) (1.26.15)\n",
102 | "Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.9/dist-packages (from requests->modelscope) (3.4)\n",
103 | "Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.9/dist-packages (from requests->modelscope) (2022.12.7)\n",
104 | "Requirement already satisfied: sortedcontainers>=1.5.9 in /usr/local/lib/python3.9/dist-packages (from jsonplus->modelscope) (2.4.0)\n",
105 | "Requirement already satisfied: python-dateutil>=2.1 in /usr/local/lib/python3.9/dist-packages (from jsonplus->modelscope) (2.8.2)\n",
106 | "Collecting simplejson>=3.3.0\n",
107 | " Downloading simplejson-3.18.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl (136 kB)\n",
108 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m136.8/136.8 KB\u001b[0m \u001b[31m19.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
109 | "\u001b[?25hCollecting crcmod>=1.7\n",
110 | " Downloading crcmod-1.7.tar.gz (89 kB)\n",
111 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m89.7/89.7 KB\u001b[0m \u001b[31m13.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
112 | "\u001b[?25h Preparing metadata (setup.py) ... \u001b[?25l\u001b[?25hdone\n",
113 | "Collecting pycryptodome>=3.4.7\n",
114 | " Downloading pycryptodome-3.17-cp35-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (2.1 MB)\n",
115 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m2.1/2.1 MB\u001b[0m \u001b[31m90.4 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
116 | "\u001b[?25hCollecting aliyun-python-sdk-kms>=2.4.1\n",
117 | " Downloading aliyun_python_sdk_kms-2.16.0-py2.py3-none-any.whl (67 kB)\n",
118 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m67.4/67.4 KB\u001b[0m \u001b[31m9.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
119 | "\u001b[?25hCollecting aliyun-python-sdk-core>=2.13.12\n",
120 | " Downloading aliyun-python-sdk-core-2.13.36.tar.gz (440 kB)\n",
121 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m440.5/440.5 KB\u001b[0m \u001b[31m47.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
122 | "\u001b[?25h Preparing metadata (setup.py) ... \u001b[?25l\u001b[?25hdone\n",
123 | "Requirement already satisfied: six in /usr/local/lib/python3.9/dist-packages (from oss2->modelscope) (1.15.0)\n",
124 | "Collecting jmespath<1.0.0,>=0.9.3\n",
125 | " Downloading jmespath-0.10.0-py2.py3-none-any.whl (24 kB)\n",
126 | "Collecting cryptography>=2.6.0\n",
127 | " Downloading cryptography-39.0.2-cp36-abi3-manylinux_2_28_x86_64.whl (4.2 MB)\n",
128 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m4.2/4.2 MB\u001b[0m \u001b[31m97.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
129 | "\u001b[?25hCollecting multidict<7.0,>=4.5\n",
130 | " Downloading multidict-6.0.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (114 kB)\n",
131 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m114.2/114.2 KB\u001b[0m \u001b[31m14.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
132 | "\u001b[?25hCollecting frozenlist>=1.1.1\n",
133 | " Downloading frozenlist-1.3.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl (158 kB)\n",
134 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m158.8/158.8 KB\u001b[0m \u001b[31m21.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
135 | "\u001b[?25hCollecting async-timeout<5.0,>=4.0.0a3\n",
136 | " Downloading async_timeout-4.0.2-py3-none-any.whl (5.8 kB)\n",
137 | "Collecting yarl<2.0,>=1.0\n",
138 | " Downloading yarl-1.8.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (264 kB)\n",
139 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m264.6/264.6 KB\u001b[0m \u001b[31m31.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
140 | "\u001b[?25hCollecting aiosignal>=1.1.2\n",
141 | " Downloading aiosignal-1.3.1-py3-none-any.whl (7.6 kB)\n",
142 | "Requirement already satisfied: typing-extensions>=3.7.4.3 in /usr/local/lib/python3.9/dist-packages (from huggingface-hub<1.0.0,>=0.2.0->datasets<=2.8.0,>=2.7.0->modelscope) (4.5.0)\n",
143 | "Requirement already satisfied: pytz>=2020.1 in /usr/local/lib/python3.9/dist-packages (from pandas->datasets<=2.8.0,>=2.7.0->modelscope) (2022.7.1)\n",
144 | "Requirement already satisfied: cffi>=1.12 in /usr/local/lib/python3.9/dist-packages (from cryptography>=2.6.0->aliyun-python-sdk-core>=2.13.12->oss2->modelscope) (1.15.1)\n",
145 | "Requirement already satisfied: pycparser in /usr/local/lib/python3.9/dist-packages (from cffi>=1.12->cryptography>=2.6.0->aliyun-python-sdk-core>=2.13.12->oss2->modelscope) (2.21)\n",
146 | "Building wheels for collected packages: oss2, aliyun-python-sdk-core, crcmod\n",
147 | " Building wheel for oss2 (setup.py) ... \u001b[?25l\u001b[?25hdone\n",
148 | " Created wheel for oss2: filename=oss2-2.17.0-py3-none-any.whl size=112391 sha256=6efbc005c037bf22a31dfda5c5b238684c57ef603858ee25c7d42560dcdf9917\n",
149 | " Stored in directory: /root/.cache/pip/wheels/4b/23/be/4682480f462daef0106731b7efd0c513bfca3c846dc20081ab\n",
150 | " Building wheel for aliyun-python-sdk-core (setup.py) ... \u001b[?25l\u001b[?25hdone\n",
151 | " Created wheel for aliyun-python-sdk-core: filename=aliyun_python_sdk_core-2.13.36-py3-none-any.whl size=533194 sha256=6761c1951778b348f0e068f535a20f96b6e5bab6de61d51b0a300d3d751fc660\n",
152 | " Stored in directory: /root/.cache/pip/wheels/10/0a/92/f08c7b2c2cee2e47cffbb64fa291e5a30b24e77c726da41e31\n",
153 | " Building wheel for crcmod (setup.py) ... \u001b[?25l\u001b[?25hdone\n",
154 | " Created wheel for crcmod: filename=crcmod-1.7-cp39-cp39-linux_x86_64.whl size=36919 sha256=92ed5958b6bcb47237a66d6c91cdee0182bfe3fb7c43370bfd46ea95c6074b46\n",
155 | " Stored in directory: /root/.cache/pip/wheels/4a/6c/a6/ffdd136310039bf226f2707a9a8e6857be7d70a3fc061f6b36\n",
156 | "Successfully built oss2 aliyun-python-sdk-core crcmod\n",
157 | "Installing collected packages: yapf, crcmod, addict, xxhash, simplejson, pycryptodome, pyarrow, multidict, jmespath, frozenlist, einops, dill, async-timeout, yarl, responses, multiprocess, jsonplus, huggingface-hub, cryptography, aiosignal, aliyun-python-sdk-core, aiohttp, aliyun-python-sdk-kms, oss2, datasets, modelscope\n",
158 | " Attempting uninstall: pyarrow\n",
159 | " Found existing installation: pyarrow 9.0.0\n",
160 | " Uninstalling pyarrow-9.0.0:\n",
161 | " Successfully uninstalled pyarrow-9.0.0\n",
162 | "\u001b[31mERROR: pip's dependency resolver does not currently take into account all the packages that are installed. This behaviour is the source of the following dependency conflicts.\n",
163 | "pandas-gbq 0.17.9 requires pyarrow<10.0dev,>=3.0.0, but you have pyarrow 11.0.0 which is incompatible.\u001b[0m\u001b[31m\n",
164 | "\u001b[0mSuccessfully installed addict-2.4.0 aiohttp-3.8.4 aiosignal-1.3.1 aliyun-python-sdk-core-2.13.36 aliyun-python-sdk-kms-2.16.0 async-timeout-4.0.2 crcmod-1.7 cryptography-39.0.2 datasets-2.8.0 dill-0.3.6 einops-0.6.0 frozenlist-1.3.3 huggingface-hub-0.13.2 jmespath-0.10.0 jsonplus-0.8.0 modelscope-1.4.1 multidict-6.0.4 multiprocess-0.70.14 oss2-2.17.0 pyarrow-11.0.0 pycryptodome-3.17 responses-0.18.0 simplejson-3.18.4 xxhash-3.2.0 yapf-0.32.0 yarl-1.8.2\n"
165 | ]
166 | }
167 | ],
168 | "source": [
169 | "!pip install modelscope"
170 | ]
171 | },
172 | {
173 | "cell_type": "code",
174 | "source": [
175 | "!pip install open_clip_torch"
176 | ],
177 | "metadata": {
178 | "colab": {
179 | "base_uri": "https://localhost:8080/"
180 | },
181 | "id": "FWakJbppsw0C",
182 | "outputId": "77cffe7f-66ad-4676-c88d-522b605ade62"
183 | },
184 | "execution_count": 2,
185 | "outputs": [
186 | {
187 | "output_type": "stream",
188 | "name": "stdout",
189 | "text": [
190 | "Looking in indexes: https://pypi.org/simple, https://us-python.pkg.dev/colab-wheels/public/simple/\n",
191 | "Collecting open_clip_torch\n",
192 | " Downloading open_clip_torch-2.16.0-py3-none-any.whl (1.5 MB)\n",
193 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.5/1.5 MB\u001b[0m \u001b[31m19.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
194 | "\u001b[?25hCollecting timm\n",
195 | " Downloading timm-0.6.12-py3-none-any.whl (549 kB)\n",
196 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m549.1/549.1 KB\u001b[0m \u001b[31m49.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
197 | "\u001b[?25hRequirement already satisfied: protobuf<4 in /usr/local/lib/python3.9/dist-packages (from open_clip_torch) (3.19.6)\n",
198 | "Requirement already satisfied: torch>=1.9.0 in /usr/local/lib/python3.9/dist-packages (from open_clip_torch) (1.13.1+cu116)\n",
199 | "Requirement already satisfied: regex in /usr/local/lib/python3.9/dist-packages (from open_clip_torch) (2022.10.31)\n",
200 | "Requirement already satisfied: torchvision in /usr/local/lib/python3.9/dist-packages (from open_clip_torch) (0.14.1+cu116)\n",
201 | "Requirement already satisfied: tqdm in /usr/local/lib/python3.9/dist-packages (from open_clip_torch) (4.65.0)\n",
202 | "Requirement already satisfied: huggingface-hub in /usr/local/lib/python3.9/dist-packages (from open_clip_torch) (0.13.2)\n",
203 | "Collecting sentencepiece\n",
204 | " Downloading sentencepiece-0.1.97-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (1.3 MB)\n",
205 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.3/1.3 MB\u001b[0m \u001b[31m73.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
206 | "\u001b[?25hCollecting ftfy\n",
207 | " Downloading ftfy-6.1.1-py3-none-any.whl (53 kB)\n",
208 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m53.1/53.1 KB\u001b[0m \u001b[31m8.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
209 | "\u001b[?25hRequirement already satisfied: typing-extensions in /usr/local/lib/python3.9/dist-packages (from torch>=1.9.0->open_clip_torch) (4.5.0)\n",
210 | "Requirement already satisfied: wcwidth>=0.2.5 in /usr/local/lib/python3.9/dist-packages (from ftfy->open_clip_torch) (0.2.6)\n",
211 | "Requirement already satisfied: filelock in /usr/local/lib/python3.9/dist-packages (from huggingface-hub->open_clip_torch) (3.10.0)\n",
212 | "Requirement already satisfied: requests in /usr/local/lib/python3.9/dist-packages (from huggingface-hub->open_clip_torch) (2.27.1)\n",
213 | "Requirement already satisfied: packaging>=20.9 in /usr/local/lib/python3.9/dist-packages (from huggingface-hub->open_clip_torch) (23.0)\n",
214 | "Requirement already satisfied: pyyaml>=5.1 in /usr/local/lib/python3.9/dist-packages (from huggingface-hub->open_clip_torch) (6.0)\n",
215 | "Requirement already satisfied: numpy in /usr/local/lib/python3.9/dist-packages (from torchvision->open_clip_torch) (1.22.4)\n",
216 | "Requirement already satisfied: pillow!=8.3.*,>=5.3.0 in /usr/local/lib/python3.9/dist-packages (from torchvision->open_clip_torch) (8.4.0)\n",
217 | "Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.9/dist-packages (from requests->huggingface-hub->open_clip_torch) (3.4)\n",
218 | "Requirement already satisfied: charset-normalizer~=2.0.0 in /usr/local/lib/python3.9/dist-packages (from requests->huggingface-hub->open_clip_torch) (2.0.12)\n",
219 | "Requirement already satisfied: urllib3<1.27,>=1.21.1 in /usr/local/lib/python3.9/dist-packages (from requests->huggingface-hub->open_clip_torch) (1.26.15)\n",
220 | "Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.9/dist-packages (from requests->huggingface-hub->open_clip_torch) (2022.12.7)\n",
221 | "Installing collected packages: sentencepiece, ftfy, timm, open_clip_torch\n",
222 | "Successfully installed ftfy-6.1.1 open_clip_torch-2.16.0 sentencepiece-0.1.97 timm-0.6.12\n"
223 | ]
224 | }
225 | ]
226 | },
227 | {
228 | "cell_type": "code",
229 | "source": [
230 | "!pip install pytorch_lightning"
231 | ],
232 | "metadata": {
233 | "colab": {
234 | "base_uri": "https://localhost:8080/"
235 | },
236 | "id": "VcCOSGxUwU4C",
237 | "outputId": "5120824b-d370-4ede-d1ae-95d39422a2d6"
238 | },
239 | "execution_count": 3,
240 | "outputs": [
241 | {
242 | "output_type": "stream",
243 | "name": "stdout",
244 | "text": [
245 | "Looking in indexes: https://pypi.org/simple, https://us-python.pkg.dev/colab-wheels/public/simple/\n",
246 | "Collecting pytorch_lightning\n",
247 | " Downloading pytorch_lightning-2.0.0-py3-none-any.whl (715 kB)\n",
248 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m715.6/715.6 KB\u001b[0m \u001b[31m11.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
249 | "\u001b[?25hRequirement already satisfied: fsspec[http]>2021.06.0 in /usr/local/lib/python3.9/dist-packages (from pytorch_lightning) (2023.3.0)\n",
250 | "Requirement already satisfied: torch>=1.11.0 in /usr/local/lib/python3.9/dist-packages (from pytorch_lightning) (1.13.1+cu116)\n",
251 | "Requirement already satisfied: packaging>=17.1 in /usr/local/lib/python3.9/dist-packages (from pytorch_lightning) (23.0)\n",
252 | "Collecting torchmetrics>=0.7.0\n",
253 | " Downloading torchmetrics-0.11.4-py3-none-any.whl (519 kB)\n",
254 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m519.2/519.2 KB\u001b[0m \u001b[31m49.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
255 | "\u001b[?25hRequirement already satisfied: tqdm>=4.57.0 in /usr/local/lib/python3.9/dist-packages (from pytorch_lightning) (4.65.0)\n",
256 | "Requirement already satisfied: typing-extensions>=4.0.0 in /usr/local/lib/python3.9/dist-packages (from pytorch_lightning) (4.5.0)\n",
257 | "Collecting lightning-utilities>=0.7.0\n",
258 | " Downloading lightning_utilities-0.8.0-py3-none-any.whl (20 kB)\n",
259 | "Requirement already satisfied: PyYAML>=5.4 in /usr/local/lib/python3.9/dist-packages (from pytorch_lightning) (6.0)\n",
260 | "Requirement already satisfied: numpy>=1.17.2 in /usr/local/lib/python3.9/dist-packages (from pytorch_lightning) (1.22.4)\n",
261 | "Requirement already satisfied: aiohttp!=4.0.0a0,!=4.0.0a1 in /usr/local/lib/python3.9/dist-packages (from fsspec[http]>2021.06.0->pytorch_lightning) (3.8.4)\n",
262 | "Requirement already satisfied: requests in /usr/local/lib/python3.9/dist-packages (from fsspec[http]>2021.06.0->pytorch_lightning) (2.27.1)\n",
263 | "Requirement already satisfied: multidict<7.0,>=4.5 in /usr/local/lib/python3.9/dist-packages (from aiohttp!=4.0.0a0,!=4.0.0a1->fsspec[http]>2021.06.0->pytorch_lightning) (6.0.4)\n",
264 | "Requirement already satisfied: async-timeout<5.0,>=4.0.0a3 in /usr/local/lib/python3.9/dist-packages (from aiohttp!=4.0.0a0,!=4.0.0a1->fsspec[http]>2021.06.0->pytorch_lightning) (4.0.2)\n",
265 | "Requirement already satisfied: charset-normalizer<4.0,>=2.0 in /usr/local/lib/python3.9/dist-packages (from aiohttp!=4.0.0a0,!=4.0.0a1->fsspec[http]>2021.06.0->pytorch_lightning) (2.0.12)\n",
266 | "Requirement already satisfied: aiosignal>=1.1.2 in /usr/local/lib/python3.9/dist-packages (from aiohttp!=4.0.0a0,!=4.0.0a1->fsspec[http]>2021.06.0->pytorch_lightning) (1.3.1)\n",
267 | "Requirement already satisfied: yarl<2.0,>=1.0 in /usr/local/lib/python3.9/dist-packages (from aiohttp!=4.0.0a0,!=4.0.0a1->fsspec[http]>2021.06.0->pytorch_lightning) (1.8.2)\n",
268 | "Requirement already satisfied: frozenlist>=1.1.1 in /usr/local/lib/python3.9/dist-packages (from aiohttp!=4.0.0a0,!=4.0.0a1->fsspec[http]>2021.06.0->pytorch_lightning) (1.3.3)\n",
269 | "Requirement already satisfied: attrs>=17.3.0 in /usr/local/lib/python3.9/dist-packages (from aiohttp!=4.0.0a0,!=4.0.0a1->fsspec[http]>2021.06.0->pytorch_lightning) (22.2.0)\n",
270 | "Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.9/dist-packages (from requests->fsspec[http]>2021.06.0->pytorch_lightning) (2022.12.7)\n",
271 | "Requirement already satisfied: urllib3<1.27,>=1.21.1 in /usr/local/lib/python3.9/dist-packages (from requests->fsspec[http]>2021.06.0->pytorch_lightning) (1.26.15)\n",
272 | "Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.9/dist-packages (from requests->fsspec[http]>2021.06.0->pytorch_lightning) (3.4)\n",
273 | "Installing collected packages: lightning-utilities, torchmetrics, pytorch_lightning\n",
274 | "Successfully installed lightning-utilities-0.8.0 pytorch_lightning-2.0.0 torchmetrics-0.11.4\n"
275 | ]
276 | }
277 | ]
278 | },
279 | {
280 | "cell_type": "code",
281 | "source": [
282 | "#for some reason I have to add this line, idk why\n",
283 | "\n",
284 | "!sed -i '15i\\ tensor=tensor.to(\"cuda\")' /usr/local/lib/python3.9/dist-packages/modelscope/models/multi_modal/video_synthesis/diffusion.py"
285 | ],
286 | "metadata": {
287 | "id": "gjZ-B1QqCIMC"
288 | },
289 | "execution_count": null,
290 | "outputs": []
291 | },
292 | {
293 | "cell_type": "code",
294 | "source": [
295 | "from modelscope.pipelines import pipeline\n",
296 | "from modelscope.outputs import OutputKeys\n",
297 | "\n",
298 | "from torch import autocast\n",
299 | "\n",
300 | "with autocast(\"cuda\"):\n",
301 | "\n",
302 | " p = pipeline('text-to-video-synthesis', 'damo/text-to-video-synthesis')\n",
303 | " test_text = {\n",
304 | " #'text': 'A panda eating bamboo on a rock.',\n",
305 | " 'text': 'A puppy jumping in a puddle!',\n",
306 | " }\n",
307 | " output_video_path = p(test_text,)[OutputKeys.OUTPUT_VIDEO]\n",
308 | " print('output_video_path:', output_video_path)"
309 | ],
310 | "metadata": {
311 | "colab": {
312 | "base_uri": "https://localhost:8080/"
313 | },
314 | "id": "pie48qmXshOh",
315 | "outputId": "f71db1bb-ad30-4293-8a83-fe0819a0db0c"
316 | },
317 | "execution_count": 2,
318 | "outputs": [
319 | {
320 | "output_type": "stream",
321 | "name": "stderr",
322 | "text": [
323 | "2023-03-19 08:37:31,627 - modelscope - INFO - Model revision not specified, use the latest revision: v1.0.3\n",
324 | "2023-03-19 08:37:32,061 - modelscope - INFO - initiate model from /root/.cache/modelscope/hub/damo/text-to-video-synthesis\n",
325 | "2023-03-19 08:37:32,062 - modelscope - INFO - initiate model from location /root/.cache/modelscope/hub/damo/text-to-video-synthesis.\n",
326 | "2023-03-19 08:37:32,065 - modelscope - INFO - initialize model from /root/.cache/modelscope/hub/damo/text-to-video-synthesis\n",
327 | "2023-03-19 08:38:01,845 - modelscope - WARNING - No preprocessor field found in cfg.\n",
328 | "2023-03-19 08:38:01,846 - modelscope - WARNING - No val key and type key found in preprocessor domain of configuration.json file.\n",
329 | "2023-03-19 08:38:01,847 - modelscope - WARNING - Cannot find available config to build preprocessor at mode inference, current config: {'model_dir': '/root/.cache/modelscope/hub/damo/text-to-video-synthesis'}. trying to build by task and model information.\n",
330 | "2023-03-19 08:38:01,848 - modelscope - WARNING - No preprocessor key ('latent-text-to-video-synthesis', 'text-to-video-synthesis') found in PREPROCESSOR_MAP, skip building preprocessor.\n",
331 | "2023-03-19 08:38:01,850 - modelscope - WARNING - task text-to-video-synthesis input definition is missing\n",
332 | "2023-03-19 08:38:17,708 - modelscope - WARNING - task text-to-video-synthesis output keys are missing\n"
333 | ]
334 | },
335 | {
336 | "output_type": "stream",
337 | "name": "stdout",
338 | "text": [
339 | "output_video_path: /tmp/tmpyxbnx1xo.mp4\n"
340 | ]
341 | }
342 | ]
343 | },
344 | {
345 | "cell_type": "code",
346 | "source": [],
347 | "metadata": {
348 | "id": "l_TRycD_By8Q"
349 | },
350 | "execution_count": null,
351 | "outputs": []
352 | },
353 | {
354 | "cell_type": "code",
355 | "source": [
356 | "import torch\n",
357 | "t=torch.tensor([1])"
358 | ],
359 | "metadata": {
360 | "id": "aD6iFH5TwzG5"
361 | },
362 | "execution_count": 3,
363 | "outputs": []
364 | },
365 | {
366 | "cell_type": "code",
367 | "source": [
368 | "t.device"
369 | ],
370 | "metadata": {
371 | "colab": {
372 | "base_uri": "https://localhost:8080/"
373 | },
374 | "id": "_5OLoR-O_P0x",
375 | "outputId": "84ecf869-097d-4773-e6f6-eecef4356356"
376 | },
377 | "execution_count": 4,
378 | "outputs": [
379 | {
380 | "output_type": "execute_result",
381 | "data": {
382 | "text/plain": [
383 | "device(type='cpu')"
384 | ]
385 | },
386 | "metadata": {},
387 | "execution_count": 4
388 | }
389 | ]
390 | },
391 | {
392 | "cell_type": "code",
393 | "source": [
394 | "from IPython.display import Video\n",
395 | "\n",
396 | "video_file_path = output_video_path\n",
397 | "\n",
398 | "Video(video_file_path)"
399 | ],
400 | "metadata": {
401 | "colab": {
402 | "base_uri": "https://localhost:8080/",
403 | "height": 171
404 | },
405 | "id": "n0ojBF4p_vsZ",
406 | "outputId": "7e1b4cf4-4e47-45eb-e38b-909ee54ec433"
407 | },
408 | "execution_count": 3,
409 | "outputs": [
410 | {
411 | "output_type": "execute_result",
412 | "data": {
413 | "text/plain": [
414 | ""
415 | ],
416 | "text/html": [
417 | ""
420 | ]
421 | },
422 | "metadata": {},
423 | "execution_count": 3
424 | }
425 | ]
426 | },
427 | {
428 | "cell_type": "code",
429 | "source": [],
430 | "metadata": {
431 | "id": "c0Ww_JxLBgSR"
432 | },
433 | "execution_count": null,
434 | "outputs": []
435 | }
436 | ]
437 | }
--------------------------------------------------------------------------------
/nouns.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nagolinc/notebooks/cbc5406023fd192dd3f1505d970d7369d22bb7f7/nouns.txt
--------------------------------------------------------------------------------
/sd3_prompt_enhancer_workflow.json:
--------------------------------------------------------------------------------
1 | {
2 | "last_node_id": 285,
3 | "last_link_id": 615,
4 | "nodes": [
5 | {
6 | "id": 11,
7 | "type": "TripleCLIPLoader",
8 | "pos": [
9 | -1885,
10 | -49
11 | ],
12 | "size": {
13 | "0": 315,
14 | "1": 106
15 | },
16 | "flags": {},
17 | "order": 0,
18 | "mode": 0,
19 | "outputs": [
20 | {
21 | "name": "CLIP",
22 | "type": "CLIP",
23 | "links": [
24 | 5,
25 | 94
26 | ],
27 | "shape": 3,
28 | "slot_index": 0
29 | }
30 | ],
31 | "properties": {
32 | "Node name for S&R": "TripleCLIPLoader"
33 | },
34 | "widgets_values": [
35 | "clip_g.safetensors",
36 | "clip_l.safetensors",
37 | "t5xxl_fp8_e4m3fn.safetensors"
38 | ]
39 | },
40 | {
41 | "id": 68,
42 | "type": "ConditioningSetTimestepRange",
43 | "pos": [
44 | -1010,
45 | 167
46 | ],
47 | "size": {
48 | "0": 317.4000244140625,
49 | "1": 82
50 | },
51 | "flags": {},
52 | "order": 15,
53 | "mode": 0,
54 | "inputs": [
55 | {
56 | "name": "conditioning",
57 | "type": "CONDITIONING",
58 | "link": 90
59 | }
60 | ],
61 | "outputs": [
62 | {
63 | "name": "CONDITIONING",
64 | "type": "CONDITIONING",
65 | "links": [
66 | 91
67 | ],
68 | "shape": 3,
69 | "slot_index": 0
70 | }
71 | ],
72 | "properties": {
73 | "Node name for S&R": "ConditioningSetTimestepRange"
74 | },
75 | "widgets_values": [
76 | 0.1,
77 | 1
78 | ]
79 | },
80 | {
81 | "id": 70,
82 | "type": "ConditioningSetTimestepRange",
83 | "pos": [
84 | -1006,
85 | 314
86 | ],
87 | "size": {
88 | "0": 317.4000244140625,
89 | "1": 82
90 | },
91 | "flags": {},
92 | "order": 12,
93 | "mode": 0,
94 | "inputs": [
95 | {
96 | "name": "conditioning",
97 | "type": "CONDITIONING",
98 | "link": 93,
99 | "slot_index": 0
100 | }
101 | ],
102 | "outputs": [
103 | {
104 | "name": "CONDITIONING",
105 | "type": "CONDITIONING",
106 | "links": [
107 | 92
108 | ],
109 | "shape": 3,
110 | "slot_index": 0
111 | }
112 | ],
113 | "properties": {
114 | "Node name for S&R": "ConditioningSetTimestepRange"
115 | },
116 | "widgets_values": [
117 | 0,
118 | 0.1
119 | ]
120 | },
121 | {
122 | "id": 67,
123 | "type": "ConditioningZeroOut",
124 | "pos": [
125 | -1370,
126 | 337
127 | ],
128 | "size": {
129 | "0": 211.60000610351562,
130 | "1": 26
131 | },
132 | "flags": {},
133 | "order": 13,
134 | "mode": 0,
135 | "inputs": [
136 | {
137 | "name": "conditioning",
138 | "type": "CONDITIONING",
139 | "link": 580
140 | }
141 | ],
142 | "outputs": [
143 | {
144 | "name": "CONDITIONING",
145 | "type": "CONDITIONING",
146 | "links": [
147 | 90
148 | ],
149 | "shape": 3,
150 | "slot_index": 0
151 | }
152 | ],
153 | "properties": {
154 | "Node name for S&R": "ConditioningZeroOut"
155 | }
156 | },
157 | {
158 | "id": 266,
159 | "type": "Note",
160 | "pos": [
161 | -2352,
162 | 576
163 | ],
164 | "size": {
165 | "0": 308.061279296875,
166 | "1": 102.86902618408203
167 | },
168 | "flags": {},
169 | "order": 1,
170 | "mode": 0,
171 | "properties": {
172 | "text": ""
173 | },
174 | "widgets_values": [
175 | "Resolution should be around 1 megapixel and width/height must be multiple of 64"
176 | ],
177 | "color": "#432",
178 | "bgcolor": "#653"
179 | },
180 | {
181 | "id": 13,
182 | "type": "ModelSamplingSD3",
183 | "pos": [
184 | -974,
185 | -220
186 | ],
187 | "size": {
188 | "0": 315,
189 | "1": 58
190 | },
191 | "flags": {
192 | "collapsed": false
193 | },
194 | "order": 10,
195 | "mode": 0,
196 | "inputs": [
197 | {
198 | "name": "model",
199 | "type": "MODEL",
200 | "link": 565
201 | }
202 | ],
203 | "outputs": [
204 | {
205 | "name": "MODEL",
206 | "type": "MODEL",
207 | "links": [
208 | 591
209 | ],
210 | "shape": 3,
211 | "slot_index": 0
212 | }
213 | ],
214 | "properties": {
215 | "Node name for S&R": "ModelSamplingSD3"
216 | },
217 | "widgets_values": [
218 | 3
219 | ]
220 | },
221 | {
222 | "id": 69,
223 | "type": "ConditioningCombine",
224 | "pos": [
225 | -662,
226 | 165
227 | ],
228 | "size": {
229 | "0": 228.39999389648438,
230 | "1": 46
231 | },
232 | "flags": {},
233 | "order": 18,
234 | "mode": 0,
235 | "inputs": [
236 | {
237 | "name": "conditioning_1",
238 | "type": "CONDITIONING",
239 | "link": 91
240 | },
241 | {
242 | "name": "conditioning_2",
243 | "type": "CONDITIONING",
244 | "link": 92
245 | }
246 | ],
247 | "outputs": [
248 | {
249 | "name": "CONDITIONING",
250 | "type": "CONDITIONING",
251 | "links": [
252 | 592
253 | ],
254 | "shape": 3,
255 | "slot_index": 0
256 | }
257 | ],
258 | "properties": {
259 | "Node name for S&R": "ConditioningCombine"
260 | }
261 | },
262 | {
263 | "id": 233,
264 | "type": "PreviewImage",
265 | "pos": [
266 | 535.1143750722272,
267 | -147.92548481673106
268 | ],
269 | "size": {
270 | "0": 604.7489624023438,
271 | "1": 592.15576171875
272 | },
273 | "flags": {},
274 | "order": 21,
275 | "mode": 0,
276 | "inputs": [
277 | {
278 | "name": "images",
279 | "type": "IMAGE",
280 | "link": 599
281 | }
282 | ],
283 | "properties": {
284 | "Node name for S&R": "PreviewImage"
285 | }
286 | },
287 | {
288 | "id": 231,
289 | "type": "VAEDecode",
290 | "pos": [
291 | 141,
292 | -177
293 | ],
294 | "size": {
295 | "0": 210,
296 | "1": 46
297 | },
298 | "flags": {},
299 | "order": 20,
300 | "mode": 0,
301 | "inputs": [
302 | {
303 | "name": "samples",
304 | "type": "LATENT",
305 | "link": 596
306 | },
307 | {
308 | "name": "vae",
309 | "type": "VAE",
310 | "link": 557
311 | }
312 | ],
313 | "outputs": [
314 | {
315 | "name": "IMAGE",
316 | "type": "IMAGE",
317 | "links": [
318 | 599
319 | ],
320 | "shape": 3,
321 | "slot_index": 0
322 | }
323 | ],
324 | "properties": {
325 | "Node name for S&R": "VAEDecode"
326 | }
327 | },
328 | {
329 | "id": 271,
330 | "type": "KSampler",
331 | "pos": [
332 | -269,
333 | -179
334 | ],
335 | "size": {
336 | "0": 315,
337 | "1": 446
338 | },
339 | "flags": {},
340 | "order": 19,
341 | "mode": 0,
342 | "inputs": [
343 | {
344 | "name": "model",
345 | "type": "MODEL",
346 | "link": 591
347 | },
348 | {
349 | "name": "positive",
350 | "type": "CONDITIONING",
351 | "link": 595
352 | },
353 | {
354 | "name": "negative",
355 | "type": "CONDITIONING",
356 | "link": 592
357 | },
358 | {
359 | "name": "latent_image",
360 | "type": "LATENT",
361 | "link": 593
362 | },
363 | {
364 | "name": "seed",
365 | "type": "INT",
366 | "link": 597,
367 | "widget": {
368 | "name": "seed"
369 | },
370 | "slot_index": 4
371 | }
372 | ],
373 | "outputs": [
374 | {
375 | "name": "LATENT",
376 | "type": "LATENT",
377 | "links": [
378 | 596
379 | ],
380 | "shape": 3,
381 | "slot_index": 0
382 | }
383 | ],
384 | "properties": {
385 | "Node name for S&R": "KSampler"
386 | },
387 | "widgets_values": [
388 | 196378094239983,
389 | "fixed",
390 | 28,
391 | 4.5,
392 | "dpmpp_2m",
393 | "sgm_uniform",
394 | 1
395 | ]
396 | },
397 | {
398 | "id": 135,
399 | "type": "EmptySD3LatentImage",
400 | "pos": [
401 | -2352,
402 | 438
403 | ],
404 | "size": {
405 | "0": 315,
406 | "1": 106
407 | },
408 | "flags": {},
409 | "order": 2,
410 | "mode": 0,
411 | "inputs": [],
412 | "outputs": [
413 | {
414 | "name": "LATENT",
415 | "type": "LATENT",
416 | "links": [
417 | 593
418 | ],
419 | "shape": 3,
420 | "slot_index": 0
421 | }
422 | ],
423 | "properties": {
424 | "Node name for S&R": "EmptySD3LatentImage"
425 | },
426 | "widgets_values": [
427 | 1024,
428 | 1024,
429 | 1
430 | ]
431 | },
432 | {
433 | "id": 252,
434 | "type": "CheckpointLoaderSimple",
435 | "pos": [
436 | -2314,
437 | -203
438 | ],
439 | "size": {
440 | "0": 746.7357788085938,
441 | "1": 98
442 | },
443 | "flags": {},
444 | "order": 3,
445 | "mode": 0,
446 | "outputs": [
447 | {
448 | "name": "MODEL",
449 | "type": "MODEL",
450 | "links": [
451 | 565
452 | ],
453 | "shape": 3,
454 | "slot_index": 0
455 | },
456 | {
457 | "name": "CLIP",
458 | "type": "CLIP",
459 | "links": [],
460 | "shape": 3,
461 | "slot_index": 1
462 | },
463 | {
464 | "name": "VAE",
465 | "type": "VAE",
466 | "links": [
467 | 557
468 | ],
469 | "shape": 3,
470 | "slot_index": 2
471 | }
472 | ],
473 | "properties": {
474 | "Node name for S&R": "CheckpointLoaderSimple"
475 | },
476 | "widgets_values": [
477 | "sdv3\\2b_1024\\sd3_medium.safetensors"
478 | ]
479 | },
480 | {
481 | "id": 276,
482 | "type": "Text box",
483 | "pos": [
484 | -2233,
485 | 897
486 | ],
487 | "size": {
488 | "0": 400,
489 | "1": 200
490 | },
491 | "flags": {},
492 | "order": 4,
493 | "mode": 0,
494 | "outputs": [
495 | {
496 | "name": "STRING",
497 | "type": "STRING",
498 | "links": [
499 | 604
500 | ],
501 | "shape": 3,
502 | "slot_index": 0
503 | }
504 | ],
505 | "properties": {
506 | "Node name for S&R": "Text box"
507 | },
508 | "widgets_values": [
509 | "You are an LLM for Transforming Simple Descriptions into Elaborate Narratives\nObjective:\n\nTransform basic image descriptions into rich, detailed narratives that not only describe the visual aspects but also evoke emotions and provide a deeper context. The response should enhance the simplicity of the input into a more artistic and engaging text.\nInstructions:\n\n Read the Input Carefully: Understand the core elements of the description provided (e.g., subject, setting).\n Enhance Description: Expand on the visual details by adding elements such as lighting, mood, camera specifics, and resolution to create a more immersive image.\n Invoke Emotions and Atmosphere: Use emotive language and descriptive adjectives to build an atmosphere around the subject.\n Incorporate Technical Excellence: If applicable, mention technical aspects such as camera type, technique, or resolution to emphasize the quality and detail of the image.\n Create a Narrative: Beyond describing the scene, tell a story that invites the viewer into the image's world.\n\nExamples:\n\nExample 1:\n\n Input: \"A man fishing at sunset\"\n Output:\n Picture an evocative scene of a lone fisherman silhouetted against the fiery canvas of the sunset, captured with the precision of a Nikon D850 in high resolution. The serene waters mirror the spectacular colors of the dusk, while the gentle ripples around the fishing rod whisper tales of patience and persistence. The soft, golden light enhances the contemplative mood, making this not just a photograph, but a moment frozen in time, offering a window into the tranquil joys of solitary pursuits.\n\nExample 2:\n\n Input: \"A child playing in the park\"\n Output:\n Imagine a vibrant tableau of youthful exuberance, as a small child dances under the sun-dappled leaves of an old oak tree in the park. Captured through the dynamic range of a Sony A7R IV, the scene bursts with colors so vivid that every laughter and playful gesture feels alive. The image is bathed in the golden glow of afternoon light, casting whimsical shadows that dance along with the child. This photograph is a celebration of childhood, rendered in exquisite detail and joyful vitality.\n\nExample 3:\n\n Input: \"A cat sleeping on a windowsill\"\n Output:\n Envision a peaceful scene of a cat, curled up in the soft embrace of a warm sunbeam on a quaint windowsill. The image, taken with a Canon EOS 5D Mark IV, captures the essence of serenity and comfort. The delicate play of light and shadow highlights the gentle rise and fall of the cat’s breathing, while the outside world blurs into a bokeh of greens and blues, symbolizing the blissful isolation of a nap in the sun. This is not merely a snapshot; it is a tranquil portrait of domestic tranquility.\n\nThese examples should guide the LLM in crafting responses that transform simple descriptions into detailed, engaging, and context-rich narratives.\n\n\nUSER:"
510 | ]
511 | },
512 | {
513 | "id": 279,
514 | "type": "Text box",
515 | "pos": [
516 | -2238,
517 | 1327
518 | ],
519 | "size": {
520 | "0": 400,
521 | "1": 200
522 | },
523 | "flags": {},
524 | "order": 5,
525 | "mode": 0,
526 | "outputs": [
527 | {
528 | "name": "STRING",
529 | "type": "STRING",
530 | "links": [
531 | 606
532 | ],
533 | "shape": 3
534 | }
535 | ],
536 | "properties": {
537 | "Node name for S&R": "Text box"
538 | },
539 | "widgets_values": [
540 | "ASSISTANT:"
541 | ]
542 | },
543 | {
544 | "id": 6,
545 | "type": "CLIPTextEncode",
546 | "pos": [
547 | -1872,
548 | 267
549 | ],
550 | "size": {
551 | "0": 389.06927490234375,
552 | "1": 207.84902954101562
553 | },
554 | "flags": {},
555 | "order": 17,
556 | "mode": 0,
557 | "inputs": [
558 | {
559 | "name": "clip",
560 | "type": "CLIP",
561 | "link": 5
562 | },
563 | {
564 | "name": "text",
565 | "type": "STRING",
566 | "link": 611,
567 | "widget": {
568 | "name": "text"
569 | }
570 | }
571 | ],
572 | "outputs": [
573 | {
574 | "name": "CONDITIONING",
575 | "type": "CONDITIONING",
576 | "links": [
577 | 595
578 | ],
579 | "shape": 3,
580 | "slot_index": 0
581 | }
582 | ],
583 | "properties": {
584 | "Node name for S&R": "CLIPTextEncode"
585 | },
586 | "widgets_values": [
587 | "Behold a captivating portrait of a red-haired beauty, expertly captured through the lens of a Leica, a legendary camera renowned for its unparalleled clarity and depth. The image is a masterful study in shadowplay, with the delicate interplay of light and darkness creating an atmosphere of intrigue and allure.\n\nThe gorgeous lighting, with its soft, ethereal quality, bathes the subject in a warm and inviting glow, illuminating her striking features and the pretty freckles that dust her porcelain skin like a scattering of stars. The subtle pastel hues that infuse the image lend a dreamy, otherworldly quality, as if the viewer has been granted a glimpse into a secret, enchanted realm.\n\nThe portrait, rendered in stunning 8K resolution, is a true testament to the power of artistry and technology, and is not merely a photograph; it is a visual poem, a love letter to the beauty and mystery of the human spirit."
588 | ],
589 | "color": "#232",
590 | "bgcolor": "#353"
591 | },
592 | {
593 | "id": 272,
594 | "type": "PrimitiveNode",
595 | "pos": [
596 | -2342,
597 | 278
598 | ],
599 | "size": {
600 | "0": 210,
601 | "1": 82
602 | },
603 | "flags": {},
604 | "order": 6,
605 | "mode": 0,
606 | "outputs": [
607 | {
608 | "name": "INT",
609 | "type": "INT",
610 | "links": [
611 | 597
612 | ],
613 | "slot_index": 0,
614 | "widget": {
615 | "name": "seed"
616 | }
617 | }
618 | ],
619 | "title": "Seed",
620 | "properties": {
621 | "Run widget replace on values": false
622 | },
623 | "widgets_values": [
624 | 196378094239983,
625 | "randomize"
626 | ]
627 | },
628 | {
629 | "id": 71,
630 | "type": "CLIPTextEncode",
631 | "pos": [
632 | -1900,
633 | 570
634 | ],
635 | "size": {
636 | "0": 380.4615783691406,
637 | "1": 102.07693481445312
638 | },
639 | "flags": {},
640 | "order": 9,
641 | "mode": 0,
642 | "inputs": [
643 | {
644 | "name": "clip",
645 | "type": "CLIP",
646 | "link": 94
647 | }
648 | ],
649 | "outputs": [
650 | {
651 | "name": "CONDITIONING",
652 | "type": "CONDITIONING",
653 | "links": [
654 | 93,
655 | 580
656 | ],
657 | "shape": 3,
658 | "slot_index": 0
659 | }
660 | ],
661 | "title": "CLIP Text Encode (Negative Prompt)",
662 | "properties": {
663 | "Node name for S&R": "CLIPTextEncode"
664 | },
665 | "widgets_values": [
666 | "bad quality, poor quality, doll, disfigured, jpg, toy, bad anatomy, missing limbs, missing fingers, 3d, cgi"
667 | ],
668 | "color": "#322",
669 | "bgcolor": "#533"
670 | },
671 | {
672 | "id": 277,
673 | "type": "Text Concatenate",
674 | "pos": [
675 | -1633,
676 | 991
677 | ],
678 | "size": {
679 | "0": 315,
680 | "1": 178
681 | },
682 | "flags": {},
683 | "order": 11,
684 | "mode": 0,
685 | "inputs": [
686 | {
687 | "name": "text_a",
688 | "type": "STRING",
689 | "link": 604,
690 | "widget": {
691 | "name": "text_a"
692 | }
693 | },
694 | {
695 | "name": "text_b",
696 | "type": "STRING",
697 | "link": 605,
698 | "widget": {
699 | "name": "text_b"
700 | },
701 | "slot_index": 1
702 | },
703 | {
704 | "name": "text_c",
705 | "type": "STRING",
706 | "link": 606,
707 | "widget": {
708 | "name": "text_c"
709 | },
710 | "slot_index": 2
711 | },
712 | {
713 | "name": "text_d",
714 | "type": "STRING",
715 | "link": null,
716 | "widget": {
717 | "name": "text_d"
718 | }
719 | }
720 | ],
721 | "outputs": [
722 | {
723 | "name": "STRING",
724 | "type": "STRING",
725 | "links": [
726 | 603
727 | ],
728 | "shape": 3,
729 | "slot_index": 0
730 | }
731 | ],
732 | "properties": {
733 | "Node name for S&R": "Text Concatenate"
734 | },
735 | "widgets_values": [
736 | "\\n",
737 | "true",
738 | "",
739 | "",
740 | "",
741 | ""
742 | ]
743 | },
744 | {
745 | "id": 274,
746 | "type": "Call LLM Basic",
747 | "pos": [
748 | -1147,
749 | 933
750 | ],
751 | "size": {
752 | "0": 400,
753 | "1": 200
754 | },
755 | "flags": {},
756 | "order": 14,
757 | "mode": 0,
758 | "inputs": [
759 | {
760 | "name": "LLM",
761 | "type": "LLM",
762 | "link": 615,
763 | "slot_index": 0
764 | },
765 | {
766 | "name": "prompt",
767 | "type": "STRING",
768 | "link": 603,
769 | "widget": {
770 | "name": "prompt"
771 | },
772 | "slot_index": 1
773 | }
774 | ],
775 | "outputs": [
776 | {
777 | "name": "STRING",
778 | "type": "STRING",
779 | "links": [
780 | 608,
781 | 611
782 | ],
783 | "shape": 3,
784 | "slot_index": 0
785 | }
786 | ],
787 | "properties": {
788 | "Node name for S&R": "Call LLM Basic"
789 | },
790 | "widgets_values": [
791 | "",
792 | 0,
793 | 0.8,
794 | 393,
795 | "randomize"
796 | ]
797 | },
798 | {
799 | "id": 285,
800 | "type": "Load LLM Model Advanced",
801 | "pos": [
802 | -1581,
803 | 1264
804 | ],
805 | "size": {
806 | "0": 315,
807 | "1": 826
808 | },
809 | "flags": {},
810 | "order": 7,
811 | "mode": 0,
812 | "outputs": [
813 | {
814 | "name": "LLM",
815 | "type": "LLM",
816 | "links": [
817 | 615
818 | ],
819 | "shape": 3
820 | }
821 | ],
822 | "properties": {
823 | "Node name for S&R": "Load LLM Model Advanced"
824 | },
825 | "widgets_values": [
826 | "Nous-Hermes-2-Mistral-7B-DPO.Q5_K_M.gguf",
827 | 506,
828 | "LLAMA_SPLIT_LAYER",
829 | 0,
830 | 0,
831 | false,
832 | true,
833 | false,
834 | 657,
835 | "randomize",
836 | 2048,
837 | 512,
838 | 0,
839 | 0,
840 | "LLAMA_ROPE_SCALING_UNSPECIFIED",
841 | 0,
842 | 0,
843 | -1,
844 | 1,
845 | 32,
846 | 1,
847 | 0,
848 | 0,
849 | false,
850 | false,
851 | false,
852 | 64,
853 | "",
854 | 0,
855 | "",
856 | false,
857 | "llama-2",
858 | true
859 | ]
860 | },
861 | {
862 | "id": 278,
863 | "type": "Text box",
864 | "pos": [
865 | -2860,
866 | 1110
867 | ],
868 | "size": {
869 | "0": 400,
870 | "1": 200
871 | },
872 | "flags": {},
873 | "order": 8,
874 | "mode": 0,
875 | "outputs": [
876 | {
877 | "name": "STRING",
878 | "type": "STRING",
879 | "links": [
880 | 605
881 | ],
882 | "shape": 3,
883 | "slot_index": 0
884 | }
885 | ],
886 | "properties": {
887 | "Node name for S&R": "Text box"
888 | },
889 | "widgets_values": [
890 | "a man holds a banana"
891 | ],
892 | "color": "#232",
893 | "bgcolor": "#353"
894 | },
895 | {
896 | "id": 281,
897 | "type": "PrimereTextOutput",
898 | "pos": [
899 | -606,
900 | 640
901 | ],
902 | "size": [
903 | 583.8583829059871,
904 | 343.7747386790918
905 | ],
906 | "flags": {},
907 | "order": 16,
908 | "mode": 0,
909 | "inputs": [
910 | {
911 | "name": "text",
912 | "type": "STRING",
913 | "link": 608,
914 | "widget": {
915 | "name": "text"
916 | }
917 | }
918 | ],
919 | "properties": {
920 | "Node name for S&R": "PrimereTextOutput"
921 | },
922 | "widgets_values": [
923 | "",
924 | "\nWitness a quirky yet charming scene captured through the lens of a Fujifilm X-T3 camera with striking clarity. A man, his face aglow with amusement, clutches a ripe yellow banana in one hand. The playful contrast of the man's serious attire against the whimsical prop invites a sense of delight and intrigue. The warm lighting casts a subtle glow on both subject and fruit, creating an atmosphere of casual charm. This photograph transcends its simple premise to become a study in human expression, humor, and the unexpected beauty found in ordinary moments."
925 | ]
926 | }
927 | ],
928 | "links": [
929 | [
930 | 5,
931 | 11,
932 | 0,
933 | 6,
934 | 0,
935 | "CLIP"
936 | ],
937 | [
938 | 90,
939 | 67,
940 | 0,
941 | 68,
942 | 0,
943 | "CONDITIONING"
944 | ],
945 | [
946 | 91,
947 | 68,
948 | 0,
949 | 69,
950 | 0,
951 | "CONDITIONING"
952 | ],
953 | [
954 | 92,
955 | 70,
956 | 0,
957 | 69,
958 | 1,
959 | "CONDITIONING"
960 | ],
961 | [
962 | 93,
963 | 71,
964 | 0,
965 | 70,
966 | 0,
967 | "CONDITIONING"
968 | ],
969 | [
970 | 94,
971 | 11,
972 | 0,
973 | 71,
974 | 0,
975 | "CLIP"
976 | ],
977 | [
978 | 557,
979 | 252,
980 | 2,
981 | 231,
982 | 1,
983 | "VAE"
984 | ],
985 | [
986 | 565,
987 | 252,
988 | 0,
989 | 13,
990 | 0,
991 | "MODEL"
992 | ],
993 | [
994 | 580,
995 | 71,
996 | 0,
997 | 67,
998 | 0,
999 | "CONDITIONING"
1000 | ],
1001 | [
1002 | 591,
1003 | 13,
1004 | 0,
1005 | 271,
1006 | 0,
1007 | "MODEL"
1008 | ],
1009 | [
1010 | 592,
1011 | 69,
1012 | 0,
1013 | 271,
1014 | 2,
1015 | "CONDITIONING"
1016 | ],
1017 | [
1018 | 593,
1019 | 135,
1020 | 0,
1021 | 271,
1022 | 3,
1023 | "LATENT"
1024 | ],
1025 | [
1026 | 595,
1027 | 6,
1028 | 0,
1029 | 271,
1030 | 1,
1031 | "CONDITIONING"
1032 | ],
1033 | [
1034 | 596,
1035 | 271,
1036 | 0,
1037 | 231,
1038 | 0,
1039 | "LATENT"
1040 | ],
1041 | [
1042 | 597,
1043 | 272,
1044 | 0,
1045 | 271,
1046 | 4,
1047 | "INT"
1048 | ],
1049 | [
1050 | 599,
1051 | 231,
1052 | 0,
1053 | 233,
1054 | 0,
1055 | "IMAGE"
1056 | ],
1057 | [
1058 | 603,
1059 | 277,
1060 | 0,
1061 | 274,
1062 | 1,
1063 | "STRING"
1064 | ],
1065 | [
1066 | 604,
1067 | 276,
1068 | 0,
1069 | 277,
1070 | 0,
1071 | "STRING"
1072 | ],
1073 | [
1074 | 605,
1075 | 278,
1076 | 0,
1077 | 277,
1078 | 1,
1079 | "STRING"
1080 | ],
1081 | [
1082 | 606,
1083 | 279,
1084 | 0,
1085 | 277,
1086 | 2,
1087 | "STRING"
1088 | ],
1089 | [
1090 | 608,
1091 | 274,
1092 | 0,
1093 | 281,
1094 | 0,
1095 | "STRING"
1096 | ],
1097 | [
1098 | 611,
1099 | 274,
1100 | 0,
1101 | 6,
1102 | 1,
1103 | "STRING"
1104 | ],
1105 | [
1106 | 615,
1107 | 285,
1108 | 0,
1109 | 274,
1110 | 0,
1111 | "LLM"
1112 | ]
1113 | ],
1114 | "groups": [
1115 | {
1116 | "title": "Load Models",
1117 | "bounding": [
1118 | -2410,
1119 | -339,
1120 | 969,
1121 | 488
1122 | ],
1123 | "color": "#3f789e",
1124 | "font_size": 24
1125 | },
1126 | {
1127 | "title": "Input",
1128 | "bounding": [
1129 | -2409,
1130 | 181,
1131 | 972,
1132 | 523
1133 | ],
1134 | "color": "#3f789e",
1135 | "font_size": 24
1136 | },
1137 | {
1138 | "title": "Output",
1139 | "bounding": [
1140 | 464,
1141 | -273,
1142 | 741,
1143 | 814
1144 | ],
1145 | "color": "#3f789e",
1146 | "font_size": 24
1147 | }
1148 | ],
1149 | "config": {},
1150 | "extra": {
1151 | "ds": {
1152 | "scale": 0.5209868481924381,
1153 | "offset": [
1154 | 3416.250859933717,
1155 | 212.7698659938928
1156 | ]
1157 | }
1158 | },
1159 | "version": 0.4
1160 | }
--------------------------------------------------------------------------------
/shap_e_text_to_3d_with_export.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": null,
6 | "id": "964ccced",
7 | "metadata": {},
8 | "outputs": [],
9 | "source": [
10 | "import torch\n",
11 | "\n",
12 | "from shap_e.diffusion.sample import sample_latents\n",
13 | "from shap_e.diffusion.gaussian_diffusion import diffusion_from_config\n",
14 | "from shap_e.models.download import load_model, load_config\n",
15 | "from shap_e.util.notebooks import create_pan_cameras, decode_latent_images, gif_widget"
16 | ]
17 | },
18 | {
19 | "cell_type": "code",
20 | "execution_count": null,
21 | "id": "8eed3a76",
22 | "metadata": {},
23 | "outputs": [],
24 | "source": [
25 | "device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')"
26 | ]
27 | },
28 | {
29 | "cell_type": "code",
30 | "execution_count": null,
31 | "id": "2d922637",
32 | "metadata": {},
33 | "outputs": [],
34 | "source": [
35 | "xm = load_model('transmitter', device=device)\n",
36 | "model = load_model('text300M', device=device)\n",
37 | "diffusion = diffusion_from_config(load_config('diffusion'))"
38 | ]
39 | },
40 | {
41 | "cell_type": "code",
42 | "execution_count": null,
43 | "id": "53d329d0",
44 | "metadata": {},
45 | "outputs": [],
46 | "source": [
47 | "batch_size = 4\n",
48 | "guidance_scale = 15.0\n",
49 | "prompt = \"baby yoda\"\n",
50 | "\n",
51 | "latents = sample_latents(\n",
52 | " batch_size=batch_size,\n",
53 | " model=model,\n",
54 | " diffusion=diffusion,\n",
55 | " guidance_scale=guidance_scale,\n",
56 | " model_kwargs=dict(texts=[prompt] * batch_size),\n",
57 | " progress=True,\n",
58 | " clip_denoised=True,\n",
59 | " use_fp16=True,\n",
60 | " use_karras=True,\n",
61 | " karras_steps=64,\n",
62 | " sigma_min=1e-3,\n",
63 | " sigma_max=160,\n",
64 | " s_churn=0,\n",
65 | ")"
66 | ]
67 | },
68 | {
69 | "cell_type": "code",
70 | "execution_count": null,
71 | "id": "633da2ec",
72 | "metadata": {},
73 | "outputs": [],
74 | "source": [
75 | "render_mode = 'nerf' # you can change this to 'stf'\n",
76 | "size = 64 # this is the size of the renders; higher values take longer to render.\n",
77 | "\n",
78 | "cameras = create_pan_cameras(size, device)\n",
79 | "for i, latent in enumerate(latents):\n",
80 | " images = decode_latent_images(xm, latent, cameras, rendering_mode=render_mode)\n",
81 | " display(gif_widget(images))"
82 | ]
83 | },
84 | {
85 | "cell_type": "code",
86 | "execution_count": null,
87 | "id": "bd8772a2",
88 | "metadata": {},
89 | "outputs": [],
90 | "source": [
91 | "render_mode = 'stf' # you can change this to 'stf'\n",
92 | "size = 64 # this is the size of the renders; higher values take longer to render.\n",
93 | "\n",
94 | "cameras = create_pan_cameras(size, device)\n",
95 | "for i, latent in enumerate(latents):\n",
96 | " images = decode_latent_images(xm, latent, cameras, rendering_mode=render_mode)\n",
97 | " display(gif_widget(images))"
98 | ]
99 | },
100 | {
101 | "cell_type": "code",
102 | "execution_count": null,
103 | "id": "1458658b",
104 | "metadata": {},
105 | "outputs": [],
106 | "source": []
107 | },
108 | {
109 | "cell_type": "code",
110 | "execution_count": null,
111 | "id": "35634e0a",
112 | "metadata": {},
113 | "outputs": [],
114 | "source": [
115 | "rendering_mode='stf'\n",
116 | "size=64\n",
117 | "cameras = create_pan_cameras(size, device)\n",
118 | "latent=latents[0]\n",
119 | "from shap_e.models.transmitter.base import Transmitter, VectorDecoder\n",
120 | "from shap_e.util.collections import AttrDict\n",
121 | "decoded = xm.renderer.render_views(\n",
122 | " AttrDict(cameras=cameras),\n",
123 | " params=(xm.encoder if isinstance(xm, Transmitter) else xm).bottleneck_to_params(\n",
124 | " latent[None]\n",
125 | " ),\n",
126 | " options=AttrDict(rendering_mode=rendering_mode, render_with_direction=False),\n",
127 | " )"
128 | ]
129 | },
130 | {
131 | "cell_type": "code",
132 | "execution_count": null,
133 | "id": "bc4254b8",
134 | "metadata": {},
135 | "outputs": [],
136 | "source": [
137 | "import base64\n",
138 | "import io\n",
139 | "from typing import Union\n",
140 | "\n",
141 | "import ipywidgets as widgets\n",
142 | "import numpy as np\n",
143 | "import torch\n",
144 | "from PIL import Image\n",
145 | "\n",
146 | "from shap_e.models.nn.camera import DifferentiableCameraBatch, DifferentiableProjectiveCamera\n",
147 | "from shap_e.models.transmitter.base import Transmitter, VectorDecoder\n",
148 | "from shap_e.util.collections import AttrDict\n",
149 | "\n",
150 | "@torch.no_grad()\n",
151 | "def decode_latent_images_foo(\n",
152 | " xm: Union[Transmitter, VectorDecoder],\n",
153 | " latent: torch.Tensor,\n",
154 | " cameras: DifferentiableCameraBatch,\n",
155 | " rendering_mode: str = \"stf\",\n",
156 | "):\n",
157 | " decoded = xm.renderer.render_views(\n",
158 | " AttrDict(cameras=cameras),\n",
159 | " params=(xm.encoder if isinstance(xm, Transmitter) else xm).bottleneck_to_params(\n",
160 | " latent[None]\n",
161 | " ),\n",
162 | " options=AttrDict(rendering_mode=rendering_mode, render_with_direction=False),\n",
163 | " )\n",
164 | " return decoded\n",
165 | " arr = decoded.channels.clamp(0, 255).to(torch.uint8)[0].cpu().numpy()\n",
166 | " return [Image.fromarray(x) for x in arr]"
167 | ]
168 | },
169 | {
170 | "cell_type": "code",
171 | "execution_count": null,
172 | "id": "be88930c",
173 | "metadata": {},
174 | "outputs": [],
175 | "source": [
176 | "x=decode_latent_images_foo(xm, latents[0], cameras, rendering_mode=render_mode)\n",
177 | "#x['meshes']"
178 | ]
179 | },
180 | {
181 | "cell_type": "code",
182 | "execution_count": null,
183 | "id": "fb9cbb25",
184 | "metadata": {},
185 | "outputs": [],
186 | "source": [
187 | "mesh=x['meshes'][0]"
188 | ]
189 | },
190 | {
191 | "cell_type": "code",
192 | "execution_count": null,
193 | "id": "24bcc4f4",
194 | "metadata": {},
195 | "outputs": [],
196 | "source": [
197 | "rm=x['raw_meshes'][0]"
198 | ]
199 | },
200 | {
201 | "cell_type": "code",
202 | "execution_count": null,
203 | "id": "6a28b767",
204 | "metadata": {},
205 | "outputs": [],
206 | "source": [
207 | "rm.vertex_channels[\"R\"]=mesh.vertex_colors[:,0]\n",
208 | "rm.vertex_channels[\"G\"]=mesh.vertex_colors[:,1]\n",
209 | "rm.vertex_channels[\"B\"]=mesh.vertex_colors[:,2]"
210 | ]
211 | },
212 | {
213 | "cell_type": "code",
214 | "execution_count": null,
215 | "id": "09c95cbb",
216 | "metadata": {},
217 | "outputs": [],
218 | "source": [
219 | "tm=rm.tri_mesh()"
220 | ]
221 | },
222 | {
223 | "cell_type": "code",
224 | "execution_count": null,
225 | "id": "9a1082dc",
226 | "metadata": {},
227 | "outputs": [],
228 | "source": [
229 | "with open(\"yoda.ply\",'wb') as f:\n",
230 | " tm.write_ply(f)"
231 | ]
232 | },
233 | {
234 | "cell_type": "code",
235 | "execution_count": null,
236 | "id": "5e35f722",
237 | "metadata": {},
238 | "outputs": [],
239 | "source": [
240 | "import trimesh\n",
241 | "\n",
242 | "def convert_ply_to_gltf(ply_file, gltf_file):\n",
243 | " # Load the .ply file\n",
244 | " mesh = trimesh.load_mesh(ply_file)\n",
245 | "\n",
246 | " # Export the mesh to .gltf format\n",
247 | " gltf_data = mesh.export(file_type='glb')\n",
248 | "\n",
249 | " # Write the .gltf file\n",
250 | " with open(gltf_file, 'wb') as f:\n",
251 | " f.write(gltf_data)\n",
252 | "\n",
253 | "# Replace these with your input and output file paths\n",
254 | "input_ply_file = \"yoda.ply\"\n",
255 | "output_gltf_file = \"yoda.glb\"\n",
256 | "\n",
257 | "# Convert the .ply file to .gltf\n",
258 | "convert_ply_to_gltf(input_ply_file, output_gltf_file)"
259 | ]
260 | },
261 | {
262 | "cell_type": "code",
263 | "execution_count": null,
264 | "id": "c30f6239",
265 | "metadata": {},
266 | "outputs": [],
267 | "source": []
268 | },
269 | {
270 | "cell_type": "code",
271 | "execution_count": null,
272 | "id": "bd1a2e80",
273 | "metadata": {},
274 | "outputs": [],
275 | "source": []
276 | }
277 | ],
278 | "metadata": {
279 | "kernelspec": {
280 | "display_name": "Python 3 (ipykernel)",
281 | "language": "python",
282 | "name": "python3"
283 | },
284 | "language_info": {
285 | "codemirror_mode": {
286 | "name": "ipython",
287 | "version": 3
288 | },
289 | "file_extension": ".py",
290 | "mimetype": "text/x-python",
291 | "name": "python",
292 | "nbconvert_exporter": "python",
293 | "pygments_lexer": "ipython3",
294 | "version": "3.10.6"
295 | }
296 | },
297 | "nbformat": 4,
298 | "nbformat_minor": 5
299 | }
300 |
--------------------------------------------------------------------------------
/ss5.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nagolinc/notebooks/cbc5406023fd192dd3f1505d970d7369d22bb7f7/ss5.png
--------------------------------------------------------------------------------