├── .dockerignore
├── .env.template
├── .gitignore
├── 00-feature-store-intro.ipynb
├── 01-triton-model-training.ipynb
├── 02-feature-drift.ipynb
├── 03-triton-vertex-inference-example.ipynb
├── LICENSE
├── Makefile
├── README.md
├── docker-compose.yaml
├── docker
├── Dockerfile.base
├── Dockerfile.jupyter
├── Dockerfile.setup
├── Dockerfile.triton
├── jupyter
│ └── requirements.txt
├── setup
│ ├── .gcloudignore
│ ├── apply.py
│ ├── create.py
│ ├── environment.yml
│ ├── materialize.py
│ ├── setup.sh
│ ├── teardown.py
│ └── teardown.sh
└── triton
│ ├── README.md
│ ├── entrypoint.sh
│ └── models
│ ├── ensemble
│ ├── 1
│ │ └── .gitkeep
│ └── config.pbtxt
│ ├── fetch-vaccine-features
│ ├── 1
│ │ └── model.py
│ └── config.pbtxt
│ └── predict-vaccine-counts
│ ├── 1
│ └── xgboost.json
│ └── config.pbtxt
├── env.sh
├── feature_store
├── README.md
├── __init__.py
├── repo
│ ├── .feastignore
│ ├── __init__.py
│ ├── config.py
│ └── features.py
└── utils
│ ├── __init__.py
│ ├── data_fetcher.py
│ ├── logger.py
│ ├── redis_model_repo.py
│ ├── storage.py
│ └── triton_model_repo.py
├── img
├── RedisFeastTriton.png
├── redis-feast-gcp-architecture.png
├── redis.gif
├── triton-inference-server.png
└── triton-vertex.png
├── re-gcp-mp
├── main.tf
└── outputs.tf
├── requirements.txt
├── setup.cfg
└── setup.py
/.dockerignore:
--------------------------------------------------------------------------------
1 | img/
2 | *__pycache__
3 | *.ipynb_checkpoints
4 | *.pkl
5 | *Dockerfile
6 | .env*
--------------------------------------------------------------------------------
/.env.template:
--------------------------------------------------------------------------------
1 | PROJECT_ID=
2 | GCP_REGION=
3 | BUCKET_NAME=
4 | GOOGLE_APPLICATION_CREDENTIALS=
5 | SERVICE_ACCOUNT_EMAIL=
6 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | *__pycache__
2 | *.ipynb_checkpoints
3 | *.pkl
4 | *.csv
5 | *.egg-info
6 | .env
7 | *build
8 | *.tar.gz
9 | instances.json
10 |
11 | ### Terraform ###
12 | **/.terraform/*
13 | **/.terraform.lock.hcl
14 | *.tfstate
15 | *.tfstate.*
16 | crash.log
17 | crash.*.log
18 | *.tfvars
19 | *.tfvars.json
20 | .terraformrc
21 | terraform.rc
22 |
--------------------------------------------------------------------------------
/00-feature-store-intro.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {
6 | "id": "jirdTjhETQW0"
7 | },
8 | "source": [
9 | "# **redis-feast-gcp**: 00 - Feature Store Intro\n",
10 | "\n",
11 | "In this notebook, we will explore how a Feature Store works using Feast.\n",
12 | "\n",
13 | "**This notebook assumes that you've already set up your Feature Store in GCP using the `setup` instructions in the README.**"
14 | ]
15 | },
16 | {
17 | "cell_type": "markdown",
18 | "metadata": {},
19 | "source": [
20 | ""
21 | ]
22 | },
23 | {
24 | "cell_type": "markdown",
25 | "metadata": {
26 | "id": "6cJFAJiuGxM3"
27 | },
28 | "source": [
29 | "## Fetching Historical Data\n",
30 | "\n",
31 | "The Feast SDK allows us to pull historical data to generate a training dataset. In this cae, the data comes from BigQuery tables, but unbeknownst to the user.\n",
32 | "\n",
33 | "To make things simpler, we use the [`DataFetcher`](utils/data_fetcher.py) class that wraps the Feast Python client."
34 | ]
35 | },
36 | {
37 | "cell_type": "code",
38 | "execution_count": 1,
39 | "metadata": {},
40 | "outputs": [
41 | {
42 | "name": "stdout",
43 | "output_type": "stream",
44 | "text": [
45 | "Feast is an open source project that collects anonymized error reporting and usage statistics. To opt out or learn more see https://docs.feast.dev/reference/usage\n"
46 | ]
47 | }
48 | ],
49 | "source": [
50 | "from feature_store.repo import config\n",
51 | "from feature_store.utils import (\n",
52 | " DataFetcher,\n",
53 | " storage\n",
54 | ")\n",
55 | "\n",
56 | "# Load Feast feature store\n",
57 | "fs = storage.get_feature_store(\n",
58 | " config_path=config.REPO_CONFIG,\n",
59 | " bucket_name=config.BUCKET_NAME\n",
60 | ")\n",
61 | "\n",
62 | "# Load DataFetcher utility class\n",
63 | "data_fetcher = DataFetcher(fs)"
64 | ]
65 | },
66 | {
67 | "cell_type": "code",
68 | "execution_count": 2,
69 | "metadata": {},
70 | "outputs": [
71 | {
72 | "name": "stdout",
73 | "output_type": "stream",
74 | "text": [
75 | "['lag_1_vaccine_interest', 'lag_2_vaccine_interest', 'lag_1_vaccine_intent', 'lag_2_vaccine_intent', 'lag_1_vaccine_safety', 'lag_2_vaccine_safety', 'lag_1_weekly_vaccinations_count', 'lag_2_weekly_vaccinations_count']\n",
76 | "['weekly_vaccinations_count']\n"
77 | ]
78 | }
79 | ],
80 | "source": [
81 | "# The data fetcher class contains the list of input features to use for model training\n",
82 | "print(data_fetcher.X_cols)\n",
83 | "\n",
84 | "print(data_fetcher.y_col)"
85 | ]
86 | },
87 | {
88 | "cell_type": "code",
89 | "execution_count": 3,
90 | "metadata": {
91 | "colab": {
92 | "base_uri": "https://localhost:8080/",
93 | "height": 330
94 | },
95 | "id": "BqgiEP2Oz42q",
96 | "outputId": "40552317-644b-4ee6-d735-0ee5e48e79dd"
97 | },
98 | "outputs": [],
99 | "source": [
100 | "# Fetch historical training data with Feast\n",
101 | "ds = data_fetcher.get_training_data(\n",
102 | " entity_query=f\"\"\"\n",
103 | " select\n",
104 | " state,\n",
105 | " date as event_timestamp\n",
106 | " from\n",
107 | " {config.BIGQUERY_DATASET_NAME}.{config.WEEKLY_VACCINATIONS_TABLE}\n",
108 | " \"\"\"\n",
109 | ")\n",
110 | "\n",
111 | "# Clean up any nulls\n",
112 | "ds.dropna(inplace=True)\n",
113 | "ds.sort_values(['event_timestamp', 'state'], axis=0, inplace=True)"
114 | ]
115 | },
116 | {
117 | "cell_type": "code",
118 | "execution_count": 4,
119 | "metadata": {},
120 | "outputs": [],
121 | "source": [
122 | "# Subset the data to the state of Virginia (in the US)\n",
123 | "virginia = ds[ds.state == 'Virginia']"
124 | ]
125 | },
126 | {
127 | "cell_type": "code",
128 | "execution_count": 5,
129 | "metadata": {},
130 | "outputs": [
131 | {
132 | "data": {
133 | "text/html": [
134 | "
\n",
135 | "\n",
148 | "
\n",
149 | " \n",
150 | " \n",
151 | " | \n",
152 | " state | \n",
153 | " event_timestamp | \n",
154 | " lag_1_vaccine_interest | \n",
155 | " lag_2_vaccine_interest | \n",
156 | " lag_1_vaccine_intent | \n",
157 | " lag_2_vaccine_intent | \n",
158 | " lag_1_vaccine_safety | \n",
159 | " lag_2_vaccine_safety | \n",
160 | " lag_1_weekly_vaccinations_count | \n",
161 | " lag_2_weekly_vaccinations_count | \n",
162 | " weekly_vaccinations_count | \n",
163 | "
\n",
164 | " \n",
165 | " \n",
166 | " \n",
167 | " 3267 | \n",
168 | " Virginia | \n",
169 | " 2021-01-25 00:00:00+00:00 | \n",
170 | " 62.549712 | \n",
171 | " 64.178025 | \n",
172 | " 22.662099 | \n",
173 | " 25.802051 | \n",
174 | " 9.102064 | \n",
175 | " 8.580120 | \n",
176 | " 152933 | \n",
177 | " 115553.0 | \n",
178 | " 329892.0 | \n",
179 | "
\n",
180 | " \n",
181 | " 3331 | \n",
182 | " Virginia | \n",
183 | " 2021-02-01 00:00:00+00:00 | \n",
184 | " 59.771108 | \n",
185 | " 62.549712 | \n",
186 | " 23.881589 | \n",
187 | " 22.662099 | \n",
188 | " 9.060282 | \n",
189 | " 9.102064 | \n",
190 | " 329892 | \n",
191 | " 152933.0 | \n",
192 | " 278811.0 | \n",
193 | "
\n",
194 | " \n",
195 | " 3279 | \n",
196 | " Virginia | \n",
197 | " 2021-02-08 00:00:00+00:00 | \n",
198 | " 71.490393 | \n",
199 | " 59.771108 | \n",
200 | " 35.214495 | \n",
201 | " 23.881589 | \n",
202 | " 11.884993 | \n",
203 | " 9.060282 | \n",
204 | " 278811 | \n",
205 | " 329892.0 | \n",
206 | " 266744.0 | \n",
207 | "
\n",
208 | " \n",
209 | " 3251 | \n",
210 | " Virginia | \n",
211 | " 2021-02-15 00:00:00+00:00 | \n",
212 | " 67.818582 | \n",
213 | " 71.490393 | \n",
214 | " 34.463701 | \n",
215 | " 35.214495 | \n",
216 | " 12.675395 | \n",
217 | " 11.884993 | \n",
218 | " 266744 | \n",
219 | " 278811.0 | \n",
220 | " 318139.0 | \n",
221 | "
\n",
222 | " \n",
223 | " 3268 | \n",
224 | " Virginia | \n",
225 | " 2021-02-22 00:00:00+00:00 | \n",
226 | " 72.135382 | \n",
227 | " 67.818582 | \n",
228 | " 34.295162 | \n",
229 | " 34.463701 | \n",
230 | " 14.205749 | \n",
231 | " 12.675395 | \n",
232 | " 318139 | \n",
233 | " 266744.0 | \n",
234 | " 344859.0 | \n",
235 | "
\n",
236 | " \n",
237 | "
\n",
238 | "
"
239 | ],
240 | "text/plain": [
241 | " state event_timestamp lag_1_vaccine_interest \\\n",
242 | "3267 Virginia 2021-01-25 00:00:00+00:00 62.549712 \n",
243 | "3331 Virginia 2021-02-01 00:00:00+00:00 59.771108 \n",
244 | "3279 Virginia 2021-02-08 00:00:00+00:00 71.490393 \n",
245 | "3251 Virginia 2021-02-15 00:00:00+00:00 67.818582 \n",
246 | "3268 Virginia 2021-02-22 00:00:00+00:00 72.135382 \n",
247 | "\n",
248 | " lag_2_vaccine_interest lag_1_vaccine_intent lag_2_vaccine_intent \\\n",
249 | "3267 64.178025 22.662099 25.802051 \n",
250 | "3331 62.549712 23.881589 22.662099 \n",
251 | "3279 59.771108 35.214495 23.881589 \n",
252 | "3251 71.490393 34.463701 35.214495 \n",
253 | "3268 67.818582 34.295162 34.463701 \n",
254 | "\n",
255 | " lag_1_vaccine_safety lag_2_vaccine_safety \\\n",
256 | "3267 9.102064 8.580120 \n",
257 | "3331 9.060282 9.102064 \n",
258 | "3279 11.884993 9.060282 \n",
259 | "3251 12.675395 11.884993 \n",
260 | "3268 14.205749 12.675395 \n",
261 | "\n",
262 | " lag_1_weekly_vaccinations_count lag_2_weekly_vaccinations_count \\\n",
263 | "3267 152933 115553.0 \n",
264 | "3331 329892 152933.0 \n",
265 | "3279 278811 329892.0 \n",
266 | "3251 266744 278811.0 \n",
267 | "3268 318139 266744.0 \n",
268 | "\n",
269 | " weekly_vaccinations_count \n",
270 | "3267 329892.0 \n",
271 | "3331 278811.0 \n",
272 | "3279 266744.0 \n",
273 | "3251 318139.0 \n",
274 | "3268 344859.0 "
275 | ]
276 | },
277 | "execution_count": 5,
278 | "metadata": {},
279 | "output_type": "execute_result"
280 | }
281 | ],
282 | "source": [
283 | "# Inspect part of the dataset for Virginia\n",
284 | "virginia.head()"
285 | ]
286 | },
287 | {
288 | "cell_type": "code",
289 | "execution_count": 6,
290 | "metadata": {},
291 | "outputs": [
292 | {
293 | "data": {
294 | "text/html": [
295 | "\n",
296 | "\n",
309 | "
\n",
310 | " \n",
311 | " \n",
312 | " | \n",
313 | " state | \n",
314 | " event_timestamp | \n",
315 | " lag_1_vaccine_interest | \n",
316 | " lag_2_vaccine_interest | \n",
317 | " lag_1_vaccine_intent | \n",
318 | " lag_2_vaccine_intent | \n",
319 | " lag_1_vaccine_safety | \n",
320 | " lag_2_vaccine_safety | \n",
321 | " lag_1_weekly_vaccinations_count | \n",
322 | " lag_2_weekly_vaccinations_count | \n",
323 | " weekly_vaccinations_count | \n",
324 | "
\n",
325 | " \n",
326 | " \n",
327 | " \n",
328 | " 3304 | \n",
329 | " Virginia | \n",
330 | " 2023-01-02 00:00:00+00:00 | \n",
331 | " 10.72945 | \n",
332 | " 11.383411 | \n",
333 | " 1.831109 | \n",
334 | " 2.094199 | \n",
335 | " 1.352006 | \n",
336 | " 1.276527 | \n",
337 | " 46707 | \n",
338 | " 65773.0 | \n",
339 | " 46312.0 | \n",
340 | "
\n",
341 | " \n",
342 | " 3295 | \n",
343 | " Virginia | \n",
344 | " 2023-01-09 00:00:00+00:00 | \n",
345 | " 10.72945 | \n",
346 | " 11.383411 | \n",
347 | " 1.831109 | \n",
348 | " 2.094199 | \n",
349 | " 1.352006 | \n",
350 | " 1.276527 | \n",
351 | " 46312 | \n",
352 | " 46707.0 | \n",
353 | " 45059.0 | \n",
354 | "
\n",
355 | " \n",
356 | " 3242 | \n",
357 | " Virginia | \n",
358 | " 2023-01-16 00:00:00+00:00 | \n",
359 | " 10.72945 | \n",
360 | " 11.383411 | \n",
361 | " 1.831109 | \n",
362 | " 2.094199 | \n",
363 | " 1.352006 | \n",
364 | " 1.276527 | \n",
365 | " 45059 | \n",
366 | " 46312.0 | \n",
367 | " 37554.0 | \n",
368 | "
\n",
369 | " \n",
370 | " 3243 | \n",
371 | " Virginia | \n",
372 | " 2023-01-23 00:00:00+00:00 | \n",
373 | " 10.72945 | \n",
374 | " 11.383411 | \n",
375 | " 1.831109 | \n",
376 | " 2.094199 | \n",
377 | " 1.352006 | \n",
378 | " 1.276527 | \n",
379 | " 37554 | \n",
380 | " 45059.0 | \n",
381 | " 27998.0 | \n",
382 | "
\n",
383 | " \n",
384 | " 3339 | \n",
385 | " Virginia | \n",
386 | " 2023-01-30 00:00:00+00:00 | \n",
387 | " 10.72945 | \n",
388 | " 11.383411 | \n",
389 | " 1.831109 | \n",
390 | " 2.094199 | \n",
391 | " 1.352006 | \n",
392 | " 1.276527 | \n",
393 | " 27998 | \n",
394 | " 37554.0 | \n",
395 | " 6770.0 | \n",
396 | "
\n",
397 | " \n",
398 | "
\n",
399 | "
"
400 | ],
401 | "text/plain": [
402 | " state event_timestamp lag_1_vaccine_interest \\\n",
403 | "3304 Virginia 2023-01-02 00:00:00+00:00 10.72945 \n",
404 | "3295 Virginia 2023-01-09 00:00:00+00:00 10.72945 \n",
405 | "3242 Virginia 2023-01-16 00:00:00+00:00 10.72945 \n",
406 | "3243 Virginia 2023-01-23 00:00:00+00:00 10.72945 \n",
407 | "3339 Virginia 2023-01-30 00:00:00+00:00 10.72945 \n",
408 | "\n",
409 | " lag_2_vaccine_interest lag_1_vaccine_intent lag_2_vaccine_intent \\\n",
410 | "3304 11.383411 1.831109 2.094199 \n",
411 | "3295 11.383411 1.831109 2.094199 \n",
412 | "3242 11.383411 1.831109 2.094199 \n",
413 | "3243 11.383411 1.831109 2.094199 \n",
414 | "3339 11.383411 1.831109 2.094199 \n",
415 | "\n",
416 | " lag_1_vaccine_safety lag_2_vaccine_safety \\\n",
417 | "3304 1.352006 1.276527 \n",
418 | "3295 1.352006 1.276527 \n",
419 | "3242 1.352006 1.276527 \n",
420 | "3243 1.352006 1.276527 \n",
421 | "3339 1.352006 1.276527 \n",
422 | "\n",
423 | " lag_1_weekly_vaccinations_count lag_2_weekly_vaccinations_count \\\n",
424 | "3304 46707 65773.0 \n",
425 | "3295 46312 46707.0 \n",
426 | "3242 45059 46312.0 \n",
427 | "3243 37554 45059.0 \n",
428 | "3339 27998 37554.0 \n",
429 | "\n",
430 | " weekly_vaccinations_count \n",
431 | "3304 46312.0 \n",
432 | "3295 45059.0 \n",
433 | "3242 37554.0 \n",
434 | "3243 27998.0 \n",
435 | "3339 6770.0 "
436 | ]
437 | },
438 | "execution_count": 6,
439 | "metadata": {},
440 | "output_type": "execute_result"
441 | }
442 | ],
443 | "source": [
444 | "virginia.tail()"
445 | ]
446 | },
447 | {
448 | "cell_type": "code",
449 | "execution_count": 7,
450 | "metadata": {},
451 | "outputs": [
452 | {
453 | "data": {
454 | "text/plain": [
455 | "count 106.000000\n",
456 | "mean 179285.452830\n",
457 | "std 148843.919624\n",
458 | "min 6770.000000\n",
459 | "25% 71501.500000\n",
460 | "50% 127141.000000\n",
461 | "75% 259355.250000\n",
462 | "max 644252.000000\n",
463 | "Name: weekly_vaccinations_count, dtype: float64"
464 | ]
465 | },
466 | "execution_count": 7,
467 | "metadata": {},
468 | "output_type": "execute_result"
469 | }
470 | ],
471 | "source": [
472 | "virginia.weekly_vaccinations_count.describe()"
473 | ]
474 | },
475 | {
476 | "cell_type": "code",
477 | "execution_count": 8,
478 | "metadata": {},
479 | "outputs": [
480 | {
481 | "name": "stderr",
482 | "output_type": "stream",
483 | "text": [
484 | "2023-02-06 15:34:18,297: font_manager.py:1633 _load_fontmanager(): generated new fontManager\n"
485 | ]
486 | },
487 | {
488 | "data": {
489 | "text/plain": [
490 | ""
491 | ]
492 | },
493 | "execution_count": 8,
494 | "metadata": {},
495 | "output_type": "execute_result"
496 | },
497 | {
498 | "data": {
499 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAlYAAAGxCAYAAACgDPi4AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjYuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8o6BhiAAAACXBIWXMAAA9hAAAPYQGoP6dpAABZPElEQVR4nO3de1hU5do/8O+ADILIGUHMAwapKCqKB7TIA1ty095aVkYWbrXaFh7RNMvjbhdmWZn1aqdXdGemlrlNTeNVwB0iKIqnlEBQTBkOKoyAAg7P74/9Y8XIaRgWzOn7ua65Lmc9z6y5YZnr7jncSyGEECAiIiKiFrMydABERERE5oKJFREREZFMmFgRERERyYSJFREREZFMmFgRERERyYSJFREREZFMmFgRERERyYSJFREREZFM2hk6AEtSXV2N69evo2PHjlAoFIYOh4iIiHQghMDt27fh7e0NK6vGx6SYWLWh69evo2vXroYOg4iIiPRw9epVPPDAA432YWLVhjp27AjgvxfG0dHRwNEQERGRLtRqNbp27SrdxxvDxKoN1Uz/OTo6MrEiIiIyMbos4+HidSIiIiKZMLEiIiIikgkTKyIiIiKZMLEiIiIikgkTKyIiIiKZMLEiIiIikgkTKyIiIiKZMLEiIiIikgkTKyIiIiKZMLEiIiIikgkfaUNEREQGk11Yiis3y9HDrQN83DsYOpwWY2JFREREba64vBJztqXjSGahdCzEzwPrIwLhZG9jwMhahlOBRERE1ObmbEtHUlaR1rGkrCLM3nbKQBHJg4kVERERtanswlIcySyERgit4xohcCSzEDlFZQaKrOWYWBEREVGbunKzvNH2yzeYWBERERHppLurfaPtPdxMdxE7EysiIiJqUz09HBDi5wFrhULruLVCgRA/D5PeHcjEioiIiNrc+ohAjPR11zo20tcd6yMCDRSRPFhugYiIiNqck70NtswYipyiMly+UcY6VkREREQt5eNuHglVDU4FEhEREcmEiRURERGRTJhYEREREcmEiRURERGRTJhYEREREcmEiRURERGRTJhYEREREcmEdayIiIio1WQXluLKzXKzKQDaFCZWREREJLvi8krM2ZaOI5mF0rEQPw+sjwiEk72NASNrXZwKJCIikkl2YSniMwqQU1Rm6FAMbs62dCRlFWkdS8oqwuxtpwwUUdvgiBUREVELWeroTEOyC0u1fhc1NELgSGYhcorKzHZakCNWRERELWSpozMNuXKzvNH2yzfMd0SPiRUREVEL1IzOaITQOl57dMbSdHe1b7S9h5t5jlYBRpBYXbt2Dc8//zzc3NxgZ2eHgIAAnDhxQmoXQmD58uXo3Lkz7OzsEBoaiszMTK1z3Lx5E1OmTIGjoyOcnZ0xY8YMlJaWavU5c+YMHnnkEbRv3x5du3bFmjVr6sSyc+dO9O7dG+3bt0dAQAD279+v1a5LLEREZFkseXSmIT09HBDi5wFrhULruLVCgRA/D7OdBgQMnFjdunULI0eOhI2NDX766Sf8+uuvWLt2LVxcXKQ+a9aswccff4yNGzciJSUFHTp0QFhYGO7evSv1mTJlCs6fP4+4uDjs3bsXR44cwcsvvyy1q9VqjBs3Dt27d0daWhree+89rFy5Ep9//rnU5+jRo4iIiMCMGTNw6tQpTJw4ERMnTsS5c+eaFQsREVkWSx6dacz6iECM9HXXOjbS1x3rIwINFFHbUAhx39hlG3r99deRlJSE//znP/W2CyHg7e2NBQsWYOHChQCAkpISeHp6IjY2Fs8++ywuXLgAf39/HD9+HEFBQQCAAwcO4M9//jN+//13eHt7Y8OGDXjzzTehUqmgVCql7969ezcuXrwIAJg8eTLKysqwd+9e6fuHDx+OgQMHYuPGjTrF0hS1Wg0nJyeUlJTA0dFR/18cEREZlcivUpGUVaQ1HWitUGCkrzu2zBhqwMgML6eoDJdvlMlWx8oQdbGac/826IjVnj17EBQUhKeffhqdOnVCYGAgvvjiC6k9JycHKpUKoaGh0jEnJycMGzYMycnJAIDk5GQ4OztLSRUAhIaGwsrKCikpKVKfkJAQKakCgLCwMGRkZODWrVtSn9rfU9On5nt0ieV+FRUVUKvVWi8iIjI/ljo6owsf9w4Y3atTi5Og4vJKRH6VijFrEzFt03GMfj8BkV+loqS8SqZI5WHQcgvZ2dnYsGEDoqOj8cYbb+D48eOYM2cOlEolpk6dCpVKBQDw9PTU+pynp6fUplKp0KlTJ632du3awdXVVauPj49PnXPUtLm4uEClUjX5PU3Fcr+YmBisWrVKt18GERGZLCd7G2yZMVT20Rn6Q2M7L41pVNCgiVV1dTWCgoLwzjvvAAACAwNx7tw5bNy4EVOnTjVkaLJYsmQJoqOjpfdqtRpdu3Y1YERERNSafNyZULUGU6qLZdCpwM6dO8Pf31/rWJ8+fZCbmwsA8PLyAgDk5+dr9cnPz5favLy8UFBQoNV+79493Lx5U6tPfeeo/R0N9and3lQs97O1tYWjo6PWi4iIiJrHlHZeGjSxGjlyJDIyMrSO/fbbb+jevTsAwMfHB15eXjh06JDUrlarkZKSguDgYABAcHAwiouLkZaWJvU5fPgwqqurMWzYMKnPkSNHUFX1xzxsXFwcevXqJe1ADA4O1vqemj4136NLLERERCQ/U9p5adDEav78+Th27BjeeecdZGVl4ZtvvsHnn3+OqKgoAIBCocC8efPwz3/+E3v27MHZs2cRGRkJb29vTJw4EcB/R7gee+wxvPTSS0hNTUVSUhJmzZqFZ599Ft7e3gCA5557DkqlEjNmzMD58+exfft2rFu3Tmuabu7cuThw4ADWrl2LixcvYuXKlThx4gRmzZqlcyxEREQkP5OqiyUM7McffxT9+vUTtra2onfv3uLzzz/Xaq+urhbLli0Tnp6ewtbWVowdO1ZkZGRo9blx44aIiIgQDg4OwtHRUUybNk3cvn1bq8/p06fFww8/LGxtbUWXLl3E6tWr68SyY8cO8dBDDwmlUin69u0r9u3b1+xYGlNSUiIAiJKSEp0/Q0REREIUl1WKF75MEd0X75VeL3yZIorLKlv9u5tz/zZoHStLwzpWRERELWOInZfNuX8bdFcgERERUXMY+85Lgz8rkIiIiMhcMLEiIiIikgkTKyIiIiKZMLEiIiIikgkTKyIiIiKZMLEiIiIikgkTKyIiIiKZMLEiIiIikgkTKyIiIiKZsPI6ERHJLruwFFdulrfpY0eIjAETKyIikk1xeSXmbEvHkcxC6ViInwfWRwTCyd7GgJERtQ1OBRIRkWzmbEtHUlaR1rGkrCLM3nbKQBERtS0mVkREJIvswlIcySyERgit4xohcCSzEDlFZQaKjKjtMLEiIiJZXLlZ3mj75RtMrMj8MbEiIiJZdHe1b7S9hxsXsZP5Y2JFRESy6OnhgBA/D1grFFrHrRUKhPh5cHcgWQQmVkREJJv1EYEY6euudWykrzvWRwQaKCKitsVyC0REJBsnextsmTEUOUVluHyjjHWsyOIwsSIiItn5uDOhIsvEqUAiIiIimTCxIiIiIpIJEysiIiIimTCxIiIiIpIJEysiIiIimTCxIiIiIpIJEysiIiIimTCxIiIiIpIJEysiIiIimTCxIiIiIpIJEysiIiIimTCxIiIiIpIJEysiIiIimTCxIiIiIpIJEysiIiIimTCxIiIiIpIJEysiIiIimTCxIiIiIpIJEysiIiIimTCxIiIiIpIJEysiIiIimRg0sVq5ciUUCoXWq3fv3lL73bt3ERUVBTc3Nzg4OGDSpEnIz8/XOkdubi7Cw8Nhb2+PTp064bXXXsO9e/e0+iQkJGDQoEGwtbWFr68vYmNj68Ty6aefokePHmjfvj2GDRuG1NRUrXZdYiEiIiLLZvARq759+yIvL096/fLLL1Lb/Pnz8eOPP2Lnzp1ITEzE9evX8eSTT0rtGo0G4eHhqKysxNGjR7F582bExsZi+fLlUp+cnByEh4dj9OjRSE9Px7x58/Diiy/i4MGDUp/t27cjOjoaK1aswMmTJzFgwACEhYWhoKBA51iIiIiIIAxoxYoVYsCAAfW2FRcXCxsbG7Fz507p2IULFwQAkZycLIQQYv/+/cLKykqoVCqpz4YNG4Sjo6OoqKgQQgixaNEi0bdvX61zT548WYSFhUnvhw4dKqKioqT3Go1GeHt7i5iYGJ1j0UVJSYkAIEpKSnT+DBERERlWc+7fBh+xyszMhLe3N3r27IkpU6YgNzcXAJCWloaqqiqEhoZKfXv37o1u3bohOTkZAJCcnIyAgAB4enpKfcLCwqBWq3H+/HmpT+1z1PSpOUdlZSXS0tK0+lhZWSE0NFTqo0ssRERERHolVv/4xz9QXl5e5/idO3fwj3/8Q+fzDBs2DLGxsThw4AA2bNiAnJwcPPLII7h9+zZUKhWUSiWcnZ21PuPp6QmVSgUAUKlUWklVTXtNW2N91Go17ty5g6KiImg0mnr71D5HU7HUp6KiAmq1WutFRERE5kuvxGrVqlUoLS2tc7y8vByrVq3S+Tzjx4/H008/jf79+yMsLAz79+9HcXExduzYoU9YRicmJgZOTk7Sq2vXroYOiYiIiFqRXomVEAIKhaLO8dOnT8PV1VXvYJydnfHQQw8hKysLXl5eqKysRHFxsVaf/Px8eHl5AQC8vLzq7Myred9UH0dHR9jZ2cHd3R3W1tb19ql9jqZiqc+SJUtQUlIiva5evarbL4KIiIhMUrMSKxcXF7i6ukKhUOChhx6Cq6ur9HJycsKf/vQnPPPMM3oHU1paikuXLqFz584YPHgwbGxscOjQIak9IyMDubm5CA4OBgAEBwfj7NmzWrv34uLi4OjoCH9/f6lP7XPU9Kk5h1KpxODBg7X6VFdX49ChQ1IfXWKpj62tLRwdHbVeREREZL7aNafzRx99BCEEpk+fjlWrVsHJyUlqUyqV6NGjR6OJxv0WLlyIv/zlL+jevTuuX7+OFStWwNraGhEREXBycsKMGTMQHR0NV1dXODo6Yvbs2QgODsbw4cMBAOPGjYO/vz9eeOEFrFmzBiqVCkuXLkVUVBRsbW0BADNnzsQnn3yCRYsWYfr06Th8+DB27NiBffv2SXFER0dj6tSpCAoKwtChQ/HRRx+hrKwM06ZNAwCdYiEiIiLSq9xCQkKCqKys1OejWiZPniw6d+4slEql6NKli5g8ebLIysqS2u/cuSNeffVV4eLiIuzt7cUTTzwh8vLytM5x+fJlMX78eGFnZyfc3d3FggULRFVVlVaf+Ph4MXDgQKFUKkXPnj3Fpk2b6sSyfv160a1bN6FUKsXQoUPFsWPHtNp1iaUpLLdARERkeppz/1YIIYQ+CVl1dTWysrJQUFCA6upqrbaQkBAZUj7zo1ar4eTkhJKSEk4LEhERmYjm3L+bNRVY49ixY3juuedw5coV3J+XKRQKaDQafU5LREREZNL0SqxmzpyJoKAg7Nu3D507d653hyARERGRpdErscrMzMR3330HX19fueMhIiIiMll61bEaNmwYsrKy5I6FiIiIyKTpNWI1e/ZsLFiwACqVCgEBAbCxsdFq79+/vyzBEREREZkSvXYFWlnVHehSKBRSRXYuXq8fdwUSERGZnlbfFZiTk6NXYERERETmTK/Eqnv37nLHQURERGTy9EqstmzZ0mh7ZGSkXsEQEZFlyC4sxZWb5ejh1gE+7h0MHQ6RbPRaY+Xi4qL1vqqqCuXl5VAqlbC3t8fNmzdlC9CccI0VEVm64vJKzNmWjiOZhdKxED8PrI8IhJO9TSOfJDKc5ty/9Sq3cOvWLa1XaWkpMjIy8PDDD2Pbtm16BU1EROZvzrZ0JGUVaR1LyirC7G2nDBQRkbz0Sqzq4+fnh9WrV2Pu3LlynZKIiMxIdmEpjmQWQnPfRIlGCBzJLEROUZmBIiOSj2yJFQC0a9cO169fl/OURERkJq7cLG+0/fINJlZk+vRavL5nzx6t90II5OXl4ZNPPsHIkSNlCYyIiMxLd1f7Rtt7uHERO5k+vRKriRMnar1XKBTw8PDAmDFjsHbtWjniIiIiM9PTwwEhfh5IyirSmg60Vigw0teduwPJLOiVWFVXV8sdBxERWYD1EYGYve2U1q7Akb7uWB8RaMCoiOSjV2JVW021BoVC0eJgiIjIvDnZ22DLjKHIKSrD5RtlrGNFZkfvxetbtmxBQEAA7OzsYGdnh/79++Nf//qXnLEREZGZ8nHvgNG9OjGpslDZhaWIzygwy52geo1YffDBB1i2bBlmzZolLVb/5ZdfMHPmTBQVFWH+/PmyBklERESmzxIKxOpVed3HxwerVq2q8+iazZs3Y+XKlXxIcwNYeZ2IiCxZ5FepDW5e2DJjqAEja1yrV17Py8vDiBEj6hwfMWIE8vLy9DklERERmTFLKRCrV2Ll6+uLHTt21Dm+fft2+Pn5tTgoIiIiMi+WUiBWrzVWq1atwuTJk3HkyBFpjVVSUhIOHTpUb8JFREREls1SCsTqNWI1adIkpKSkwN3dHbt378bu3bvh7u6O1NRUPPHEE3LHSERERCaupkCs9X3lmawVCoT4eZjNDlG9Fq+Tfrh4nYiILFlJeVWdArGmsCuwOfdvvaYC9+/fD2tra4SFhWkdP3jwIKqrqzF+/Hh9TktERERmzBIKxOo1Ffj6669Do9HUOS6EwOuvv97ioIiIiMh8mXOBWL0Sq8zMTPj7+9c53rt3b2RlZbU4KCIiIiJTpFdi5eTkhOzs7DrHs7Ky0KGD+WWfRERERLrQK7GaMGEC5s2bh0uXLknHsrKysGDBAvz1r3+VLTgiIiIiU6JXYrVmzRp06NABvXv3ho+PD3x8fNCnTx+4ubnh/ffflztGIiIiIpOg165AJycnHD16FHFxcTh9+jTs7OzQv39/hISEyB0fERERkclo1TpWAQEB2L9/P7p27dpaX2FSWMeKiIhaU3ZhKa7cLDfLMgaG1Op1rHR1+fJlVFVVteZXEBERWbzi8krM2ZZucoU3zZFea6yIiIjIeMzZlo6krCKtY0lZRZi97ZSBIrJcTKyIiIhMWHZhKY5kFkJz38oejRA4klmInKIyA0XWtrILSxGfUWDwn7dVpwKJiIiodV25Wd5o++UbZWa93srYpkE5YkVERGTCurvaN9rew818kyrA+KZBmVgRERGZsJ4eDgjx84C1QqF13FqhQIifh1mPVhnjNKhsiVVxcXGdY5999hk8PT3l+goiogYZy/oKIkNYHxGIkb7uWsdG+rpjfUSggSJqG7pMg7Y1vdZYvfvuu+jRowcmT54MAHjmmWfw/fffw8vLC/v378eAAQMAAM8995x8kRIR1cPY1lcQGYKTvQ22zBiKnKIyXL5RZjF1rIxxGlSvEauNGzdKRT/j4uIQFxeHn376CePHj8drr70ma4BERI0xtvUVRIbk494Bo3t1soikCjDOaVC9EiuVSiUlVnv37sUzzzyDcePGYdGiRTh+/LjewaxevRoKhQLz5s2Tjt29exdRUVFwc3ODg4MDJk2ahPz8fK3P5ebmIjw8HPb29ujUqRNee+013Lt3T6tPQkICBg0aBFtbW/j6+iI2NrbO93/66afo0aMH2rdvj2HDhiE1NVWrXZdYiKjtGOP6CiJqW8Y2DapXYuXi4oKrV68CAA4cOIDQ0FAAgBACGo1Gr0COHz+Ozz77DP3799c6Pn/+fPz444/YuXMnEhMTcf36dTz55JNSu0ajQXh4OCorK3H06FFs3rwZsbGxWL58udQnJycH4eHhGD16NNLT0zFv3jy8+OKLOHjwoNRn+/btiI6OxooVK3Dy5EkMGDAAYWFhKCgo0DkWImpbxri+gojaVs00aPzCUdg0bQjiF47ClhlDDbcUQOghKipKdO/eXYSGhgo3Nzdx+/ZtIYQQ27ZtE4GBgc0+3+3bt4Wfn5+Ii4sTjz76qJg7d64QQoji4mJhY2Mjdu7cKfW9cOGCACCSk5OFEELs379fWFlZCZVKJfXZsGGDcHR0FBUVFUIIIRYtWiT69u2r9Z2TJ08WYWFh0vuhQ4eKqKgo6b1GoxHe3t4iJiZG51iaUlJSIgCIkpISnfoTUeMuFdwW3RfvbfCVXVhq6BDJSF0quC0OX8zn3xHSSXPu33qNWH344YeYNWsW/P39ERcXBwcHBwBAXl4eXn311WafLyoqCuHh4dLIV420tDRUVVVpHe/duze6deuG5ORkAEBycjICAgK0dh+GhYVBrVbj/PnzUp/7zx0WFiado7KyEmlpaVp9rKysEBoaKvXRJRYialvGuL6CjFtxeSUiv0rFmLWJmLbpOEa/n4DIr1JRUs7n2pI89NoVaGNjg4ULF9Y5Pn/+/Gaf69tvv8XJkyfrXZulUqmgVCrh7OysddzT0xMqlUrqc39Jh5r3TfVRq9W4c+cObt26BY1GU2+fixcv6hzL/SoqKlBRUSG9V6vV9fYjIv2tjwjE7G2ntHYFWsI2c9JPY5sdtswYaqCoyJzo/UibzMxMxMfHo6CgANXV1Vpttdc3Nebq1auYO3cu4uLi0L59e31DMVoxMTFYtWqVocMgMmvmus08u7AUV26Wm83PYwxqNjvcr/ZmB/6uqaX0Sqy++OILvPLKK3B3d4eXlxcUtYbhFQqFzolVWloaCgoKMGjQIOmYRqPBkSNH8Mknn+DgwYOorKxEcXGx1khRfn4+vLy8AABeXl51du/V7NSr3ef+3Xv5+flwdHSEnZ0drK2tYW1tXW+f2udoKpb7LVmyBNHR0dJ7tVot7aYkInn5uJtHAsK6XK3H0p+pR21DrzVW//znP/H2229DpVIhPT0dp06dkl4nT57U+Txjx47F2bNnkZ6eLr2CgoIwZcoU6c82NjY4dOiQ9JmMjAzk5uYiODgYABAcHIyzZ89q7d6Li4uDo6Mj/P39pT61z1HTp+YcSqUSgwcP1upTXV2NQ4cOSX0GDx7cZCz3s7W1haOjo9aLiKgxrMvVeoyxmCSZH71GrG7duoWnn366xV/esWNH9OvXT+tYhw4d4ObmJh2fMWMGoqOj4erqCkdHR8yePRvBwcEYPnw4AGDcuHHw9/fHCy+8gDVr1kClUmHp0qWIioqCra0tAGDmzJn45JNPsGjRIkyfPh2HDx/Gjh07sG/fPul7o6OjMXXqVAQFBWHo0KH46KOPUFZWhmnTpgEAnJycmoyFiKglOFXVumo2OyRlFWnVPrNWKDDS152/W5KFXiNWTz/9NH7++We5Y6nXhx9+iMcffxyTJk1CSEgIvLy8sGvXLqnd2toae/fuhbW1NYKDg/H8888jMjIS//jHP6Q+Pj4+2LdvH+Li4jBgwACsXbsWX375JcLCwqQ+kydPxvvvv4/ly5dj4MCBSE9Px4EDB7QWtDcVCxFRS7AuV+sztmKSZH4UQtxXslgHMTEx+OCDDxAeHo6AgADY2GjP+8+ZM0e2AM2JWq2Gk5MTSkpKOC1IRHVkF5ZizNrEBtvjF47iqIpMzG2zA7Wu5ty/9UqsfHx8Gj6hQoHs7OzmntIiMLEioqZEfpXa4FQVywEQGUZz7t96rbHKycnRKzAiImoc63IRmTa961jVqBnwUtxX+ZiIiJrPXOtyEVkKvRavA8CWLVsQEBAAOzs72NnZoX///vjXv/4lZ2xERBbLx70DRvfqxKSKyMToNWL1wQcfYNmyZZg1axZGjhwJAPjll18wc+ZMFBUV6fVoGyIiIiJTp/fi9VWrViEyMlLr+ObNm7Fy5UquwWoAF68T6Y+PeCEiQ2n1xet5eXkYMWJEneMjRoxAXl6ePqckIqoXH/FCRKZErzVWvr6+2LFjR53j27dvh5+fX4uDIiKqwUe8EJEp0WvEatWqVZg8eTKOHDkirbFKSkrCoUOH6k24iIj0wUe8EJGp0WvEatKkSUhJSYG7uzt2796N3bt3w93dHampqXjiiSfkjpGILBQf8UJEpkbvOlaDBw/G119/LWcsRERaurvaN9rew42jVURkXHROrNRqtbQSXq1WN9qXO96ISA49PRwQ4ufR4CNeOA1IRMZG58TKxcUFeXl56NSpE5ydneuttC6EgEKhgEajkTVIIrJcfMQLEZkSnROrw4cPw9XVFQAQHx/fagEREdXGR7wQkSnRObF69NFHpT/7+Piga9eudUathBC4evWqfNEREf1/Pu5MqIjI+Om1K9DHxweFhXW3QN+8eRM+Pj4tDoqIiIjIFOmVWNWspbpfaWkp2rdv3+KgiIiIiExRs8otREdHAwAUCgWWLVsGe/s/tkJrNBqkpKRg4MCBsgZIREREZCqalVidOvXfR0gIIXD27FkolUqpTalUYsCAAVi4cKG8ERIR3YcPZCYiY9WsxKpmN+C0adOwbt061qsiojbFBzJbFibQZIoUQtSquketSq1Ww8nJCSUlJUxKifQQ+VVqg8VCt8wYasDISE5MoMnYNOf+rfcjbU6cOIEdO3YgNzcXlZWVWm27du3S97RERPXiA5ktx5xt6UjKKtI6lpRVhNnbTjGBJqOn167Ab7/9FiNGjMCFCxfwww8/oKqqCufPn8fhw4fh5OQkd4xERHo/kDm7sBTxGQXIKeIDm01BTQKtuW8ypXYCTWTM9Bqxeuedd/Dhhx8iKioKHTt2xLp16+Dj44O///3v6Ny5s9wxEhE1+4HMnE4yTbok0ByZJGOm14jVpUuXEB4eDuC/uwHLysqgUCgwf/58fP7557IGSEQE/PFAZuv7auhZKxQI8fOoc7NtbDqJjFdzE2giY6NXYuXi4oLbt28DALp06YJz584BAIqLi1Fe3vj/bRAR6Wt9RCBG+rprHavvgcycTjJdzU2giYyNXlOBISEhiIuLQ0BAAJ5++mnMnTsXhw8fRlxcHMaOHSt3jEREAHR/IDOnkwxHjhIJ6yMCMXvbKa1p3PoSaCJjpFdi9cknn+Du3bsAgDfffBM2NjY4evQoJk2ahKVLl8oaIBHR/Zp6IDOnk9qenGvadE2giYwR61i1IdaxImob2YWlmLPtFH69rkZ1reOsedV6WGOMzFmb1LGqrq5GVlYWCgoKUF1drdUWEhKi72mJqI2ZU3Xr+kZNauN0UutgjTGiP+iVWB07dgzPPfccrly5gvsHvBQKBTQajSzBEVHrMcdyBPXtBLRSAP7ejlgfMYg391bCNW1Ef9BrV+DMmTMRFBSEc+fO4ebNm7h165b0unnzptwxElErMLdyBA3tBKwWwLlragNFZRm4po3oD3qNWGVmZuK7776Dr6+v3PEQURswpqkbuaYiOWpiODUlEhpaY8XfO1kSvRKrYcOGISsri4kVkYkyhiRE7qlIjpoYlimUSDCn9YRkvPRKrGbPno0FCxZApVIhICAANjba/wj2799fluCIqHUYQxIi94N2OWpiWMZcIsEc1xOS8dKr3IKVVd2lWQqFAkIILl5vBMstkDEx5Pb47MJSjFmb2GB7/MJRet2US8qr6oya8AZKLAVBLdXq5RZycnL0CoyIjIchp25aayrSmEdNyDCMaT0hWQa9Eqvu3bvLHQcRtTFDJiGtPRXZVGV2shzGsJ6QLIvOidWePXswfvx42NjYYM+ePY32/etf/9riwIiobRgiCeF6KGorxrCekCyLzmusrKysoFKp0KlTp3rXWEkn5BqrBnGNFdEfuB6K2grXWFFLNef+zWcFtiEmVkR1cT0UtTYm8dRSTKyMFBMrIv2xBhG1FJN40ldz7t96PdJmzpw5+Pjjj+sc/+STTzBv3jydz7Nhwwb0798fjo6OcHR0RHBwMH766Sep/e7du4iKioKbmxscHBwwadIk5Ofna50jNzcX4eHhsLe3R6dOnfDaa6/h3r17Wn0SEhIwaNAg2NrawtfXF7GxsXVi+fTTT9GjRw+0b98ew4YNQ2pqqla7LrEQkfyKyysR+VUqxqxNxLRNxzH6/QREfpWKkvIqQ4dGJsbHvQNG9+rEpIpalV6J1ffff4+RI0fWOT5ixAh89913Op/ngQcewOrVq5GWloYTJ05gzJgxmDBhAs6fPw8AmD9/Pn788Ufs3LkTiYmJuH79Op588knp8xqNBuHh4aisrMTRo0exefNmxMbGYvny5VKfnJwchIeHY/To0UhPT8e8efPw4osv4uDBg1Kf7du3Izo6GitWrMDJkycxYMAAhIWFoaCgQOrTVCxE1DrM7ZmGRGTe9JoKbN++Pc6dO1fnkTZZWVno168f7t69q3dArq6ueO+99/DUU0/Bw8MD33zzDZ566ikAwMWLF9GnTx8kJydj+PDh+Omnn/D444/j+vXr8PT0BABs3LgRixcvRmFhIZRKJRYvXox9+/bh3Llz0nc8++yzKC4uxoEDBwD89xE9Q4YMwSeffAIAqK6uRteuXTF79my8/vrrKCkpaTIWXXAqkKh5WquQKBFRc7T6VKCvr6+UlNT2008/oWfPnvqcEhqNBt9++y3KysoQHByMtLQ0VFVVITQ0VOrTu3dvdOvWDcnJyQCA5ORkBAQESEkVAISFhUGtVkujXsnJyVrnqOlTc47KykqkpaVp9bGyskJoaKjUR5dY6lNRUQG1Wq31IiLd6VKDiIjImOhVIDQ6OhqzZs1CYWEhxowZAwA4dOgQ1q5di48++qhZ5zp79iyCg4Nx9+5dODg44IcffoC/vz/S09OhVCrh7Oys1d/T0xMqlQoAoFKptJKqmvaatsb6qNVq3LlzB7du3YJGo6m3z8WLF6VzNBVLfWJiYrBq1SrdfhFEbchUFoKzBhERmRq9Eqvp06ejoqICb7/9Nt566y0AQI8ePbBhwwZERkY261y9evVCeno6SkpK8N1332Hq1KlITGx46N+ULFmyBNHR0dJ7tVqNrl27GjAisnSm9jBaFhIlIlOj11QgALzyyiv4/fffkZ+fD7Vajezs7GYnVQCgVCrh6+uLwYMHIyYmBgMGDMC6devg5eWFyspKFBcXa/XPz8+Hl5cXAMDLy6vOzrya9031cXR0hJ2dHdzd3WFtbV1vn9rnaCqW+tja2ko7HmteRIZkigvB10cEYqSvu9axtnqmIRFRc+mVWOXk5CAzMxMA4OHhAQcHBwBAZmYmLl++3KKAqqurUVFRgcGDB8PGxgaHDh2S2jIyMpCbm4vg4GAAQHBwMM6ePau1ey8uLg6Ojo7w9/eX+tQ+R02fmnMolUoMHjxYq091dTUOHTok9dElFiJjV/MwWs19+1VqP4w2u7AU8RkFyCkynrVLNc80jF84CpumDUH8wlHYMmOoUY6wERHpNRX4t7/9DdOnT4efn5/W8ZSUFHz55ZdISEjQ6TxLlizB+PHj0a1bN9y+fRvffPMNEhIScPDgQTg5OWHGjBmIjo6Gq6srHB0dMXv2bAQHB0u78MaNGwd/f3+88MILWLNmDVQqFZYuXYqoqCjY2toCAGbOnIlPPvkEixYtwvTp03H48GHs2LED+/btk+KIjo7G1KlTERQUhKFDh+Kjjz5CWVkZpk2bBgA6xUJk7JpaCD77m5M4d/2PDRbGNkXIBysTkSnQK7E6depUvXWshg8fjlmzZul8noKCAkRGRiIvLw9OTk7o378/Dh48iD/96U8AgA8//BBWVlaYNGkSKioqEBYWhv/5n/+RPm9tbY29e/filVdeQXBwMDp06ICpU6fiH//4h9THx8cH+/btw/z587Fu3To88MAD+PLLLxEWFib1mTx5MgoLC7F8+XKoVCoMHDgQBw4c0FrQ3lQsRMauqYXgv17X3rVaM0XIZ6kREelOrzpWTk5OSEhIQGCg9hqHtLQ0jBo1Crdv35YtQHPCOlZkaPU9jNZKAVQ38q8Aa0URkaVr9TpWISEhiImJgUajkY5pNBrExMTg4Ycf1ueURNQG6lsI7u/d+D8SrBVFNYxxDR6RsdFrKvDdd99FSEgIevXqhUceeQQA8J///AdqtRqHDx+WNUAikk/NQvDaD6MVQjRa3Zy1osjUynQQGZJeI1b+/v44c+YMnnnmGRQUFOD27duIjIzExYsX0a9fP7ljJCKZ1X4YbU2tKGuFQquPtUKBED8PTgOSSZbpIDIUvdZYkX64xoqMVUl5FWZvO8URCaqDz2skat79W6+pwBrl5eXIzc1FZWWl1vH+/fu35LRE1MbqmyLkzZIA3Z7XyL8rRH/QK7EqLCzEtGnT8NNPP9XbXntROxGZDtaKovvxeY1EzaPXGqt58+ahuLgYKSkpsLOzw4EDB7B582b4+flhz549csdIREQGwjV4RM2j14jV4cOH8e9//xtBQUGwsrJC9+7d8ac//QmOjo6IiYlBeHi43HESEZFMsgtLceVmuc5TvusjAuuswePzGonqp1diVVZWhk6dOgEAXFxcUFhYiIceeggBAQE4efKkrAESEZE89C2bwDV4RLrTayqwV69eyMjIAAAMGDAAn332Ga5du4aNGzeic+fOsgZIZE5YYJEMqaVlE2qX6SCi+uk1YjV37lzk5eUBAFasWIHHHnsMW7duhVKpRGxsrJzxEZkFFlgkQ8suLNX6+1dDIwSOZBYip4i7+4jkoFdi9fzzz0t/Hjx4MK5cuYKLFy+iW7ducHd3b+STRJapsZECPuSY2gLLJhC1Db2mAn/55Ret9/b29hg0aBCTKqJ61IwUaO6rxVt7pICotbFsAlHb0CuxGjNmDHx8fPDGG2/g119/lTsmIrOiy0gBUWtj2QSitqFXYnX9+nUsWLAAiYmJ6NevHwYOHIj33nsPv//+u9zxEZk8jhSQsVgfEYiRvtozCyybQCSvFj8rMCcnB9988w22bduGixcvIiQkBIcPH5YrPrPCZwVarsivUpGUVaQ1HWitUGCkrzvXWFGbY9kEouZpzv1blocwazQa/PTTT1i2bBnOnDnDR9o0gImV5TLWhxw3t1AkEZElarOHMCclJWHr1q347rvvcPfuXUyYMAExMTEtOSWRWTK2AottVf6BiRsRWRq9RqyWLFmCb7/9FtevX8ef/vQnTJkyBRMmTIC9feNrSSwdR6zIWLT21CTrdhGROWnO/VuvxetHjhzBa6+9hmvXrmHv3r2IiIhgUkVkItqi/ENLK3wTEZkqvRKrpKQkvPrqq03WrQoPD5cqtBORcWjt8g+s20VElkyvxEpXR44cwZ07d1rzK4iomVq7/APrdhGRJWvVxIqIjE9rF4pk3S4ismRMrIgsUGsWimSFbyKyZC0qt0BEujOm0gOtXf5hfURgnbpdrPBNRJaAiRVRKzPm0gM+7q2T5Blb3S4iorbCqUCiVmbJpQd83DtgdK9OTKqIyGLolViVlem2q+eNN96Aq6urPl9BZBZYeoCIyLLolVh5enpi+vTp+OWXXxrtt2TJEjg7O+vzFURmgaUH5JVdWIr4jAImpERktPRaY/X1118jNjYWY8aMQY8ePTB9+nRERkbC29tb7viITBpLD8jDmNepERHVpteI1cSJE7F7925cu3YNM2fOxDfffIPu3bvj8ccfx65du3Dv3j254yQySSw9IA9LXqdGRKalRYvXPTw8EB0djTNnzuCDDz7A//3f/+Gpp56Ct7c3li9fjvLyxqdBiCxBa9aMsgRttU6N04xEJIcWlVvIz8/H5s2bERsbiytXruCpp57CjBkz8Pvvv+Pdd9/FsWPH8PPPP8sVK8nEmOopWQJjLD1gSn8HdFmn1pKfgdOMRCQnvRKrXbt2YdOmTTh48CD8/f3x6quv4vnnn9daqD5ixAj06dNHrjhJBnLdQEzppmxMWqtmVHOYYhLR2uvUGptm3DJjaIvOTUSWR6/Eatq0aXj22WeRlJSEIUOG1NvH29sbb775ZouCI3m19AZiijdl0maKSUTNOrWkrCKt6UBrhQIjfd1blKzWTDPer/Y0o6GTYSIyLXqtscrLy8Nnn33WYFIFAHZ2dlixYoXegZG85FinwgXEps2Ua2q11jo1lsMgIrnpPGKlVqsbfV+bo6Oj/hFRq2jpOhX+n73pa+21Sq2ptdapsRwGEclN58TK2dkZivu2jN9PCAGFQgGNRtPiwEheLb2BmPJNmf7LHJIIudepteY0IxFZJp0Tq/j4+NaMg1pZS28g5nBTtnRMIuq3PiIQs7ed0hqRZTkMItKXQoj7FlzooKqqCjY29S9WLioqgru7e71tlk6tVsPJyQklJSUGmS4tKa+qcwNpzuLzyK9SG7wpG+vCZ9LW0r8D5syYymEQkXFpzv1br8Rq0qRJ+O677+pMDebn52Ps2LE4d+5cc09pEQydWNXQ9wbCm7L5YBJBRKS75ty/9doVmJubixdffFHrWF5eHkaNGoXevXvrfJ6YmBgMGTIEHTt2RKdOnTBx4kRkZGRo9bl79y6ioqLg5uYGBwcHTJo0Cfn5+XXiCQ8Ph729PTp16oTXXnutzmN1EhISMGjQINja2sLX1xexsbF14vn000/Ro0cPtG/fHsOGDUNqamqzYzEFPu4dMLpXp2bfUGsWEMcvHIVN04YgfuEobJkxlEmVCdL37wARETVOr8Rq//79OHr0KKKjowEA169fx6hRoxAQEIAdO3bofJ7ExERERUXh2LFjiIuLQ1VVFcaNG4eysj+2OM+fPx8//vgjdu7cicTERFy/fh1PPvmk1K7RaBAeHo7KykocPXpUqgS/fPlyqU9OTg7Cw8MxevRopKenY968eXjxxRdx8OBBqc/27dsRHR2NFStW4OTJkxgwYADCwsJQUFCgcyyWgjdlIiKiBgg95ebmim7duon58+cLPz8/MXnyZHHv3j19TyeEEKKgoEAAEImJiUIIIYqLi4WNjY3YuXOn1OfChQsCgEhOThZCCLF//35hZWUlVCqV1GfDhg3C0dFRVFRUCCGEWLRokejbt6/Wd02ePFmEhYVJ74cOHSqioqKk9xqNRnh7e4uYmBidY2lKSUmJACBKSkp06k9ERESG15z7t94PYe7atSvi4uKwdetWDB06FNu2bYO1tXWLkrySkhIAgKurKwAgLS0NVVVVCA0Nlfr07t0b3bp1Q3JyMgAgOTkZAQEB8PT0lPqEhYVBrVbj/PnzUp/a56jpU3OOyspKpKWlafWxsrJCaGio1EeXWIiIiMiy6VxuwcXFpd46VuXl5fjxxx/h5uYmHbt582azA6mursa8efMwcuRI9OvXDwCgUqmgVCq1nkEIAJ6enlCpVFKf2klVTXtNW2N91Go17ty5g1u3bkGj0dTb5+LFizrHcr+KigpUVFRI7xsrqkpERESmT+fE6qOPPmrFMICoqCicO3cOv/zyS6t+T1uKiYnBqlWrDB0GERERtRGdE6upU6e2WhCzZs3C3r17ceTIETzwwAPScS8vL1RWVqK4uFhrpCg/Px9eXl5Sn/t379Xs1Kvd5/7de/n5+XB0dISdnR2sra1hbW1db5/a52gqlvstWbJEWuAP/HfEqmvXrrr8SoiIiMgE6b3G6tKlS1i6dCkiIiKknXM//fSTtK5JF0IIzJo1Cz/88AMOHz4MHx8frfbBgwfDxsYGhw4dko5lZGQgNzcXwcHBAIDg4GCcPXtWa/deXFwcHB0d4e/vL/WpfY6aPjXnUCqVGDx4sFaf6upqHDp0SOqjSyz3s7W1haOjo9aLiIiIzJg+q+MTEhKEnZ2dCA0NFUqlUly6dEkIIURMTIyYNGmSzud55ZVXhJOTk0hISBB5eXnSq7y8XOozc+ZM0a1bN3H48GFx4sQJERwcLIKDg6X2e/fuiX79+olx48aJ9PR0ceDAAeHh4SGWLFki9cnOzhb29vbitddeExcuXBCffvqpsLa2FgcOHJD6fPvtt8LW1lbExsaKX3/9Vbz88svC2dlZa7dhU7E0hbsCiYiITE9z7t96JVbDhw8Xa9euFUII4eDgICVWKSkpokuXLjqfB0C9r02bNkl97ty5I1599VXh4uIi7O3txRNPPCHy8vK0znP58mUxfvx4YWdnJ9zd3cWCBQtEVVWVVp/4+HgxcOBAoVQqRc+ePbW+o8b69etFt27dhFKpFEOHDhXHjh3TatcllsYwsfrDpYLb4vDFfJFdWGroUKgevD5ERH9ozv1br0faODg44OzZs/Dx8UHHjh1x+vRp9OzZE5cvX0bv3r1x9+5d+YbUzIixPNLGkIrLKzFnWzofi2OkeH2IiOpq9UfaODs7Iy8vr87xU6dOoUuXLvqckizEnG3pSMoq0jqWlFWE2dtOGSgiqo3Xh4ioZfRKrJ599lksXrwYKpUKCoUC1dXVSEpKwsKFCxEZGSl3jGQmsgtLcSSzEJr7Bkk1QuBIZiFyisoa+CS1BV4fIqKW0yuxeuedd9C7d2907doVpaWl8Pf3R0hICEaMGIGlS5fKHSOZiSs3yxttv3yDN25D4vUhImo5netY1aZUKvHFF19g2bJlOHfuHEpLSxEYGAg/Pz+54yMz0t3VvtH2Hm58qPP9sgtLceVmOXq4dZAeel3fMTnw+hARtZxeiVUNLy8v3LlzBw8++CDatWvRqcgC9PRwQIifB5KyirSmm6wVCoz0dZc1STB19S0iH/GgG4QAkrNvSMfkXFiu7/VprUSPiMgU6TUVWF5ejhkzZsDe3h59+/ZFbm4uAGD27NlYvXq1rAGSeVkfEYiRvu5ax0b6umN9RKCBIjJO9S0iP3rphlZSBci/sLw516e4vBKRX6VizNpETNt0HKPfT0DkV6koKa+SLR4iIlOjV7mFuXPnIikpCR999BEee+wxnDlzBj179sS///1vrFy5EqdOcQdRfVhu4Q85RWW4fKOMoxz1yC4sxZi1ic36TPzCUbL+HnW5PpFfpTY4urVlxlDZYiEiMrTm3L/1mr/bvXs3tm/fjuHDh0OhUEjH+/bti0uXLulzSrIwPu5MqBrS1CLy+ly+USbr77Op61Ozg/B+tXcQ8voSkSXSayqwsLAQnTp1qnO8rKxMK9EiMjfZhaWIzyho1dIDTS0ir09bLyznDkIiovrpNWIVFBSEffv2Yfbs2QAgJVNffvllgw8kJjJlbVmRvKFF5PUx1MJ/7iAkIqqfXonVO++8g/Hjx+PXX3/FvXv3sG7dOvz66684evQoEhObtzaEyBQ0VpG8NdYTrY8IxOxtp5rcFWiohf+67iDkjkEisjR6LV4HgOzsbMTExOD06dMoLS3FoEGDsHjxYgQEBMgdo9ng4nXT1NRicrkXjtdW3yJyY1n4X1JeVSf5qxnFExB85iARmY3m3L/1SqwiIyMxevRohISE4MEHH9Q7UEvDxMo0xWcUYNqm4w22b5o2BKN71V1zaCnqS/Qa2zG48q/+HMUiIpPS6rsClUolYmJi8OKLL8Lb2xuPPvooRo0ahUcffZTV18nscD1R4+7fQdjUjsHao38cxSIic6PXrsAvv/wSv/32G3Jzc7FmzRo4ODhg7dq16N27Nx544AG5YyQyqJr1RNb37Xi1VigQ4udhkqMurbm7sTnlIuQucEpEZGgteg6Ni4sL3Nzc4OLiAmdnZ7Rr1w4eHh5yxUZkNOpbTG6KFePbYndjc8pFsO4VEZkbvdZYvfHGG0hISMCpU6fQp08faSowJCQELi4urRGnWeAaK9NnLAvH9dVW1dLr+57GWPo6NSIybq2+xmr16tXw8PDAihUr8OSTT+Khhx7SK1AiU2PKFePbslp6fSN8jbH0dWpEZD70SqxOnTqFxMREJCQkYO3atVAqldKo1ahRo5hoGTnWFtKPqf/edKmWLtfP5WRvgy0zhmqN8K349/km614REZk6vetY1Xb69Gl8+OGH2Lp1K6qrq6HRaOSIzewYeiqwLauHmxNz+b0Zsh4X0HjdK1P6PRKR5Wn1qUAhBE6dOoWEhAQkJCTgl19+gVqtRv/+/fHoo4/qFTS1vle+PqlVtRsAjmQWYubXadj28nADRWX82rrqemvRtVp6a6lvFMvcR6pMfZSTiJpPr8TK1dUVpaWlGDBgAB599FG89NJLeOSRR+Ds7CxzeCSX7MLSOklVjeTsG9yV1YC2XJfUFoxhd6Mpr1PTlbmMchJR8+mVWH399dd45JFHuLPNhKTk3Gy0/Vj2DbO/2emjLdcltQVLHDUyBHMZ5SSi5tMrsQoPD5c7Dmp1jS+lUzTaarnMteq6JYwaGYq5jXISUfPoVXmdTM8wH7fG23s23m6pzLHqOrUuXUY5ddGa1fGJqPW0qPI6mY6eHg4Y8aAbjl6qu85qxINuTBAaYQzrksi41V6k3tJRTq7PIjJtspRbIN0YutxC7o0yTPg0CbfKq6RjLvY22BP1MLq66f4YEkvFdUl0v4aSoHvV1UjJvqlXhfu2qo5PRLprzv2bU4EWZOnu81Dfuad1TH3nHt7cfc5AEZkWH/cOGN2rE5MqkjS0SF2I/45q1qbLKGfN+qz7HwVUe30WERk3TgVaCC6oJWqZ+2tSNfbfVHL2DcQvHAUAzRrlNLddqESWiImVheA/2ET6aWi675khDzT6ucs3ypo9wmmuu1CJLAkTKwvR1JxvOysWXCCqT0PTfXeq7jXwif/SJwkydHV8Imo5rrGyENVNtN+r5h4Govs1tubp+OVbGNLdRfZSHOsjAvVan0VExoEjVhaCI1ZEzdfUFPrfRvSAnfJ3WUtxmHN1fD47kSwBEysLwRErouZras2TfxcnbBng3SpJkDlVx2dtLrIknAq0EFwUS9R8ulbeZymOxjX27EQic8PEioioEVzz1DKszUWWhlOBFoLlFoj0Y85rntqCsfzbw/Vd1FaYWFkITgWSKTOGm6I5rXlqS4b+t4fru6itMbGyED09HBDc0w3J2XUfwhzc002qJG3omxfJxxyuJ2+Kps/QtbkaW9/FZy9Sa2BiZUEUDVRU0FRXI/KrVN68zIQ5JSO8KZqH9RGBmL3tlKxlKXTBR3mRITCxshDZhaU4eqnuaBUApF6+VWcXA29epstckhHeFM2HodapGcv6LrIs3BVoIZr6B+b+OlfmvmMnu7AU8RkFZvfzmdMOLF1uimRa2roshaHXd5FlMmhideTIEfzlL3+Bt7c3FAoFdu/erdUuhMDy5cvRuXNn2NnZITQ0FJmZmVp9bt68iSlTpsDR0RHOzs6YMWMGSktLtfqcOXMGjzzyCNq3b4+uXbtizZo1dWLZuXMnevfujfbt2yMgIAD79+9vdizGrKl/YBry67USmSMxrOLySkR+lYoxaxMxbdNxjH4/AZFfpaKkvMrQocnCnJIR3hSppXStQ0YkJ4MmVmVlZRgwYAA+/fTTetvXrFmDjz/+GBs3bkRKSgo6dOiAsLAw3L17V+ozZcoUnD9/HnFxcdi7dy+OHDmCl19+WWpXq9UYN24cunfvjrS0NLz33ntYuXIlPv/8c6nP0aNHERERgRkzZuDUqVOYOHEiJk6ciHPnzjUrFmPW0D8wTT3JJvbo5dYLygB0LVRoqiNa5pSM8KZIcmAdMmprCiGEUTzLRKFQ4IcffsDEiRMB/HeEyNvbGwsWLMDChQsBACUlJfD09ERsbCyeffZZXLhwAf7+/jh+/DiCgoIAAAcOHMCf//xn/P777/D29saGDRvw5ptvQqVSQalUAgBef/117N69GxcvXgQATJ48GWVlZdi7d68Uz/DhwzFw4EBs3LhRp1h0oVar4eTkhJKSEjg6Osrye2uOkvKqOgtIQ/w8cPtuJU5dbXhkKn7hKLPYNZhdWIoxaxMbbI9fOAou9jYmv/A78qvUBndgmdIaK6Dhv7OmdD3IOLAOGbVEc+7fRrvGKicnByqVCqGhodIxJycnDBs2DMnJyQCA5ORkODs7S0kVAISGhsLKygopKSlSn5CQECmpAoCwsDBkZGTg1q1bUp/a31PTp+Z7dInFFAjUn0NHDOvW6OfOXS8xi+kzXabJzOHRG+b0f+g1i57jF47CpmlDEL9wFLbMGMqkipqNjx2itmK0uwJVKhUAwNPTU+u4p6en1KZSqdCpUyet9nbt2sHV1VWrj4+PT51z1LS5uLhApVI1+T1NxVKfiooKVFRUSO/VanUjP3Hre+Xrk3XqWB3JLMTtu5WNfm7L0cs4eaVY65ip7TIrLq/Ep4ezGu1jrUCju9C+Tc3FsP9f88uYmWOlcBbnJCJTYbQjVuYgJiYGTk5O0qtr164GiyW7sLTe4qAAcOpqCYZ0d6l3LUtQdxccv3zL5HeZzdmWjlO5xfW21azZ0TQxKf76rrMmNVrH/0MnImp7RptYeXl5AQDy8/O1jufn50ttXl5eKCgo0Gq/d+8ebt68qdWnvnPU/o6G+tRubyqW+ixZsgQlJSXS6+rVq0381K0n/mJBo+1/DvCqd/po2ogejX7OFHaZNVSCoMag7s5YHxGo885JU5saJCKitmO0iZWPjw+8vLxw6NAh6ZharUZKSgqCg4MBAMHBwSguLkZaWprU5/Dhw6iursawYcOkPkeOHEFV1R8jDHFxcejVqxdcXFykPrW/p6ZPzffoEkt9bG1t4ejoqPUylM8SLzXafv66ut61LH28G4/ZFHaZNbW26tXRvnCyt2lwF9r9TG20joiI2o5BE6vS0lKkp6cjPT0dwH8XiaenpyM3NxcKhQLz5s3DP//5T+zZswdnz55FZGQkvL29pZ2Dffr0wWOPPYaXXnoJqampSEpKwqxZs/Dss8/C29sbAPDcc89BqVRixowZOH/+PLZv345169YhOjpaimPu3Lk4cOAA1q5di4sXL2LlypU4ceIEZs2aBQA6xWLMsgtLUVDa+Dqq4jv/TTzvnz4yhy3vzSlBUN/C74aYwmgdERG1LYMuXj9x4gRGjx4tva9JdqZOnYrY2FgsWrQIZWVlePnll1FcXIyHH34YBw4cQPv27aXPbN26FbNmzcLYsWNhZWWFSZMm4eOPP5banZyc8PPPPyMqKgqDBw+Gu7s7li9frlXrasSIEfjmm2+wdOlSvPHGG/Dz88Pu3bvRr18/qY8usRirpkZsACCsb8NTmoZ6zpdcmvMQ2NoLv49l38CSXWcbPK8pjNYREVHbMpo6VpbAUHWsmqrfZKUAsmPCmzyPKe8y07cekjnVhCIiIv005/7NxKoNtXViVVPQ01oBrDmQgXPX6y/3sPixXnisX2eTS5b00dzkkAUqiYiIiZWRaqvEqri8sk71cF0wYWiYKY/WERFRy5hF5XXSX33Vw3XBMgINY00oIiLSBRMrM9NUzabGsIwAERFRyzCxMjO67ABsCssIEBER6YeJlZnRtXp4Y1hGgIiISD9MrMyMrtXD62NKRT+JiIiMERMrM7Q+IhBDfVyb/TlTKvpJRERkjAxaeZ1ah5O9DWysrWClAKqbWMNupQD8vR2xPmIQR6qIiIhaiImVGarZGaiLagGcu1Z/4VAiIiJqHk4FmiF9dgZyJyAREVHLMbEyQ/rsDCy6XdEKkRAREVkWJlZmSJ+dgddL7rRiRERERJaBa6zMTM2DlxeOewh3qu7h+OVbOn1uUDeXVo6MiIjI/DGxMhNNPXh5SHcXZOTfhvruvTptTnY2eMTPo7VDJCIiMnucCjQTTT14+WRuMUQDzw98kGUWiIiIZMHEygzo8uBljRC4XaGpt+3k1WI+eJmIiEgGTKzMgBwPXk7JviFDJERERJaNiZUZkOPBy4WlLLdARETUUkysTFxxeSVW7vm1xefxcLCVIRoiIiLLxsTKxDW1aF1Xw3q6yRANERGRZWNiZcKaWrT+2rhe6O5mB13KhP5+q+XrtIiIiCwdEysT1tSi9fd+zsCVG3fQ8F7BPxy6kC9PUERERBaMiZUJk2PReg13rrEiIiJqMSZWJqynhwOCZVobNaCrsyznISIismRMrExcM56z3Kh71bpMGBIREVFjmFiZsOzCUhy9JE9hz3ZWMmVoREREFoyJlQlrTsX1ptImjlgRERG1HBMrE9aci2evtG60nSNWRERELcfEyoRV69jPsX073Kms/wHMNThiRURE1HLtDB0A6U/Xcgvqu/ea7NPDrUNLwyEiIrJ4HLEyYT09HBDi5wFrubYGEhERUYswsTJx6yMCMdLXvcXnuXyjTIZoiIiILBunAk2ck70NtswYipyiMhzLLsKSXef0Og+nAomIiFqOI1Zmwse9AyKGdkdQd5dmf3bEg27wcWdiRURE1FJMrMzMtBE9mv0ZwQ2BREREsmBiZWb6eDs2+zPJ2TeQU8Q1VkRERC3FxMrMNLRTsKkLzcXrRERELcfEygzVt1NwUBNrr7h4nYiIqOW4K9AM1d4pePlGGXq4dYCPewdEfpWKpKwiaGotqrJWKDDS152L14mIiGTAESsz5uPeAaN7dZKSpn9O7AdHO+1c2tGuHd6e2M8Q4REREZkdJlYWZOnuc1Df0X68jfrOPby5W7/aV0RERKSNiVUzffrpp+jRowfat2+PYcOGITU11dAh6SS7sBRHMgu1pgEBQCMEjmQWclcgERGRDJhYNcP27dsRHR2NFStW4OTJkxgwYADCwsJQUFBg6NCadOVmeaPt3BVIRETUckysmuGDDz7ASy+9hGnTpsHf3x8bN26Evb09/vd//9fQoTWpu6t9o+3cFUhERNRyTKx0VFlZibS0NISGhkrHrKysEBoaiuTk5Ho/U1FRAbVarfUylIbqW1krFAjx8+CuQCIiIhkwsdJRUVERNBoNPD09tY57enpCpVLV+5mYmBg4OTlJr65du7ZFqA2qr77VSF93rI8INFBERERE5oV1rFrRkiVLEB0dLb1Xq9UGTa4aqm9FRERE8mBipSN3d3dYW1sjPz9f63h+fj68vLzq/YytrS1sbW3bIrxm8XFnQkVERNQaOBWoI6VSicGDB+PQoUPSserqahw6dAjBwcEGjIyIiIiMBUesmiE6OhpTp05FUFAQhg4dio8++ghlZWWYNm2aoUMjIiIiI8DEqhkmT56MwsJCLF++HCqVCgMHDsSBAwfqLGgnIiIiy6QQ4r5S3NRq1Go1nJycUFJSAkdHR0OHQ0RERDpozv2ba6yIiIiIZMLEioiIiEgmTKyIiIiIZMLEioiIiEgmTKyIiIiIZMLEioiIiEgmrGPVhmoqW6jVagNHQkRERLqquW/rUqGKiVUbun37NgAY9EHMREREpJ/bt2/Dycmp0T4sENqGqqurcf36dXTs2BEKhcLQ4ZgctVqNrl274urVqyywasR4nUwDr5Np4HUyDkII3L59G97e3rCyanwVFUes2pCVlRUeeOABQ4dh8hwdHfkPjAngdTINvE6mgdfJ8JoaqarBxetEREREMmFiRURERCQTJlZkMmxtbbFixQrY2toaOhRqBK+TaeB1Mg28TqaHi9eJiIiIZMIRKyIiIiKZMLEiIiIikgkTKyIiIiKZMLEioxITE4MhQ4agY8eO6NSpEyZOnIiMjAytPnfv3kVUVBTc3Nzg4OCASZMmIT8/30AREwCsXr0aCoUC8+bNk47xOhmHa9eu4fnnn4ebmxvs7OwQEBCAEydOSO1CCCxfvhydO3eGnZ0dQkNDkZmZacCILY9Go8GyZcvg4+MDOzs7PPjgg3jrrbe0Hp/C62Q6mFiRUUlMTERUVBSOHTuGuLg4VFVVYdy4cSgrK5P6zJ8/Hz/++CN27tyJxMREXL9+HU8++aQBo7Zsx48fx2effYb+/ftrHed1Mrxbt25h5MiRsLGxwU8//YRff/0Va9euhYuLi9RnzZo1+Pjjj7Fx40akpKSgQ4cOCAsLw927dw0YuWV59913sWHDBnzyySe4cOEC3n33XaxZswbr16+X+vA6mRBBZMQKCgoEAJGYmCiEEKK4uFjY2NiInTt3Sn0uXLggAIjk5GRDhWmxbt++Lfz8/ERcXJx49NFHxdy5c4UQvE7GYvHixeLhhx9usL26ulp4eXmJ9957TzpWXFwsbG1txbZt29oiRBJChIeHi+nTp2sde/LJJ8WUKVOEELxOpoYjVmTUSkpKAACurq4AgLS0NFRVVSE0NFTq07t3b3Tr1g3JyckGidGSRUVFITw8XOt6ALxOxmLPnj0ICgrC008/jU6dOiEwMBBffPGF1J6TkwOVSqV1nZycnDBs2DBepzY0YsQIHDp0CL/99hsA4PTp0/jll18wfvx4ALxOpobPCiSjVV1djXnz5mHkyJHo168fAEClUkGpVMLZ2Vmrr6enJ1QqlQGitFzffvstTp48iePHj9dp43UyDtnZ2diwYQOio6Pxxhtv4Pjx45gzZw6USiWmTp0qXQtPT0+tz/E6ta3XX38darUavXv3hrW1NTQaDd5++21MmTIFAHidTAwTKzJaUVFROHfuHH755RdDh0L3uXr1KubOnYu4uDi0b9/e0OFQA6qrqxEUFIR33nkHABAYGIhz585h48aNmDp1qoGjoxo7duzA1q1b8c0336Bv375IT0/HvHnz4O3tzetkgjgVSEZp1qxZ2Lt3L+Lj4/HAAw9Ix728vFBZWYni4mKt/vn5+fDy8mrjKC1XWloaCgoKMGjQILRr1w7t2rVDYmIiPv74Y7Rr1w6enp68Tkagc+fO8Pf31zrWp08f5ObmAoB0Le7frcnr1LZee+01vP7663j22WcREBCAF154AfPnz0dMTAwAXidTw8SKjIoQArNmzcIPP/yAw4cPw8fHR6t98ODBsLGxwaFDh6RjGRkZyM3NRXBwcFuHa7HGjh2Ls2fPIj09XXoFBQVhypQp0p95nQxv5MiRdcqV/Pbbb+jevTsAwMfHB15eXlrXSa1WIyUlhdepDZWXl8PKSvt2bG1tjerqagC8TibH0KvniWp75ZVXhJOTk0hISBB5eXnSq7y8XOozc+ZM0a1bN3H48GFx4sQJERwcLIKDgw0YNQkhtHYFCsHrZAxSU1NFu3btxNtvvy0yMzPF1q1bhb29vfj666+lPqtXrxbOzs7i3//+tzhz5oyYMGGC8PHxEXfu3DFg5JZl6tSpokuXLmLv3r0iJydH7Nq1S7i7u4tFixZJfXidTAcTKzIqAOp9bdq0Sepz584d8eqrrwoXFxdhb28vnnjiCZGXl2e4oEkIUTex4nUyDj/++KPo16+fsLW1Fb179xaff/65Vnt1dbVYtmyZ8PT0FLa2tmLs2LEiIyPDQNFaJrVaLebOnSu6desm2rdvL3r27CnefPNNUVFRIfXhdTIdCiFqlXYlIiIiIr1xjRURERGRTJhYEREREcmEiRURERGRTJhYEREREcmEiRURERGRTJhYEREREcmEiRURERGRTJhYEREREcmEiRUR1TFq1CjMmzfP0GGYhJUrV2LgwIFt9n2xsbFwdnZus+8jouZhYkVEBpWQkIAJEyagc+fO6NChAwYOHIitW7caOiydLVy4UOvhuK1t8uTJ+O2335r1GWNKlP/2t79h4sSJhg6DqNW0M3QARGTZjh49iv79+2Px4sXw9PTE3r17ERkZCScnJzz++OOGDq9JDg4OcHBwaLPvs7Ozg52dXZt9X22VlZVQKpUG+W4iU8ERKyJq1L/+9S8EBQWhY8eO8PLywnPPPYeCggKtPnv27IGfnx/at2+P0aNHY/PmzVAoFCguLm7y/G+88QbeeustjBgxAg8++CDmzp2Lxx57DLt27Wrysz///DPat29f53vmzp2LMWPGAABu3LiBiIgIdOnSBfb29ggICMC2bdu0+ldXV2PNmjXw9fWFra0tunXrhrfffltq//333xEREQFXV1d06NABQUFBSElJAVB3KrBmROb9999H586d4ebmhqioKFRVVUl9KioqsHDhQnTp0gUdOnTAsGHDkJCQ0OTPC9SdCqz5/n/961/o0aMHnJyc8Oyzz+L27dtSPImJiVi3bh0UCgUUCgUuX74MADh37hzGjx8PBwcHeHp64oUXXkBRUZF07lGjRmHWrFmYN28e3N3dERYWptPnvvvuOwQEBMDOzg5ubm4IDQ1FWVkZVq5cic2bN+Pf//63FIuuPzeRqWBiRUSNqqqqwltvvYXTp09j9+7duHz5Mv72t79J7Tk5OXjqqacwceJEnD59Gn//+9/x5ptvtug7S0pK4Orq2mS/sWPHwtnZGd9//710TKPRYPv27ZgyZQoA4O7duxg8eDD27duHc+fO4eWXX8YLL7yA1NRU6TNLlizB6tWrsWzZMvz666/45ptv4OnpCQAoLS3Fo48+imvXrmHPnj04ffo0Fi1ahOrq6gbjio+Px6VLlxAfH4/NmzcjNjYWsbGxUvusWbOQnJyMb7/9FmfOnMHTTz+Nxx57DJmZmc39VQEALl26hN27d2Pv3r3Yu3cvEhMTsXr1agDAunXrEBwcjJdeegl5eXnIy8tD165dUVxcjDFjxiAwMBAnTpzAgQMHkJ+fj2eeeUbr3Js3b4ZSqURSUhI2btzY5Ofy8vIQERGB6dOn48KFC0hISMCTTz4JIQQWLlyIZ555Bo899pgUy4gRI/T6mYmMliAius+jjz4q5s6dW2/b8ePHBQBx+/ZtIYQQixcvFv369dPq8+abbwoA4tatW83+7u3btwulUinOnTunU/+5c+eKMWPGSO8PHjwobG1tG/3u8PBwsWDBAiGEEGq1Wtja2oovvvii3r6fffaZ6Nixo7hx40a97StWrBADBgyQ3k+dOlV0795d3Lt3Tzr29NNPi8mTJwshhLhy5YqwtrYW165d0zrP2LFjxZIlSxr9WYUQYtOmTcLJyUnr++3t7YVarZaOvfbaa2LYsGHS+/qu51tvvSXGjRundezq1asCgMjIyJA+FxgY2KzPpaWlCQDi8uXL9cY/depUMWHChCZ/TiJTxTVWRNSotLQ0rFy5EqdPn8atW7ekkZrc3Fz4+/sjIyMDQ4YM0frM0KFD9fqu+Ph4TJs2DV988QX69u2r02emTJmC4cOH4/r16/D29sbWrVsRHh4uTZdpNBq888472LFjB65du4bKykpUVFTA3t4eAHDhwgVUVFRg7Nix9Z4/PT0dgYGBOo2g1ejbty+sra2l9507d8bZs2cBAGfPnoVGo8FDDz2k9ZmKigq4ubnp/B219ejRAx07dtT6vvuna+93+vRpxMfH17s+7NKlS1J8gwcPbtbnxo0bh7FjxyIgIABhYWEYN24cnnrqKbi4uOjzoxGZHCZWRNSgsrIyhIWFISwsDFu3boWHhwdyc3MRFhaGyspKWb8rMTERf/nLX/Dhhx8iMjJS588NGTIEDz74IL799lu88sor+OGHH7Sm3d577z2sW7cOH330EQICAtChQwfMmzdPir+pheD6LBS3sbHReq9QKKSEtLS0FNbW1khLS9NKvgDovQi+se9rSGlpKf7yl7/g3XffrdPWuXNn6c8dOnRo1uesra0RFxeHo0eP4ueff8b69evx5ptvIiUlBT4+Ps35sYhMEhMrImrQxYsXcePGDaxevRpdu3YFAJw4cUKrT69evbB//36tY8ePH2/W9yQkJODxxx/Hu+++i5dffrnZcU6ZMgVbt27FAw88ACsrK4SHh0ttSUlJmDBhAp5//nkA/12o/ttvv8Hf3x8A4OfnBzs7Oxw6dAgvvvhinXP3798fX375JW7evNmsUauGBAYGQqPRoKCgAI888kiLz6cLpVIJjUajdWzQoEH4/vvv0aNHD7Rrp/utQJfPKRQKjBw5EiNHjsTy5cvRvXt3/PDDD4iOjq43FiJzwsXrRNSgbt26QalUYv369cjOzsaePXvw1ltvafX5+9//josXL2Lx4sX47bffsGPHDmnESKFQNPkd8fHxCA8Px5w5czBp0iSoVCqoVCrcvHlT5zinTJmCkydP4u2338ZTTz0FW1tbqc3Pz08aQblw4QL+/ve/Iz8/X2pv3749Fi9ejEWLFmHLli24dOkSjh07hq+++goAEBERAS8vL0ycOBFJSUnIzs7G999/j+TkZJ3jq+2hhx7ClClTEBkZiV27diEnJwepqamIiYnBvn379DpnU3r06IGUlBRcvnwZRUVFqK6uRlRUFG7evImIiAgcP34cly5dwsGDBzFt2rRGE5+mPpeSkoJ33nkHJ06cQG5uLnbt2oXCwkL06dNHiuXMmTPIyMhAUVGR1m5JInPAxIqIGuTh4YHY2Fjs3LkT/v7+WL16Nd5//32tPj4+Pvjuu++wa9cu9O/fHxs2bJB2BdZOcBqyefNmlJeXIyYmBp07d5ZeTz75pM5x+vr6YujQoThz5oy0G7DG0qVLMWjQIISFhWHUqFFSklTbsmXLsGDBAixfvhx9+vTB5MmTpTVKSqUSP//8Mzp16oQ///nPCAgIwOrVq+tM4zXHpk2bEBkZiQULFqBXr16YOHEijh8/jm7duul9zsYsXLgQ1tbW8Pf3l6Zzvb29kZSUBI1Gg3HjxiEgIADz5s2Ds7MzrKwavjU09TlHR0ccOXIEf/7zn/HQQw9h6dKlWLt2LcaPHw8AeOmll9CrVy8EBQXBw8MDSUlJrfIzExmKQgghDB0EEZmXt99+Gxs3bsTVq1cNHQoRUZviGisiarH/+Z//wZAhQ+Dm5oakpCS89957mDVrlqHDIiJqc5wKJKIWy8zMxIQJE+Dv74+33noLCxYswMqVKwFAqtBd3+udd95p8twNfdbBwQH/+c9/Wvkna3st/X0RkWFxKpCIWtW1a9dw586dettcXV2b3GmXlZXVYFuXLl0M9ty81tLS3xcRGRYTKyIiIiKZcCqQiIiISCZMrIiIiIhkwsSKiIiISCZMrIiIiIhkwsSKiIiISCZMrIiIiIhkwsSKiIiISCZMrIiIiIhk8v8AhoMj9uYTLSwAAAAASUVORK5CYII=\n",
500 | "text/plain": [
501 | ""
502 | ]
503 | },
504 | "metadata": {},
505 | "output_type": "display_data"
506 | }
507 | ],
508 | "source": [
509 | "# Investigate trends across features for prediction weekly vaccinations count!\n",
510 | "virginia.plot.scatter(y='weekly_vaccinations_count', x='lag_2_vaccine_interest')"
511 | ]
512 | },
513 | {
514 | "cell_type": "markdown",
515 | "metadata": {},
516 | "source": [
517 | "## Low latency feature retrieval\n",
518 | "\n",
519 | "To make a prediction in real-time, we need to do the following:\n",
520 | "\n",
521 | "1. Fetch our features from Redis using the Feast client\n",
522 | "2. Pass these features to our model\n",
523 | "3. Return these predictions to the user\n",
524 | "\n",
525 | "Here is an example pulling the latest features data from Redis (online store) for the state of *Virginia*."
526 | ]
527 | },
528 | {
529 | "cell_type": "code",
530 | "execution_count": 9,
531 | "metadata": {},
532 | "outputs": [],
533 | "source": [
534 | "# Fetch online feature\n",
535 | "online_feature = data_fetcher.get_online_data(state=\"Virginia\")"
536 | ]
537 | },
538 | {
539 | "cell_type": "code",
540 | "execution_count": 10,
541 | "metadata": {},
542 | "outputs": [
543 | {
544 | "data": {
545 | "text/html": [
546 | "\n",
547 | "\n",
560 | "
\n",
561 | " \n",
562 | " \n",
563 | " | \n",
564 | " lag_1_vaccine_interest | \n",
565 | " lag_2_vaccine_interest | \n",
566 | " lag_1_vaccine_intent | \n",
567 | " lag_2_vaccine_intent | \n",
568 | " lag_1_vaccine_safety | \n",
569 | " lag_2_vaccine_safety | \n",
570 | " lag_1_weekly_vaccinations_count | \n",
571 | " lag_2_weekly_vaccinations_count | \n",
572 | "
\n",
573 | " \n",
574 | " \n",
575 | " \n",
576 | " 0 | \n",
577 | " 10.72945 | \n",
578 | " 11.383411 | \n",
579 | " 1.831109 | \n",
580 | " 2.094199 | \n",
581 | " 1.352006 | \n",
582 | " 1.276527 | \n",
583 | " 6770 | \n",
584 | " 27998 | \n",
585 | "
\n",
586 | " \n",
587 | "
\n",
588 | "
"
589 | ],
590 | "text/plain": [
591 | " lag_1_vaccine_interest lag_2_vaccine_interest lag_1_vaccine_intent \\\n",
592 | "0 10.72945 11.383411 1.831109 \n",
593 | "\n",
594 | " lag_2_vaccine_intent lag_1_vaccine_safety lag_2_vaccine_safety \\\n",
595 | "0 2.094199 1.352006 1.276527 \n",
596 | "\n",
597 | " lag_1_weekly_vaccinations_count lag_2_weekly_vaccinations_count \n",
598 | "0 6770 27998 "
599 | ]
600 | },
601 | "execution_count": 10,
602 | "metadata": {},
603 | "output_type": "execute_result"
604 | }
605 | ],
606 | "source": [
607 | "online_feature"
608 | ]
609 | },
610 | {
611 | "cell_type": "markdown",
612 | "metadata": {},
613 | "source": [
614 | "### Alternatives for Feature Retrieval\n",
615 | "- Instead of using the Feast SDK (and our Data Fetcher class), you can also setup and use the [Python feature server](https://docs.feast.dev/reference/feature-servers/python-feature-server).\n",
616 | "- If you need the best in class performance for feature retrieval and don't mind the extra setup, Feast also supports a [Go feature server](https://docs.feast.dev/reference/feature-servers/go-feature-server) that is much faster than standard Python.\n",
617 | "\n",
618 | "If included in the project, you can run these HTTP (or gRPC) servers with `feast serve` command. The doc links above show how to set that up."
619 | ]
620 | }
621 | ],
622 | "metadata": {
623 | "colab": {
624 | "collapsed_sections": [],
625 | "name": "Fraud Detection Tutorial",
626 | "provenance": [],
627 | "toc_visible": true
628 | },
629 | "kernelspec": {
630 | "display_name": "Python 3 (ipykernel)",
631 | "language": "python",
632 | "name": "python3"
633 | },
634 | "language_info": {
635 | "codemirror_mode": {
636 | "name": "ipython",
637 | "version": 3
638 | },
639 | "file_extension": ".py",
640 | "mimetype": "text/x-python",
641 | "name": "python",
642 | "nbconvert_exporter": "python",
643 | "pygments_lexer": "ipython3",
644 | "version": "3.8.13"
645 | },
646 | "vscode": {
647 | "interpreter": {
648 | "hash": "e1c5a7c9cc0d58080444e081b74a0823c09a12f0209aca730c38726ea6940124"
649 | }
650 | }
651 | },
652 | "nbformat": 4,
653 | "nbformat_minor": 4
654 | }
655 |
--------------------------------------------------------------------------------
/03-triton-vertex-inference-example.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {
6 | "id": "jirdTjhETQW0"
7 | },
8 | "source": [
9 | "# **redis-feast-gcp**: 03 - Triton + Vertex AI Prediction Inference Example\n",
10 | "\n",
11 | "In this notebook, we will test the deployed Triton model on the Vertex AI Prediction endpoint.\n",
12 | "\n",
13 | "**This notebook assumes that you've already set up your Feature Store, model repo in GCP, and deployed your model in Vertex AI with NVIDIA Triton**"
14 | ]
15 | },
16 | {
17 | "cell_type": "markdown",
18 | "metadata": {},
19 | "source": [
20 | ""
21 | ]
22 | },
23 | {
24 | "cell_type": "markdown",
25 | "metadata": {},
26 | "source": [
27 | "## Unpacking the Triton Ensemble\n",
28 | "\n",
29 | "Before we test the inference endpoint to forecast Covid vaccinations for the state of Virginia, we will unpack the Triton Ensemble used to create the DAG of operations.\n",
30 | "\n",
31 | "### What is an Ensemble???\n",
32 | "An [Ensemble model](https://github.com/triton-inference-server/server/blob/main/docs/user_guide/architecture.md#ensemble-models) represents a pipeline of one or many operations (models) and connects inputs to outputs of each stage. These are useful for inference workflows that involve several stages like data preprocessing, postprocessing, and other transformations or business logic.\n",
33 | "\n",
34 | "\n",
35 | "\n",
36 | "\n",
37 | "**Checkout the structure of the Triton Model repository below.**"
38 | ]
39 | },
40 | {
41 | "cell_type": "code",
42 | "execution_count": 2,
43 | "metadata": {},
44 | "outputs": [
45 | {
46 | "name": "stdout",
47 | "output_type": "stream",
48 | "text": [
49 | "ensemble fetch-vaccine-features predict-vaccine-counts\n"
50 | ]
51 | }
52 | ],
53 | "source": [
54 | "!ls ./docker/triton/models"
55 | ]
56 | },
57 | {
58 | "cell_type": "markdown",
59 | "metadata": {},
60 | "source": [
61 | "There's a model for:\n",
62 | "- `fetch-vaccine-features` - Fetch vaccine count features from Redis at low-latency.\n",
63 | "- `predict-vaccine-counts` - Use XGBoost model (with [Triton FIL](https://developer.nvidia.com/blog/real-time-serving-for-xgboost-scikit-learn-randomforest-lightgbm-and-more/) backend) to forecast the counts.\n",
64 | "- `ensemble` - Wraps the other two - creating the pipeline.\n",
65 | "\n",
66 | "Each model here has a `config.pbtxt`. Let's look at the ensemble model config below:"
67 | ]
68 | },
69 | {
70 | "cell_type": "code",
71 | "execution_count": 3,
72 | "metadata": {},
73 | "outputs": [
74 | {
75 | "name": "stdout",
76 | "output_type": "stream",
77 | "text": [
78 | "name: \"ensemble\"\n",
79 | "platform: \"ensemble\"\n",
80 | "max_batch_size: 256\n",
81 | "input [\n",
82 | " {\n",
83 | " name: \"state\"\n",
84 | " data_type: TYPE_STRING\n",
85 | " dims: 1\n",
86 | " }\n",
87 | "]\n",
88 | "output [\n",
89 | " {\n",
90 | " name: \"prediction\"\n",
91 | " data_type: TYPE_FP32\n",
92 | " dims: 1\n",
93 | " }\n",
94 | "]\n",
95 | "ensemble_scheduling {\n",
96 | " step [\n",
97 | " {\n",
98 | " model_name: \"fetch-vaccine-features\"\n",
99 | " model_version: -1\n",
100 | " input_map {\n",
101 | " key: \"state\"\n",
102 | " value: \"state\"\n",
103 | " }\n",
104 | " output_map {\n",
105 | " key: \"feature_values\"\n",
106 | " value: \"feature_values\"\n",
107 | " }\n",
108 | " },\n",
109 | " {\n",
110 | " model_name: \"predict-vaccine-counts\"\n",
111 | " model_version: -1\n",
112 | " input_map {\n",
113 | " key: \"input__0\"\n",
114 | " value: \"feature_values\"\n",
115 | " }\n",
116 | " output_map {\n",
117 | " key: \"output__0\"\n",
118 | " value: \"prediction\"\n",
119 | " }\n",
120 | " }\n",
121 | " ]\n",
122 | "}"
123 | ]
124 | }
125 | ],
126 | "source": [
127 | "!cat ./docker/triton/models/ensemble/config.pbtxt"
128 | ]
129 | },
130 | {
131 | "cell_type": "markdown",
132 | "metadata": {},
133 | "source": [
134 | "## Create Inference Instances\n",
135 | "\n",
136 | "Before we can test the Vertex AI Prediction endpoint, we need to construct a JSON body that represents an inference request. See the example below:"
137 | ]
138 | },
139 | {
140 | "cell_type": "code",
141 | "execution_count": 4,
142 | "metadata": {},
143 | "outputs": [],
144 | "source": [
145 | "import json\n",
146 | "\n",
147 | "# Create inference instance\n",
148 | "payload = {\n",
149 | " \"id\": \"1\",\n",
150 | " \"inputs\": [\n",
151 | " {\n",
152 | " \"name\": \"state\", ## Triton model input name\n",
153 | " \"shape\": [1, 1], ## Triton model input shape\n",
154 | " \"datatype\": \"BYTES\", ## Triton model input datatype\n",
155 | " \"data\": [[\"Virginia\"]] ## Triton model input data\n",
156 | " }\n",
157 | " ]\n",
158 | "}\n",
159 | "\n",
160 | "# Save to file\n",
161 | "with open(\"instances.json\", \"w\") as f:\n",
162 | " json.dump(payload, f)"
163 | ]
164 | },
165 | {
166 | "cell_type": "markdown",
167 | "metadata": {
168 | "id": "6cJFAJiuGxM3"
169 | },
170 | "source": [
171 | "## Test Endpoint\n",
172 | "\n",
173 | "You can test the Vertex AI Prediction `rawPredict` endpoint using any HTTP tool or library, including `curl`."
174 | ]
175 | },
176 | {
177 | "cell_type": "code",
178 | "execution_count": 6,
179 | "metadata": {},
180 | "outputs": [
181 | {
182 | "name": "stdout",
183 | "output_type": "stream",
184 | "text": [
185 | "Activated service account credentials for: [tyler-redis@nifty-time-353221.iam.gserviceaccount.com]\n",
186 | "\n",
187 | "\n",
188 | "Updates are available for some Google Cloud CLI components. To install them,\n",
189 | "please run:\n",
190 | " $ gcloud components update\n",
191 | "\n",
192 | "\n",
193 | "\n",
194 | "To take a quick anonymous survey, run:\n",
195 | " $ gcloud survey\n",
196 | "\n"
197 | ]
198 | }
199 | ],
200 | "source": [
201 | "# Log in to GCloud using the CLI and your service account\n",
202 | "!gcloud auth activate-service-account $SERVICE_ACCOUNT_EMAIL \\\n",
203 | " --key-file=$GOOGLE_APPLICATION_CREDENTIALS \\\n",
204 | " --project=$PROJECT_ID"
205 | ]
206 | },
207 | {
208 | "cell_type": "code",
209 | "execution_count": 7,
210 | "metadata": {},
211 | "outputs": [
212 | {
213 | "name": "stdout",
214 | "output_type": "stream",
215 | "text": [
216 | "Using endpoint [https://us-east1-aiplatform.googleapis.com/]\n",
217 | "4273405872979312640\n"
218 | ]
219 | }
220 | ],
221 | "source": [
222 | "# List out the Vertex AI endpoints\n",
223 | "!echo $(gcloud ai endpoints list \\\n",
224 | " --region=$GCP_REGION \\\n",
225 | " --filter=display_name=vaccine-predictor-endpoint \\\n",
226 | " --format=\"value(name)\")"
227 | ]
228 | },
229 | {
230 | "cell_type": "code",
231 | "execution_count": 8,
232 | "metadata": {},
233 | "outputs": [
234 | {
235 | "name": "stdout",
236 | "output_type": "stream",
237 | "text": [
238 | "{\"id\":\"1\",\"model_name\":\"ensemble\",\"model_version\":\"1\",\"parameters\":{\"sequence_id\":0,\"sequence_start\":false,\"sequence_end\":false},\"outputs\":[{\"name\":\"prediction\",\"datatype\":\"FP32\",\"shape\":[1],\"data\":[40990.265625]}]}"
239 | ]
240 | },
241 | {
242 | "name": "stderr",
243 | "output_type": "stream",
244 | "text": [
245 | "Using endpoint [https://us-east1-aiplatform.googleapis.com/]\n",
246 | " % Total % Received % Xferd Average Speed Time Time Time Current\n",
247 | " Dload Upload Total Spent Left Speed\n",
248 | "100 319 0 215 100 104 181 87 0:00:01 0:00:01 --:--:-- 268\n"
249 | ]
250 | }
251 | ],
252 | "source": [
253 | "%%bash\n",
254 | "\n",
255 | "# Fetch Token\n",
256 | "TOKEN=$(gcloud auth print-access-token)\n",
257 | "\n",
258 | "# Fetch the Endpoint ID\n",
259 | "ENDPOINT_ID=$(gcloud ai endpoints list \\\n",
260 | " --region=$GCP_REGION \\\n",
261 | " --filter=display_name=vaccine-predictor-endpoint \\\n",
262 | " --format=\"value(name)\")\n",
263 | "\n",
264 | "# POST to the endpoint to get a response from the Triton ensemble model\n",
265 | "curl \\\n",
266 | " -X POST \\\n",
267 | " -H \"Authorization: Bearer ${TOKEN}\" \\\n",
268 | " -H \"Content-Type: application/json\" \\\n",
269 | " https://${GCP_REGION}-aiplatform.googleapis.com/v1/projects/${PROJECT_ID}/locations/us-east1/endpoints/${ENDPOINT_ID}:rawPredict \\\n",
270 | " -d \"@instances.json\"\n"
271 | ]
272 | },
273 | {
274 | "cell_type": "markdown",
275 | "metadata": {},
276 | "source": [
277 | "# Summary\n",
278 | "\n",
279 | "We have just built an end-to-end ML system using Feast, Redis Enterprise, and NVIDIA Triton -- all in GCP.\n",
280 | "\n",
281 | "This system generates realtime predictions using up to date feature values and allows us to manage \"point-in-time correct\" datasets from our offline datasource for training and model exploration.\n",
282 | "\n",
283 | "Next steps that you can take after completing this tutorial include:\n",
284 | "\n",
285 | "- Pull this repo and collaboration with your team.\n",
286 | "- Use this tutorial to bootstrap a model for your use case by editing features / model.\n",
287 | "- Incorporate the code in this tutorial into your company's batch pipelines by creating stages that perform feature creation and materialization.\n",
288 | "\n",
289 | "**Redis and Triton are a perfect match: bringing the data layer (optimized for fast data access) close to the computing infrastructure (optimized for fast data processing).**"
290 | ]
291 | }
292 | ],
293 | "metadata": {
294 | "colab": {
295 | "collapsed_sections": [],
296 | "name": "Fraud Detection Tutorial",
297 | "provenance": [],
298 | "toc_visible": true
299 | },
300 | "kernelspec": {
301 | "display_name": "Python 3 (ipykernel)",
302 | "language": "python",
303 | "name": "python3"
304 | },
305 | "language_info": {
306 | "codemirror_mode": {
307 | "name": "ipython",
308 | "version": 3
309 | },
310 | "file_extension": ".py",
311 | "mimetype": "text/x-python",
312 | "name": "python",
313 | "nbconvert_exporter": "python",
314 | "pygments_lexer": "ipython3",
315 | "version": "3.8.13"
316 | },
317 | "vscode": {
318 | "interpreter": {
319 | "hash": "e1c5a7c9cc0d58080444e081b74a0823c09a12f0209aca730c38726ea6940124"
320 | }
321 | }
322 | },
323 | "nbformat": 4,
324 | "nbformat_minor": 4
325 | }
326 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2022, Redis, inc.
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | MAKEFLAGS += --no-print-directory
2 |
3 | # Do not remove this block. It is used by the 'help' rule when
4 | # constructing the help output.
5 | # help:
6 | # help: GCP Feast Demo Makefile help
7 | # help:
8 |
9 | SHELL:=/bin/bash
10 |
11 | # help: help - Display this makefile's help information
12 | .PHONY: help
13 | help:
14 | @grep "^# help\:" Makefile | grep -v grep | sed 's/\# help\: //' | sed 's/\# help\://'
15 |
16 | # help:
17 | # help: Commands
18 | # help: -------------
19 |
20 | # help: env - Create an ENV file for secrets and credentials
21 | .PHONY: env
22 | env:
23 | @echo "Generate ENV file"
24 | @rm -rf .env
25 | @./env.sh
26 |
27 | # help:
28 | .PHONY: tf-deploy
29 | tf-deploy:
30 | cp .env re-gcp-mp/env; \
31 | cd re-gcp-mp; \
32 | terraform init; \
33 | terraform plan; \
34 | terraform apply --auto-approve; \
35 | echo 'REDIS_CONNECTION_STRING='`terraform output db_public_endpoint` >> env; \
36 | echo 'REDIS_PASSWORD='`terraform output db_password` >> env; \
37 | cp env ../.env
38 |
39 | # help:
40 | .PHONY: tf-destroy
41 | tf-destroy:
42 | cd re-gcp-mp && terraform destroy --auto-approve
43 |
44 | # help: docker - Build required docker images
45 | .PHONY: docker
46 | docker:
47 | @docker build -t redisventures/redis-feast-gcp:1.0.0 -f docker/Dockerfile.base .
48 | @docker compose build --no-cache
49 |
50 | # help: setup - Setup GCP Infra and Feast feature store
51 | .PHONY: setup
52 | setup:
53 | @docker compose run setup sh -c "./setup/setup.sh"
54 |
55 | # help: jupyter - Spin up a jupyter notebook to explore dataset and model
56 | .PHONY: jupyter
57 | jupyter:
58 | @docker compose run --service-ports jupyter
59 |
60 | # help: teardown - Teardown GCP infra and Feast
61 | .PHONY: teardown
62 | teardown:
63 | @docker compose run setup sh -c "./setup/teardown.sh"
64 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # redis-feast-triton-gcp
2 | *An end-to-end machine learning feature store reference architecture using [Feast](https://docs.feast.dev/) and [Redis Enterprise](https://app.redislabs.com/) (as the Online Feature Store) deployed on [Google Cloud Platform](https://cloud.google.com/).*
3 |
4 | >This prototype is a reference architecture. All components are containerized, and various customizations and optimizations might be required before running in production for your specific use case.
5 | ___
6 |
7 | ## Demo ML Application: COVID-19 Vaccination Forecasting
8 | To demonstrate the value of a Feature Store, we provide a demo application that **forecasts the counts of administered COVID-19 vaccine doses** (by US state) **for the next week**.
9 |
10 | The Feature Store fuses together weekly [google search trends data](https://console.cloud.google.com/marketplace/product/bigquery-public-datasets/covid19-vaccination-search-insights) along with lagging [vaccine dose counts](https://github.com/owid/covid-19-data). *Both datasets are open source and provided free to the public.*
11 |
12 | The full system will include:
13 | - GCP infrastructure setup and teardown
14 | - Offline (BigQuery) and Online (Redis Enterprise) Feature Stores using Feast
15 | - Model serving in Vertex AI + NVIDIA Triton Inference Server
16 |
17 | ### Reference Architecture
18 |
19 | 
20 |
21 | The architecture takes advantage of GCP managed services in combination with Feast, Redis, and Triton.
22 |
23 | - **Feast** feature definitions in a **GitHub** repository (here).
24 | - Feature registry persisted in a **Cloud Storage** bucket with **Feast** and *COMING SOON* **Cloud Build** for CI/CD.
25 | - Offline feature data stored in **BigQuery** as the source of record.
26 | - Daily **Cloud Scheduler** tasks to trigger a materialization **Cloud Function** that will migrate the latest feature updates to the Online feature store.
27 | - Model serving with **Vertex AI Prediction** using a custom **NVIDIA Triton Inference Server** container.
28 | - Online feature retrieval from **Redis** (low latency) with **Feast**.
29 |
30 | By the end of this tutorial, you will have all components running in your GCP project.
31 |
32 | ___
33 |
34 | # Getting Started
35 | The demo contains several smaller apps organized by Docker Compose. Below we will cover prereq's and setup tasks.
36 |
37 | ## Prerequisites
38 |
39 | ### Docker
40 | Install Docker on your machine. [Docker Desktop](https://www.docker.com/products/docker-desktop/) is best, thanks to it's ease of use, in our opinion.
41 |
42 | ### ☁️ GCP Account Setup
43 |
44 | In order to run this in Google Cloud, you will need a GCP project. The steps are
45 |
46 | 1. If you don't have one [create a new GCP project](https://console.cloud.google.com/cloud-resource-manager)
47 | 2. [Make sure that billing is enabled for your project.](https://cloud.google.com/billing/docs/how-to/modify-project)
48 | 3. Acquire a GCP service account credential file and download to your machine, somewhere safe.
49 | - IAM -> Service Account -> Create service account
50 |
51 | 4. Create a new key for that service account.
52 | - In Service account, go to "keys" pane and create new key.
53 | - Download locally and remember the file path:
54 |
55 | 
56 |
57 |
58 | ### Environment
59 | This demo provisions GCP infrastructure from your localhost. So, we need to handle local environment variables, thankfully all handled by Docker and a `.env` file.
60 |
61 |
62 | Make the env file and enter values as prompted. See template below:
63 | ```bash
64 | $ make env
65 | ```
66 | >PROJECT_ID={gcp-project-id} **(project-id NOT project-number)**
67 |
68 | >GCP_REGION={preferred-gcp-region}
69 |
70 | >GOOGLE_APPLICATION_CREDENTIALS={local-path-to-gcp-creds}
71 |
72 | >SERVICE_ACCOUNT_EMAIL={your-gcp-scv-account-email}
73 |
74 | >BUCKET_NAME={your-gcp-bucket-name} **(must be globally unique)**
75 |
76 | ### Redis
77 | If you are bringing an existing Redis Enterprise instance from [Redis Enterprise Cloud](https://app.redislabs.com/), add Redis instance reference to the env file. Make sure to record the public endpoint `{host}:{port}` and password. **There's a 30Mb Free Tier** which will be perfect for this demo.
78 | ```bash
79 | cat <> .env
80 | REDIS_CONNECTION_STRING=
81 | REDIS_PASSWORD=
82 | EOF
83 | ```
84 | Then, skip to [Build Containers](#build-containers) section.
85 |
86 | If you want to provision a Redis Enterpirse database instance using your existing [Redis Enterprise in Google Cloud Marketplace](https://console.cloud.google.com/marketplace/product/redis-marketplace-isaas/redis-enterprise-cloud-flexible-plan) subscription with the [Make](./Makefile) utility in this repo, you'll follow the steps below:
87 | 1. Collect Redis Enterprise Cloud [Access Key](https://docs.redis.com/latest/rc/api/get-started/enable-the-api/) and [Secret Key](https://docs.redis.com/latest/rc/api/get-started/manage-api-keys/#secret) in Redis Enterpirse Console
88 | 2. Add the keys collected in step 1 to the env file as follows:
89 | ```bash
90 | cat <> .env
91 | REDISCLOUD_ACCESS_KEY=
92 | REDISCLOUD_SECRET_KEY=
93 | REDIS_SUBSCRIPTION_NAME=
94 | REDIS_SUBSCRIPTION_CIDR=
95 | EOF
96 | ```
97 | 3. Run the following command to deploy your Redis Enterprise database instance
98 | ```bash
99 | $ make tf-deploy
100 | ```
101 |
102 |
103 | ### Build Containers
104 | Assuming all above steps are done, build the docker images required to run the different setup steps.
105 |
106 | From the root of the project, run:
107 | ```bash
108 | $ make docker
109 | ```
110 |
111 | **TIP**: Disable docker buildkit for Mac machines (if you have trouble with this step)
112 |
113 | ```bash
114 | export DOCKER_BUILDKIT=0
115 | ```
116 |
117 | The script will build a [base Docker image](./Dockerfile) and then build separate images for each setup step: [`setup`](setup/) and [`jupyter`](jupyter/).
118 |
119 | >This will take some time, so grab a cup of coffee.
120 |
121 | ## Infra Setup
122 | The provided [Makefile](./Makefile) wraps bash and Docker commands to make it super easy to run. This particular step:
123 | - Provisions GCP infrastructure
124 | - Generates the feature store
125 | - Deploys the model with Triton Inference Server on Vertex AI
126 |
127 | ```bash
128 | $ make setup
129 | ```
130 | >At the completion of this step, most of the architecture above will be deployed in your GCP project.
131 |
132 | ### About Triton on Vertex AI
133 |
134 | As noted above, Vertex AI allows you to [deploy a custom serving container](https://cloud.google.com/vertex-ai/docs/predictions/use-custom-container) as long as it meets baseline requirements. Additionally, we can use a popular serving frameworks like NVIDIA's [Triton Inference Server](https://developer.nvidia.com/nvidia-triton-inference-server). NVIDIA and GCP already did the integration legwork so that we can use them together:
135 |
136 | 
137 |
138 | In this architecture, a Triton ensemble model combines both a Python backend step (for Feast feature retrieval) and a FIL backend step (for the XGboost model). It's packaged, hosted and served by Vertex AI Prediction.
139 |
140 | ___
141 |
142 | ### Other Components
143 | With the Feature Store in place, utilize the following add-ons to perform different tasks as desired.
144 |
145 | #### Jupyter Notebooks
146 | This repo provides several helper/tutorial notebooks for working with Feast, Redis, and GCP. Open a Jupyter session to explore these resources:
147 |
148 | ```bash
149 | $ make jupyter
150 | ```
151 |
152 | #### Teardown
153 | Cleanup GCP infrastructure and teardown Feature Store.
154 |
155 | ```bash
156 | $ make teardown
157 | ```
158 |
159 | If you are running the `make tf-deploy` command to provision a Redis Enterprise database instance, you'll need to run "$ make tf-destroy" to remove the database instance.
160 |
161 | ### Cleanup
162 | Besides running the teardown container, you can run `docker compose down` periodically after shutting down containers to clean up excess networks and unused Docker artifacts.
163 |
164 | ___
165 |
--------------------------------------------------------------------------------
/docker-compose.yaml:
--------------------------------------------------------------------------------
1 | version: "3.7"
2 |
3 | services:
4 | setup:
5 | container_name: redis-feast-gcp-setup
6 | build:
7 | context: ./
8 | dockerfile: ./docker/Dockerfile.setup
9 | volumes:
10 | - ${GOOGLE_APPLICATION_CREDENTIALS}:/tmp/keys/credentials.json:ro
11 | - /var/run/docker.sock:/var/run/docker.sock
12 | env_file:
13 | - ".env"
14 | environment:
15 | GOOGLE_APPLICATION_CREDENTIALS: "/tmp/keys/credentials.json"
16 | jupyter:
17 | container_name: redis-feast-gcp-jupyter
18 | build:
19 | context: ./docker
20 | dockerfile: ./Dockerfile.jupyter
21 | volumes:
22 | - ./:/home/jovyan/work
23 | - ${GOOGLE_APPLICATION_CREDENTIALS}:/tmp/keys/credentials.json:ro
24 | ports:
25 | - 8888:8888
26 | env_file:
27 | - ".env"
28 | environment:
29 | GOOGLE_APPLICATION_CREDENTIALS: "/tmp/keys/credentials.json"
--------------------------------------------------------------------------------
/docker/Dockerfile.base:
--------------------------------------------------------------------------------
1 | FROM python:3.8-slim-buster
2 |
3 | WORKDIR /app
4 |
5 | ENV PYTHONUNBUFFERED 1
6 | ENV PYTHONDONTWRITEBYTECODE 1
7 |
8 | # Add Python Virtual Env
9 | RUN python3 -m venv /opt/venv
10 | ENV PATH="/opt/venv/bin:$PATH"
11 |
12 | RUN python3 -m pip install --upgrade pip setuptools wheel
13 |
14 | COPY setup.py ./
15 | COPY setup.cfg ./
16 | COPY ./feature_store ./feature_store/
17 |
18 | RUN pip install -e .
19 |
20 | RUN apt-get update \
21 | && echo "Installing curl" \
22 | && apt-get install -y curl
23 |
24 | # Install GCloud CLI
25 | ENV CLOUDSDK_INSTALL_DIR="/usr/local/gcloud/"
26 | RUN curl -sSL https://sdk.cloud.google.com | bash
27 | ENV PATH="/usr/local/gcloud/google-cloud-sdk/bin:$PATH"
28 |
--------------------------------------------------------------------------------
/docker/Dockerfile.jupyter:
--------------------------------------------------------------------------------
1 | FROM redisventures/redis-feast-gcp:1.0.0 as pythonImg
2 |
3 | FROM jupyter/minimal-notebook:python-3.8 as jupyterImg
4 |
5 | WORKDIR /home/jovyan/work
6 |
7 | USER root
8 |
9 | ENV PYTHONUNBUFFERED 1
10 | ENV PYTHONDONTWRITEBYTECODE 1
11 |
12 | # Copy over Virtual Python Env
13 | COPY --from=pythonImg /opt/venv /opt/venv
14 | ENV PATH="/opt/venv/bin:$PATH"
15 |
16 | RUN python3 -m pip install --upgrade pip setuptools wheel
17 |
18 | COPY ./jupyter/requirements.txt .
19 | RUN python3 -m pip install -r requirements.txt
20 |
21 | # Install Curl
22 | RUN apt-get update \
23 | && echo "Installing curl" \
24 | && apt-get install -y curl \
25 | && apt-get install -y tree
26 |
27 | # Copy over GCloud CLI
28 | ENV CLOUDSDK_INSTALL_DIR="/usr/local/gcloud/"
29 | COPY --from=pythonImg $CLOUDSDK_INSTALL_DIR $CLOUDSDK_INSTALL_DIR
30 | ENV PATH="/usr/local/gcloud/google-cloud-sdk/bin:$PATH"
31 | RUN sudo chown jovyan -R ~/
32 |
33 | # Define this parameter to install jupyter lab
34 | ENV JUPYTER_ENABLE_LAB=yes
--------------------------------------------------------------------------------
/docker/Dockerfile.setup:
--------------------------------------------------------------------------------
1 | FROM continuumio/miniconda3:latest as condaImg
2 |
3 | WORKDIR /app
4 |
5 | ENV PYTHONNOUSERSITE=True
6 |
7 | COPY ./docker/setup/environment.yml ./environment.yml
8 |
9 | RUN conda env create -f environment.yml
10 | RUN conda install -y conda-pack
11 | RUN conda pack -n py38 -o python3.8.tar.gz
12 |
13 | FROM redisventures/redis-feast-gcp:1.0.0 as appImg
14 |
15 | WORKDIR /app
16 |
17 | COPY ./docker/setup ./setup/
18 | COPY ./docker/triton ./triton/
19 | COPY ./feature_store ./setup/feature_store/
20 | COPY ./setup.py ./setup/setup.py
21 | COPY ./setup.cfg ./setup/setup.cfg
22 | COPY ./requirements.txt ./setup/requirements.txt
23 | COPY --from=condaImg /app/python3.8.tar.gz ./triton/models/fetch-vaccine-features/python3.8.tar.gz
24 |
25 | RUN apt-get update && \
26 | apt-get install curl -y && \
27 | apt-get install docker.io -y
28 |
29 | ENV PATH="/usr/local/gcloud/google-cloud-sdk/bin:$PATH"
30 |
--------------------------------------------------------------------------------
/docker/Dockerfile.triton:
--------------------------------------------------------------------------------
1 | #
2 | # Multistage build.
3 | #
4 | ARG TRITON_VERSION=2.28.0
5 | ARG TRITON_CONTAINER_VERSION=22.11
6 |
7 | FROM triton_fil_test AS full
8 |
9 | FROM nvcr.io/nvidia/tritonserver:22.11-py3-min
10 |
11 | ARG TRITON_VERSION
12 | ARG TRITON_CONTAINER_VERSION
13 |
14 | ENV TRITON_SERVER_VERSION ${TRITON_VERSION}
15 | ENV NVIDIA_TRITON_SERVER_VERSION ${TRITON_CONTAINER_VERSION}
16 | LABEL com.nvidia.tritonserver.version="${TRITON_SERVER_VERSION}"
17 |
18 | ENV PATH /opt/tritonserver/bin:${PATH}
19 |
20 | ENV TF_ADJUST_HUE_FUSED 1
21 | ENV TF_ADJUST_SATURATION_FUSED 1
22 | ENV TF_ENABLE_WINOGRAD_NONFUSED 1
23 | ENV TF_AUTOTUNE_THRESHOLD 2
24 | ENV TRITON_SERVER_GPU_ENABLED 1
25 |
26 | # Create a user that can be used to run triton as
27 | # non-root. Make sure that this user to given ID 1000. All server
28 | # artifacts copied below are assign to this user.
29 | ENV TRITON_SERVER_USER=triton-server
30 | RUN userdel tensorrt-server > /dev/null 2>&1 || true && if ! id -u $TRITON_SERVER_USER > /dev/null 2>&1 ; then useradd $TRITON_SERVER_USER; fi && [ `id -u $TRITON_SERVER_USER` -eq 1000 ] && [ `id -g $TRITON_SERVER_USER` -eq 1000 ]
31 |
32 | # Ensure apt-get won't prompt for selecting options
33 | ENV DEBIAN_FRONTEND=noninteractive
34 |
35 | # Common dependencies. FIXME (can any of these be conditional? For
36 | # example libcurl only needed for GCS?)
37 | RUN apt-get update && apt-get install -y --no-install-recommends software-properties-common libb64-0d libcurl4-openssl-dev libre2-5 git gperf dirmngr libgoogle-perftools-dev libnuma-dev curl && rm -rf /var/lib/apt/lists/*
38 |
39 | # Set TCMALLOC_RELEASE_RATE for users setting LD_PRELOAD with tcmalloc
40 | ENV TCMALLOC_RELEASE_RATE 200
41 |
42 | ENV DCGM_VERSION 2.2.9
43 | # Install DCGM. Steps from https://developer.nvidia.com/dcgm#Downloads
44 | RUN curl -o /tmp/cuda-keyring.deb https://developer.download.nvidia.com/compute/cuda/repos/ubuntu2004/x86_64/cuda-keyring_1.0-1_all.deb && apt install /tmp/cuda-keyring.deb && rm /tmp/cuda-keyring.deb && apt-get update && apt-get install -y datacenter-gpu-manager=1:2.2.9
45 |
46 | # Extra defensive wiring for CUDA Compat lib
47 | RUN ln -sf ${_CUDA_COMPAT_PATH}/lib.real ${_CUDA_COMPAT_PATH}/lib && echo ${_CUDA_COMPAT_PATH}/lib > /etc/ld.so.conf.d/00-cuda-compat.conf && ldconfig && rm -f ${_CUDA_COMPAT_PATH}/lib
48 |
49 | # python3, python3-pip and some pip installs required for the python backend
50 | RUN apt-get update && apt-get install -y --no-install-recommends python3 libarchive-dev python3-pip libpython3-dev && pip3 install --upgrade pip && pip3 install --upgrade wheel setuptools && pip3 install --upgrade numpy && rm -rf /var/lib/apt/lists/*
51 |
52 | WORKDIR /opt/tritonserver
53 | RUN rm -fr /opt/tritonserver/*
54 | ENV NVIDIA_PRODUCT_NAME="Triton Server"
55 | COPY docker/entrypoint.d/ /opt/nvidia/entrypoint.d/
56 |
57 | ENV NVIDIA_BUILD_ID 48581223
58 | LABEL com.nvidia.build.id=48581223
59 | LABEL com.nvidia.build.ref=672b06cfa8c8be41ca941d58cf70fa73046860e1
60 |
61 | WORKDIR /opt/tritonserver
62 | COPY --chown=1000:1000 --from=full /opt/tritonserver/LICENSE .
63 | COPY --chown=1000:1000 --from=full /opt/tritonserver/TRITON_VERSION .
64 | COPY --chown=1000:1000 --from=full /opt/tritonserver/NVIDIA_Deep_Learning_Container_License.pdf .
65 | COPY --chown=1000:1000 --from=full /opt/tritonserver/bin bin/
66 | COPY --chown=1000:1000 --from=full /opt/tritonserver/lib lib/
67 | COPY --chown=1000:1000 --from=full /opt/tritonserver/include include/
68 | # Copying over backends
69 | COPY --chown=1000:1000 --from=full /opt/tritonserver/backends/python /opt/tritonserver/backends/python
70 | COPY --chown=1000:1000 --from=full /opt/tritonserver/backends/fil /opt/tritonserver/backends/fil
71 |
72 | # Top-level /opt/tritonserver/backends not copied so need to explicitly set permissions here
73 | RUN chown triton-server:triton-server /opt/tritonserver/backends
74 | # Copying over repoagents
75 |
76 | LABEL com.amazonaws.sagemaker.capabilities.accept-bind-to-port=true
77 | COPY --chown=1000:1000 --from=full /usr/bin/serve /usr/bin/.
78 |
79 | RUN echo "deb [signed-by=/usr/share/keyrings/cloud.google.gpg] http://packages.cloud.google.com/apt cloud-sdk main" | tee -a /etc/apt/sources.list.d/google-cloud-sdk.list && curl https://packages.cloud.google.com/apt/doc/apt-key.gpg | apt-key --keyring /usr/share/keyrings/cloud.google.gpg add - && apt-get update -y && apt-get install google-cloud-sdk -y
80 |
81 | WORKDIR /src
82 |
83 | COPY ./entrypoint.sh ./
84 |
85 | ENV LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/usr/local/cuda/compat/lib.real:/usr/local/hugectr/lib:/usr/local/cuda/extras/CUPTI/lib64:/usr/local/cuda/compat/lib:/usr/local/nvidia/lib:/usr/local/nvidia/lib64:/usr/local/cuda/lib64:/usr/local/cuda/extras/CUPTI/lib64:/usr/local/lib:/repos/dist/lib
86 |
--------------------------------------------------------------------------------
/docker/jupyter/requirements.txt:
--------------------------------------------------------------------------------
1 | matplotlib==3.6.2
2 | seaborn==0.12.2
3 | evidently==0.2.1
4 | pandas==1.5.2
5 | xgboost==1.6.2
--------------------------------------------------------------------------------
/docker/setup/.gcloudignore:
--------------------------------------------------------------------------------
1 | img
2 | models
3 | *.ipynb
4 | /opt/venv
--------------------------------------------------------------------------------
/docker/setup/apply.py:
--------------------------------------------------------------------------------
1 | from datetime import datetime
2 | from feature_store.repo import (
3 | config,
4 | features
5 | )
6 | from feature_store.utils import (
7 | logger,
8 | storage
9 | )
10 |
11 |
12 | if __name__ == '__main__':
13 | # Setup logger
14 | logging = logger.get_logger()
15 |
16 | # Create FeatureStore
17 | logging.info("Fetching feature store")
18 | store = storage.get_feature_store(
19 | config_path=config.REPO_CONFIG,
20 | bucket_name=config.BUCKET_NAME
21 | )
22 |
23 | # Apply
24 | logging.info("Applying feature store objects")
25 | store.apply([
26 | features.state,
27 | features.weekly_vaccinations_fv,
28 | features.vaccine_search_trends_fv,
29 | features.serving_features,
30 | features.training_features
31 | ])
32 |
33 | # Materialize?
34 | logging.info("Materializing features")
35 | store.materialize_incremental(datetime.now())
36 |
37 | logging.info("Done")
38 |
--------------------------------------------------------------------------------
/docker/setup/create.py:
--------------------------------------------------------------------------------
1 | from feast import RepoConfig
2 | from google.cloud import bigquery
3 | from feature_store.utils import (
4 | logger,
5 | storage
6 | )
7 | from feature_store.repo import (
8 | config,
9 | features
10 | )
11 |
12 |
13 | if __name__ == "__main__":
14 | # Setup logger
15 | logging = logger.get_logger()
16 |
17 | # Create a feature store repo config
18 | logging.info("Creating Feast repo configuration")
19 | repo_config = RepoConfig(
20 | project=config.FEAST_PROJECT,
21 | # Cloud Storage Blob for the Registry
22 | registry=f"gs://{config.BUCKET_NAME}/data/registry.db",
23 | # Google Cloud Project -- GCP
24 | provider="gcp",
25 | # Redis Enterprise as the Online Store
26 | online_store={
27 | "type": "redis",
28 | "connection_string": f"{config.REDIS_CONNECTION_STRING},password={config.REDIS_PASSWORD}"
29 | },
30 | entity_key_serialization_version=2
31 | )
32 |
33 | # Host the config in cloud storage
34 | logging.info("Uploading repo config to cloud storage bucket")
35 | storage.upload_pkl(repo_config, config.BUCKET_NAME, config.REPO_CONFIG)
36 |
37 | # Generate initial features data in offline store
38 | logging.info("Generating initial vaccine features in GCP")
39 | client = bigquery.Client()
40 |
41 | features.generate_vaccine_counts(
42 | logging,
43 | client,
44 | f"{config.PROJECT_ID}.{config.BIGQUERY_DATASET_NAME}.{config.WEEKLY_VACCINATIONS_TABLE}"
45 | )
46 |
47 | features.generate_vaccine_search_trends(
48 | logging,
49 | client,
50 | f"{config.PROJECT_ID}.{config.BIGQUERY_DATASET_NAME}.{config.VACCINE_SEARCH_TRENDS_TABLE}"
51 | )
52 |
53 | logging.info("Done")
54 |
--------------------------------------------------------------------------------
/docker/setup/environment.yml:
--------------------------------------------------------------------------------
1 | name: py38
2 | dependencies:
3 | - pip
4 | - python=3.8
5 | - pip:
6 | - git+https://github.com/RedisVentures/redis-feast-gcp@vertex-triton-support
7 |
--------------------------------------------------------------------------------
/docker/setup/materialize.py:
--------------------------------------------------------------------------------
1 | from datetime import datetime
2 | from google.cloud import bigquery
3 | from feature_store.repo import (
4 | config,
5 | features
6 | )
7 | from feature_store.utils import (
8 | logger,
9 | storage
10 | )
11 |
12 | def materialize_features(logging):
13 | """
14 | Incrementally materialize ML features from offline store to online store
15 | using Feast.
16 | """
17 | # Load FeatureStore
18 | store = storage.get_feature_store(
19 | config_path=config.REPO_CONFIG,
20 | bucket_name=config.BUCKET_NAME
21 | )
22 |
23 | # Materialize Features to Redis
24 | logging.info("Beginning materialization")
25 | store.materialize_incremental(end_date=datetime.now())
26 |
27 | def main(data, context):
28 | # Setup logger
29 | logging = logger.get_logger()
30 |
31 | # Big Query Client
32 | client = bigquery.Client()
33 |
34 | # Generate Vaccine Count Features
35 | features.generate_vaccine_counts(
36 | logging,
37 | client,
38 | f"{config.PROJECT_ID}.{config.BIGQUERY_DATASET_NAME}.{config.WEEKLY_VACCINATIONS_TABLE}"
39 | )
40 | # Generate Vaccine Search Features
41 | features.generate_vaccine_search_trends(
42 | logging,
43 | client,
44 | f"{config.PROJECT_ID}.{config.BIGQUERY_DATASET_NAME}.{config.VACCINE_SEARCH_TRENDS_TABLE}"
45 | )
46 | # Perform local materialization
47 | materialize_features(logging)
48 |
--------------------------------------------------------------------------------
/docker/setup/setup.sh:
--------------------------------------------------------------------------------
1 | # Auth
2 | gcloud auth activate-service-account $SERVICE_ACCOUNT_EMAIL \
3 | --key-file=$GOOGLE_APPLICATION_CREDENTIALS \
4 | --project=$PROJECT_ID
5 |
6 | # Setup GCP Project Name
7 | echo project_id = $PROJECT_ID > ~/.bigqueryrc
8 |
9 | # Enable APIs
10 | echo "\nEnabling GCP APIs"
11 | gcloud services enable artifactregistry.googleapis.com
12 | gcloud services enable ml.googleapis.com
13 | gcloud services enable aiplatform.googleapis.com
14 | gcloud services enable bigquery.googleapis.com
15 | gcloud services enable cloudscheduler.googleapis.com
16 | gcloud services enable cloudfunctions.googleapis.com
17 |
18 | # Create Cloud Storage Bucket
19 | echo "\nCreating cloud storage bucket"
20 | gsutil ls -b gs://$BUCKET_NAME || gsutil mb gs://$BUCKET_NAME
21 |
22 | # Create BigQuery Dataset
23 | echo "\nCreating biqquery dataset"
24 | bq --location=us mk --dataset $PROJECT_ID:gcp_feast_demo
25 |
26 | # Deploy cloud Function
27 | echo "\nCreating cloud function for materialization"
28 | gcloud functions deploy feast-update-features \
29 | --source=./setup \
30 | --entry-point=main \
31 | --memory=1024MB \
32 | --allow-unauthenticated \
33 | --runtime python38 \
34 | --trigger-resource feast-schedule \
35 | --trigger-event google.pubsub.topic.publish \
36 | --timeout 540s \
37 | --set-build-env-vars GOOGLE_FUNCTION_SOURCE="materialize.py" \
38 | --set-env-vars PROJECT_ID=$PROJECT_ID
39 |
40 | # Create Cloud Scheduler Job
41 | echo "\nCreating cloud scheduler task for triggering materialization daily"
42 | gcloud scheduler jobs create pubsub feast-daily-job \
43 | --location $GCP_REGION \
44 | --schedule "0 22 * * *" \
45 | --topic feast-schedule \
46 | --message-body "This job schedules feature materialization once a day."
47 |
48 | # Create & Apply the Feature Store
49 | echo "\nCreating Feature Store"
50 | python setup/create.py
51 | python setup/apply.py
52 |
53 | ## Create Artifact Registry
54 | echo "\nCreating GCP Artifact Repository for Custom Triton Serving Container"
55 | ARTIFACT_REPOSITORY_NAME=nvidia-triton
56 |
57 | gcloud artifacts repositories create $ARTIFACT_REPOSITORY_NAME \
58 | --repository-format=docker \
59 | --location=$GCP_REGION \
60 | --description="NVIDIA Triton Docker repository"
61 |
62 | # Setup Vertex AI and Triton
63 | echo "\nUploading Triton Models to Cloud Storage"
64 | CONTAINER_IMAGE_URI=$GCP_REGION-docker.pkg.dev/$PROJECT_ID/$ARTIFACT_REPOSITORY_NAME/vertex-triton-inference
65 | NGC_TRITON_IMAGE_URI=ghcr.io/redisventures/tritonserver-python-fil:22.11-py3
66 | MODEL_STORAGE_URI=gs://$BUCKET_NAME/models
67 |
68 | ## Upload Triton Model Repository Contents
69 | gsutil -m cp -r ./triton/models gs://$BUCKET_NAME/
70 | gsutil rm $MODEL_STORAGE_URI/ensemble/1/.gitkeep
71 |
72 | # Pull and Upload Triton Image
73 | echo "\nPulling Triton Docker Image"
74 | docker pull $NGC_TRITON_IMAGE_URI
75 | docker tag $NGC_TRITON_IMAGE_URI $CONTAINER_IMAGE_URI
76 |
77 | echo "\nPushing Triton Docker Image to GCP"
78 | gcloud auth configure-docker $GCP_REGION-docker.pkg.dev --quiet
79 | docker push $CONTAINER_IMAGE_URI
80 |
81 | # Create Vertex AI Model
82 | echo "\nCreating Vertex AI Model"
83 | ENDPOINT_NAME=vaccine-predictor-endpoint
84 | DEPLOYED_MODEL_NAME=vaccine-predictor
85 |
86 | gcloud ai models upload \
87 | --region=$GCP_REGION \
88 | --display-name=$DEPLOYED_MODEL_NAME \
89 | --container-image-uri=$CONTAINER_IMAGE_URI \
90 | --artifact-uri=$MODEL_STORAGE_URI \
91 | --container-env-vars="REDIS_CONNECTION_STRING=$REDIS_CONNECTION_STRING","REDIS_PASSWORD=$REDIS_PASSWORD","PROJECT_ID=$PROJECT_ID","GCP_REGION=$GCP_REGION","BUCKET_NAME=$BUCKET_NAME"
92 |
93 | # Create Endpoint
94 | echo "\nCreating Vertex AI Endpoint"
95 | gcloud ai endpoints create \
96 | --region=$GCP_REGION \
97 | --display-name=$ENDPOINT_NAME
98 |
99 | ## Lookup Endpoint and Model IDs
100 | echo "\nDeploying Model to Endpoint"
101 | ENDPOINT_ID=$(gcloud ai endpoints list \
102 | --region=$GCP_REGION \
103 | --filter=display_name=$ENDPOINT_NAME \
104 | --format="value(name)")
105 |
106 | MODEL_ID=$(gcloud ai models list \
107 | --region=$GCP_REGION \
108 | --filter=display_name=$DEPLOYED_MODEL_NAME \
109 | --format="value(name)")
110 |
111 | # Deploy Model to the Endpoint on Vertex
112 | gcloud ai endpoints deploy-model $ENDPOINT_ID \
113 | --region=$GCP_REGION \
114 | --model=$MODEL_ID \
115 | --display-name=$DEPLOYED_MODEL_NAME \
116 | --machine-type=n1-standard-2 \
117 | --service-account=$SERVICE_ACCOUNT_EMAIL
--------------------------------------------------------------------------------
/docker/setup/teardown.py:
--------------------------------------------------------------------------------
1 | from feature_store.repo import config
2 | from feature_store.utils import (
3 | logger,
4 | storage
5 | )
6 |
7 |
8 | if __name__ == '__main__':
9 | # Setup logging
10 | logging = logger.get_logger()
11 |
12 | # Create FeatureStore
13 | logging.info("Fetching feature store")
14 | store = storage.get_feature_store(
15 | config_path=config.REPO_CONFIG,
16 | bucket_name=config.BUCKET_NAME
17 | )
18 |
19 | # Teardown
20 | logging.info("Tearing down feature store")
21 | store.teardown()
22 |
23 | logging.info("Done")
--------------------------------------------------------------------------------
/docker/setup/teardown.sh:
--------------------------------------------------------------------------------
1 | # Auth
2 | gcloud auth activate-service-account $SERVICE_ACCOUNT_EMAIL \
3 | --key-file=$GOOGLE_APPLICATION_CREDENTIALS \
4 | --project=$PROJECT_ID
5 |
6 | # Cleanup BigQuery
7 | bq rm -t -f "gcp_feast_demo.vaccine_search_trends"
8 | bq rm -t -f "gcp_feast_demo.us_weekly_vaccinations"
9 | bq rm -r -f -d "gcp_feast_demo"
10 |
11 | # Cleanup Cloud Function and Scheduler
12 | gcloud functions delete feast-update-features --quiet
13 | gcloud scheduler jobs delete feast-daily-job \
14 | --project=$PROJECT_ID \
15 | --location=$GCP_REGION \
16 | --quiet
17 |
18 | # Teardown Vertex AI Stuff
19 | ENDPOINT_NAME=vaccine-predictor-endpoint
20 | DEPLOYED_MODEL_NAME=vaccine-predictor
21 | ARTIFACT_REPOSITORY_NAME=nvidia-triton
22 |
23 | ENDPOINT_ID=$(gcloud ai endpoints list \
24 | --region=$GCP_REGION \
25 | --filter=display_name=$ENDPOINT_NAME \
26 | --format="value(name)")
27 |
28 | DEPLOYED_MODEL_ID=$(gcloud ai endpoints describe $ENDPOINT_ID \
29 | --region=$GCP_REGION \
30 | --format="value(deployedModels.id)")
31 |
32 | gcloud ai endpoints undeploy-model $ENDPOINT_ID \
33 | --region=$GCP_REGION \
34 | --deployed-model-id=$DEPLOYED_MODEL_ID
35 |
36 | gcloud ai endpoints delete $ENDPOINT_ID \
37 | --region=$GCP_REGION \
38 | --quiet
39 |
40 | MODEL_ID=$(gcloud ai models list \
41 | --region=$GCP_REGION \
42 | --filter=display_name=$DEPLOYED_MODEL_NAME \
43 | --format="value(name)")
44 |
45 | gcloud ai models delete $MODEL_ID \
46 | --region=$GCP_REGION \
47 | --quiet
48 |
49 | gcloud artifacts repositories delete $ARTIFACT_REPOSITORY_NAME \
50 | --location=$GCP_REGION \
51 | --quiet
52 |
53 | # Teardown Feast
54 | echo "Tearing down Feast infrastructure"
55 | python setup/teardown.py
--------------------------------------------------------------------------------
/docker/triton/README.md:
--------------------------------------------------------------------------------
1 | # Creating Custom Triton Image
2 |
3 | We provide this custom image for you to use, hosted from our GitHub Container Registry at RedisVentures. **But if you wish to re-create, follow these steps. This is NOT required**.
4 |
5 | The project requires a Docker image with NVIDIA Triton along with the following customizations:
6 |
7 | - Python and FIL backends.
8 | - Smaller size -- down to ~8Gb instead of ~21Gb.
9 | - Custom `ENTRYPOINT` and required libraries.
10 |
11 | >If doing yourself, we recommend performing these steps in a linux VM environment with x86_64 CPU architecture.
12 |
13 | ## Pull Repos and Create FIL Backend
14 | ```bash
15 | git clone https://github.com/triton-inference-server/server.git
16 | git clone https://github.com/triton-inference-server/fil_backend.git
17 |
18 | cd fil_backend/
19 |
20 | BASE_IMAGE=nvcr.io/nvidia/tritonserver:22.11-py3 ./build.sh
21 | ```
22 |
23 | This will output a Docker image --> `triton_fil_test` image that contains the Triton FIL backend installed over the `22:11` base Triton image.
24 |
25 | ## Create Custom Triton Image
26 | Now we want to build a custom Triton image with all other backends ripped out (to make it smaller).
27 | ```bash
28 | cd ../server/
29 |
30 | python3 compose.py --backend python --backend fil --container-version 22.11 --dry-run
31 | ```
32 | The `--dry-run` option will creates a Dockerfile. We've included the generated [Dockerfile](../docker/Dockerfile.tritonDockerfile) here.
33 |
34 | We added some addition parts from line 80 onwards to handle the custom [`ENTRYPOINT`](./entrypoint.sh) and required libraries:
35 | ```dockerfile
36 | RUN echo "deb [signed-by=/usr/share/keyrings/cloud.google.gpg] http://packages.cloud.google.com/apt cloud-sdk main" | tee -a /etc/apt/sources.list.d/google-cloud-sdk.list && curl https://packages.cloud.google.com/apt/doc/apt-key.gpg | apt-key --keyring /usr/share/keyrings/cloud.google.gpg add - && apt-get update -y && apt-get install google-cloud-sdk -y
37 |
38 | WORKDIR /src
39 |
40 | COPY ./entrypoint.sh ./
41 |
42 | ENV LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/usr/local/cuda/compat/lib.real:/usr/local/hugectr/lib:/usr/local/cuda/extras/CUPTI/lib64:/usr/local/cuda/compat/lib:/usr/local/nvidia/lib:/usr/local/nvidia/lib64:/usr/local/cuda/lib64:/usr/local/cuda/extras/CUPTI/lib64:/usr/local/lib:/repos/dist/lib
43 |
44 | ENTRYPOINT ./entrypoint.sh
45 | ```
46 |
47 | ## Build Image, Tag, Push
48 | Easiest part last:
49 |
50 | ```bash
51 | docker build -t $GCP_REGION-docker.pkg.dev/$PROJECT_ID/nvidia-triton/vertex-triton-inference:latest -f docker/Dockerfile.triton .
52 | ```
53 |
54 | Push image:
55 | ```bash
56 | gcloud auth configure-docker $GCP_REGION-docker.pkg.dev
57 | docker push $GCP_REGION-docker.pkg.dev/$PROJECT_ID/nvidia-triton/vertex-triton-inference:latest
58 | ```
--------------------------------------------------------------------------------
/docker/triton/entrypoint.sh:
--------------------------------------------------------------------------------
1 | # Set up a global error handler
2 | err_handler() {
3 | echo "Error on line: $1"
4 | echo "Caused by: $2"
5 | echo "That returned exit status: $3"
6 | echo "Aborting..."
7 | exit $3
8 | }
9 |
10 | trap 'err_handler "$LINENO" "$BASH_COMMAND" "$?"' ERR
11 |
12 |
13 | if [ -z "${AIP_STORAGE_URI}" ]
14 | then
15 | echo 'AIP_STORAGE_URI not set. Exiting ....'
16 | exit 1
17 | fi
18 |
19 | MODEL_REPOSITORY=/model
20 |
21 | echo "Copying model ensemble from ${AIP_STORAGE_URI} to ${MODEL_REPOSITORY}"
22 | mkdir ${MODEL_REPOSITORY}
23 | gsutil -m cp -r ${AIP_STORAGE_URI}/* ${MODEL_REPOSITORY}
24 |
25 | # gsutil does not copy empty dirs so create a version folder for the ensemble
26 | ENSEMBLE_DIR=$(ls ${MODEL_REPOSITORY} | grep ens)
27 | mkdir ${MODEL_REPOSITORY}/${ENSEMBLE_DIR}/1
28 |
29 | echo "Starting Triton Server"
30 | tritonserver --vertex-ai-default-model=ensemble --model-repository=$MODEL_REPOSITORY --backend-config=python,shm-default-byte-size=16777216 --log-verbose=3 --log-info=1 --log-warning=1 --log-error=1
--------------------------------------------------------------------------------
/docker/triton/models/ensemble/1/.gitkeep:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/redis-applied-ai/redis-feast-gcp/24d402a01dcae3c998bd2be45348929d2423dc6a/docker/triton/models/ensemble/1/.gitkeep
--------------------------------------------------------------------------------
/docker/triton/models/ensemble/config.pbtxt:
--------------------------------------------------------------------------------
1 | name: "ensemble"
2 | platform: "ensemble"
3 | max_batch_size: 256
4 | input [
5 | {
6 | name: "state"
7 | data_type: TYPE_STRING
8 | dims: 1
9 | }
10 | ]
11 | output [
12 | {
13 | name: "prediction"
14 | data_type: TYPE_FP32
15 | dims: 1
16 | }
17 | ]
18 | ensemble_scheduling {
19 | step [
20 | {
21 | model_name: "fetch-vaccine-features"
22 | model_version: -1
23 | input_map {
24 | key: "state"
25 | value: "state"
26 | }
27 | output_map {
28 | key: "feature_values"
29 | value: "feature_values"
30 | }
31 | },
32 | {
33 | model_name: "predict-vaccine-counts"
34 | model_version: -1
35 | input_map {
36 | key: "input__0"
37 | value: "feature_values"
38 | }
39 | output_map {
40 | key: "output__0"
41 | value: "prediction"
42 | }
43 | }
44 | ]
45 | }
--------------------------------------------------------------------------------
/docker/triton/models/fetch-vaccine-features/1/model.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import sys
3 | import json
4 | import io
5 |
6 | # triton_python_backend_utils is available in every Triton Python model. You
7 | # need to use this module to create inference requests and responses. It also
8 | # contains some utility functions for extracting information from model_config
9 | # and converting Triton input/output types to numpy types.
10 | import triton_python_backend_utils as pb_utils
11 | from feature_store.repo import config
12 | from feature_store.utils import (
13 | DataFetcher,
14 | logger,
15 | storage
16 | )
17 |
18 | logging = logger.get_logger()
19 |
20 |
21 |
22 | class TritonPythonModel:
23 | """Your Python model must use the same class name. Every Python model
24 | that is created must have "TritonPythonModel" as the class name.
25 | """
26 |
27 | def initialize(self, args):
28 | """`initialize` is called only once when the model is being loaded.
29 | Implementing `initialize` function is optional. This function allows
30 | the model to intialize any state associated with this model.
31 |
32 | Parameters
33 | ----------
34 | args : dict
35 | Both keys and values are strings. The dictionary keys and values are:
36 | * model_config: A JSON string containing the model configuration
37 | * model_instance_kind: A string containing model instance kind
38 | * model_instance_device_id: A string containing model instance device ID
39 | * model_repository: Model repository path
40 | * model_version: Model version
41 | * model_name: Model name
42 | """
43 |
44 | # You must parse model_config. JSON string is not parsed here
45 | self.model_config = model_config = json.loads(args['model_config'])
46 |
47 | # Get OUTPUT0 configuration
48 | output0_config = pb_utils.get_output_config_by_name(
49 | model_config, "feature_values")
50 |
51 | # Convert Triton types to numpy types
52 | self.output0_dtype = pb_utils.triton_string_to_numpy(
53 | output0_config['data_type'])
54 |
55 | logging.info("Loading feature store")
56 | self.fs = storage.get_feature_store(
57 | config_path=config.REPO_CONFIG,
58 | bucket_name=config.BUCKET_NAME
59 | )
60 | logging.info("Loading feature store")
61 | self.data_fetcher = DataFetcher(self.fs)
62 |
63 | def execute(self, requests):
64 | """`execute` MUST be implemented in every Python model. `execute`
65 | function receives a list of pb_utils.InferenceRequest as the only
66 | argument. This function is called when an inference request is made
67 | for this model. Depending on the batching configuration (e.g. Dynamic
68 | Batching) used, `requests` may contain multiple requests. Every
69 | Python model, must create one pb_utils.InferenceResponse for every
70 | pb_utils.InferenceRequest in `requests`. If there is an error, you can
71 | set the error argument when creating a pb_utils.InferenceResponse
72 |
73 | Parameters
74 | ----------
75 | requests : list
76 | A list of pb_utils.InferenceRequest
77 |
78 | Returns
79 | -------
80 | list
81 | A list of pb_utils.InferenceResponse. The length of this list must
82 | be the same as `requests`
83 | """
84 |
85 | output0_dtype = self.output0_dtype
86 |
87 | responses = []
88 |
89 | # Every Python backend must iterate over everyone of the requests
90 | # and create a pb_utils.InferenceResponse for each of them.
91 | for request in requests:
92 | # Get Input
93 | input_tensor = pb_utils.get_input_tensor_by_name(request, "state")
94 | state = input_tensor.as_numpy().reshape(1)
95 | logging.info(state)
96 |
97 | # Fetch feature data from Feast db
98 | feature_vector = self.data_fetcher.get_online_data(state=state[0].decode('utf-8'))
99 | feature_out = feature_vector.to_numpy().reshape(-1, 8)
100 | logging.info(feature_vector)
101 |
102 | # Create InferenceResponse
103 | inference_response = pb_utils.InferenceResponse(
104 | output_tensors=[pb_utils.Tensor(
105 | "feature_values",
106 | feature_out.astype(output0_dtype)
107 | )]
108 | )
109 | responses.append(inference_response)
110 |
111 | # Return a list of pb_utils.InferenceResponse
112 | return responses
113 |
114 | def finalize(self):
115 | """`finalize` is called only once when the model is being unloaded.
116 | Implementing `finalize` function is OPTIONAL. This function allows
117 | the model to perform any necessary clean ups before exit.
118 | """
119 | logging.info('Cleaning up...')
--------------------------------------------------------------------------------
/docker/triton/models/fetch-vaccine-features/config.pbtxt:
--------------------------------------------------------------------------------
1 | name: "fetch-vaccine-features"
2 | backend: "python"
3 | max_batch_size: 256
4 | input [
5 | {
6 | name: "state"
7 | data_type: TYPE_STRING
8 | dims: [ 1 ]
9 | }
10 | ]
11 |
12 | output [
13 | {
14 | name: "feature_values"
15 | data_type: TYPE_FP32
16 | dims: [ 8 ]
17 | }
18 | ]
19 |
20 | parameters: {
21 | key: "EXECUTION_ENV_PATH",
22 | value: {string_value: "$$TRITON_MODEL_DIRECTORY/python3.8.tar.gz"}
23 | }
24 |
25 | instance_group [{ kind: KIND_CPU }]
--------------------------------------------------------------------------------
/docker/triton/models/predict-vaccine-counts/config.pbtxt:
--------------------------------------------------------------------------------
1 | name: "predict-vaccine-counts"
2 | backend: "fil"
3 | max_batch_size: 256
4 | input [
5 | {
6 | name: "input__0"
7 | data_type: TYPE_FP32
8 | dims: 8
9 | }
10 | ]
11 | output [
12 | {
13 | name: "output__0"
14 | data_type: TYPE_FP32
15 | dims: 1
16 | }
17 | ]
18 | instance_group [{ kind: KIND_CPU }]
19 | parameters [
20 | {
21 | key: "model_type"
22 | value: { string_value: "xgboost_json" }
23 | },
24 | {
25 | key: "output_class"
26 | value: { string_value: "false" }
27 | },
28 | {
29 | key: "storage_type"
30 | value: { string_value: "AUTO" }
31 | },
32 | {
33 | key: "use_experimental_optimizations"
34 | value: { string_value: "true" }
35 | }
36 | ]
--------------------------------------------------------------------------------
/env.sh:
--------------------------------------------------------------------------------
1 | # Create an ENV file
2 |
3 | touch .env
4 |
5 | while read p; do
6 | echo $p
7 | read -u 1 input
8 | echo $p$input >> .env
9 | done <.env.template
--------------------------------------------------------------------------------
/feature_store/README.md:
--------------------------------------------------------------------------------
1 | # Feature Store Overview
2 | A quick view of what's in this package:
3 |
4 | * [`repo/`](repo/) contains Feast feature definitions and configuration.
5 | * [`utils/`](utils/) contains helper utilities and functions that can be used throughout.
6 |
7 | TBD -- Need to fill this out a good bit.
8 |
9 |
10 | >Our offline features will be stored in GCP BigQuery. Above, we've already created a dataset `gcp_feast_demo` where our cloud function can load two new tables.
11 |
12 | - `gcp_feast_demo.us_weekly_vaccinations` - Weekly vaccination counts across the United States by State.
13 | - `gcp_feast_demo.vaccine_search_trends` - Weekly vaccine search trends series with three search categories (interest, intent, and safety) by US state.
--------------------------------------------------------------------------------
/feature_store/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/redis-applied-ai/redis-feast-gcp/24d402a01dcae3c998bd2be45348929d2423dc6a/feature_store/__init__.py
--------------------------------------------------------------------------------
/feature_store/repo/.feastignore:
--------------------------------------------------------------------------------
1 | *.ipynb
2 | *__pycache__
3 | *.ipynb_checkpoints
4 | *.pkl
--------------------------------------------------------------------------------
/feature_store/repo/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/redis-applied-ai/redis-feast-gcp/24d402a01dcae3c998bd2be45348929d2423dc6a/feature_store/repo/__init__.py
--------------------------------------------------------------------------------
/feature_store/repo/config.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | PROJECT_ID = os.environ["PROJECT_ID"]
4 | GOOGLE_APPLICATION_CREDENTIALS = os.getenv("GOOGLE_APPLICATION_CREDENTIALS")
5 | REDIS_CONNECTION_STRING = os.getenv("REDIS_CONNECTION_STRING", "localhost:6379")
6 | REDIS_PASSWORD = os.getenv("REDIS_PASSWORD", "")
7 | BUCKET_NAME = os.getenv("BUCKET_NAME", "gcp-feast-demo")
8 | GCP_REGION = os.getenv("GCP_REGION", "us-east1")
9 | FEAST_PROJECT = os.getenv("FEAST_PROJECT", "feature_store")
10 | REPO_CONFIG = "data/repo_config.pkl"
11 | BIGQUERY_DATASET_NAME = "gcp_feast_demo"
12 | MODEL_NAME = "predict-vaccine-counts"
13 | MODEL_FILENAME = "xgboost.json"
14 | VACCINE_SEARCH_TRENDS_TABLE = "vaccine_search_trends"
15 | WEEKLY_VACCINATIONS_TABLE = "us_weekly_vaccinations"
16 | DAILY_VACCINATIONS_CSV_URL = "https://raw.githubusercontent.com/owid/covid-19-data/master/public/data/vaccinations/us_state_vaccinations.csv"
17 |
--------------------------------------------------------------------------------
/feature_store/repo/features.py:
--------------------------------------------------------------------------------
1 | import pandas as pd
2 | import tempfile
3 |
4 | from datetime import timedelta
5 | from google.cloud import bigquery
6 | from feast import (
7 | BigQuerySource,
8 | Entity,
9 | FeatureService,
10 | FeatureView,
11 | Field
12 | )
13 | from feast.types import (
14 | Float32,
15 | Int64
16 | )
17 | from feature_store.repo import config
18 | from feature_store.utils import storage
19 |
20 |
21 | # Define an entity for the state. You can think of an entity as a primary key used to
22 | # fetch features.
23 | state = Entity(name="state", join_keys=["state"])
24 |
25 | # Defines a data source from which feature values can be retrieved. Sources are queried when building training
26 | # datasets or materializing features into an online store.
27 | vaccine_search_trends_src = BigQuerySource(
28 | name="vaccine_search_trends_src",
29 | # The BigQuery table where features can be found
30 | table=f"{config.PROJECT_ID}.{config.BIGQUERY_DATASET_NAME}.{config.VACCINE_SEARCH_TRENDS_TABLE}",
31 | # The event timestamp is used for point-in-time joins and for ensuring only
32 | # features within the TTL are returned
33 | timestamp_field="date"
34 | )
35 |
36 | # Feature views are a grouping based on how features are stored in either the
37 | # online or offline store.
38 | vaccine_search_trends_fv = FeatureView(
39 | # The unique name of this feature view. Two feature views in a single
40 | # project cannot have the same name
41 | name="vaccine_search_trends",
42 | # The list of entities specifies the keys required for joining or looking
43 | # up features from this feature view. The reference provided in this field
44 | # correspond to the name of a defined entity (or entities)
45 | entities=[state],
46 | # The timedelta is the maximum age that each feature value may have
47 | # relative to its lookup time. For historical features (used in training),
48 | # TTL is relative to each timestamp provided in the entity dataframe.
49 | # TTL also allows for eviction of keys from online stores and limits the
50 | # amount of historical scanning required for historical feature values
51 | # during retrieval
52 | ttl=timedelta(weeks=52 * 10), # Set to be very long for example purposes only
53 | # The list of features defined below act as a schema to both define features
54 | # for both materialization of features into a store, and are used as references
55 | # during retrieval for building a training dataset or serving features
56 | schema=[
57 | Field(name="lag_1_vaccine_interest", dtype=Float32),
58 | Field(name="lag_2_vaccine_interest", dtype=Float32),
59 | Field(name="lag_1_vaccine_intent", dtype=Float32),
60 | Field(name="lag_2_vaccine_intent", dtype=Float32),
61 | Field(name="lag_1_vaccine_safety", dtype=Float32),
62 | Field(name="lag_2_vaccine_safety", dtype=Float32)
63 | ],
64 | source=vaccine_search_trends_src,
65 | )
66 |
67 |
68 | weekly_vaccinations_src = BigQuerySource(
69 | name="weekly_vaccinations_src",
70 | table=f"{config.PROJECT_ID}.{config.BIGQUERY_DATASET_NAME}.{config.WEEKLY_VACCINATIONS_TABLE}",
71 | timestamp_field="date"
72 | )
73 |
74 | weekly_vaccinations_fv = FeatureView(
75 | name="weekly_vaccinations",
76 | entities=[state],
77 | ttl=timedelta(weeks=52 * 10),
78 | schema=[
79 | Field(name="lag_1_weekly_vaccinations_count", dtype=Int64),
80 | Field(name="lag_2_weekly_vaccinations_count", dtype=Int64),
81 | Field(name="weekly_vaccinations_count", dtype=Int64)
82 | ],
83 | source=weekly_vaccinations_src,
84 | )
85 |
86 |
87 | serving_features = FeatureService(
88 | name="serving_features",
89 | features=[
90 | vaccine_search_trends_fv,
91 | weekly_vaccinations_fv[[
92 | "lag_1_weekly_vaccinations_count",
93 | "lag_2_weekly_vaccinations_count"
94 | ]]
95 | ],
96 | )
97 |
98 | training_features = FeatureService(
99 | name="training_features",
100 | features=[
101 | vaccine_search_trends_fv,
102 | weekly_vaccinations_fv
103 | ],
104 | )
105 |
106 |
107 | def generate_vaccine_search_trends(
108 | logging,
109 | client: bigquery.Client,
110 | table_id: str
111 | ):
112 | """
113 | Generate and upload weekly vaccine search trends features derived from a public
114 | Google dataset stored in BigQuery.
115 |
116 | Args:
117 | client (bigquery.Client): GCP bigquery Client.
118 | table_id (str): Table ID for this feature set.
119 | """
120 | job_config = bigquery.QueryJobConfig(
121 | destination=table_id,
122 | write_disposition='WRITE_TRUNCATE'
123 | )
124 | sql = f"""
125 | WITH vaccine_trends AS (
126 | SELECT
127 | date,
128 | sub_region_1 as state,
129 | avg(sni_covid19_vaccination) as lag_1_vaccine_interest,
130 | avg(sni_vaccination_intent) as lag_1_vaccine_intent,
131 | avg(sni_safety_side_effects) as lag_1_vaccine_safety
132 | FROM
133 | `bigquery-public-data.covid19_vaccination_search_insights.covid19_vaccination_search_insights`
134 | GROUP BY
135 | date, state
136 | ),
137 | weekly_trends AS (
138 | SELECT
139 | TIMESTAMP(date) as date,
140 | state,
141 | lag_1_vaccine_interest,
142 | lag(lag_1_vaccine_interest)
143 | over (partition by state order by date ASC) as lag_2_vaccine_interest,
144 | lag_1_vaccine_intent,
145 | lag(lag_1_vaccine_intent)
146 | over (partition by state order by date ASC) as lag_2_vaccine_intent,
147 | lag_1_vaccine_safety,
148 | lag(lag_1_vaccine_safety)
149 | over (partition by state order by date ASC) as lag_2_vaccine_safety
150 | FROM
151 | vaccine_trends
152 | )
153 | SELECT
154 | date,
155 | state,
156 | lag_1_vaccine_interest,
157 | lag_2_vaccine_interest,
158 | lag_1_vaccine_intent,
159 | lag_2_vaccine_intent,
160 | lag_1_vaccine_safety,
161 | lag_2_vaccine_safety
162 | FROM
163 | weekly_trends
164 | WHERE
165 | state IS NOT NULL AND
166 | lag_1_vaccine_interest IS NOT NULL AND
167 | lag_2_vaccine_interest IS NOT NULL AND
168 | lag_1_vaccine_intent IS NOT NULL AND
169 | lag_2_vaccine_intent IS NOT NULL AND
170 | lag_1_vaccine_safety IS NOT NULL AND
171 | lag_2_vaccine_safety IS NOT NULL
172 | ORDER BY
173 | date ASC,
174 | state;
175 | """
176 | query_job = client.query(sql, job_config=job_config)
177 | query_job.result()
178 | logging.info("Generated weekly vaccine search trends features")
179 |
180 | def generate_vaccine_counts(
181 | logging,
182 | client: bigquery.Client,
183 | table_id: str
184 | ):
185 | """
186 | Generate and upload vaccine count features from a CSV to BigQuery.
187 |
188 | Args:
189 | client (bigquery.Client): GCP bigquery Client.
190 | table_id (str): Table ID for this feature set.
191 | """
192 | # Generate temp dir
193 | tmpdir = tempfile.gettempdir()
194 | input_filename = f"{tmpdir}/us_state_vaccinations.csv"
195 | output_filename = f"{tmpdir}/us_weekly_vaccinations.csv"
196 | output_storage_filename = "data/us_weekly_vaccinations.csv"
197 |
198 | # Download the CSV file from URL
199 | storage.download_file_url(
200 | filename=input_filename,
201 | url=config.DAILY_VACCINATIONS_CSV_URL
202 | )
203 |
204 | logging.info("Loading us_state_vaccinations.csv")
205 | df = pd.read_csv(input_filename)[['date', 'location', 'daily_vaccinations']]
206 | logging.info(f"Loaded {len(df)} daily vaccination records")
207 |
208 | logging.info("Cleaning dataset")
209 | df['date'] = df['date'].astype('datetime64[ns]')
210 |
211 | logging.info("Truncating records and filling NaNs")
212 | df = df[(~df.location.isin(['United States', 'Long Term Care'])) & (df.date >= '2021-1-1')].fillna(0)
213 | logging.info(f"{len(df)} daily records remaining")
214 |
215 | logging.info("Rolling up counts into weeks starting on Mondays")
216 | df = df.groupby([pd.Grouper(freq='W-Mon', key='date'), 'location'])['daily_vaccinations'].sum().reset_index()
217 | df.rename(columns={'daily_vaccinations': 'lag_1_weekly_vaccinations_count', 'location': 'state'}, inplace=True)
218 | logging.info(f"{len(df)} weekly vaccine count records for {len(df.state.value_counts())} total states & territories")
219 |
220 | logging.info("Creating lagged features")
221 | df['weekly_vaccinations_count'] = df.groupby('state').lag_1_weekly_vaccinations_count.shift(periods=-1)
222 | df['lag_2_weekly_vaccinations_count'] = df.groupby('state').lag_1_weekly_vaccinations_count.shift(periods=1)
223 | df.sort_values(['date', 'state'], inplace=True)
224 |
225 | logging.info("Saving dataframe...")
226 | df['weekly_vaccinations_count'] = df['weekly_vaccinations_count'].astype('Int64', errors='ignore')
227 | df['lag_1_weekly_vaccinations_count'] = df['lag_1_weekly_vaccinations_count'].astype('Int64', errors='ignore')
228 | df['lag_2_weekly_vaccinations_count'] = df['lag_2_weekly_vaccinations_count'].astype('Int64', errors='ignore')
229 | df['date'] = df['date'].dt.strftime("%Y-%m-%d %H:%M:%S")
230 |
231 | logging.info("Uploading CSV")
232 | # Save back to tempfile
233 | df.to_csv(output_filename, index=False)
234 |
235 | # Upload to cloud storage
236 | storage.upload_file(
237 | local_filename=output_filename,
238 | remote_filename=output_storage_filename,
239 | bucket_name=config.BUCKET_NAME
240 | )
241 |
242 | # Load bq job config
243 | job_config = bigquery.LoadJobConfig(
244 | schema=[
245 | bigquery.SchemaField("date", "TIMESTAMP"),
246 | bigquery.SchemaField("state", "STRING"),
247 | bigquery.SchemaField("lag_1_weekly_vaccinations_count", "INTEGER"),
248 | bigquery.SchemaField("weekly_vaccinations_count", "INTEGER"),
249 | bigquery.SchemaField("lag_2_weekly_vaccinations_count", "INTEGER")
250 | ],
251 | skip_leading_rows=1,
252 | max_bad_records=2,
253 | source_format=bigquery.SourceFormat.CSV,
254 | write_disposition=bigquery.WriteDisposition.WRITE_TRUNCATE
255 | )
256 | # Start the job
257 | logging.info("Running query")
258 | load_job = client.load_table_from_uri(
259 | f"gs://{config.BUCKET_NAME}/{output_storage_filename}",
260 | table_id,
261 | job_config=job_config
262 | )
263 | # Wait for job to complete
264 | load_job.result()
265 | logging.info("Generated weekly vaccine count features")
--------------------------------------------------------------------------------
/feature_store/utils/__init__.py:
--------------------------------------------------------------------------------
1 | from .data_fetcher import DataFetcher
2 | from .triton_model_repo import TritonGCSModelRepo
3 | from .redis_model_repo import RedisModelRepo
--------------------------------------------------------------------------------
/feature_store/utils/data_fetcher.py:
--------------------------------------------------------------------------------
1 | import pandas as pd
2 |
3 | from feast import FeatureStore
4 | from typing import Optional
5 |
6 |
7 | class DataFetcher:
8 | X_cols = [
9 | 'lag_1_vaccine_interest',
10 | 'lag_2_vaccine_interest',
11 | 'lag_1_vaccine_intent',
12 | 'lag_2_vaccine_intent',
13 | 'lag_1_vaccine_safety',
14 | 'lag_2_vaccine_safety',
15 | 'lag_1_weekly_vaccinations_count',
16 | 'lag_2_weekly_vaccinations_count'
17 | ]
18 |
19 | y_col = ['weekly_vaccinations_count']
20 |
21 | def __init__(self, fs: FeatureStore):
22 | """
23 | DataFetcher is a generic helper class to abstract the fetching of
24 | data from the offline and online ML feature sources a la Feast.
25 |
26 | Args:
27 | fs (FeatureStore): Feast FeatureStore object.
28 | """
29 | self._fs = fs
30 | self.serving_feature_svc = self._fs.get_feature_service("serving_features")
31 | self.training_feature_svc = self._fs.get_feature_service("training_features")
32 |
33 | def get_online_data(self, **entities) -> pd.DataFrame:
34 | """
35 | Fetch ML Features from the online data source.
36 |
37 | Returns:
38 | pd.DataFrame: DataFrame consisting of the serving feature set.
39 | """
40 | try:
41 | features = self._fs.get_online_features(
42 | features=self.serving_feature_svc,
43 | entity_rows=[entities]
44 | ).to_df()
45 | return features[self.X_cols]
46 | except Exception as why:
47 | print(why)
48 |
49 | def get_training_data(
50 | self,
51 | entity_df: Optional[pd.DataFrame] = None,
52 | entity_query: Optional[str] = None
53 | ) -> pd.DataFrame:
54 | """
55 | Fetch point-in-time correct ML Features from the
56 | offline data source.
57 |
58 | Args:
59 | entity_df (pd.DataFrame, optional): DataFrame consisting of entities to include in training set. Default to None.
60 | entity_query (str, optional): Query string to create entity df from offline data source. Default to None.
61 |
62 | Returns:
63 | pd.DataFrame: DataFrame consisting of historical training data.
64 | """
65 | try:
66 | if entity_df:
67 | return self._fs.get_historical_features(
68 | features=self.training_feature_svc,
69 | entity_df=entity_df
70 | ).to_df()
71 | if entity_query:
72 | # Otherwise query the offline source of record
73 | return self._fs.get_historical_features(
74 | features=self.training_feature_svc,
75 | entity_df=entity_query
76 | ).to_df()
77 | except Exception as why:
78 | print(why)
79 |
--------------------------------------------------------------------------------
/feature_store/utils/logger.py:
--------------------------------------------------------------------------------
1 | def get_logger():
2 | import logging
3 |
4 | # Setup logger
5 | logging.basicConfig(
6 | level=logging.INFO,
7 | format="%(asctime)5s:%(filename)25s"
8 | ":%(lineno)3s %(funcName)30s(): %(message)s",
9 | )
10 | return logging
--------------------------------------------------------------------------------
/feature_store/utils/redis_model_repo.py:
--------------------------------------------------------------------------------
1 | import pickle
2 | import redis
3 |
4 |
5 | class RedisModelRepo:
6 | model_prefix = "model"
7 | versions = "versions"
8 | latest = "latest"
9 | latest_version = None
10 | model_name = None
11 |
12 | def __init__(
13 | self,
14 | host: str,
15 | port: str,
16 | password: str,
17 | model_name = str
18 | ):
19 | """
20 | ModelRepo is a basic storage and versioning layer for ML models using
21 | Redis as the backend.
22 |
23 | Args:
24 | host (str): Redis host.
25 | port (str): Redis port.
26 | password (str): Redis password.
27 | """
28 | self.redis_client = redis.Redis(
29 | host=host,
30 | port=port,
31 | password=password
32 | )
33 | self.model_name = model_name
34 | self.latest_version = self.redis_client.hlen(self.model_versions())
35 |
36 | @classmethod
37 | def from_config(cls, config):
38 | host, port = config.REDIS_CONNECTION_STRING.split(":")
39 | return cls(
40 | host=host,
41 | port=port,
42 | password=config.REDIS_PASSWORD,
43 | model_name=config.MODEL_NAME
44 | )
45 |
46 | def model_versions(self) -> str:
47 | return f"{self.model_prefix}:{self.model_name}:{self.versions}"
48 |
49 | def save_version(self, model) -> int:
50 | """
51 | Persist the model in the database and increment
52 | the version count.
53 |
54 | Args:
55 | model: Model object to store.
56 |
57 | Returns:
58 | int: Model version number.
59 | """
60 | pickle_out = pickle.dumps(model)
61 | new_version = self.latest_version + 1
62 | res = self.redis_client.hset(
63 | name=self.model_versions(),
64 | key=str(new_version),
65 | value=pickle_out
66 | )
67 | if res:
68 | # TODO some checks... increment version
69 | self.latest_version = new_version
70 | return self.latest_version
71 |
72 | def fetch_version(self, version: int):
73 | """
74 | Fetch model by version.
75 |
76 | Args:
77 | version (int): Model version number to fetch.
78 | """
79 | print(version, flush=True)
80 | res = self.redis_client.hget(
81 | name=self.model_versions(),
82 | key=str(version)
83 | )
84 | if res:
85 | pickle_out = pickle.loads(res)
86 | return pickle_out
87 |
88 | def fetch_all_versions(self) -> dict:
89 | """
90 | Fetch all model versions.
91 |
92 | Returns:
93 | dict: Dictionary of model_version : model object.
94 | """
95 | res = self.redis_client.hgetall(name=self.model_versions())
96 | if res:
97 | return {k: pickle.loads(v) for k, v in res.items()}
98 |
99 | def fetch_latest(self):
100 | """
101 | Fetch the latest model version.
102 | """
103 | res = self.redis_client.hget(
104 | name=self.model_versions(),
105 | key=str(self.latest_version)
106 | )
107 | if res:
108 | pickle_out = pickle.loads(res)
109 | return pickle_out
--------------------------------------------------------------------------------
/feature_store/utils/storage.py:
--------------------------------------------------------------------------------
1 | import requests
2 | import pickle
3 |
4 | from feast import FeatureStore
5 | from google.cloud import storage
6 | from typing import Any
7 |
8 |
9 | def get_feature_store(
10 | config_path: str,
11 | bucket_name: str
12 | ) -> FeatureStore:
13 | """
14 | Fetch the Feast Feature Store using the repo config stored
15 | in GCS.
16 |
17 | Returns:
18 | FeatureStore: Feast FeatureStore
19 | """
20 | return FeatureStore(
21 | config = fetch_pkl(
22 | remote_filename=config_path,
23 | bucket_name=bucket_name
24 | )
25 | )
26 |
27 | def get_blob(
28 | remote_filename: str,
29 | bucket_name: str
30 | ):
31 | """
32 | Grab a pointer to the GCS blob in bucket.
33 |
34 | Args:
35 | remote_filename (str): Path to the remote file within the GCS bucket.
36 | bucket_name (str): Name of the GCS bucket.
37 | """
38 | storage_client = storage.Client()
39 | bucket = storage_client.bucket(bucket_name)
40 | blob = bucket.blob(remote_filename)
41 | return blob
42 |
43 |
44 | def upload_file(
45 | local_filename: str,
46 | bucket_name: str,
47 | remote_filename: str
48 | ) -> None:
49 | """
50 | Upload a local file to GCS (Google Cloud Storage) bucket
51 |
52 | Args:
53 | local_filename (str): Path to the local file to upload to GCS.
54 | bucket_name (str): Name of the GCS bucket.
55 | remote_filename (str): Path to the remote file within the GCS bucket.
56 | """
57 | blob = get_blob(remote_filename, bucket_name)
58 | blob.upload_from_filename(local_filename)
59 |
60 | def upload_pkl(
61 | obj: Any,
62 | bucket_name: str,
63 | remote_filename: str,
64 | ) -> None:
65 | """
66 | Upload an object to GCS as a pickle file.
67 |
68 | Args:
69 | obj (Any): Some object.
70 | bucket_name (str): Name of the GCS bucket.
71 | remote_filename (str): Path to the remote file within the GCS bucket.
72 | """
73 | blob = get_blob(remote_filename, bucket_name)
74 | pickle_out = pickle.dumps(obj)
75 | blob.upload_from_string(pickle_out)
76 |
77 | def fetch_pkl(
78 | bucket_name: str,
79 | remote_filename: str
80 | ) -> Any:
81 | """
82 | Fetch a pickled object from GCS.
83 |
84 | Args:
85 | bucket_name (str): Name of the GCS bucket.
86 | remote_filename (str): Path to the remote file within the GCS bucket.
87 |
88 | Returns:
89 | Any: Some object.
90 | """
91 | # Get the blob and download
92 | blob = get_blob(remote_filename, bucket_name)
93 | pickle_in = blob.download_as_string()
94 | obj = pickle.loads(pickle_in)
95 | return obj
96 |
97 |
98 | def download_file_url(
99 | filename: str,
100 | url: str
101 | ):
102 | """
103 | Download a file by iterating over chunks of content and
104 | saving it to a local file.
105 |
106 | Args:
107 | filename (str): Filename to store the resulting data in.
108 | url (str): URL to fetch the file from.
109 | """
110 | with requests.get(url, stream=True) as r:
111 | r.raise_for_status()
112 | with open(filename, 'wb') as f:
113 | for chunk in r.iter_content(chunk_size=8192):
114 | f.write(chunk)
--------------------------------------------------------------------------------
/feature_store/utils/triton_model_repo.py:
--------------------------------------------------------------------------------
1 | from .logger import get_logger
2 | from google.cloud import storage
3 |
4 |
5 | logging = get_logger()
6 |
7 | class TritonGCSModelRepo:
8 | repo_name = "models"
9 | versions = []
10 | latest_version = 0
11 | model_name = None
12 |
13 | def __init__(
14 | self,
15 | bucket_name: str,
16 | model_name: str,
17 | model_filename: str
18 | ):
19 | """
20 | TritonModelRepo is a basic storage and versioning layer for ML models using
21 | GCS as the backend.
22 | """
23 | self.bucket_name = bucket_name
24 | self.model_name = model_name
25 | self.model_filename = model_filename
26 | self.storage_client = storage.Client()
27 | self.bucket = self.storage_client.bucket(bucket_name)
28 | self._refresh()
29 |
30 | def _refresh(self):
31 | self.versions = self.list_versions()
32 | self.latest_version = len(self.versions)
33 |
34 | def _version_path(self, version: str) -> str:
35 | return f"{self.repo_name}/{self.model_name}/{version}"
36 |
37 | def create(self, config: str):
38 | path = f"{self.repo_name}/{self.model_name}/config.pbtxt"
39 | blob = self.bucket.blob(path)
40 | if not blob.exists():
41 | logging.info(f"Creating Model Repository for {self.model_name}")
42 | blob.upload_from_string(config)
43 | else:
44 | logging.info(f"Model Repository already exists.")
45 |
46 | def list_versions(self):
47 | blobs = [
48 | blob.name for blob in
49 | self.bucket.list_blobs(prefix=f"{self.repo_name}/{self.model_name}")
50 | if ".pbtxt" not in blob.name
51 | ]
52 | return blobs
53 |
54 | def save_version(self, model_path: str, version: int = None) -> int:
55 | """
56 | Persist the model in GCS and increment
57 | the version count.
58 |
59 | Args:
60 | model: Model object to store.
61 |
62 | Returns:
63 | int: Model version number.
64 | """
65 | if not version:
66 | version = self.latest_version + 1
67 | logging.info(f"Saving new model version {version}.")
68 | path = f"{self._version_path(version)}/{self.model_filename}"
69 | blob = self.bucket.blob(path)
70 | blob.upload_from_filename(model_path)
71 | logging.info(f"Saved model version {version}.")
72 | self._refresh()
73 | return version
74 |
--------------------------------------------------------------------------------
/img/RedisFeastTriton.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/redis-applied-ai/redis-feast-gcp/24d402a01dcae3c998bd2be45348929d2423dc6a/img/RedisFeastTriton.png
--------------------------------------------------------------------------------
/img/redis-feast-gcp-architecture.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/redis-applied-ai/redis-feast-gcp/24d402a01dcae3c998bd2be45348929d2423dc6a/img/redis-feast-gcp-architecture.png
--------------------------------------------------------------------------------
/img/redis.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/redis-applied-ai/redis-feast-gcp/24d402a01dcae3c998bd2be45348929d2423dc6a/img/redis.gif
--------------------------------------------------------------------------------
/img/triton-inference-server.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/redis-applied-ai/redis-feast-gcp/24d402a01dcae3c998bd2be45348929d2423dc6a/img/triton-inference-server.png
--------------------------------------------------------------------------------
/img/triton-vertex.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/redis-applied-ai/redis-feast-gcp/24d402a01dcae3c998bd2be45348929d2423dc6a/img/triton-vertex.png
--------------------------------------------------------------------------------
/re-gcp-mp/main.tf:
--------------------------------------------------------------------------------
1 | terraform {
2 | required_providers {
3 | rediscloud = {
4 | source = "RedisLabs/rediscloud"
5 | }
6 | }
7 | }
8 |
9 | locals {
10 | envs = { for tuple in regexall("(.*)=(.*)", file("env")) : tuple[0] => tuple[1] }
11 | }
12 |
13 | provider "rediscloud" {
14 | api_key = local.envs["REDISCLOUD_ACCESS_KEY"]
15 | secret_key = local.envs["REDISCLOUD_SECRET_KEY"]
16 | }
17 |
18 | resource "rediscloud_subscription" "mc-example" {
19 | name = local.envs["REDIS_SUBSCRIPTION_NAME"]
20 | memory_storage = "ram"
21 | payment_method = "marketplace"
22 |
23 | cloud_provider {
24 | provider = "GCP"
25 | cloud_account_id = 1
26 |
27 | region {
28 | region = local.envs["GCP_REGION"]
29 | networking_deployment_cidr = local.envs["REDIS_SUBSCRIPTION_CIDR"]
30 | preferred_availability_zones = []
31 | }
32 | }
33 |
34 | creation_plan {
35 | average_item_size_in_bytes = 1
36 | memory_limit_in_gb = 1
37 | quantity = 1
38 | replication = false
39 | support_oss_cluster_api = false
40 | throughput_measurement_by = "operations-per-second"
41 | throughput_measurement_value = 25000
42 | modules = []
43 | }
44 | }
45 |
46 | resource "rediscloud_subscription_database" "mc-example" {
47 | subscription_id = rediscloud_subscription.mc-example.id
48 | name = "online-store"
49 | protocol = "redis"
50 | memory_limit_in_gb = 1
51 | replication = true
52 | data_persistence = "aof-every-1-second"
53 | throughput_measurement_by = "operations-per-second"
54 | throughput_measurement_value = 25000
55 | average_item_size_in_bytes = 0
56 | depends_on = [rediscloud_subscription.mc-example]
57 | }
58 |
59 |
--------------------------------------------------------------------------------
/re-gcp-mp/outputs.tf:
--------------------------------------------------------------------------------
1 | output "db_public_endpoint" {
2 | value = rediscloud_subscription_database.mc-example.public_endpoint
3 | description = "The Redis DB endpoint"
4 | }
5 |
6 | output "db_password" {
7 | value = rediscloud_subscription_database.mc-example.password
8 | sensitive = true
9 | description = "The Redis DB Password"
10 | }
11 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | ./
--------------------------------------------------------------------------------
/setup.cfg:
--------------------------------------------------------------------------------
1 | [metadata]
2 | name = feature_store
3 | version=0.0.1
4 | description = End to end demo fo Feast deployed on GCP with Redis.
5 | long_description = file: README.md
6 | long_description_content_type=text/markdown
7 |
8 | url = https://github.com/RedisVentures/gcp-feast-demo
9 | project_urls =
10 | Source = https://github.com/RedisVentures/gcp-feast-demo
11 |
12 | author = Redis Ventures
13 | contact_email = tyler.hutcherson@redis.com
14 | license = MIT
15 | keywords = redis, feast, ai, machine learning, feature store, gcp
16 | classifiers =
17 | Programming Language :: Python :: 3.7
18 | Programming Language :: Python :: 3.8
19 | Programming Language :: Python :: 3.9
20 |
21 | [options]
22 | packages = find:
23 | setup_requires =
24 | setuptools>=39.2
25 | include_package_data = True
26 | python_requires = >=3.7
27 | install_requires =
28 | google-cloud-bigquery==2.34.4
29 | google-cloud-bigquery-storage==2.14.1
30 | feast[gcp, redis]==0.22.0
31 | requests==2.28.1
32 | ipython==7.34.0
33 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | from setuptools import setup
2 |
3 | setup()
--------------------------------------------------------------------------------