├── .gitignore
├── 1_max_margin_contrastive_loss_vs_baseline.ipynb
├── 2_comparing_contrastive_losses.ipynb
├── Main_contrast_loss-regression.ipynb
├── Plot_learning_curves.ipynb
├── README.ipynb
├── README.md
├── lars_optimizer.py
├── losses.py
├── main.py
├── main_ce_baseline.py
├── model.py
├── requirements.txt
├── supcontrast.py
└── test_supcontrast_loss.py
/.gitignore:
--------------------------------------------------------------------------------
1 | *.pyc
2 | .ipynb_checkpoints
3 | refs/
4 | venv/
5 | logs/
6 | runs/
7 | figs/
8 | img/
9 | .DS_Store
10 |
--------------------------------------------------------------------------------
/Plot_learning_curves.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": 11,
6 | "metadata": {},
7 | "outputs": [],
8 | "source": [
9 | "import pandas as pd\n",
10 | "\n",
11 | "import matplotlib.pyplot as plt\n",
12 | "%matplotlib inline\n",
13 | "import seaborn as sns\n",
14 | "sns.set_context('talk')"
15 | ]
16 | },
17 | {
18 | "cell_type": "code",
19 | "execution_count": 4,
20 | "metadata": {},
21 | "outputs": [
22 | {
23 | "name": "stdout",
24 | "output_type": "stream",
25 | "text": [
26 | "run-baseline_fashion_mnist_20200427-210140_test-tag-accuracy.csv\r\n",
27 | "run-baseline_mnist_20200427-205525_test-tag-accuracy.csv\r\n",
28 | "run-contrast_loss_model_fashion_mnist_20200427-210420_test-tag-accuracy.csv\r\n",
29 | "run-contrast_loss_model_mnist_20200427-205809_test-tag-accuracy.csv\r\n"
30 | ]
31 | }
32 | ],
33 | "source": [
34 | "!ls runs/"
35 | ]
36 | },
37 | {
38 | "cell_type": "code",
39 | "execution_count": 7,
40 | "metadata": {},
41 | "outputs": [
42 | {
43 | "data": {
44 | "text/html": [
45 | "
\n",
46 | "\n",
59 | "
\n",
60 | " \n",
61 | " \n",
62 | " | \n",
63 | " Wall time | \n",
64 | " Step | \n",
65 | " Value | \n",
66 | " model | \n",
67 | "
\n",
68 | " \n",
69 | " \n",
70 | " \n",
71 | " 0 | \n",
72 | " 1.588035e+09 | \n",
73 | " 0 | \n",
74 | " 0.9456 | \n",
75 | " MLP | \n",
76 | "
\n",
77 | " \n",
78 | " 1 | \n",
79 | " 1.588035e+09 | \n",
80 | " 1 | \n",
81 | " 0.9506 | \n",
82 | " MLP | \n",
83 | "
\n",
84 | " \n",
85 | " 2 | \n",
86 | " 1.588035e+09 | \n",
87 | " 2 | \n",
88 | " 0.9616 | \n",
89 | " MLP | \n",
90 | "
\n",
91 | " \n",
92 | " 3 | \n",
93 | " 1.588035e+09 | \n",
94 | " 3 | \n",
95 | " 0.9685 | \n",
96 | " MLP | \n",
97 | "
\n",
98 | " \n",
99 | " 4 | \n",
100 | " 1.588035e+09 | \n",
101 | " 4 | \n",
102 | " 0.9634 | \n",
103 | " MLP | \n",
104 | "
\n",
105 | " \n",
106 | "
\n",
107 | "
"
108 | ],
109 | "text/plain": [
110 | " Wall time Step Value model\n",
111 | "0 1.588035e+09 0 0.9456 MLP\n",
112 | "1 1.588035e+09 1 0.9506 MLP\n",
113 | "2 1.588035e+09 2 0.9616 MLP\n",
114 | "3 1.588035e+09 3 0.9685 MLP\n",
115 | "4 1.588035e+09 4 0.9634 MLP"
116 | ]
117 | },
118 | "execution_count": 7,
119 | "metadata": {},
120 | "output_type": "execute_result"
121 | }
122 | ],
123 | "source": [
124 | "df1 = pd.read_csv('runs/run-baseline_mnist_20200427-205525_test-tag-accuracy.csv')\n",
125 | "df1.head()"
126 | ]
127 | },
128 | {
129 | "cell_type": "code",
130 | "execution_count": 8,
131 | "metadata": {},
132 | "outputs": [],
133 | "source": [
134 | "df2 = pd.read_csv('runs/run-contrast_loss_model_mnist_20200427-205809_test-tag-accuracy.csv')"
135 | ]
136 | },
137 | {
138 | "cell_type": "code",
139 | "execution_count": 14,
140 | "metadata": {},
141 | "outputs": [
142 | {
143 | "data": {
144 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAaQAAAEwCAYAAAD4uwVgAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAgAElEQVR4nOzdd3zURfrA8c+T3khCCL1DEBBQBEFBEDwUBbEc2LCAWM5yeujpz4Z6iHqW82wneirWEzh7AUTFUxQLRVBAegk9lPQEEtLm98d8N9kkm7LJhk153q/Xvnb3W2cp++zMPDMjxhiUUkopfwvwdwGUUkop0ICklFKqntCApJRSql7QgKSUUqpe0ICklFKqXtCApJRSql7QgKRUEyAiV4uIEZGR/i6LUhXRgKQaHREZ6Xz5GhF5oYJjWolInnPM4jL7Fjvbt4tIiIdzpzv7T/ZwzzvLHBsjIveLyG8iki4i2SKSKCKfiMh1zjFXu5W3qsdi/EBE+jufu4s/7l8Z58/vNn+XQ9VekL8LoFQdygUuF5E7jDFHy+y7ChCgoJLzuwI3Ac/V5OYiEg2sALoBHwCvA3nO+2HAVGAW8L1THnfTgF4eth+oSVl8oD/wN2AxsMNPZajI1UAX4Fn/FkPVlgYk1Zh9DEwELgDeK7NvCvA5MKqCc3OAROB+EXndGJNVg/tfD/QAbjPGlAtqItIGwBizHdheZt91QC9jzDs1uK9SDZI22anGbBWwBht8ionIYKAP8EYl5xYB9wLxwF01vH8P5/l/nnYaY/bX8LqVEpHrRWSjiBwVka1Oc5Z4OK6diPzTaU5ME5FcEVkvIneLSKDbcdMp+bP61q358E1nfzMReURElolIstt9HxeRiDL3DBCR20RkjYhkiUimiGwSkddEJLjMsSeLyMdu19wkItNEJMjtmB3ACKBzmabNkT75w1THlNaQVGP3OvC0iLQ3xux1tl0DHATmV3aiMeYzEfkBuF1EZtYggGxznqeIyN3GmMqaB33CCT7PAKuB+4AI4E7s5y3rBGA8tia5DQgGzgEexzYr3uAc9xHQFvgT8Hdgg7Pd9fnaA9cBHwJzsM2gI7CB/CTgbLd7TgNmAPOAfwOF2KbR84FQIN/5HOc6990K/BNIBYY45/YHLnaudxvwGPaHw+1u99mAaniMMfrQR6N6ACMBg/0ibgEcBe5z9oUD6cBTzvtsYHGZ8xcD2c7roc61/u22f7qz7WRP93Tb1hzY5Ww/gO1HuhvbfxRQxWdYbP97evW5Y4HDwHogwm17B+dzGmCk2/ZwQDxc5z/YQNHWbdvVZc932xcCBHvY/rBzzmC3bauA9VV8jjBgP7ZvLajMvts9fI7FwA5//7vTR+0f2mSnGjVjTArwGfYLFWyNIAZbc6rO+T8BnwDXishxXt47DRgIPAFkABOwtY8lwDYRGe3N9aphNLZGNNMYc8StHHuA2R7Kl2Ocb3QRCRGROBGJB77ENuefXPYcT4wxecYYV80mSESaO9f52jnkFLfDM4D2IjKskkueBbTGNhPGiki864Ht93N9VtXIaEBSTcEbQA/nS/AaYLkxZr0X59+L7YN5zNsbG2MOGWPuMcYch21WOg9bA+kMfCwiCd5esxLdnOeNHvaV+7xO8LhfRDZjMxJTgENO+cDW8KpFRG4WkTXY2miqc53FHq5zn3OvJSKyV0Rmi8jlZdLrezvPrzvXcX+4Plvr6pZNNRzah6Sagi+Bvdi05TOwqdzVZozZKCJvANeJyClVnlDxdVKw/VbzRWQ39sv5MuCRml6zlp4GbgXeBR7F9jPlAwOwtbpq/WAVkb9i+3m+Ap4H9mHT29sDb7pfxxjzs4h0x/YrneE8LsdmMw4zxqRSkoDxf8BvFdx2X3U/pGo4NCCpRs8YUygib2NrOjnA3Bpc5m/YL84nKPnlXxtLnef2PriWiyt1vBflM/uO93D8VcD3xpjL3DdWUGurbCXPq7Bjk8YYY4rcrnOOp4ONMdnYBIgPneNuBmYC1wL/ALY4hx42xnzt6RpelE01INpkp5qKfwMPATcaYzK9PdkYsw87QHYEMLY654jIEBGJrWD3hc6zN02HVVmEDbh/dk+3FpEO2GBaViFl0sFFJJLS2Wou2c5zXAXXMe7XclKz7yl7oNMPVNaqMtf+Eltbu0dEyt1PRMJFpFmZsjUXkXKp7aph0RqSahKMMbuw2XG18QQ29XlQNY+/ApvyvQBYju2jaYENaGdgg1G1kiuqwxiTJiIPAE8BPzm1wgjgRmyt46Qyp3wA3CAi72ITEFpj+9hSPFx+BXZs1jQRaY7N5ks0xixzrvMYsFBEPgKisQEw38N1NojIUmAZttnNlU6eB/zX+RyHRWQSNplkk4i8jk3/jsXW/sYDf6SkproUGAe8ICI/YQPkN8YYT6nuqh7TgKRUNRljMkTkUWzfS3X8G5tifgbwV2xSw1Hsl+tDwNPGmMM+LuM/RSTbud9jwG5sgMqgfPD7K5AFXIKdzWI38Ao2+JRqKjPG7BKRa7Bp6y9hxyy9hQ0s/8DWjq7F1iL3Y/ul3qB8DfCf2ID8F2y240FsQHnMGLPa7X5fisggbC3rSqAlkIYd+/Q0dsCzyzPYhI6LsME3APtnrgGpgREn61MppZTyK+1DUkopVS9oQFJKKVUvaEBSSilVL2hAUkopVS9oll0NiEgBNph7PZ5FKaWasGigyBjjMfZoll0NiEgRIDExMf4uilJKNRgZGRlgZ7H32DqnNaSayYyJiYlJT0/3dzmUUqrBiI2NJSMjo8KWJe1DUkopVS9oQFJKKVUvaEBSSilVL2hAUkopVS9oQFJKKVUvaEBSSilVL2jat1JKNXXGQMFRKMixz/k5UJBrH/m5nrcPnAI+XhNRA5JSqn4pLHD7MvTwxZifW2b/0fLbK/sizc+FgEAICrOPYNdzOASFQlB4ybaKthcVer5vReVx315wFCSggnuF2W1Boc59K9huijx/vurcv6I/F2/1vxKCQnz6V68BSamm5GgWpO+yX0Y1ZqAwvwZfghUFmTIBpKjAZx9X+YgElg+QhXkakJRSlTAGsg9AaiKk7YC0ROe183wk2d8lrL3AUM81Ca9rOJXUwjw2W7nXsAKqV5PxtD0oDDA1COhurwMCKv98lW13L0+54yvYHxh8TP5qNSAp1dAU5EHG7tKBxhV80nZA/pFjUw4JqMaXmjdfmNX4gg8MtV/GqlHSgKSUPxQVwdFMyEmD3HTISXd7nVbmvfNw7cvLrt49olpD8y7QvCvEdbXPzbvYR0hk7cofGGJ/Nfu4U1s1bX4NSCISCswArgKaA6uBacaY/1Xj3EnAncBxQBrwPnCfMSa7zHFtnXucBbQC9gIfAo8bY3R2VFV9xkDqdtjzC+Rm1KLPJMf25Zii2pUnIAhiO7kFnC6lX9c26Ch1jPm7hvQmMAF4FtgKXA0sFJERxpifKzpJRKY65ywC/g10AKYCfUTkTOOsqSEikcBPQBQwE9gDnATcAZwGDK+TT6Uah6JC2L8Wdv1sHzt/hsMH6+ZeEgBhsRDeHMKd57DYMq+dfWGxENMeojtAoL//CyvlO3771ywig4HLgNuNMc86294GfgeeAE6v4LxQ4CHgG+Bst+DzEzAPuAD4xDl8HNAFGGeMWeB2jRzgDhHpaoxJ9P2nUw1Sfi7sXQm7frLBZ/dyyMsqf1xYDDRr630HsXv/SFh06UAT2kybv1ST58+fVxcB+cAs1wZjTK6IvAY8KiJtjTFJHs7rA8QA7xq31QWNMfNFJBu4lJKAFO08Hyhzjf3Oc07tP4ZqsHLSYfeyktrPvlU2lbWs6PbQaQh0HgKdhkLLXtqxrlQd8GdAOgnYWLbPB1gOCNAf8BSQQp1nT8EkBxjg9n4JUAQ8JyJ3ULrJ7k1jzP7yl1CNhjG2rydrP2QllTxn7La1nwPrAA8rJsf3LAk+nYfYfhqlVJ3zZ0Bqi00wKMsVhNpVcN4W7LfIacB/XBtFpCfQEgh3bTPGbBSRPwFPAe59Uq8AN1VUMBGpKtlB1y73t7zD5QON+3PmPvtcUEUlWAKh7YnQeaitBXUaApEtjs1nUEqV4s+AFA54Gi6e67a/HGNMsoi8B1wrIpuAT4H2wL+wTYBlz9sDLAUWAruwiQx/AVKBe2v5GVRdKSqEjD3lx9mkJkL6Tlvz8YYE2DToZm1s/0+bfjb4dBgEoVF18hGUUt7xZ0DKoaT5zV2Y2/6K3IANPE87D4B3gG3AKNdBInIaMB8YZIz5zdn8iYhkAn8TkTeNMZvKXtwYE1tZwZ0alNaSaivvMKTtLD+bQNoOO71NUX71rhMRb4NMszYlAadZG4huV/I+sqWdv0wpVW/5MyAlYZvtynJt21fRicaYDOACEemEzaLbaYzZ6WTabXE79AZgn1swcvkMmA4MAcoFJFUHCvPt+J3t38KOHyBlq53ipkoCMR3suBr3wZ0xHWygiWrt8/m0lFL+4c+A9BswVUSiyiQ2nOI8r67qAsaYXdhmOEQkFhgIPON2SGvA089i18RMOoijrrgGkW77xj4Sl3hOoQabEl024Lhex3bSgKNUE+HPL+QPsDMtXIcd5OoaYzQF+NEYs8/Z1gmIMMZsrOJ6j2Ez6l5227YZGC0iw40xS9y2T3Sef631p1AljqRC4new7Vv7yNhV/phWx0O3M6BN35JZBaJa6xgcpZR3AUlEIoCe2Cl4DHAI2GSM8Xo2R2PMMhF5H3jSmd5nGzAZ6IydscHlbWAENhXcVY5pQG9gGVAAXAiMBm4oM9D1BWyAmy8iL2BrUyOwAekLY8xKb8ut3BTkwZ4VJbWgfb9SLo06sqUNQN3/AN1GQrSnVlqllKpGQBKR5tgAcTG2SazsOQUishJ4D3jLGJPmxf0nAQ87z82BNcBYY8yPVZy3BjsjwwXO+1XAGGPMF+4HGWM2ichA4BHsfHltsH1TT2L7kJS30nbA5i9LmuHyD5feHxhqU6i7O0GoVR8dRKqUqhZxm+yg9A6RGOAB4GZs5tsm7FiebUAKtsYSByQAp2JrTrnYOeMecRIPGiURSY+JiYlJT28Cc7MWFdmaz6bP7ePg+vLHtO5rA1C3M2wwCvaYsa+UauJiY2PJyMjIqCiTubIa0jbsOKHHgHeqmvNNRLphayF/wjaTxdesyMrv8nMg8XsnCH0B2WUmtIiIh4QzS5rhmrX2RymVUo1MZQFpBvCyMaZaax0bY7YDD4nI48CNviicOoYOJ9umuE2f2+a4sou8xfeEnmOg17nQfqCO6VFK+VyFAckY83xNLugEsOdqXCJ17CRvcWpBC+0ko+7r80iAncmg5xjoORZadPdfOZVSTUK1s+xE5EHgI2PM7xXs7wNMMMbM8FXhVB04kgo/PgcbF0DKltL7giMhYZQNQD1G65xuSjUy+YVFfPrbPg5k5hITHkxMeDCxEcElr8NDaBYWRECAf4ZheJP2PR27iJ7HgAT0Bf6GbepT9VHqdnhngn12iWpT0hTXZbhdu0cp1ehsOZDFHe+vZs2eyvPNRCA6LLhUwIoODyY2vPS2C/q3JyzYt033vhwYG4YdE6Tqoz0rYc4lcCTZLhA35M/Qaxy07a9p2Uo1YoVFhtd/SOQfX20ir6AIEejbLobDRwtIz8knIyefwqKSbGtjIMPZXplxJ1S0IEPNVRqQRCQacE/Pa+HMnFBWHHAFsNuHZVO+sukLeP9quxRDeHOY+C50OqXK05RSDdvOlMP83/trWL4jFYCOceH846ITObVbSXO8MYbsowVk5OSTfiSfzJz84kDl2mZf5xVvy84tICLE94lNVdWQbgcedJUbO8XPsxUcK8BdPiqX8pVfXocFd9iEhdjOcOWHEN/D36VSStUhYwyzl+3i759v4EheIQCXn9KJ+8b2Jiq09Ne+iNAsLJhmYcF0aO6P0paoKiAtdp4FG5g+xs6S4M4A2cBSY8xPPi2dqjlj4JtHYMlT9n3b/nDF+xDVyr/lUkrVqX3pOdz94RqWbEkGoE10GI9P6MfInvX//36lAckY8x3wHYCIdAb+bYxZdiwKpmqhIA/m/QVWz7XvE86Ci9/UheiUasSMMXy0ai/T560jK9d2548/qT1/O68PMRHBVZxdP1Q7qcEYM6UuC6J8JDcT3ptk1x0CGDAJzn0GAnWlDXXsGWPYk5bD6j3prN2TwZo9GaxPyiQ4UGgdHeY8QmnVrOR16+gwWkWH0iIylEA/pR83NIeyjnLfx2tZtN6uMdYiMoRH/9iPc/q28XPJvOPtbN/NsP1Ko7FrDU0yxvwsIvHYOe/eq8YyEaquZCbB7IvhwFr7fuR9MOIuXdpBHRPGGJIyclmzJ4O1e9Od5wzSj3jO1krOzmPdvswKrxcYILSMCi0OUsXBKzqMkce1pFW0DlEA+HxtEtM+Xkua8+d8Tp82PPrHvrSI8rQgd/3mzcDYlsAPQDfseKRu2GXEMcYki8hkbEbeX+ugnKoqBzfaMUaZe0AC4fzn4aQr/V0q1YgdzMotrvWs3Wufk7M9zzQWERJI33Yx9OsQQ9/20QSIcCAzlwOZR9mfmctB5/WBzFyOFtgZQwqLDPszc9mfmQuUHjsTEx7Mq5NOZnDXuLr+mPVW+pE8Hvx0HZ+ttotrR4cFMeOCvlzQvx3SQH+EelNDegS7fMMp2HWFDpbZ/ykwykflUt7Y8SP8dyLkZkBIFFzylp38VCkf+3VXGrN+SGTVzjSSMnI9HhMSFECfdtGc0D6Gfh1iOaFDDN1bRlWr+c0YQ2ZOAQeyctmfkcuBzFwOZh11gpcNWlsPZpORk8+Vry3jmUv6c+4JTW+NrW82HuCeD9dyMMv+ABhxXEuemHACbWIadq3Rm4A0DnjRGLNKRDzNKbOd0gvrqWPh94/g4xugMM+uvHr5e9Cuv79LpRqZ3/dm8PSizXyzsfTv0OBAoVebaPp1iHECUAzHtW5GcGDNBluLCDERwcREBHNc62Yej9mdeoTJbyxn+6HD3DJ3FUkZvblueLca3a8hMcawalc6s5fu5KNf9wIQGRLI/eOO57JBHRtsrcidNwEpHttUV5Ei7GwN6lj56QX4app93aKHHWPUvLN/y6QalQ1JmTyzaDNfOZ3lAH3aRXPZoI6c0CGWnm2a+Xz6mKp0jIvgwxuHct3bv7ByZxqPLNjAvvRc7j+3t9/mYKsrxhjW7s1g/pokFqxJYm96TvG+U7rG8dTFJ9IxLsKPJfQtbwLSfqCyKZ9PwjblqbpWVAhfToNlL9n3HU+FiXMhoum2pyvf2nIgi2e/3sKCtUnF23q1acZtZx7H2X1a+/3XePPIEGZfdwq3/fc3vli3n9d/TORAZi7/vOTEYx4gfc0Yw4akLOav2ceCtUnsTCm9FEyvNs244tTOXDG4U6MLwN4EpM+Ba0XkX0Ce+w4ROQW7DHlFszgoX8nPhY+uhw2f2fe9z4fxr+qkqMonth/K5vn/beHT1ftwLSad0CqK287swdi+bevVF2BYcCAzrxjAw/PX8+ZPO1iwNomDWbm8OulkYiNC/F08r205kMW8NUnMX7OP7YcOl9rXvWUk553YjnEntCWhleemzMagwiXMyx0o0gZYCQQCnwHXAu8AIcB4YB8w0BiTWjdFrT/8toS5MfDBNbDuI/v+lJvg7Ed1sTxVa7tSjvD8N1v4aNUeXPNsdo2PZOqoHpx3Yrt6PR7IGMOsJYk8+vkGwAbQN6cMokNz3zdlGWPYfCCb7KMFxTNfx4QHExJUsz6zxOTDzF+9j/lrkth0IKvUvs4tIhh3QlvGndCOXm2a+b1W6gtVLWFe7YAEICIdgReAcwHX34DB1p5uMsbsqWV5GwS/BaQfn4dFD9jXox6E4Xcc2/urRmdP2hFmfruV93/ZQ4ETiTrGhfOXP/Tgjye1J6iGyQn+8Nnqfdz53mryCoto2SyUN64eRN/2MT65dn5hEQt/389rPySyenf5//cRIYHEhttlGtzXGIqNCCEmvPTyDZGhQSxPTGX+mn3lxmG1jw0vDkJ920c3iiDkzqcBqfgkOwt4T+wcd1ubQq3InV8C0rZv4Z3xdpLUEyfChS/pgFdVY/szcpn57Vb+u2IX+YX2O6BdTBi3jurBRQM71DhLzt+Wbk/hT2//QmZuAZEhgbx05UBOP65lja+XcSSfuSt28dZPOypMc6+tNtFhjO3XlnEntuWkjrGNLgi5q5OA1NQd84CUthNeGQk5qdD2RLjmSwgOPzb3Vo3KwaxcXlq8jdnLdpHnDEBtHR3KLWckcMmgjoQGNfzm380Hsrj69eXsy8glKEB4bHw/Lj65o1fX2JF8mDd+TOT9lXuKZ8sWgdHHt+baYd04oUNMqaUZ0o+ULM1QftmG0o/CIkN8VChj+7XhvBPbMbBT83rVN1eXfBaQRCQBSDDGfOG27RTgfux6SG8ZY17xQZnrvWMakPJz4LXRsH8NRLSAPy2GWE9LUilVsZTso7z8/Xbe/nkHufk2EMVHhXLzyO5cfkqnBp+ZVtaBzFwmv76cjfttv8wdZx3HLX9IqLT2YYxh6fZUXvshkf9tPFCc1BEZEsglgzoyZWhXOrWoXb+UMYbDeYVEBAc2mSDkzpcB6UMgzhhzhvM+HtgMRAE5zvMEY8wnPil5PXbMApIxdtDrmndBAuCqT6DbiLq9p2pU0o/k8cr323nzpx3Fv/SbRwRz44juXDWkMxEhjXfS3azcfG56ZxU/bLXLMEwc3JGHL+hbrl8sr6CIeav38fqPiaX6dNrHhjPltC5cMqgj0WENY7bs+q6qgOTNv8aTAfca0EQgGuiPDUyLgalAow9Ix8yyl20wAjjrYQ1GqtoycvJ57YdEXv8hkeyjdimCmPBg/nR6NyYP7VJukbbGqFlYMK9fPYh7PlzDR7/uZe7y3ezPyOWFywcQGRpE6uE85izbyVs/7+RQVskcfAM6xXLd8G6MPr51g0rqaAy8+VfZEpva7XIO8KMx5ncAEfkvMM2HZWvadvwAX95nX/edAEP+7N/yqAYh+2gBb/yQyKtLtpPprInTLDSI64Z3Y8qwLk3ul35IUAD/vORE2saGMfPbbXy76RATX11Kn3YxfLRqT/FEroEBwpi+bbh2WFdO6uTnZVObMG8C0mHsbN6ISCAwDHjebX8OtsakaitjD7w3GUwhtO4L5/9LM+pUpY7kFfDWTzt55fttxcsQRIYEcs2wrlw3rFuDWaCtLogI/3d2L9rGhPPgp7+zxpmhHKBZWBCXD+7EpKFdaB+riUL+5k1AWgdMEpG3gYuxfUaL3PZ3Bg75sGxNU34uvHsVHEmGsFi49B0IifR3qVQ9lZtfyDtLd/Lv77aRnG0nUAkPDmTy0C786fRuxEU2vBkL6sqVp3amTXQYt7/3Gy0iQ5hyWlcuGtiByCbQfNlQeJPUcC52iQnXT/VfgUHGuYCILAf2GWMurIuC1id1ltRgDHx2C/z6DiBw5Qe6jITy6GhBIf9dvpuZ324tXoIgNCiAK0/tzI0jutOyWcNbnO1YKSgsIjBAGvV4n/rKZ0kNxpgFIvIH4ALsalkvuAWjFsAe4G0flLnp+uV1JxgBox7QYKTK2ZuewzcbDvDi4m3FAzVDAgOYOLgjN5+RQGtdRbVKmqhQf3lVVzXGfA9872F7CnY+O1VTu5bBwrvt697nwzBdeLepM8aw9WA2y3eksiIxlRU70kotPxAUIFwyqCO3nJFAO+3/UI2ANp7WB5lJ8N5VUJQPLXvBhS9qEkMTlF9YxLp9maxITGX5jlR+2ZFanKDgLjIkkHEntOOWPyQ0qrVwlNKA5G8FefD+ZMg+AKHRcOlsCG2808urEkfyCvh1VzrLE1NZsSOVX3elk5NfWO64FpEhDOoSx6CucQzuEkfvts202Uk1ShqQ/O2Le2D3Mvt6/KsQn+Df8qg6dbSgkJcW2/Ew6/ZmFM+w7a5TXASDusQxuGtzBnWJo2t8pHbAqyZBA5I/rfoP/PKafT3yXuh5jn/Lo+rcU19u4tUlicXvRaBn62YM7hpna0Fd4mgTo4kJqmnSgOQve1bCAidx4bgxcPpd/i2PqnN703N466edAIw7oS3jB7RnYKe4Jj1oVSl31Q5IInI6sMEY43HwqzPZ6vFOJp6qTPZBePdKKMyDFgkw/mUI0D6Bxu7ZRZvJKyyiVbNQ/nHRiYSHNK4ZtpWqLW++Bb8Fzqpk/yjnGFWZwnx4/2rI2gchUTaJIcw3q1qqqh3KOspf5v7Kt5sOHtP7bjmQxYer7ILKU8/socFIKQ+8CUhV9aoGAkW1KEvT8NUDsPNH+/rCl6BVL/+Wp4l57YdEPlu9j1tmr2J/Ha0A6smTX26iyEDX+Egu8XKxOKWaCm/biSqbZ2gokFyLsjR+xkCQM6XL8Dvg+PP9W54m6OftKQAczivk4fnrj8k9V+5MZdH6AwDcObpng10eXKm6VmkfkohMxa5x5PKsiDzq4dDm2Jm+X/dh2RofETjrIUgYBZ1P83dpmpzsowX8vjej+P2CtUlcsvkQI45rWWf3NMbwxMJNAJzQIYax/drU2b2Uauiq+qmWDux0HgApbu9djx3AEuAB4C/e3FxEQkXkCRHZJyI5IrJUREZV89xJIrJGRHJFJElEnheRqDLHTBcRU8nDP1Gh6+kQoH0Ix9ovO1IpLDIECJzUyc7t+OCnv5PrYTCqr3y76SDLd6QCcPc5vXQ8kVKVqLSGZIx5C3gLQEQSgXuMMZ/58P5vAhOAZ4GtwNXAQhEZYYz5uaKTnJrbs9jlL/4NdMDW5PqIyJmmZArzj5zrlvV37PIZK3zzMVRDsHS7DQz92sfwj4tO4Jxnl7Az5QgvLt7GX886zuf3KywyPPmFrR0N7xHPaQnxPr+HUo2JN7N9d/XljUVkMHAZcLsx5lln29vA78ATwOkVnBcKPAR8A5ztNuP4T8A87GzknzhlXgOsKXN+R2wAm2WMyfPlZ1L121Kn/+jUbi1IaNWM60/vxkuLt/Hvxdu4sH87urWMquIK3vn0t71s3J8F2NqRUqpy1e5dFZEWItK7zLauIvIvEZktImd7ee+LgHxglmuDMSYXeA0YJiJtKzivDxADvOtWE8IYMx/IBi6t4r4TsRmDs70sr2rAso8WsNbpPzq1WwsA/vKHHqjP8OIAACAASURBVLSPDSevsIgHP11HddcGq46jBYX886vNgB0E27e9pvYrVRVv0n2ew2m+A3D6a5YAf8Z+yS9wBs9W10nARmNMdpnty7EBo38F57lWHsvxsC8HGFDFfa8AduNhGQ3VeLn3H53cpTkA4SGBPHR+HwB+2JrMvDVJPrvf7KW72JueQ1CAcMfonj67rlKNmTcBaQjwudv7S4F2wFjneQPgzfw3bQFP3wCube0qOG8LNv28VEKCiPQEWlZyHiLSBzgBmGsq+TksIumVPbA1NNWALEu0/Ud928fQLKxkqp4zj2/NWce3BuDh+evJzC2/3IO3snLzeeFb23V56aCOdI3XJeiVqg5vAlJrbM3CZQzwizHmC2PMfmyCwkleXC8cOOphe67b/nKMMcnAe8C1InK7iHQTkeHAu9gmwMpWKrvCedbmuibGvf+orOnn9yE8OJBDWUd52mlmq41XlySSejiP8OBApo7qUevrKdVUeBOQyn7ZjwC+c3ufDpT/316xHEqa39yFue2vyA3Y2trTwDZs89tabFJD2SZAAMTm214O/O4kO1TIGBNb2QO7hLtqIA4fLWDNHlf/UVy5/e1jw5l6pg0cb/+8o9RYJW8dyjrKrCXbAbhmWBda6ZLiSlWbNwFpMzBBrPOBOOB/bvs7AqleXC8J22xXlmvbvopONMZkGGMuADpjA2MXY8xVzrlbKjjtNOd4rR01Mb/sTHPrPyofkACuHdaV41pHUWRg2ie/U+hhnaLqeOGbLRzJKyQ2IpgbRnSvTbGVanK8CUgzsV/+acAHwHZKB6Th2FpKdf0G9Co7mBU4xXleXdUFjDG7jDHfG2N2ikgsMLBMmdxdge17muNFGVUj4Gqu69s+hugwz0s9BAcG8MiF/QBYvTuduct3eX2fXSlHmOOc9+eRCRXeSynlWbUDkjHmbWAy9gv/HWCMMSYfbEo4EIvt26muD4Bg4DrXBmeM0RTgR2PMPmdbJxGpziCOx7CTu75cdoeIBAMXAz8YY7z/plENmisgndLVc+3IZXDXOC4a2AGAJ7/YyKEsT12cFXt60SbyCw3tYsK4akjnmhVWqSbMqwX6jDH/Af7jYXsKtnbizbWWicj7wJPOmKNt2IDXGTtjg8vb2JpZ8ZwrIjIN6A0sAwqAC4HRwA3GmETKOxvbv6XNdU3M4aMFrN1TevxRZe4d04tF6w+QkZPPY59v4OlLKxp9UNr6fZl8utq2Mt921nGEBevUUEp5q0bTDotIgoicJiK1TX+ehB3fNAl4HltjGmuM+bGK89YAx2GnAHoSmwgxxhjzSgXHX4FNyni/luVt8goKi1i4NomMnNqnRx8LK3emUVBF/5G7FlGh3DPGVsg/+nUvP29LqdZ9nvxyI8ZAj1ZRTBjQoVZlVqqp8iogicg4EdkGbMJmtg10trcSka0icpE31zPG5Bpj/s8Y09YYE2aMGWyM+brMMSONMVJm2zzn2GbOY4Qx5otK7jPRGBNijPEm6UJ58Nz/tnDT7FVc++YKn85sUFdczXV92sUQE169Pp1LT+5YPPnqA5/+Tl5B5ct8Ld2ewuJNdiHl/zu7J4EBOoGqUjXhzdRBI4GPsZl0D+HWhGaMOYhtcrvMx+VT9UhufiH/WWonfv9lZxqfra4wEbLeKBl/VHXtyCUgQHjkwr4ECGw9mM2sH7ZXeKwxhscXbgRgYOfmxYNslVLe86aG9CA28+0UbMZdWT9T9bQ9qgH79Le9pB8paap7fOFGjuQV+LFElTuS5z7+yJshcrZGdfVQO5/w8//bwu7UIx6P+3LdAX7bnQ7o8hJK1ZY3AWkQMNsYU1H7xR5AVx9rpIwxvPmTrR2dltCCkKAAkjJy+fd3Fdce/M3VfyTV7D8q66+jj6N1dCi5+UU8NG9duf0FhUX840tbO/pDr1YMriKLTylVOW8CUgCep/pxiQd0OYdGasWONDYkZQJw19m9uH64rT28/N029qR5rj34W0n/UXS1+4/cRYUG8eA4O/nq1xsO8tW6/aX2f7hqD9sOHUbE9h0ppWrHm4C0ATv4tSLjqMZgVtUwvfXTDgD6d4zlxI6x3DwygVbNQjlaUFTch1LfuBbkO7Wrd8117sb2a8PpzhLnD81bX9xEmZtfyDOL7KQgF/ZvT++20bUsrVKq0oDkDEp1zV/3GnCRiFzrdp4RkQgReR47G3hFadeqAduXnsMXTu3g6qFdAIgMDSpOj56/JonlifUrgfFIXgGrnb4db/uP3IkIM87vQ0hQAHvTc3jufzYIvfXTDvZn5hIcKHWy2qxSTVFVNaRE4I8AxpiXsDNqv0rJEhBzsRON3gK8aYzRgaeN0OxlOyksMsRHhTK2X8n0gxf2b8+JHW169EPz1tV4/re64N5/NKiWfTtd4iP588gEAF5bksiKHam8uHgbAFec0pmOcRG1Lq9SquqAVHb8z5XABOz0QRuxKeCfAxcbY66tkxIqv8rNL2TucrvqyOWndCIkqOSfTECA8Lfzjgdg3b5MPli52+M1/MHVf3R825r1H5V148hudI2PpKDIcOWsZWTk5BMZEsgtf0io9bWVUpbXMzUYYz42xkwwxvQxxhxvjLnAGPNhXRRO+d/8NUmkHs4jKEC48pRO5fYP6NSc8Se1B+AfX27yyQJ3vrDM1X9Ui+Y6d6FBgTx8QV8AjjoDZa8/vRvxUZ5WUFFK1USNpg5STYMxpjiZYWy/thWu7XPXOb0IDw4kOTuPF77ZegxL6NmRvAJW76l9/1FZw3rEc96JdkHiFpEhXDe8m8+urZSq3uSqw0Wk2pOwOrOCq0Zg1a501jqL1U12khk8aRMTxp/P6M5TX23mjR8TmTi4k1+X7V61M538Qtt/NLgG448q88gFfWkXG8ZZvVsTFerV3MRKqSpU53/Un5xHVQSb6KABqZFw1Y76tY9hgDO3W0WuG96N/67YzZ60HB5dsJ5ZkwcdgxJ6Vqr/KMK3axLFRARz75jePr2mUsqqTkB6BVha1wVR9cuBzFw+X5sE2NpRVVPihAUHct/Y3tw8exVfbzjI95sPFY/fOdZK5q/zXXOdUqruVScgLTHG6CqrTczsZbsoKDLERYYw7gRPK82XN6ZvG07pGseyxFQenr+ez6cOJzjw2HZT5uQVFvcfVbUgn1KqftGkBlVOXkERc5bZhXUnDu5Y7cXmRIQHzzueAIEtB7OZ7cwMfiyt2pVW0n+kAUmpBkUDkirn87VJJGcfJTBAuPJU75bi7tMuhksH2fTwZ77eQtrhYzu9oau5rnebaGIjQo7pvZVStaMBSZXzppPMcE6fNrSNCa/8YA/uHH0czcKCyMjJ55mvN/u4dJXT/iOlGq5KA5IxJkD7j5qW1bvTi9f3qSzVuzItokKZOqoHAO8s3cmm/Vm+Kl6lcvIKi8vuzYJ8Sqn6ocKAJCKjanpRETmzpucq/3KlevduG82gLs1rfJ1JQ7rQrWUkRQZmzF93TJY71/4jpRq2ympIX4jINyIyTkSq7NUWkWAR+aOIfIed3041MIeyjjJvjV2W/OqhnWu1+mlIUAAPnGvnuftxawqL1h/wSRkr42qu66X9R0o1SJWlfZ8EPA18BhwSka+B5cA27KSqAsQBPYBTgVFALPAV0L8Oy6zqyNzlu8gvNMRGBHNB//a1vt4ZvVoxsmdLFm86xKOfb2BEz5aEBlUvY68mSuav09qRUg1RhQHJGPM7MFpEhgA3AxcAE7GzMbgTIBP4CHjJGLOijsqq6lB+YRGzl9k07UsHVT/Vuyr3n3s8P2z5np0pR3jjxx3cOKK7T65bVun+I01oUKohqnJgrDHmZ+Bnp9luIHA80BIbmA4BvwO/GmOK6rKgqm598ft+DmQeJUDgKi9TvSuT0CqKSUO68PqPifzrf1sYP6A9rZp5nqS1Nn7dlUZeYREiOiBWqYbKm0lTC7FNdsvrrjjKX1zJDGf2bk2H5r5dcG7qqB588tteUg/n8Y8vNvGPi0/06fVB+4+Uagx0HJLi970Z/LIzDShZotyXYiKCuWO0Xeb7g1V7WONM7eNLS53+I60dKdVwaUBSxbWj41pHMaR73fS/XDaoE73aNMMYeGjeep+mgefma/+RUo2BBqQmLiX7KJ+utqne1ZnVu6YCA+w8dwArd6bxmXNPX1jl9B+B1pCUasg0IDVx/12xm7yCIqLDgvjjSbVP9a7M0O7xjOnbBoAHPvmdnSmHfXJdV3NdrzbNaB6p/UdKNVQakJqwgsKi4hm5Lzm5IxEhdb8C6t/O60N8VAiZuQXc8J+VHMkrqPU1df46pRoHDUhN2KL1B9iXkYuInernWGgTE8bMywcQFCBs3J/FXR+sqVV/Um5+Ib/t0v4jpRqDagckEblfRKq3UptqEFyzev+hZys6tfBtqndlTunWgvvPtcuAz1+TxKwliTW+lnv/kc5fp1TD5k0NaQawS0TmiciF1ZnfTtVfG5IyWZZo+15qOqt3bUwe2oXxTp/VYws38OPW5BpdZ5lb/1Gc9h8p1aB5E5BOAV4DhgMfAntE5HEROa5OSqbq1Ns/7wCgW8tIhiXEH/P7iwh/H9+PPu2iKTJwy5xV7Ek74vV1tP9Iqcaj2gHJGLPCGHMj0BaYAmwG7gI2iMj3InKViHi/mps65tKP5PHxr3sBOxA2IKBuUr2rEhYcyMtXDaR5RDBpR/K54T8ryc0vrPb5ufmF/KrrHynVaHid1GCMyTHGvG2MGQH0BJ4EugNvAkki8qKI6Gzf9di7K3aTm19EVGgQ4wd08GtZOjSPYOblAwgQWLcvk/s+XlvtJIdfd6WTV+DqP9IaklINXW2z7BKBlcAG7KzfUcD1wEoRWaBJEPVPYZHhP06q90UDOxAVWvep3lUZmhDPvWNsksNHq/YWzxxRlZL567T/SKnGoEYBSUT6iMjTwD7gXaAX8AjQDegIPAqcAbzuo3IqH/nfhgPsScsBYNIQ383qXVvXDe/KeSe2A+DhBRtY5gSbyrgCks7OoFTj4E3ad5SIXC8iS4E1wK3AMuBCoJMx5kFjzC5jzH5jzIPAdOD0uii0qrnP1yYBMLxHPN1aRvm5NCVEhCcm9KNXm2YUFhn+PGcVSRk5FR5fuv9Im+uUagy8qSEdAP6NTWqYAXQ1xpxnjJlXwVpIOwFNcqhHjDH8sNXWKs7s3drPpSkvIiSIl68aSHRYEMnZedz4ziqOFnhOcvhtt3v/kdaQlGoMvAlIi4DzsYHoIWPMnsoONsa8a4zRmSDqkU0HskjOPgrAsB7HPtW7Ojq3iOT5iSchAqt3p/PgJ+s8Jjm4mut6tm5Gi6jQY11MpVQd8Cbt+0JjzAJfrgwrIqEi8oSI7BORHBFZKiKjqnnuJBFZIyK5IpIkIs+LiMc2KBE5TkTeFZFDzn02iMhdvvocDcUPW+zg07YxYXSLj/RzaSo2smcr7hzdE4B3f9nNnOW7yh1TMv5Ia0dKNRbe9CGNEpHHKtn/mIic4eX93wRuB94BpgJFwEIRGVJFWaYCbwH7gb8CbwDXAp9KmfUTRGQAsALoAjwG/AX4BJt80aT84MyGcFpCfJ0tM+ErN4/szjl97Mzg0z9bx0pnAUGw/UerdP46pRodb3J+7wYyKtnf1Tnm2+pcTEQGA5cBtxtjnnW2vQ38DjxBBQkRIhIKPAR8A5xtnPYcEfkJmAdcgA04ONMb/Qf4H3CRL2t3Dc3RgsLiaXaG19PmOnciwlOXnMjWmdlsPZjNTe+sZP6tw2gVHab9R0o1Ut708ZwILK1k/zLnmOq6CMgHZrk2GGNysdMTDatkDFMfIAZ417h1Lhhj5gPZwKVux44GjgemGWOKnEzBJtmv9euudHKcWRCGdq//AQkgKtQmOTQLDeJg1lFunr2KvIKi4sB6XOso7T9SqhHx5ss5BqhsRbUcoLkX1zsJ2GiMyS6zfTl2kG1Fsz24voE85QTnAAPc3p8JZALtRWQTkAVkicirInLspreuB1yTl/Zq04yWzRrOl3j3llE8c6n9p/DLzjQenr9e569TqpHyJiDtBQZWsn8gtk+nutoCSR62u7a1q+C8LYABTnPfKCI9gZZlzkvANkt+CnwFTABewvY3zamoYCKSXtkDG5wblCVOQoM/JlKtrTOPb83UUT0A+M/SnSxN1ICkVGPkTUBaAEwWkTPL7nAy4yYDn3txvXDgqIftuW77yzHGJAPvAdeKyO0i0k1EhmNnjMgvc14UEAG8bYy51RjzkTHmTuAp4AIR8aaJscHKyMlnzR6bBFBf072rMnVUD0b1agWAq6FW+4+Ualy8CUiPAoeAL0Vkvog84jzmY2sfh4CHvbheDiXNb+7C3PZX5AZs8Hsa2AZ8D6zFJjW4NwG6rjG3zPmznefT8MAYE1vZg8qTO+qdn7elUGQgJDCgwX6JBwQIz1zWn65OunqPVlHEa/+RUo1KtbPsjDEHRGQotslrDDDWtQtYCNxijPHUBFeRJGyzXVmubfsqKUsGtobTCZvOvdMYs9PJtNtS5h5gZ5lw53rvTZ9Xg+XqPxrQOZaIEP9PplpT0WHBvH71IP751SYuObnJZe0r1eh59e1kjNkJjBWR5tj+GYCtxpi0Sk6ryG/AVBGJKpPYcIrzvLoa5dkF7AIQkVhsP9YzboesBK4D2gOb3La71lw4VINyNziu8UcNsf+orK7xkbxw+YCqD1RKNTg1SoE2xqQ5C/atqGEwAvgACMYGDKB4jNEU4EdjzD5nWycR6VWN6z2GHVj7stu2z4A8bBKDu+uxNbtvalj2BmNP2hESk21y5LAeLf1cGqWUqliN2m+cKXpi8RDQnFpLlYwxy0TkfeBJZ8zRNmxiRGfgardD3wZGYFPBXfefBvTGjn0qwM44Phq4wRiT6HaPfSLyOPCgiIRgA9BQ4ErgRWPM1up+5obK1VwXHRZEv/YNLjlQKdWEeBWQROQy4H5sMKhIoBeXnIRNhJiE7c9ZA4w1xvxYxXlrsDMyXOC8XwWMMcZ84eHY6UA6cLNz/F7sZ6hwGqTGxDW799Du8QT6aalypZSqjmoHJBG5EDt2ZzO2WexG530QtoayBpsaXm3OzAz/5zwqOmakh23zsBl11bmHwfYrPVPVsY1NUZEpriGd1kDTvZVSTYc3fUh3Ypcq7w886Gx73RhzGXAy0BObqKDqiQ37M0k9nAfA8EaQ0KCUaty8CUgnAG85tRrXJKWBAMaY34FXgHt9WzxVG67lJtrHhtO5RZOaKUkp1QB5E5ACgRTntWvAqXsv+Sagry8KpXzDPd27vi83oZRS3gSkPdgMOIwxOcBBSs9t15PKJ19Vx1BufiHLE+2s2A11uiClVNPiTZbdT9jZs139R58Bt4lIDjaw/ZlqJhqourdqZxpHnTWDhnbXSUiVUvWfNwHpReCPIhLu1JCmAYOxadUA67CJD6oecDXX9WkXrWsGKaUaBG/msluBXQrc9f4Q0F9ETgAKgQ1NeUXW+qYxTReklGoaqhWQRCQSuANYZoz50n2fMWZNXRRM1Vz6kTzW7rUTkmv/kVKqoahWUoMx5jBwH6BTLDcAP21LwRgICQpgUJeGudyEUqrp8SbLbhvQpq4KonzH1Vw3qEtzwoK9mclJKaX8x5uA9CJwvYhoylY95xoQe5r2HymlGhBvsuyygFRgk4i8hV0I70jZg4wxb/uobKoGdqUcYVeq/WsZnqDLTSilGg5vAtKbbq9vr+AYg10uQvmJq7kuNiKY49tF+7k0SilVfd4EpDPqrBTKZ4pn99blJpRSDYw345C+q8uCqNorLDL8uE37j5oCYwzJycnk5uZSVKTD/5R/BQQEEBYWRnx87ebNrNGKsap+Wr8vk/Qj+QAM1/FHjZYxhr1795KVlUVoaCiBgZpJqfwrPz+f7Oxsjh49Svv27WsclLxZoO/Bqo/CGGMerlFJVK0t2XoIgE5xEXSM0+UmGqvk5GSysrJo3bo1cXE6zkzVD6mpqRw4cIDk5GRatqxZQpU3NaTplewzgDjPGpD8pLj/SJvrGrXc3FxCQ0M1GKl6JS4ujvT0dHJzc2t8DW8CUtcKzu+OzbqLASbXuCSqVnLzC1mxIw3Q5rrGrqioSJvpVL0UGBhYqz5Nb5Iadlawa5uILAK+B6ZgpxhSx9iKHankFRQhAkO66dhlpVTD481MDRUyxhjgA2CSL66nvOcaf9SvfQzNI0P8XBqllPKeTwKSIwTQn+Z+otMFKaUaOp8EJBE5GZgKbPDF9ZR3Ug/nsW5fJgDDNSApVSe6dOnChRde6O9ieDR9+vRyqdYiwvTp0/1ToBqqdkASke0VPNKBZUBLtP/IL1zZdaFBAQzo3NzPpVGq5t58801EBBHhl19+Kbc/Ly+PuLg4RISrr766ePuOHTsQEZ599tlKr++6togQEBBAhw4dOP/881m1apWvP4qqAW+y7HZh07rdGWAVsBl4xRizw0flUl5wBaTBXeN0uQnVKISGhjJnzhxOPvnkUtsXLlxIRkYGwcHBNb722WefzZVXXklRURGbNm1i5syZDBkyhKVLl3LSSSfVtuj1Rk5ODkFBDWvuA2+y7EbWYTlUDRljWLJFlytXjcvYsWN59913eeqppwgIKGnImTt3LiNHjmTlypU1vnavXr248sori98PHTqUcePG8dJLL/HKK6/Uqtz1SVhYmL+L4DVfJjUoP9iZcoS96TmALleuGo+JEyeyb98+vvuuZArN7Oxs5s2bx8SJE316r5EjRwK22a86Fi5cyIknnkhYWBj9+vXj888/L7U/NTWVO++8k379+hEVFUV0dDRjxoxh9erV5a71r3/9iz59+hAREUHz5s05+eSTmTNnTqlj1q1bx/jx44mLiyM8PJxTTz2VRYsWVVnOsn1Irn6mxMREJk2aRExMDDExMUyZMoUjR8qtJMQbb7zBgAEDCA8PJz4+nsmTJ3PgwIFq/RnVlDdTB10KnGuM8Zja7ayRNM8Y84GvCqeqtsRprmsRGULvNrrcRFNVUFhEUkbNR8j7WtuYMIICa/57t0ePHpx88snMnTuXM86wCw18+umnFBQUMGHCBO68805fFZXt27cD0KJF1UnCGzdu5IorruCmm25i8uTJzJo1iwsuuIDvvvuOoUOHFl/vk08+4eKLL6Zr164cOHCAl19+mREjRrB+/XratWsHwKuvvspf/vIXrr32Wm677TaOHDnC6tWrWbZsGZdffjkAa9euZdiwYXTu3Jl7772XsLAwZs+ezZgxY/jyyy8ZNWqU1593woQJdO/enccff5xVq1Yxa9YsWrVqxRNPPFF8zEMPPcSMGTOYOHEif/rTn0hKSuK5555jxYoVrFy5kvDwcK/vWx3eNDDegl3GvCKFwK3Y8UjqGPnRaa4bmhBPgC430WQlZeQy/Mlv/V2MYkvuOqPW8ylOnDiRRx99lJkzZxIcHMycOXM455xzaN68dok7ubm5JCcnF/ch3X67Xd5twoQJVZ67adMmPvvsM8477zwApkyZQo8ePZg2bRrffmv//Pv168fmzZtLNTVeddVV9OrVi9dee40HHngAgAULFjB27FhmzZpV4f1uu+02EhISWLp0aXG/2U033cSAAQOYNm1ajQLSoEGDePnll4vfp6Sk8NprrxUHpB07dvDwww/z5JNPcscddxQfN2bMGIYOHcpbb73FjTfe6PV9q8ObnzC9gV8r2f8rcHztiqO8UVhk+Gmbq/9Ih4CpxuXSSy8lPT2dL774gpSUFBYtWuST5rqXX36Zli1b0rp1a04//XQ2btzIo48+ykUXXVTluZ06dSoORgDNmzdn4sSJfP/99xw+fBiwCRmuYFRYWEhKSgpRUVH07NmzVDZfbGws69atY+PGjR7vlZqayrfffsvFF19MRkYGycnJJCcnk56ezujRo1mxYoXHpraqlA0mw4cPJyUlhcxMO3Tk448/xhjD+PHji++ZnJxMQkICbdu2ZfHixV7fs7q8qSFFYmtBFTFAs9oVR3lj7d4MMnMLABjWQ5crb8raxoSx5K76s4Zm25jad6i3b9+e4cOHM2fOHPbu3UtISAjnn39+ra87fvx4brrpJkSEmJgY+vTpU+0mqISEhHLbevToQVFREbt376ZXr14UFRXx3HPP8eKLL5KYmEhhYcnXpnuz4N13383XX39N79696d27N2effTYTJ05k8ODBAGzduhVjDPfeey/33nuvx/KkpKQQEeFdTbRTp06l3rtqnGlpaURHR7NlyxaKioro1q2bx/MPHTrk1f284U1ASgSGAS9UsH8YNjVcHSM/bLH/MLrGR9I+tm7adFXDEBQY0CiXHJk4cSJ//etf2bZtG+eff77XX76edOzYkTPPPNMHpfPs73//Ow888ADXXHMNDz/8MHFxcQQEBHDbbbeVmni0d+/ebNq0iQULFvDFF1/w3//+l2effZYZM2bwwAMPFB979913V1jemizzUNHEvHYGuJLJexcuXOhxXaPaNplWxpuA9DFwj4gsMsa85r5DRK4BLgb+4cvCqcr9ULzchDbXqcbp4osv5tZbb2XFihU8+GB1lmSrW1u3bi23bcuWLQQEBNCxY0cAPvjgA8444wxee63U1yTp6enEx5fOhI2MjOSSSy7hkksuIT8/n4suuogZM2Zw1113FddQwsLC6jSAltW9e3cKCwvp0aMHXbp0OWb3Be/6kB7HTg30ioj8LiLvOI+1wKvAJuDvdVFIVd6RvAJW7UwHYFiCNtepxikuLo6ZM2cyffp0zj77bH8Xh127djFv3rzi92lpacydO5fhw4cTGRkJ2BqIq7bh8v7777N3795S21JSUkq9Dw4Opk+fPhQVFZGXl0erVq04/fTTeemllzw2k9VV09kf//hHAgICmDFjRrl9RUVFpKam1sl9wbuBsVkichrwGHApJQkMacBLwP3GmEzfF1F5sjwxlbzCIgIEhnTXGpJqvK6//vpqH7to0SKys7PLbb/ssss89v94q2fP+VGcnAAAGRRJREFUnkyePJmbb76ZFi1a8Oqrr5KRkcHDD5esSzpu3DhmzJjBlClTGDp0KGvXrmX27Nnl+mRGjx5NmzZtGDp0KG3atGHjxo288MILnHvuuTRrZrvjZ86cyfDhw+nbty/XXXcdXbt2JSkpiSVLlpCbm8v3339f689UVkJCAjNmzOD+++9n27ZtnHfeeURGRrJt2zY+/PBDpk2bxnXXXefz+4J3TXYYYzKAm0Xkz4Cr7plsyv4cUHXONV3QCR1iiQmv+TQqSjUmn3/+ebmBqgD9+/f3SUDq1asXzzzzDHfffTebN28mISGBjz/+mOHDhxcfc99993H48GHmzJnDu+++y4ABA1iwYAH33HNPqWvdcMMNzJ49m2eeeYbs7Gw6dOjArbfeyv333198TN++fVmxYgXTp09n1qxZpKen07p1awYOHMjUqVNr/XkqMm3aNHr06MGzzz7L3/72NwICAujUqRMXXnghZ511Vp3dVzSWeE9E0mNiYmLS09P9VoZznv2ejfuzuOWMBO48u6ffyqGOvZ077VqZnTt39nNJlCqtqn+bsbGxZGRkZBhjYj3t92a27z+LyNeV7P9KRG6o7vVUzR3KOsrG/VmATheklGo8vElquBrYUsn+zcA1tSqNqhbXYNjw4EBO6uTxh4ZSSjU43gSkHsDaSvavc45Rdcy1Ouwp3eIIDdLlJpRSjYM3ASkYqGz4dVgV+5UPGGOKxx/pchNKqcbEm4C0GagsvWI0lU++Wo6IhIrIEyKyT0RyRGSpiFRrtkARmSQia0QkV0SSROR5EYkqc0wXETEVPM7xpqz1xfbkw8WzOmv/kVKqMfEmIM0FRovIwyIS4tooIsEi8hA2IM2p8GzP3gRuB94BpgJFwEIRGVLZSSIyFXgL2A/8FXgDuBb4VDzNdWGvf1WZR/nFSRqA7zfbwXDxUaH0bK1TByqlGg9vxiE9A4wBpsH/t3fv0VXU1wLHvzsJSYCaAAKVSM2D8H6I0CuogMhDLCqo7Q1QFXRJobpKFdqSi9xbQCxXUB7Kw0Vb5VpUBHGByuXhCwVU9CoFFkJQHkGrKO+AQBKS7PvHzDmcJCfv5JwJ2Z+1Zg3nN7+Z2Rkm2Wdmfr/f8KCI+IaobQc0ATYDs8u7MRG5FhgOjFfVeW7ZP4BdwEygTwnrxQDTgPeAQb4+UCLyEfAmMBRYXWS1z1X1xfLG5lXncvP46ybn3S03tW0WdJwpY4yprcp9haSqF3Cugv4D+BdwjTt9A0wEBqhqbgX2/SvgAuB/GYiqZgPPAb1EpEUJ63UE4oHlgR1yVXUN8CPOKBLFiEjDwCu72mjRxv0czsomJiqC3/e39iPGmEtLRUdquADMcqdiRCRGVXPKublrgAxVLTrOx6eAAF2Bw0HWi3Hn54MsOw90C1I+HecKT0XkEyBdVUscc0NEyurxGl/G8mp36PhZ/9XR2BtbXZIjOxtj6rbKv2M4gIh0F5FFwHcVWK0FwROOryyhhPW+wnn30g1FYmgLNCuyXgGwAfgjMMSdJwLviEhvapHpa3aTm1/AlY3q8+CNrcIdjjHGVLsKXSEFEpEmwD04nWE741zVfFmBTdQHgl1NZQcsL0ZVj4nICuABEdkLvA5cCczHuQVYP6Du10Ch1nQi8gqwG2f08kJJLWC9UnubuldQIbtK2rj3CO/sOQLA5FvbUz/a+h4ZYy49Fb5CEpFBIrIc+BbnNpivkUFnVW1XgU2d5+Ltt0CxActLMhZYC8zBaWq+CafT7ps4z5FKpKrf4bQY7Ckinr/vlZtXwPQ3dwNwXcrl/KLTFWGOyBgTDklJSdx3333hDqNGlSshuf15HhORQziJoC+w0l08WVUfU9UvKrjvwzi37YrylZV4+09Vs1R1KM7ttxuBJFW91123tOGNfL7B+dk9P+7Okg8PcuDYWSIjhGlDO1rLOlNn7Nu3jzFjxpCcnExMTAzx8fH+9wPl5lak/VT5bd26lalTpxKugZMzMjKYOnUqmZmZYdl/uJV6y05E7sa5JXcjkA+sAcbhJKVE4O4q7Hs78LCI/KRIw4Ye7rzMfkLuLbmv3VgbAd1xrtrKkoLz85ysUMQh9sPpbJ5518mvI69LpI31OzJ1xJtvvklaWhoNGjRg5MiRdOzYkfPnz7Np0ybGjRvHwYMHmTUraNuqKtm6dSvTpk3jvvvuo1Gj0H9fzcjIYNq0afTt27fY21r37t1LRES1PPb3rLKeIS0FDgCPAMtU1f+Kw2r4pr4Sp5HBaMDXDykGuB/40L21hohcBTRQ1YySNuT6b5xGDIsDYmymqoVeqygiqcAIYJOqlnZbMOyeWJfB2dx8Lm8YzSMD2oQ7HGNCYv/+/YwYMYKUlBQ2btxI8+bN/cvGjRvH7t27a+TFdBWVn59PXl4eMTHBnjxUv1DtJ6xUtcQJ5zlOHvA2ztVQ/YBlrXASwF2lbaOM7a8AcnE6wo4BPnQ/3xBQ530nzELrTcYZfWEc8CBOSzoFxhSptwT4APgz8BuchgyngLNAtyrEfSo+Pl5r0v8dPK6J6Ws0MX2NvvLpoRrdl6ldMjMzNTMzM9xh1JixY8cqoB9//HGZdS9cuKBTp07V5ORkjY6O1pSUFH3sscc0Ly+vUD1AH374YV25cqV26NBBo6OjtUOHDrpu3Tp/nSlTpqj7d6TQdPDgwULbeOGFF7Rt27YaGRmpGzduVFXVJ598Uq+77jpt0qSJxsbGardu3fTVV18tFu9bb72lN9xwg8bHx2vDhg21TZs2OmnSJFVVXbJkSdD9+/aRmJioo0aNUlXVTz/9VAF98cUXi+3jlVdeUUA3b97sL9u1a5feeeed2rhxY42NjdUePXroW2+9Vebxraiyzs34+HgFTmlJf1tLWqDOH95GwO+AbW7yycLpuNoHSK2GhBQLPInzPCkbpw/SgCJ1giWk2926Z9zpA+CWINsf4S47itMC74ibBDtWNmYNQULKyy/QwU9v0sT0NXr7/M2an19QY/sytc+lnpASEhK0VatW5ao7atQoBXT48OG6YMECHTZsmAI6evToQvUA7dq1qyYkJOjjjz+u8+bN05SUFG3QoIEeO3ZMVVV37Nih99xzjwI6d+5cXbp0qS5dulR//PFH/zbat2+vV1xxhU6fPl3nz5+vGRkZqqrasmVLfeihh3TBggU6Z84cvfbaaxXQNWvW+GPYtWuXRkdH60033aQLFizQZ599VidMmKC9e/dWVdX9+/frI488ooA++uij/v1///33qlo4IamqJicn65AhQ4odk7vuuktbtmypBQXO342dO3dqXFycdu7cWWfNmqXPPPOM9ujRQyMjI/Wdd94p13EurxpNSFr4j3A3YCFwAuf5y/fu/P7ybuNSmWo6Ib209ZD/6mjboRM1th9TOwX9pc+7oHoi0ztT3oVK/WxZWVkK6NChQ8usu337dgX0t7/9baFy3xXWjh07/GWAxsTE6IEDB/xlO3bsUEDnz5/vL5s7d26hq6JAgEZGRurevXuLLTt37lyhz7m5udqpUyft169foW3HxcUVu3oLtGrVqkJXRYGKJqT09HSNiYlR5wWsjjNnzmj9+vV1/Pjx/rJ+/fppt27dNDc311924cIF7dy5s/bo0aPEWCqjqgmp3P2QVHUbsE1EJgC/xBnMtC/wd3ew05XAKq14azsT4NS5XJ7c4Dwu+1X3llxzVeMwR2RqhdPfwtNdwh3FRQ/vhMYVf8X66dOnAbjssrIb8KxduxaACRMmFCofP348ixcvZu3atXTpcvGYDBo0iOTkZP/nLl26EBcXx4EDB8odX79+/WjTpvjz3Pr1L3abPHnyJPn5+fTu3Ztly5b5yxs1asTZs2dZv349t956a7n3WZK0tDRmzpzJ66+/zr333gs4jUHOnz/PsGHOCGonTpxg48aNzJgxg6ysrELr33zzzcydO5dz587RoIE3esBUuMmGquao6suq2h/nOdJfgMbAY9TSEbS9ZM7bX3Ly3AV+EhPFxFvahjscY0IqLi4OgDNnzpRZ99ChQ0RFRdGqVeGRS1JTU4mKiuLQoUOFyq+66qpi22jcuDEnT5a/sW1gQgu0Zs0aevbsSWxsLE2aNKFZs2Y8++yzhZLAsGHD6NmzJ7fddhsJCQmMHDmSVatW+e66VFi3bt1ITU1l+fLl/rIVK1aQmJhIjx5OY+V9+/ahqkyaNIlmzZoVmmbPnk1BQQHHjx8vaRchV+mRGgBUNRP4s4hMAQZhrzCvkj2HT/PiVueX6JEBrWl+mb3v0JRT3JXOVYlXxF1ZudXi4mjRogW7du2q5oAgMjL4CCcVSQiBV0I+mzdvZsiQIfTp04dFixbRokUL6tWrx5IlS3j55ZcLrbtp0yY2btzI2rVrWb9+PUuXLmXgwIGsW7euxPhKk5aWxlNPPcWpU6eIiopi/fr1jBs3zr+8oKAAgPT0dAYMGBB0G82aNavwfmtKlRKSjzr/o+vdyVSCqjLljS8oUGjVrCEjr0sKd0imNomMqtQtMi+67bbb+Nvf/sYnn3zi/6YfTGJiInl5eezfv5/WrS+Ofr9//37y8vJITKz48ahMd5bXXnuN2NhYNmzYUKhp9pIlS4rVjYiIoH///vTv35/Zs2cza9Ys0tPTef/99+nfv3+F95+WlsaMGTNYvXo10dHRZGdn+2/XAaSkpAAQGxtbYkLykku7l1UtsmbnYT49eAKAqUM6Eh1l/zWmbpo4cSINGjRg9OjRHD16tNjyPXv2sHjxYgYPHgzAvHnzCi1/+umnASr1nKZhw4YAFRqpITIyEhEhPz/fX5aZmcnq1YVfyxbs1ljXrl0ByM7OrtT+r776atq2bcvy5ctZsWIFrVq1onv37v7lzZs3949uEexYBisLp2q5QjJVcy43jxlr9wAwqONP6d3aO5fQxoRaamoqL730EsOHD6d9+/aFRmrYsmULr732GhMmTGDs2LGMGjWKRYsWcfLkSXr16sWWLVtYtmwZDzzwAJ07d67wvn1/zCdPnszw4cOpV68et99+uz9RBHPrrbcyZ84cbrnlFn79619z5MgRFi5cSGpqKjt3XryNOn36dDZt2sTgwYNJSkri6NGjLFq0iJYtW9KrVy/ASTBRUVHMnDmTrKwsYmJi6NevX6HOwUWlpaXxxBNPEBERwR/+8IdiyxcuXEjv3r3p1KkTo0ePJjk5mcOHD7N582ays7M90cnYr6TmdzaFrtn3rPV7NDF9jbaZvFa/Pn622rZrLk2Xej8kn4yMDH3ggQc0MTFRo6OjNS4uTvv06aOLFy/2N2H2dYxNSkrSevXqaVJSUqkdY4sq2pRaVXXGjBl65ZVXakRERNCOscE899xz2rp1a42JidF27drpkiVL/B1tfd59910dOnSoJiQkaHR0tCYkJOiIESP0yy+/LLSt559/XlNSUjQyMrLEjrGBdu3a5e9Eu3379qDxffXVV3r33Xdr8+bNNTo6Wn/2s5/pHXfcUaifVHWoarNv0Uq28KjLRORUfHx8fHUMwHjo+FkGztlEbn4Bv+/fmgkDbYggUzpf67HKPCMxpiaVdW42atSILKfjVNCBAu1BRZjZi/eMMcZhCSmM7MV7xhhzkSWkMMnNK+Ax98V717eyF+8ZY4wlpDB5/sODHHRfvDd1iL14zxhjLCGFwQ+ns5lvL94zxphCLCGFgb14zxhjirOEFGKfZZ5g1T+/BWDiLW2Jr18vzBGZ2iYiIqLQqADGeEV+fn6VXrNuCSnEfCMydGkZz793/1mYozG1UWxsLDk5OZw4cSLcoRjjd+LECXJycoiNrfyg0DZ0UIjNSevK4/+7m4duSiUiwhoymIpr2rQpOTk5/PDDD5w6dapSo0QbU53y8/PJycnhsssuo2nTppXejo3UUAnVOVKDMZWhqhw7dozs7Gz/KwaMCZeIiAhiY2Np2rRpqS2Gyxqpwa6QjKmFRMRT77ExpjrYMyRjjDGeYAnJGGOMJ1hCMsYY4wmWkIwxxniCJSRjjDGeYM2+K0FECgCJj48PdyjGGFNrZGVlgfMW3aAXQ5aQKkFE8nCuLk9XchO+TJZVPRHVOXb8qsaOX9XY8au8OKBAVYN2ObKEFAYicgqgpM5hpnR2/KrGjl/V2PGrOfYMyRhjjCdYQjLGGOMJlpCMMcZ4giUkY4wxnmAJyRhjjCdYQjLGGOMJlpCMMcZ4gvVDMsYY4wl2hWSMMcYTLCEZY4zxBEtIxhhjPMESkjHGGE+whBQiIhIjIjNF5DsROS8iW0Wkf7jjqi1EpK+IaAlTu3DH5yUi0kJEnhCRjSJyxj1GfUuoO0REtolItoh8LSJTRCToSMx1RXmPn4hklnA+PhGGsC8JdfrEC7H/AX4JzAP2AfcB60TkRlX9OIxx1TbzgM+LlH0XjkA8rC2QjnOe7QSuD1ZJRH4BrAbeA8YBnYE/A03dz3VVuY6f63OcczLQrhqK65JnCSkERORaYDgwXlXnuWX/wDlxZwJ9whhebfOBqq4OdxAe9znQVFWPi8gdwKoS6j0F/BMYpKr5ACJyGpgkIs+o6lehCddzynv8AP6lqi+GKK5Lnt2yC41fAReAv/sKVDUbeA7oJSItwhVYbSQil9X120qlUdUzqnq8tDoi0gHoACz2JSPXIpy/C7+swRA9rTzHL5B7O75BTcZUV1hCCo1rgAxV/bFI+aeAAF1DH1KttRTnTb3nReQtEekc7oBqqWvc+WeBhar6HfCvgOWmdDcDZ4GzIrJfRMaEO6DazL5lhkYL4Nsg5YfdeUIIY6mtcoGVwDrgGNAF+COwRUT+TVW/DGdwtZDvqvxwkGWHsXOyPHYCm4EvgWbAb4DFItJEVa1hQyVYQgqN+kBOkPLsgOWmFKr6EfBRQNEbIvImzjf8KcDdYQms9vKdcyWdl3YLqgyqOiTws4gsAbYA/yUiz6pqVngiq73sll1onAdigpTHBiw3FaSqO4B3AGs+X3G+c66k89LOyQpyn8XNw0nm14U5nFrJElJoHObiLZJAvjJrtlx53wBNwh1ELeS7VVfSeWnnZOV8487tnKwES0ihsR1oJyI/KVLew53vCHE8l5IU4Gi4g6iFtrvznwcWikgC0DJguamYFHdu52QlWEIKjZVAPWC0r0BEYoD7gQ/dlk2mFCLSLEhZL+AmYEPoI6rdVPULIAMYIyKRAYseBAqA18ISWC0hIk1EJKJIWSzwJ+AMYJ3dK8EaNYSAqn4iIq8Cs9w+R/uBUUAizogNpmzLReQcTsOGY0AnYIz776lhjMuTROQ/3X+2d+f3ugn8lKoucMv+BLwBbBCR5TjH9Hc4fZPqdKvFchy/IcBkEVkJZAKX4/xOtwEeDNLFw5SDvaAvRNxvT9OBe4DGOE1GH1XVd8IaWC0hIr/HaUmXCsQBR3CujKaq6tfhjM2LRKSkX+xDqpoUUO8OnFaK7XFuMz0PTFfVvBoP0sPKOn4i0h3ni9A1OE2+c4BtwFOquiY0UV56LCEZY4zxBHuGZIwxxhMsIRljjPEES0jGGGM8wRKSMcYYT7CEZIwxxhMsIRljjPEES0jGGGM8wRKSMSYoEXlfRDLDHYepOywhGRNCItJXRLSUqU6PkGDqNhvLzpjwWAasDVJeEOpAjPEKS0jGhMc2VX0x3EEY4yV2y84YDxKRJPcW3lQRGSEiO0UkW0S+dsuKfZkUkS4iskpEjrt1d4vIxCKvl/DVvUJEnhGRAyKSIyJHRORtERkYpG6CiCwTkZMick5ENohIm5r62U3dZVdIxoRHAxFpGqQ8V1VPB3wegvPSt4XA9+7nKTivLrnfV0lEfg58AFwIqHs7MBO4GmekdF/dJOBD4KfAP4DPgIZAT2AA8HbA/hsCm4CtwKNAMvAw8LqIdHJf221M9VBVm2yyKUQT0BfQUqY1br0k93M+0C1gfQFWuct6BpR/COQBXYrUXeHW7R9QvtYtGxQkvoiAf7/v1ptYpM6fSlrfJpuqMtktO2PC46/AwCDT5CL13lbVbb4PqqrALPfjnQAi0hy4HnhDVXcWqfuXInWbALcA61W12Jt2VbVoo4oC4JkiZe+589Zl/pTGVIDdsjMmPL7S8r2ccU+Qst3uPMWdJ7vzL0pYvyCgbirOldM/yxnnd6qaXaTsuDu/vJzbMKZc7ArJGFOa0p4RSciiMHWCJSRjvK19kLIO7vyAOz/ozjsGqdsO5/fcV3cfzvOfrtUVoDHVxRKSMd42UES6+T6IiAAT3Y+rAVT1CPARcLuIdCpSd5L7cZVb9wSwDviFiAwoujN3HWPCwp4hGRMe3UTknhKWrQ749w7gPRFZCBwGhuI0zV6qqh8H1HsYp9n3Zrfu98BtwCDgZVV9N6Du73AS2DoReQH4HKgP9AAygfQq/mzGVIolJGPCY4Q7BdMapwk3wBvAXpwrnbbAEWC6O/mp6mcicj0wDXgIp//QAZzkMrtI3YNuv6X/AgYDI4GTOMnvr1X9wYypLHFahhpjvMTtvHoQmKaqU8MajDEhYs+QjDHGeIIlJGOMMZ5gCckYY4wn2DMkY4wxnmBXSMYYYzzBEpIxxhhPsIRkjDHGEywhGWOM8QRLSMYYYzzBEpIxxhhP+H/ei3jqYrg/iQAAAABJRU5ErkJggg==\n",
145 | "text/plain": [
146 | ""
147 | ]
148 | },
149 | "metadata": {
150 | "needs_background": "light"
151 | },
152 | "output_type": "display_data"
153 | }
154 | ],
155 | "source": [
156 | "fig, ax = plt.subplots()\n",
157 | "ax.plot(df1['Step'], df1['Value'], label='MLP baseline')\n",
158 | "ax.plot(df2['Step'], df2['Value'], label='Contrastive')\n",
159 | "\n",
160 | "ax.set(xlabel='Epoch', ylabel='Accuracy (Test set)', title='MNIST dataset');\n",
161 | "ax.legend();\n",
162 | "fig.savefig('figs/mnist_test_acc_curves.png')"
163 | ]
164 | },
165 | {
166 | "cell_type": "code",
167 | "execution_count": 15,
168 | "metadata": {},
169 | "outputs": [],
170 | "source": [
171 | "df1 = pd.read_csv('runs/run-baseline_fashion_mnist_20200427-210140_test-tag-accuracy.csv')\n",
172 | "df2 = pd.read_csv('runs/run-contrast_loss_model_fashion_mnist_20200427-210420_test-tag-accuracy.csv')"
173 | ]
174 | },
175 | {
176 | "cell_type": "code",
177 | "execution_count": 16,
178 | "metadata": {},
179 | "outputs": [
180 | {
181 | "data": {
182 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAaQAAAEwCAYAAAD4uwVgAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAgAElEQVR4nOydeXiU1fXHPycLSSAhEBL2TXbZlE0QlaVY9wVFq6iI1KXa2rrU2p+1tbi2Wlu1bhVxV6xLVdwVlUVlB9kJ+74HSMhC9vP7476TDGGSzCQTEuB8nud93sx9733vmWF4v3PPPfdcUVUMwzAMo7aJqG0DDMMwDANMkAzDMIw6ggmSYRiGUScwQTIMwzDqBCZIhmEYRp3ABMkwDMOoE5ggGUclIjJMRFRErguy/jQR2VizVhmhIiKvioitPTEAEyQjDPiJQ3nHoNq2sS4gIuP9PpPLyqlziV+d8WWu+crfKqftNBHJKqfP/mXKO4jIBBFJFZEcEdkvIitF5DURGe7VebWSf1ctz9YjhYiMrK2+K8P77EfWth1HE1G1bYBxTPE28HmA8rVH2pAAnAVIbRvhkQuMA94PcO2X3vXYCtqPFpF/qOqiqnTuidN0oAB4HVgOxAGdcZ9TJjAVeAH4pkzzN4BU4OEy5UuqYksYGAmMBcbXUv8V8VfgNeCj2jbkaMEEyQgnC1X1zdo2IhCqml/bNvjxIfALEWmhqjt8hSLSHDgHeBe4qpy2S4EuwKPA2VXs/69AfeBkVV1c9qJnB6o6C5hV5tobwK66+u9sHN2Yy844IojIKZ4LaLXnIsoUkR9F5JIAdduIyMsisklE8kRkt4jMFJGx5dx7nIgs9+puEpG7A9QJOIckIkNEZIqIZIjIQRFZKCLXl9deRFqKyNueiytHRL4SkS4hfhxvAsXAtWXKrwXUu14em4HngLNEZESI/froDOwNJEYAqrqzivctFxGJFZF/iMh273OeKyJnlVM3qO+KiEzDjY783Zkl84oi0k1EnvO+G5nevRaIyA0B+kwSkSdEZJ2I5IrIXq/uHwLUvUJEfvC75xx/F6yItJfSebGx/rZV9fM7XrARkhFO6otIcpmyPFXNBC4BuuF+/W8CmuAeJh+IyNWqOglARKKAKUAr3IN3NZAI9AbOwLlA/LkZaAa8BKQD1wCPishW3z3LQ0QuxI1WdgL/xLmqrgQmikgHVb23TJMGwAxgNvAn4ATgNmCyiPRU1aJKPh8fu4HPcG67R/3KxwGfAnsqaf8wzrX3qIgM0NATUq4DuorIpar6QYhtq8rbOPfaJ8BXQEfgA2BDgLpBfVdwn0ME7nsxxq/9TO88DBiC+0w34P79LgdeFJEUVf2bX5v3vLr/wbkf44ATvXv8w1dJRB4C7gW+BP6C+2FxCfCeiNyqqs/i/v3G4Nyb3wMTgvuIDFTVDjuqdeD+02o5x3+9Og0CtKsPrAJW+JX19trdHWSf24HEMvfcA8wqU38asNHvdSTuYZcOtPQrrwf8CBQBncu0P8wu4A9e+dlBfE7jvbr9gQu9vwd71wZ7ry/wriswvkx7BT71/v6T9/rKMjZmldenX9mpQL5Xvhp4GbgFODGI96DAtBC/H2d57V4tUz7S9z0pUx7Ud8Urf7Vs+0ruE+F9ThlAtFeW6NnxXCXvo69X75EA1z4CDgAJZT6rVyu6px2HHuayM8LJBODnZY6HAFQ121dJROqLSBPcQ+Y74EQRaehdzvDOw0WkaRB9vqKqvjaoag5uBNO5knb9gLbAy6q63a99PvAY7sF1cZk2xcC/y5R9550r668sX+BGZuO81+OAHV55MDyJE+OHRCQ6lI7VzQ31w402E72+nwNWiMgMEekQyv2CwBdp9g//QlX9CCcyZe0L9rtSIWXuE+vdJwn4GmiIG4UBHATygIEi0r6CW16NE5nXRCTZ/wA+BhJwYm9UEXPZGeFkjaqWjcoCwBOXh3AP+UBC0wg4oKqbRORh4B5gh4gsAr4F3lPVeQHarQ9Qthfn5qmIE7zz8gDXfGVlH8zbVTU3QF8E0d8hqGqhFyDwKxH5E3AF8LyqFolUHgyoqjniwp0n4NyWT4fY/1LgOgARaQcMBW7Aub8mi0g/DV8gSAecmK8OcG0l0NW/INjvSmWdikg8boT4C6BNgCqNwf0IEZHbgaeADSKyAid+H6nqt371T8RFaqZW0G2zyuwyysdGSEaNI+4J+zVuHuA13MP3HNwIyjcfUPJdVNU/40Yct+PmO24A5oqI/3yLj2DnbcJBRX1VJaT8Zdwv9bdwv65frkL7VODPIpJQhf4BUNVNqvo6TpR+BHoCp1T1ftUh1O9KJUwC7sQtRbja7z5PlL2Pqv4HaA/cCCwELgO+EZH/+puHGyH57hPoCPiDzAgOGyEZR4LewEnAA6r6V/8LgSKeAFR1Pe5X/9MiEoubCL9bRP6pqrvDYJNvZNUjwLXuZerUCKqaKiKzcA+ymap6mPuqkvZFInIPLjDjrjDYoyIyBzgNF1QSLtbjHv5dOHxEemKZ16F+VwIGdIhII9x83BuqenOZa2cGaqMuBH8iLqglEheUMNr7zs0D1uDEaLOqrgx0D6N62AjJOBL4RhaHjCJEpCcuQsm/LLHsnIjnJvM9ABqHyaaFuBDqceKtu/H6j6Y0UGFymPqqiP8D7se5KEPGm4eZiRsJBDPnhoj83ItmLFsehwtAAFhRFXvKwfc5HhJCLS6LQdcydYP+rnhkedeTgrxPC9yI27+svojU9y9TFzHpW+zru/cb3vkRT7AOQUTKuuuy/NoaQWAjJONIsBL3y/hu7z/+Ktyv5V/hFnr286s7HJggIv/z6mV5128A5oQ6iigPb3RxK250MU9EJuDCvq8ABuEiqdaEo69K7JiBCyWvDn/EhRefCGRXUhecy6qJiHyM+/xzcHMsV+H+XV735pjCgqp+JSKf4NbkJOFCpjvi/v2X4VyEPkL5roALYLkVeE5EPsNln5ijqhtE5GvgGhE5CMwD2nn32cChc35dgOki8qFnz37cZ3mLV/d7733M8+btxgOLROQ9XGBJC8+u83BRmv62nSkif8T9+FFV9XcBGmUwQTJqHO/hfz7wOG5uoAHuP/5YnHvG/yGzGLc+ZRjO7x+J+8/8CG6tUDjt+kTc4tI/436918M9EG9Q1ZfC2VdNoqo/eOJyUZBN7sQFDJwOjMIFCWTgRgSP4kKpw80VuECFq3EuyqXApTgRLBGkEL8r4NY39cGtH7sc5/UZhxOSa4C/40Lsx+JcbvfiROsVv3tswc3HDcdFBMYA24AXgUe9yE2fffeLyHzgd7g5zga4dWXLvDJ/fg086/Xpm+MzQaoAUbXFw4ZhGEbtY3NIhmEYRp3ABMkwDMOoE5ggGYZhGHUCEyTDMAyjTmBRdlVARApxYl5p+hLDMAyjhIZAsaoG1B6LsqsCIlIMSGJiYm2bYhiGcdSQkZEBbj1WQO+cjZCqxoHExMTE9PT02rbDMAzjqKFRo0ZkZGSU61myOSTDMAyjTmCCZBiGYdQJTJAMwzCMOoEJkmEYhlEnMEEyDMMw6gQmSIZhGEadwATJMAzDqJziYti3HlI/g+//6V6HGVuHZBiGYZSiCge2w+6VsHuFO+9ZCXtWQUFOab2eo6Bx+7B2bYJkGIZxvJKdVio6/kdeRvltImMgpSvkVlCnipggGYZh1CbFxVCQDXlZkJ8FeZnuyM/yyjLLXMuC4oKq96cKWTud8GTvKb+eREJyZ2h6IjTt7s4pJ0LSCRARWfX+K8AEyTCMmkfV/aLOTnMPQS2CqDiIjoUo74iOg6gYVx5RB6a3i4shN93ZW3J49udnQ3GhO4oKoLjIiURJmXcuKSvy6nllBbmlApOfDdRmTlFxrjef6PiOJp3cv8cRxATJMIyqkZ8DOWmHPqgP+3sPZO9151B+1UfWc8IUFeOJViDx8v3tf93XJu5wkfOvV3CwApu9v3P2OuGsLerFuyMmAWL8/q4XD1H1qnfv2EZ+o56uUK9BeGyuJiZIhmEcSn4OZO7wjp3ufKDM6+w97hd+TVGU7468musiZOIaQ4MUd9SLh8ho57qKiIaIKHdERpX+XVLmu+5XNyoGYhr6CU081EsoFZ/oBnVjlHiEMUEyjOOJokI3ib1/Y6m4+IvPgR0VT2iXR2QMxDeFBslQP9l7cCeXPsAPeZ3sHsqFuVCY50Yrhbml58Jc59IqPOidcw+9XnDQtSu5HmS9ojLqFt2gjI2B7PXK6jdxwmLUKCZIhnEsk50GW+bC1rmwZR5sX3ho6G5l1G8CCS28o7nfufmhD/F68SASmm31GhxZV1FxcaloRcXUGTeVUYoJkmEcK/hGP1vnOhHaMhf2bwhct158qbg0bOknNi0OFZ0jPKldo0REQL367jDqJCZIhnG0kr0Xts4rFaBtC134cFkioqFFb2h9CrQZ4M6JrUMf0RhGDWOCZBh1HVXI2OK3cHEFbJ0P+9YFrh/fvFR42pwCLU52kWWGUccxQTKMuoIqZO12grMn1W8FfapbHBmIiCho3tsJT+sB7pzYxkY/xlGJCZJhBEvOPti1DHYtdyHJwax5KXvdt8L94P7D07XsXgEH95Xff0Q0JHdxa0d8LriWJ7v7GsYxgAmSYZTF5yLbsQR2Li09MjZX/94RUS5EOtBcjw+JgKQOh6dsadLRQo+NYxoTJOP4pqjAZTHeuRR2Lik9l5s4UpxYxCQEt9alLL7UMj4S23rC061UfJK72KjHOC4xQTKOH4oKnOBsnQ87F7sR0J5U534LRFSsE4nmvbyjNzTr4VbSl0dxsROlihZxFua5tTsp3SC2Yc28V8M4CglJkESkPtAVaIrLBrgHWKWqIay0M4wjRNbuMotCf3KCEIi4JCc6LXo74WneC5p0dqlgQiEiAiLibIRjGFWg0v9tItIYuA64HOgXoE2hiCwA3gVeU9X94TbSMCqlqMAFHGzxW5eTvilw3YatXTBAi5NKxadhS4tMM4xaplxBEpFE4C/Ar4FYYBXwFrAO2AsIkAR0AgYB/wIeEZFngYdUNfy7NxmGj6w9pcKz1Rv9BEqJE1nPrcPxD4tu2PLI22sYRqVUNEJah8u1+zfgTVUtJweJQ0Q6AGOAm4BxQHK4jDSOc1Rh33rYPAs2zXTnfesD123YqlR4Wp/iXHDHUvobwziGqUiQHgBeUNWgEsCr6nrgfhH5O3BzOIwzjlOKi92anE0zYfNMd87adXi9yHrO7XZISpxWR95ewzDCQrmCpKr/rsoNPQF7qsoWGccfhfmwY5ETnk0zYcvswGHXsY2g7anQbjC0HeTmfywljmEcMwQdQiQi9wEfqOqycq73AEap6gPhMs44BlF12zZvX1gqQFvnB45+S2hRKkDtBrvFocfhpmWGcbwQSkzreGAtEFCQgJ7AX3GuPuNYRdWlucna5XYMzcvyzgf8/s5yudfyMgOUZZW/LXRSB2jriU+7U6HxCRb5ZhjHEeFcGBsLFFZayzj6UHUh1cv+B8s+KD+cOiTELTJtN7h0FJTQPAz3NQzjaKVCQRKRhkAjv6ImItI2QNUk4GpgSxhtM2qbPath+QdOiNJWH3pNIqBegstaUC/e75zgjgrLEiC5E8Q1rp33ZRhGnaSyEdIdwH3e3wo86R2BEODuMNll1Bb7N7pR0LIPYNfSQ68ldYCeo6DHpS7nmrnTDMMII5UJ0jTvLDhh+hBYUqaOAlnAbFWdGVbrjCPDge2w/EMnQtvmH3otsQ30uMQJUYuTTIQMw6gxKhQkVZ0OTAcQkXbAf1R1zpEwzKhhsvbAio+cEG2aiftd4RHfzIlQj0vdIlOLbDMM4wgQdFCDqo6rSUOMI8DB/bDyUzcntGE6aHHptbgk6H6xGwm1G1y6kZxhGMYRItRs3wm4eaWzgGbAtao6S0SScTnv3lXV1PCbaVSZvExY9YUTobXfQnFB6bWYhnDihW4k1GGobf5mGEatEsrC2BTgB6ADbj1SByAOQFXTRGQsLiLvzhqw0wiFgoOw+isnQmu+dvvy+IiuD13PdSLU6UzLdGAYRp0hlBHSQ0BzYCCwGdhd5vpkYESY7DJCpTAP1n3nAhNWfe4Wo/qIjIHOP3fuuC5nQ70GtWenYRhGOYQiSBcAz6nqQhFpEuD6ety+ScaRoqjQzQUt/wBWfnJo/reIKOj4MzcS6nYexCbWnp2GYRhBEIogJeNcdeVRjMvWEDQiEoNLNTQGaAwsBu5V1W+DaHsm8GegFxABpAJPqOq7ZeolevVGAq2BncBXwAOquj0Ue+sEqrB5Nix9D1ZMhpy00msSAe1PdyOhEy+C+km1Z6dhGEaIhCJIO4GOFVzvg3PlhcKrwCjcYtu1uBHWFyIyVFVnlddIRC4APgZm4vLnAVwJvCMiCar6klcvAic+PYHngNVAF1wAxggR6Rns9hp1grxM+PROWPruoeVtBjkR6n4xJDSrHdsMwzCqSSiC9DlwvYg8DeT7XxCRgcC1lJ/F4TBE5BSciNyhqk96Za/jkrc+CgypoPlvgB3ACJ+giMiLOLfhtcBLXr0BuDmvW1X1Wb++NwNPA4OBqcHaXKtsXwTvjyvdmK55b+h1uVsv1KhN7dpmGIYRBkIRpPuBi4CfcKMTBcaKyI3ApcB2nJAEy2VAATDRV6CquSLyEvCwiLRQ1R3ltG0I7Pcf3ahqnojsBw6WqQdQdne3nd45wJ4HdQxVmPMCTPkLFOW7TenOehhOudGyJhiGcUwRysLYnSIyCHgG+CUundAYnDB9DtyiqvtC6LsPkKqqWWXK53r3Phk3CgrEdOAeEXkQ5/YD5+7rglsn5WMBLq3RgyKyD1gFdAUexI2M6nbWiZx9MPk3LmoOoEknuOwVty23YRjGMUZIC2NVdQtwsZcFvCtOONaGKEQ+WgDbApT7RKhlBW0fxs1n3YsLWAAnPBep6hQ/e/eJyJXAi4B/oMQnwBWq6pcvpxQRSa/E9poPWds0C/53PRzwPqLeV8L5/3TZsg3DMI5BqrQfkqoeAOZVs+84IFBAQa7f9fLIwwUovIdL+BoJ3AS8KyIjVNXftt24kdJMYAVu5HU38DIwujpvoEYoLoLv/wXTHnGpfaIbOCE6ue6ZahiGEU5CydTQCeikql/6lQ3EjVCSgNdUdUIIfR8EYgKUx/pdL4+ngVOAAaouIZuIvAssxwVWnOaVdcBlLL9KVSd7bSeLyEbgVRF52X9E5UNVG5Ut88cbQYV/lJS5Ez64ETbMcK+b9YLLX4HkzmHvyjAMo64RShrnR4E/+l54+eu+AM7GhVU/LyIjQ7jfDpzbriy+soBrhESkHnAD8KlPjABUtcCz5xQR8QntdTjR+7zMbT72zqeFYG/NsvYbeP60UjEacCPc8I2JkWEYYWVnRi6rd2VSzoxFrRKKy64/4D8CGo2LYjsZ5z6bBtwGfBTk/RYBt4lIfJnAhoHeeXE57Zrg7A6Ujjrau+YLP2vm/V1WeH1ZRMO5hXvVKCqA7x6EH59yr2MT4aJnoPtFtWuXYRjHHLszcznriekcyC2kecNYhndLYVjXppzeKZkGMbX/OAxlhJTCoaOWc4AfVXWZquYD/wW6h3C/93HCcIOvwMvcMM6773avrK2IdPNrtxtIBy4VkWi/tvHAhcAyb7QETigjgMvL9O2bkPkpBHvDz/5N8PI5pWLU+hS4+QcTI8MwaoRJczZzILcQgJ0Hcnl77hZ+9cYC+jwwhWsmzuGlHzawIS271uwLRRKzcdm8EZFI4HTg337XD1K67qdSVHWOiLwHPCYiLYB1wFigHYfmxHsdGIo36lHVIhF5HJfsdZaIvIkbLV2PSw10l1/bV73XL4lIf1xQQ1+cCC6h1HV35FkxGSb/FvK8/HOn3wHD77UtIAzDqBHyCot4c7ZLpnPlgDZ0SGnAd6m7mb9xP/lFxfywNo0f1qbx4KcraN+kPsO7NWV416YM7JBETNSR2R9NgvUjisg03ET+mbgRx7PAcFWd4V1/CLc/UtugOxeJxa0JugaXy24J8CdV/aZMv0NVVcq0vQrnIuyCmydaAjyqqh+WqdcKly9vONAK2IsL+/6Tqu4N1tYy90xPTExMTE+vLDo8AAUH4as/wfyX3esGKXDJC9DJEqUbhlFzfLBwK3e+u5jICOH7u4fTspELZD6QW8APa9L4LnU301btIS3r0ODn+vUiOa1TMsO7NmV4txRaJFYUAF0xjRo1IiMjI6O8wLFQBOl83BYTPmH4CRflpt71ucB2VQ0lsOGopFqC9N51bttwgA7D4JIJln/OMIwaRVW56JkfWbotg/N7t+DZq/oGrFdcrCzbnsHU1D18t2o3S7amU1YiTmzRkOFdUxh9SlvaJNUPyY7KBCmUTA2ficjPgIuBDOAZPzFqAmzFudeMihjyB1gzxbnoTr8TIkKZxjMMwwidhZv3s3Sbmx745Wnty60XESH0bt2I3q0bcduZnUnLymP6qj1MXbWbGav3cCC3kJU7DrByxwHO7tE8ZEGqjKBHSEYp1RohgUsJZFtDGIZxhPjNpIV8tmQHvVol8vGtpyFVyINZWFTMgk37mbpqD4u27GfSDYOIiAjtPmEbIRlhxMTIMIwjxI6Mg3y5zOWTHnda+yqJEUBUZAQDOzRhYIdA+7OGB/MXGYZhHMO8MWsTRcVKcnwM5/cOlIug7mCCZBiGcYySW1DE23NdqPfVA9sesfDtqmKCZBiGcYwyedE29ucUEB0pXD0o6BU5tYYJkmEYxjGIqvLKjxsBuKB3S5omxFbcoA4QtCCJyBARSangerKIVLTtuGEYhnGEmLV+L6k7MwEXzHA0EMoIaSrw8wquj/DqGIZhGLXMq97oqF+7xvRuXeGOOnWGUASpsljBSKC4kjqGYRhGDbNlXw5TVu4C4LrB7WvXmBAIdQ6polW0g4G0athiGIZhhIHXZm5EFZo3jOWcns1r25ygqXBhrIjchktg6uNJEXk4QNXGuEzfL4fRNsMwDCNEsvMKeWf+FgDGnNqO6MijJ3atskwN6cAm7+/2uEzZu8rUUWAZMBt4IpzGGYZhGKHxwcKtZOYWEhMVwehT6n6otz8VCpKqvga8BiAiG4D/U9Xa20PIMIzjmszcAn739k80blCPR0f1PuK//g/kFnDnO4tQhdvO7FznggWKi5VXZ24EYOTJrUhqUK92DQqRULJ9n1CThhiGYVTGk9+sYeqqPQA0qBfFgyN7HrG+i4qV297+qaT/b1N3c9FJLfnD2V3DnvW6qny/No11e9yOr9cdJaHe/oSyDqmJiJxYpuwEEXlaRN4SkbPDb55hGIYjdeeBkl//AG/M3sSbszeV3yDMPPZlaokYtfUE6OPF2xnxz+k89OkK0nPyj5gt5fHKjxsAGNQhiRNbBL2Bd50hlPHuU3juOwARiQe+B34DjAY+s4WxhmHUBKrKfR8tp6hY6ZDcgJ93d5tajv94ObPWVWnj55D4YOFWXpixHoAbTj+BqXcN4/HLT6JFYiz5RcVM/GEDQx6byoQZ68gtKKpxewKxfk8W0zzBHHfa0enQCkWQTgU+93t9BdASOM87rwTuDp9phmEYjo8WbWPuxn0A3H9xD5684mS6NU+gsFj59VsL2Lw3p8b6/mnzfv7vg6UADOmSwv+d243ICOGyfq2Zetcw7j6nKwkxURzILeSRz1MZ8c/pfPTTNoqLj+xec695o8fWjeM488SjcxfqUASpGbDF7/W5wHxV/VJVdwKvAn3CaJthGAYHcgt4+LNUAM7v1YIzOqfQICaKF6/tT1KDeuzPKeDG1+eTlVcY9r53ZuTyqzcWkF9YTIfkBjw9ug9RfoEUsdGR/HpYJ6b9YRjXDW5PVISwLf0gt7+ziIue/YGZa4/M0swDuQW8v2ArAGNPbU9kiBvn1RVCEaQCIM7v9VBgut/rdKDmdm4yDOO45Ikpq0nLyiMuOpJ7zy+dxm6TVJ/nru5LVISwalcmd7yzKKyjktyCIm56Yz67M/NIiI3ixbH9SYyLDli3SXwM4y/qwTd3DuX8Xm7PoWXbDnDVxDlc98pcUnceCJtdgXhv/lay84uoXy+SXwxoU6N91SShCNJqYJQ4LgKSgG/9rrcB9oXTOMMwjm9WbD9Q4or63YjOtGwUd8j1QR2a8MDFLtJuyopd/GvK6rD0q6rc/f4SlmzNIELgmav60jElvtJ27ZMb8OzVffnw14MZ0L4xANNW7eG8p77n7vcXszMjNyz2+VNUrCWf0ai+rcsVzaOBUATpWdyoaD/wPrCeQwXpDGBp+EwzDON4RlW5b/IyihU6pDTg+tMDT9RfNbAt157aDoBnpq7lk8Xbq93389PX8bF3nz+ddyJDu5S70UFA+rRtzLu/OpUJY/rRIaUBxQrvzt/KsMen8vhXq8jMLai2jT6+S93N5n1uDm3sUZS3LhBBC5Kqvg6MxYnQm8C5qloALiQcaAS8WxNGGoZx/PHBwm3M37QfgAcu6km9qPIfV3+5oDundnAzBne9t5ilWzOq3O83K3bxj69WAXBZv9blCmFliAhn9WjO17cP4aGRPUmOr0duQTHPTF3L8Men8dFP21Ctvovx1Zku1HtIlxQ6Na18FFeXkXB8IMcbIpKemJiYmJ6eXtumGEaNoarsycxj1a5MVu3MZPWuTFbvymLd7ix6tkpkwrX9SIitGfdQxsECRvxzGmlZ+ZzfuwXPXtW30jb7s/O5+Nkf2bwvh+YNY/n4t6eFvCnd6l2ZXPLsj2TnF9G3bSPevmlQ2Lb9zsor5MUZ65kwYz0HvdDwwR2b8ODInkG5AwOxamcmZz85A4BXxg1geNemYbG1pmjUqBEZGRkZqhowxUWVBElEOuGi7papatV/ihylmCAZxxrpOflOdHZnsXpnJqt2OQFKzynftXRK+yRe++UpxNULzwPbn/EfL+fVmRupXy+Sb38/lBaJcZU3wgnKpc/NJCuvkD5tG/H2jYOIjQ7OPn9Ba5EYy+RbQxe0YNiRcZAHPlnBF8t2AlAvMoKbh3Xk18M6Bm2rj3s+WMLbc7fQIbkB39w5lIg6Hl0XVkESkQtwC2Tbe0U/V9XvRKQpMBOX6+79atpc5zFBMo5mNu3NZs76fSWis2pnJrsz88qtHyFusr5rswQ6N1JOuI4AACAASURBVEtAgKe+XQPAGZ2TmTi2f9hGEQDLt2dw4dM/UKxwz7nd+NXQjiG1/2bFLm58Yz6qbpL/8ct7I1Lxg7qgqJhrX5rLrPV7iY2O4P2bB9OzVWJ13kalfJe6i/smL2fr/oMAtG9Snwcu7smQIOer9mfnc+rfvyW3oJj7L+pxVMwfVSZIQeeyE5FhwIfAIlzGhvG+a6q6W0TWAVfiAh4Mw6iD/LAmjXGvzqWgKPAP0VaN4ujaPIEuzRLo2jyeLs0S6JgSf9gv9+T4evxl8nK+X5PGb976ieev6RuWRKfFxcp9k5dTrNCpaXyVMg6c2b0Zfzi7K499uYr/LdxKt+YJ3DikQ4VtHvx0BbPWu4wP/7jspBoXI4CfdWvGqR2Sefq7NUyYsZ6Ne3O49uW5XHhSS/5y/ok0bVjx6Oy/87aQW1BMQkwUo/q1rnF7jwRBCxJwH7AYGIjb/2h8meuzgGvDY5ZhGOFmX3Y+d767iIIiJTEuml6tEg8Rns7NEoiPCe6RMObU9hwsKOKRz1P5ZuUu7nhnEU9d2afaCzL/t3ArC0oCGXpUGMhQEbcM7ciqnZlMXrSdv32xkk7N4sudX3lrziZen+Vy4v32Z5248KSWVTO+CsTVi+Tuc7oxsk8r/vzhMuZu3Mcni7czLXU3fzinK1cPbBfwMy0sKuaNWRsBuLx/m6D/3eo6obyLAcB9qlpczvB3K3D0bE1oGMcRqsof/7eE3Zl5NKgXyce3nka7Jg2qdc+bhnQkJ7+IJ79Zw6dLdhAXHcmjo3pXeR4jI6eAv3/hMjJceFJLBndKrrJtIsKjo3qzIS2bJVsz+N2kn/jwN6cdFoU2e/1e/jp5OQA/796MO87sUuU+q0OXZgm886tBvL9gK498vpL9OQXcN3k57y/YysMje9Gr9aEjtq9X7GJ7Ri4iR9cW5ZURys+PCKB8RzMkA7Wf7tYwjMP477wtTFnh9ta8/+Ke1RYjH7eN6MyvPHfYewu2cv8ny6scyvzPKavYm51Pg3qR3HveiZU3qITY6EgmjOlPSkIMmXmF3Pj6fDL8gjS27MvhljcXUFisdG2WwBNXnFyrQQEiwuX92/Dd74dxRX+XbWHJ1gwufvYHxn+8nAN+a5d8Wb1HdGtG2yZ1Y+uLcBCKIK3ELX4tjwtwLj3DMOoQ6/Zk8cAnKwC4oHcLRvVtFbZ7iwj/d263koWpr83axKNfrgpZlJZtyyjZSuL2M7vQPDE80W3NE2OZMKYf9aIi2JCWza1vL6SwqJhsT6D25xTQuH40E8f2rzNur8YN6vHoZb15/+ZT6dosgWKFV2du5Mx/TufTJdtZti2DeRudW3PcUbjnUUVUKEgi0lZEfPGWLwGXicj1fu1UROqLyL9x2cAn1JyphmGESn5hMbf/dxEHC4pomRjLwyN7VRpxFioiwvgLe3CZN7H+n+nrePq7tUG3Ly5W/uJlZOjcND7sG8v1aduYv1/aC4Dv16Tx8OcrufPdRaTuzCQqQnju6n51ZoM9f/q3T+LT353OPed2Iy46kt2Zedw66SeueWkOAF2bJTC447GVPrSyEdIG4BIAVX0eeAd4EVgDKPA2kAHcCryqqm/VnKmGAUu3ZnD2EzP41RvzWbMrs7bNqfP8a8pqlm7LQAT+dcXJJNavmYWsERFuzuaC3i1K+p34/fqg2r6/YCs/bXZLKB64uGeNbEt+ad/WJa7FV37cyFfLnfty/EU9OLUOP9SjIyP41dCOTLlzCGee6IIyfGvDrjutfdh/XNQ2lf3LH/JuVfUaYBQufVAqLpnq58Dlqnp9jVhoGB5rdmVy7ctzWLUrk6+W7+LsJ2dwzwdL2X0g/AkrjwVmrkvjhRnrABd1NqhDzT54IyOEJ644ueTB+dBnKyvd0TU9J5+/f+kCGS4+uWWNisPd53RjeNfSNT5jBrXjmkHtaqy/cNK6cX0mjh3AhDH96JjSgD5tGzHy5PC5XusKFS6MFZFi4BpVnXTkTKr72MLYI8+WfTlc9p+Z7DqQR1KDeiTGRbMhLRuA+vUiufGMDtw0pAMN6sg8QG2TnpPPOU9+z84DufRuncj7Nw+ucgh1qOQWFHHj6/P5fo3bC+ifl59U7jqZP3+0lDdnbyY+Jopvfz+UZpWsvakuB3ILuPfDZTSuH81fLuheI6Mxo3wqWxhr/xpGnWfXgVyunjiHXQfySIiJ4vVfnsLXdwzhgYt70KRBPXLyi3jq2zUM/cc03py9icKi4to2uVZRVf704VJ2HsglLjqSp67sc8TECFx02wtj+pVsv/CH9xfz+dIdh9VbujWDt+ZsBuD2MzvXuBgBNIyN5unRfWrMNWhUj2BGSC/gFr0GhZcV/JjGRkhHjv3Z+VwxYRard2URGx3BG9cPZED7pJLrmbkFvDB9PRN/WE9ugROijikN+OM53fh592bHnI89GN6bv4U/vL8EgEdH9eKKAW1rxY7M3AKumTiHxVsziIoQJlzbj591c1trFxcrlzw/k8Vb0unaLIFPf3e6CcRxQLVy2XmCFGz8pgCqquHPtFjHMEE6MmTmFnD1xDks2ZpBdKQwceyAcvel2ZFxkCemrOa9BVvxfaVPaZ/EPed1o0/bxkfQ6tplY1o25//7e7LzizinR3Oev6ZvrYpyek4+V06YTerOTOpFRfDKdQM4rVMy/527mf/7wG2f9s5NgxhYw/NbRt0gHIL0AjA72A5V9bWQrTzKMEGqeXILirj25bnM3bCPCIFnr+rLud7W0BWRuvMAf/8ilWmr9pSUnd+rBXef0zVsi0HrKgVFxVz2n1ks3pJOs4YxfHnbEBo3qFfbZpGWlccvXpjF+j3ZxEVH8vToPvzh/cXszyngkj6teOKKk2vbROMIEQ5BsqCGMpgg1Sz5hcX86o35TPVE5fHLTypZ4xIsP65N45HPV7J8+wEAoiOFqwe243cjOpNUBx7SNcE/v15Vsv7nrRsGclo1Uu+Em50ZuVz+wky27DtYUpYQE8W3dw2tkS0ejLqJBTUYRxVFxcod7y4qEaPxF3YPWYwATuuUzCe3ns6TV5xMq0ZxFBQpr87cyNDHpvLctLXk5BeG2/RaZe6GfTw71YnRTUM61CkxApcxYdINg2jhl4Hhjp93MTEyDqFWBUlEYkTkURHZLiIHRWS2iIwIsu2ZIjJNRPaKyH4RmSUivyinbgsRmej1kysi60TkX+F9N0Z1UVXu/XApny1xEVm//3kXrqvC9gM+IiKEkX1a8e3vh/Kn87rRMDaKzLxCHvtyFSc/MIUxL83hxRnrSd15ICxbSdcWGQcLuOOdRRQr9GjZkN+fVTsJQiujTVJ93rxhIB1SGjCkS0pJuiHD8FGrLjsReRu30PZJYC1wHdAfGKqq5Ub2eRsFfozbFPC/XvGVwGnADar6kl/ddsCPwAHgdWAP0Aboqqqjq2i3uezCjKry8GcrmfiDSxp505AO3HNut7BOyKfn5PPMd2t5fdYm8suEhqckxHBG52SGdE7h9M7JJMfHhK3fmua2//7E5EXbiY2O4NPfnk6npgm1bZJhBKRGtjAPByJyCjAHuENVn/TKYoFlwHZVHVJB2y+A3kAHVc3zymKA9cBaVR3qV/crIBEYrqoHA92vCrabIIWZp75ZwxPfrAZg9ClteeSSnjUWHZaek8+Pa/fy/Zo9zFi9h+0Zh2d66NGyIWd0TmFI52T6tW8c8o6oxcXK7sw8Nu3NZtO+HDbvzfHO2Wzdf5BWjeM4u0dzzu3ZnA4p8ZXfsBw++mkbt7+zCICHRvY8ajIPGMcnVRYkERmhqt9WpVMROVNVv6mkzmPA7UCSqmb5ld8DPAy0UtXDV9O5Oj8Ciaras0z5MmCrqp7jvT4RWAGcp6pfiEh9IF9VqzWBYIIUXl7+YQMPfOqyUV94UkuevOLkam/0Fiyqyro92Xy/Zg/fr0lj1rq9HCwoOqROXHQkAzskMaRzCkO6JNMxJR4RIb+wmG3pB9m0N5vN+3LYtNcdm/e51751UZXRrXkC5/Rsznm9WtC5aXzQQrxlXw7nPfU9mXmFnHliU168tv9xue7KOHqojiAVAN8D/wK+UNWigBVL60fjtqC4HThVVSsMZRKRKUAzVe1dpnwE8A2eiJTT9hHgHuAh4FWv+Drgj8D5qjrFq/db4N/ACOAxoB9uz6aPgV+r6h6qgAlS+Hh3/hbu9hZxjujWlP+M6VerCyTzCotYsGk/369J4/s1e1i27cBhdZo3jCUqUtiefpDiIBwMzRrG0C6pAW2b1KddUn1aNopj6bYMvly2k51l8vB1SGnAeT1bcE7P5vRo2bBcgSksKuaKCbNZsGk/yfExfHX7GTQ5ityMxvFJdQSpJ06MzsTNu3wDzAXW4ZKqCpAEdAYG4R76jYCvgd+r6oqKDPNGM9tU9ewy5d2B5ZSZCypTpwHwMnA5pQlgs3BJXr/0q/cU8Dtgr2fX/4DuwL041+DAQEIrIpUpTWJiYiImSNXj86U7uHXSQooVBnVI4tVxpxAbXbfWVadl5fHj2jRmrHYCtTvz8D0qoyKENkn1aZtUn3ZNfOcGtGtSnzaN6xNXL/B7Ki5WftqSzpfLdvD50p1sSz/Uo9w2qT7n9mzOOT2bc3KbRoeIk7+L87VfnlLugmHDqEtUew5JRE4Ffg1cDMRzeOYGwQUMfAA8r6rzgjFMRNYBy1X1ojLlHXCi91tVfaactlHAX4EuwIdAJHAT0AcY4bNBRF4Cfgl8qarn+rX/NfAsMFJVJwe4vwlSDTNt1W5ufH0+BUXKSa0TeevGQXVmg7TyUFVW78pi9vq9REUK7ZKc6LRIjCWqmqM6VWXZtgN8sWwHXyzbWZI41kfLxFjO7tmcc3u2IELgigmzKSpWfnnaCdx3Yfdq9W0YR4qwBTWISCTO5dUdSMEJ0x7cSOMnVQ0po2U1R0jPA6cAA3z9ei7D5cAeVT3NK3sG+A0w1j/HnogkAunAP1T17lDs9tqby64azN2wj2tfnkNuQTFdmyXw35sG1YmMAnUFVWXVrky+WLqTL5btYPWurID1ujVP4KPfnFbnRpWGUR6VCVLQP0k919Zc7wgHO4BAuWB8ZdsDNRKResANwCP+IqiqBV703a9FJMoLXPAFRezyv4eqZohIHnD8JDmrI2zam831r84jt6CYdk3q88b1p5gYlUFE6Na8Id2aN+SOn3dh7e4svvRGTr7ME/WiInjqyj4mRsYxRW36SBYBt4lIvH+UHTDQOy8up10TnN2B/idGe9d8zvYF3vmQnaxEJBmIwY3wjCPIe/O3kplXSHJ8DG9eP5CmR2DLgaOdTk3jufVnnbn1Z53ZvDeH6at3071lQ7o2t/VGxrFFbWZqeB8nIDf4Cry1ROOAH1V1u1fWVkS6+bXbjXO3Xeq56Xxt44ELgWWqWuAVTwPSgHEi4v9eb/TOFYamG+Hnu9TdAIzq24o2SfVr2Zqjj7ZN6jPm1Pb0a5dUeWXDOMqotRGSqs4RkfeAx0SkBS6QYSzQDhfC7eN1YCjeqEdVi0TkcVzI9ywReRM3WroeaA3c5ddHroj8EXgJ+EpEPgJOBG4BPlPV72r2XRr+7MzIZcUO53Ia3q1pLVtjGEZdo7bDmq4FHvTOjYEluPVHP1bUSFUfFpENwG24aLsYr+2lqvphmbovi0g+bo3Sv3Ah4E8Cfw7zezEqYdoqNzpKiI2iXzubvjMM41BqVZBUNRf4g3eUV2dYOeWTgKBy7Knqm8CbVTDRCCM+d92Qzim2O6hhGIdhTwXjiJBXWMQPa9MAc9cZhhGYoAVJRP7szfUYRsjM27CfnHyXFMOyChiGEYhQRkgPAJtF5BMRGektlDWMoPC5605qnUhKguVcMwzjcEIRpIG4aLUzcDnhtorI30Wkbu4GZtQppnoBDeauMwyjPIIWJFWdp6o34zIpjANWA3cDK0VkhoiMEZG4GrLTOIrZkJZdkptteFcTJMMwAhNyUIOqHlTV171N8LritnXoiNsGYoeIPCciJ4fXTONoZqrnrkuOj6FXq8RatsYwjLpKdaPsNuDS86zELVyNx2VBWCAin1kQhAGl7rphXVOIOEIb7xmGcfRRJUESkR4i8i9cAtR3gG64zAkdgDa4HV+H4/YsMo5jsvMKmbN+H2DuOsMwKibohbFerrjRuBQ9A4Bi4EtgAi4Nj//2E/eJSBYui4JxHPPj2jTyi4qJihDO6JJc2+YYhlGHCSVTwy4gFtiKCwF/SVW3VlB/E2BBDsc5Pndd//aNaRgbXUltwzCOZ0Jx2U0BLgJOUNX7KxEjVPUdVbVMEHWMrLxC/vThUqavrvmdN1SVqamun59ZuLdhGJUQStj3SFUt65ozjjLenrOZSXM287u3fyK3oKhG+1q5I5OdB3IBmz8yDKNyQkkdNEJE/lbB9b+JyPDwmGXUFEu2ZQCQcbCATxYH3JQ3bPjcda0bx9GpaXyN9mUYxtFPKC61PwKdKrh+glfHqMOs2J5R8vebszfVaF++9Uc/69YUEQv3NgyjYkIRpJOA2RVcn+PVMeooOfmFrPcyJgAs3prB4i3pNdLX/ux8Fm7eD5i7zjCM4AhFkBKB7AquH8RtsmfUUVbuyETV/d3W2z68pkZJM9bsoVghNjqCUzs2qZE+DMM4tghFkLYB/Sq43g/YWT1zjJrE565r1SiOm4Z0AODjxdtJz8kPe18+d93gjsnERltieMMwKicUQfoMGCsiZ5a9ICIjgLHA5+EyzAg/y7cfAKB7y4aM7NOK+Jgo8gqLeX9BhRH8IVNUrCVh5cO72t5HhmEERyiC9DCwB/hKRD4VkYe841Pga+/agzVhpBEeVuxwgtSjZUPiY6K4tG8rwLntios1bP0s2rKf/TkFgG03YRhG8ISyDmkXMBj4CjgX+JN3nAt8AZymqjtqwkij+hQUFZO6MxOA7i0aAnDNoHYAbNybU7K9eDjwLYbt0iye1o3rh+2+hmEc24SUSUFVN6nqeUAybsO+gUCyql6gqhtrwD4jTKzbk0V+oVvT3MPbAqJLswQGnpAEwBthDG7w7Q5royPDMEKhSql9VHW/t2HfPFXdH26jjPCzfJtz1zWqH03LxNiS8jGnulHStyt3sT39YLX72ZmRW+IatHBvwzBCoarbT8SLSGsRaVv2CLeBRnjwBTT0aNnwkEWqZ3VvTkpCDMUKb8/dXO1+pnnZGRJio+jXzlYBGIYRPCEJkohcKSLLgAxcNu8NAQ6jDrJihwv57tHy0B1b60VFMHpAGwDenrulxK1XVXzuuiFdUoiOtNy6hmEETyi57EYCk3BbVryA2yH2beA9oAC3c+wDNWCjUU1UlRV+I6SyjB7YlsgIIS0rj6+WV30pWV5hUUlwhLnrDMMIlVB+wt6F26r8ZOA+r+xlVb0S6A90BRaF1zwjHGzdf5ADuYVAaYSdPy0S4zjzRCcg1QlumLthHzn5RYi47coNwzBCIRRB6g28pqq5uN1iASIBVHUZbufYe8JrnhEOlnsZGmKjI+iQEjjr9phB7QEnKqu88PBQ8YV7927diOT4mCrdwzCM45dQBCkS2Ov97QvH8p+QWAX0DIdRRnjxBTR0a96QyIjAWbcHd2xCh+QGQNXz2/m2m7DsDIZhVIVQBGkr0A5AVQ8Cuzk0t11XKk6+atQSyyuYP/IRESFc7S2U/WDhVrLyCkPqY0NaNhu8TOK2O6xhGFUhFEGaCfjnsfsYuF1E7hOR8cBvgGnhM80IF6UBDYkV1rusb2tioyPIzi/iw5+2hdSHL5lqcnwMPSvpxzAMIxChCNJzwDQRifNe34tz043HBTmswwU+GHWIvVl5JduId69ghASQWD+ai0/y8tvN2oRq8PntfO66YV1TiCjHLWgYhlERoeSym6eqf/LcdajqHlU9GRd11ws4SVW31JCdRhXxuesiI4RuzRMqre/L3LBqVybzNgaXhCM7r5A56/cB5q4zDKPqBCVIItLAc82dXfaaqi5R1eWqWr0VlUaN4BOkjikNgtqXqGerRE5q0wgIPgT8x7Vp5BcVExUhnN45uerGGoZxXBOUIKlqNi6zd5uaNccIN76Q78rmj/wZ4wU3fLlsB7szcyut73PXDWifRMPY6CpYaRiGEdoc0jqgeU0ZYtQM/nsgBcsFvVvQqH40BUXKu/Mq9sKqasn6o+HdLNzbMIyqE2pQw40i0qSmjDHCS3ZeYUkodqAMDeURGx3JL/q7wfCkOZspLCrfG7tyR2ZJ0ITNHxmGUR2iQqibCewDVonIa8AaIKdsJVV9PUy2GdUkdecBfIFylUXYleXqgW2ZMGM92zNy+S51N2f1CDw49rnr2iTF0bGcLBCGYRjBEIogver39x3l1FHABKmO4AtoaNUojkb164XUtl2TBgztksL01Xt4Y/amcgWpZDO+rk0P2dbCMAwjVEIRpOE1ZoVRI/g25Qtl/sifMYPaMX31Hr5fk8aGtGxO8FIL+difnc9Pm11ouO0OaxhGdQlakFR1ek0aYoSf0oCGqmVOGN6tKa0axbEt/SCT5mzi3vO7H3J9xpo9FKtL2npqB5taPJKoKmlpaeTm5lJcbCsujNolIiKC2NhYkpOTq+UpsR3UjlEKiopLsnaHOn/kIzJCuGqg2wT43flbyS0oOuS6z103uGNyUGucjPCgqmzbto20tDQKCgpq2xzDoKCggLS0NLZt2xZShpeyBD1CEpH7Kq+FquqDIdwzBrep3xigMbAYuFdVvw2i7ZnAn3FZIiKAVOAJVX23gjYDgVm4zQUbq2p6sLYebazdnUW+Fx1XVZcdwBUD2vDkN6vJOFjAJ4u3c7kXfVdUrExf7Qv3NnfdkSQtLY3MzEyaNWtGUlJSbZtjGADs27ePXbt2kZaWRkpK1ZaAhDKHNL6Ca4p7yCsQtCDhAiVGAU8Ca4HrgC9EZKiqziqvkYhcgEvuOhP4q1d8JfCOiCSo6ksB2gjwb1xkYIOy1481fAENjetH0yIxtsr3SY6P4bxeLZi8aDtvzt5UIkiLtuwnPcf9Ordw7yNLbm4uMTExJkZGnSIpKYn09HRycytfTF8eobjsTghwdAbOAb4GZgPdgr2ZiJyCE5G7VfVuVZ0A/AzYDDxaSfPfADuAEar6jKo+A4wAtgPXltNmLNAJOEysjkX8MzRUN/rNl7lh8dYMFm9xg0qfu65rswRaNYort60RfoqLi4mMNBepUfeIjIys1pxmKMlVNwU41qnq18B5QBEwLoS+LwMKgIl+feTiBON0EWlRQduGwH5VzfNrmwfsp3TzwBJEJAH4G26UF1zG0KOcYPZACpZ+7RqXJGb1bd7ny84wzLIzGIYRJsIS1KBuFut9yh+dBKIPkKqqWWXK5+LcfydX0HY60ENEHhSRjt7xINAF+GeA+n8BMoDnQ7DvqEVVWekJUlUDGvwRkZIs4B8v3k7qzgMlEXw/62ruOsMwwkMoc0iVUQ8IJfa3BRBoF7gd3rllBW0fBjri9mT6s1eWBVykqlP8K4pIZ+A2YJSqFgbjvhKRyoId6vQOdFv2HSTT2/E1HCMkgJEnt+Jvn6eSlVfIHe8sBiAhNop+7RqH5f6GYRhhGSGJSH/cQ39lCM3igLwA5bl+18sjD1gNvAeMBq4BFgLvisiAMnWfAGao6qch2HZU45s/iouO5ITk8KTzaRATxai+bvO+ld7oaEiXFKIibeWAcXzQvn17Ro4cWdtmBGT8+PGHzRWLCOPHj68dg6pI0E8TEVlfzpEOzAFScFtUBMtBICZAeazf9fJ4GjdvNVpV/6uqb+G2V9+Ji9jz2XwOLujizhDsQlUbVXTg3H91Ft/8UbcWCUSGcffWa7zgBh/mrjPCzauvvoqIICLMnz//sOv5+fkkJSUhIlx33XUl5Rs3bkREePLJJw9r44/v3iJCREQErVu35qKLLmLhwoXhfitGFQjFZbcZF9btj+JGJquBCaq6MYT77cC57criK9seqJGI1ANuAB7x3xRQVQtE5Avg1yISpaqFwGO48PBMEWnvVW3knduKSJyq7uAYozTCLjzuOh+dmyUwqEMSs9fvQwSGdrWABqNmiImJYdKkSfTv3/+Q8i+++IKMjAyio6u+79bZZ5/NNddcQ3FxMatWreLZZ5/l1FNPZfbs2fTp06e6ptcZDh48SFRUOGdlap5QUgcNC3Pfi4DbRCS+TGDDQO+8uJx2TXB2B4p7jfau+YYFbXELZy8JUHcxbmQ3KES76zy+gIPuLcI/1XXz0I7M2bCPn3VtSnJ8oAGuYVSf8847j3feeYfHH3+ciIhSR87bb7/NsGHDWLBgQZXv3a1bN6655pqS14MHD+aCCy7g+eefZ8KECdWyuy4RG1v19Ye1RW1OALyPE5AbfAVe5oZxwI+qut0raysi/uubdgPpwKUiEu3XNh64EFimqr58KlfjxMj/eMe7dg1wdw28r1olLSuPXQfc1Fy4R0gAw7o25fu7h/PMVX3Dfm/D8DF69Gi2b9/O9OmlKTSzsrL45JNPGD16dFj7GjZsGODcfsHwxRdfcNJJJxEbG0uvXr34/PPPD7m+b98+7rrrLnr16kV8fDwNGzbk3HPPZfHiw39jP/300/To0YP69evTuHFj+vfvz6RJkw6ps3z5ci699FKSkpKIi4tj0KBBTJky5bB7laXsHJJvnmnDhg1ce+21JCYmkpiYyLhx48jJOWwnIV555RX69u1LXFwcycnJjB07ll27dgX1GVWVUFIHXQGcr6oBQ7u9PZI+UdX3g7mfqs4RkfeAx7w1R+twi1fb4TI2+HgdGIo36lHVIhF5HHgImCUib+JGS9cDrYG7/Pr4LICdvnDyz47F1EG++aPICKGrt3Yo3LRuXL9G7mtUj8KiYnZkVH2VfLhpkRhb5aCXzp07079/f95++22GD3cbDUyePJnCwkJGjRrFXXfdVckdgmf9+vUANGlSeZBwamoqV199Nbfccgtjx45l4sSJXHzxxUyfPp3BgweX3O+j8ltXgAAAIABJREFUjz7i8ssv54QTTmDXrl288MILDB06lBUrVtCypQsgfvHFF/nd737H9ddfz+23305OTg6LFy9mzpw5XHXVVQAsXbqU008/nXbt2nHPPfcQGxvLW2+9xbnnnstXX33FiBEjQn6/o0aNomPHjvz9739n4cKFTJw4kaZNm/Loo6X5CO6//34eeOABRo8ezU033cSOHTt46qmnmDdvHgsWLCAurmYWw4fiYLwVJxrlUQT8FjfyCZZrcamGrsXlslsCnKeqP1bUSFUfFpENuMi+v+KCI5YAl6rqhyH0f8zhmz/qlBJvCU+PM3Zk5HLGY1Nr24wSvr97OG2Sqv7jZfTo0Tz88MM8++yzREdHM2nSJM455xwaN67eUoPc3FzS0tJK5pDuuMNt7zZq1KhK265atYqPP/6YCy+8EIBx48bRuXNn7r33XqZOdZ99r169WL169SGuxjFjxtCtWzdeeukl/vKXvwDw2Wefcd555zFx4sTDO/K4/fbb6dSpE7Nnzy6ZN7vlllvo27cv9957b5UEacCAAbzwwgslr/fu3ctLL71UIkgbN27kwQcf5LHHHuP3v/99Sb1zzz2XwYMH89prr3HzzTeH3G8whPLz5UTgpwqu/wR0r+D6Yahqrqr+QVVbqGqsqp6iqt+UqTNMVQ8LFVPVSao6UFUbq2p9VR0UjBip6nhVlWNxdAThzdBgGLXJFVdcQXp6Ol9++SV79+5lypQpYXHXvfDCC6SkpNCsWTOGDBlCamoqDz/8MJdddlmlbdu2bVsiRgCNGzdm9OjRzJgxg+zsbMAFZPjEqKioiL179xIfH0/Xrl0PieZr1KgRy5cvJzU1NWBf+/btY+rUqVx++eVkZGSQlpZGWloa6enpnHXWWcybNy+gq60yyorJGWecwd69ezlwwD07PvzwQ1SVSy+9tKTPtLQ0OnXqRIsWLZj2/+3deXwV1fn48c+TkIXFBNn3JJCwGEAEFcRAFVCsICIqQqngQl2/FLG/kgIqASpftVjQCr4oKm6AIHyxFVlEC7K4lNaCiKyBACJFhBDW7M/vj7k33iQ3+3Jvkuf9es1ruGfOzJw7TPLknDlzzsaNJT5ncZWkhlQXpxZUEAUqpo3IFFt5jtBgqpbm4aFsnug/82iWZVBfgJYtW9KnTx8WL17MsWPHCA4OZsiQIWUu17Bhw3j00UcREcLDw4mNjS12E1R0dHS+tJiYGLKzszl69CgdO3YkOzubl156iXnz5nHo0CGysn7+tenZLBgfH88nn3xCp06d6NSpEwMHDmTkyJFce+21ABw4cABVZdKkSUyaNMlreU6dOkWdOiWrhbZp0ybXZ3eNMzk5mbCwMPbv3092djZt27b1uv/JkydLdL6SKElAOgTEAa8UsD0Op2u4KYKqkpGlBNcq3z4lF9IyOXTK+SvNAlLNUyswoExNZP5o5MiRPPnkkyQmJjJkyJAS//L1pnXr1gwYMKAcSufdzJkzefrpp3nggQeYMWMGDRo0ICAggCeeeCLXwKOdOnVi7969fPTRR6xdu5b33nuPOXPmMH36dJ5++umcvPHx8QWWtzTTPBQ0MK97HiP34L1r1qzxOjBzWZtMC1OSgLQS+IOIrM87vYOIPADcDfypPAtXHa3ZeZxXNhzgF+0bM/GWYg+OXiy7j5/FPTdWbAV0+Tamst19992MGzeObdu28cwzxZmSrWIdOHAgX9r+/fsJCAigdWtnapbly5dz44038vrruScWOHPmDI0aNcqVVrduXYYPH87w4cPJyMjgrrvuYvr06UycODGnhhIaGlqhATSvdu3akZWVRUxMDJGRkZV2XijZM6TncIYG+quIfCsi77qWncACYC8wsyIKWZ3s+e85dv1wliX/PJJvBtaycj8/anV5bcLrlP7FQWP8RYMGDZg7dy4JCQkMHDjQ18XhyJEjfPjhhzmfk5OTWbJkCX369KFuXWeatcDAwHyzpr7//vscO5Z76M5Tp07l+hwUFERsbCzZ2dmkp6fTpEkT+vbty6uvvuq1mayims7uuOMOAgICmD59er5t2dnZnD59ukLOCyV7MfaciFyPM43DPfzcgSEZZxTtp1T1bPkXsXoZ1asN8zYeIPliBn/bfox7rmlT9E7FVFEjNBjjS7/5zW+KnXf9+vWcP593AgEYMWKE1+c/JdWhQwfGjBnDY489RsOGDVmwYAEpKSnMmPHzvKSDBw9m+vTp3H///fTu3ZudO3eyaNGifM9kbr75Zpo1a0bv3r1p1qwZe/bs4ZVXXmHQoEFcdpnzOH7u3Ln06dOHzp07M3bsWKKiojh+/DibN28mNTWVTZs2lfk75RUdHc306dN56qmnSExM5LbbbqNu3bokJiayYsUKpkyZwtixY4s+UCmUaFwJVU3BGZrnccBd9/xJyzKJeg3T5LJQbuvagv/7zzEWbk1i+NWtyzyBnpt7hIbYFtZcZ2qm1atX53tRFaBbt27lEpA6duzI7NmziY+PZ9++fURHR7Ny5Ur69OmTk2fy5MlcuHCBxYsXs3TpUrp3785HH33EH/7wh1zHevjhh1m0aBGzZ8/m/PnztGrVinHjxvHUU0/l5OncuTPbtm0jISGB1157jTNnztC0aVN69OjB+PHjy/x9CjJlyhRiYmKYM2cOU6dOJSAggDZt2jB06FBuuummCjuvWCwpORE5Ex4eHn7mTOl6jn/z/RmGvOK8arX4Nz3p3a5REXsULSMrm9hn1pGelc1ro69mwBVNy3xM458OH3YmSYyIiCgipzGVq6h7s379+qSkpKS4BqnOpySjfT8uIp8Usv1jEXm4uMerybq2qp8zj9CbW5PK5Zj7T5wnPcvplRPb0prsjDFVT0k6NdwH7C9k+z7ggTKVpga5//pIANbvPsHR0yV/uS0v9/OjBnWDaRZW9QZVNMaYkgSkGGBnIdt3ufKYYhgY24xmYaGowlufJ5X5eJ4jNJTXMyljjKlMJQlIQfw8eZ43oUVsNx6CAgO49zqnnXXpv45ywTXleGl9ZyM0GGOquJIEpH1AYd0rbqbwwVdNHiOvbUNIrQDOpWbyf19/X+rjZGerxxxIFpCMMVVTSQLSEuBmEZnhmrUVABEJEpFpOAFpcYF7m3wa1A1maLeWACz8PIns7NL1eDyafJHzrhqWdfk2xlRVJQlIs4FNwBTgBxHZIiJbcKYifxrYArxY/kWs3u6PiwTg4MkLbD7wU6mO4X5+VDsokKhGdcuraMYYU6mKHZBcs7DeDPwB+B64yrUcxZl5dYCqpldEIauzjs3CuK6tMwLwwq2HSnUMdw+7Ts0vIzDAOjQYY6qmEg03raoZqvqCqnZT1bqu5SpVnaWqGa4pyE0J3efqAr5x70kST+Yf9qQoP/ews+Y6Y0zVVS7zH4hIDxGZB/xQHseraQZ0akrrBs58LG+Xogu49bAzxlQHpQ5IItJARH4rItuBfwKPABU3c1M1FhggjLkuEoDl//6es6kZxd735Lk0fjyXBtigqsaYqq3EAUlEBorIUuAYTkeHEGAa0EVVy3eCnxrk7qtbUyc4kAvpWSzbdrTY+7mfHwUGCO2b2oS9xlRXkZGR3Hfffb4uRoUqVkASkUgRmS4ih4HVwA3ActfmKao6XVV3VVAZa4Tw2kHc2b0VAG9/cZisYnYBdz8/imlSj9Ag7zNBGlMVHThwgIceeoioqChCQkIIDw/PmR8oPb1i+k99+eWXJCQkUNqBk8tqz549JCQkkJSU5JPz+1qhAUlERonIp8ABIB74F3AH0BJIAKxLVzka0zsSgCOnL/KPPT8Wax97fmSqow8//JAuXbqwYsUKhg4dyty5c/njH/9I06ZN803RUJ6+/PJLpk2b5tOANG3aNK8Bae/evSxYsKDyC1WJipoP6R3gIPAEsERVc6Y4tPHSyl90k3r0bd+YTftOsnDrIW4qxhQSNkKDqW4SExMZOXIkbdu2ZcOGDTRp0iRn27hx4/juu+8qZGK6ksrKyiIzM5OQkMrpXFxZ5/Gloprs0oBI4HbgFhGpXeElquHco4B/nniKvf89V2je82mZHPrpAmBdvk318ac//YkLFy7w+uuv5wpGbldccQWPPPIIAJmZmUybNo22bdsSEhJCu3btmDFjBllZWbn2ERGeeOIJVqxYQWxsLCEhIcTGxrJ27dqcPAkJCUyYMAGAqKgoRAQRyamtuI/x9ttv07FjR0JCQvjiiy8AmDVrFr1796Zhw4bUrl2bHj16sHz5cvJav349cXFx1K9fn3r16tGhQwcmT54MwJtvvskdd9wBwI033phz/o0bNwK5nyFt27YNEWHRokX5zrF06VJEhC1btuSk7dq1i2HDhtGgQQNq165Nr169WL9+fZH/F5WtqBpSc+DXONNKvAPME5HlwFtYF+8K8YuYxrRtVJeDP13gzc8P8b/DuhaYd/fxn2eMtyY7U118+OGHtGvXjl69ehWZd+zYsbz11luMGDGCuLg4Nm/ezDPPPMORI0fyNW999tlnvP/++zz22GPUq1ePl19+mTvvvJMjR47QsGFDhg0bRmJiIu+++y6zZ8+mUSNn4szGjRvnHOPjjz9m6dKlPP7449SvX5/mzZsD8NJLLzFkyBBGjRpFeno67733HnfffTerVq1i0KBBgBMUBg8ezPXXX8+zzz5LYGAg+/fvzwkcffv25YknnmDOnDlMnjyZTp06AeSsPV1zzTVERUWxbNkyRo0alWvbsmXLaNWqFddffz0AO3fuJC4ujoiICCZNmkRoaCiLFi3il7/8JevWraN///7F+n+pFKparAXoDswFTgNZwH9d6/uLe4zqsgBnwsPDtaK8ufWQRsSv0vZTVuvp82kF5lu45aBGxK/SuOc/rbCyGP+TlJSkSUlJ+TdkZqieTvKfJTOjxN8tJSVFAb399tuLzLt9+3YF9JFHHsmV/vDDDyugO3bsyEkDNCQkRA8ePJiTtmPHDgX0L3/5S07a7NmzFdBDhw7lOx+ggYGBunfv3nzbLl68mOtzenq6du7cWfv165fr2GFhYZqZmVngd1q5cqUCumHDhnzbIiIidMyYMTmf4+PjNSQkRJ0JWB3nzp3T2rVr64QJE3LS+vXrp927d9f09PSctIyMDO3SpYv27NmzwLKURoH3pkt4eLgCZ7SA361F1ZA8A9fXwNci8iRwJ/AgTm+710RkPE6vu5Vqve3K7M4erZi1bi/n0jJ5b9tRHr2hndd8OSM0NLfmOgOcPQYvFVyjrnTjv4HLSzbN+tmzzj192WVFv8KwevVqAJ588slc6RMmTGD+/PmsXr2arl1/vh4DBw4kKioq53PXrl0JCwvj4MGDxS5fv379aN++fb702rV/fpqRnJxMVlYWffr0YcmSJTnp9evX58KFC6xduzan1lQWw4cP5/nnn+dvf/sb9957L+DULi9dusQ999wDwOnTp9mwYQMzZ84kJSUl1/4333wzs2fP5uLFi9SpU6fM5SkPJX4PSVXTVHWxqvYH2gHPApcD04Ed5Vy+GqleSC2GX9MagHe+SCLTNTV5Xu4ODfZCrKkuwsKce/ncucKfnwIcPnyYWrVq0a5d7j/YoqOjqVWrFocPH86V3qZNm3zHuPzyy0lOTi52+TwDmqdVq1bRq1cvQkNDadCgAY0bN+bVV1/NFQTuueceevXqxeDBg2nRogWjR49m5cqV7laXEuvevTvR0dEsXbo0J23ZsmVERETQs2dPwOk6r6pMmjSJxo0b51pefPFFsrOzOXXqVEGnqHTFriF5o6pJwDMiMhUYiE1hXm7GXBfJG1sP8UNKKut2nWBQ1+a5tqdnZrPvhPNDa8+PDABhLZ1aib8Ia1nyXcLCaN68Od9++225Fycw0Pt7eiUJCJ41IbfNmzczZMgQ+vbty7x582jevDlBQUEsXLiQxYsX59p306ZNbNiwgdWrV7N27VreeecdbrrpJtasWVNg+QozfPhwZs2axZkzZ6hVqxZr165l3LhxOduzs50/ZuPj4xkwYIDXY3g+I/O1MgUkN3X+R9e6FlMO2jSsQ/+OTflk9wne/PxQvoC0/8dzZGQ5P0jWw84AEFirxE1k/mjw4MEsWLCAr776KucvfW8iIiLIzMwkMTGRmJiYnPTExEQyMzOJiCj5tSjN6ywrVqwgNDSUdevW5eqavXDhwnx5AwIC6N+/P/379+fFF1/khRdeID4+no0bN9K/f/8Sn3/48OHMnDmTDz74gODgYFJTU3Oa6wDatm0LQGhoaIEByZ+Uy+CqpmI84OoCvi0pmW+P5W7/dT8/alg3mKZh1f/9BFNzTJw4kTp16jB27FhOnsw/PObu3buZP38+t956KwBz5szJtf2ll14CKNVzmrp1nfnESvJibGBgICKSq6t5UlISH3zwQa583prGunXrBkBqamqpzn/llVfSoUMHli5dyrJly2jXrh09evTI2d6kSZOc0S28XUtvab5ULjUkUzGua9eQDk0vY++Jc7yx9RB/Ht4tZ5vnCA32krKpTqKjo1m0aBEjRoygU6dOjB49mtjYWC5dusSWLVtYsWIFTz75JA8//DBjxoxh3rx5JCcnExcXx5YtW1iyZAkPPvggXbp0KfG53b/Mp0yZwogRIwgKCuK2227LCRTeDBo0iD//+c/ccsst/OpXv+LHH39k7ty5REdH8803Pzehzpgxg02bNnHrrbcSGRnJyZMnmTdvHq1atSIuLg5wAkytWrV4/vnnSUlJISQkhH79+nl9H8tt+PDhPPfccwQEBPC73/0u3/a5c+fSp08fOnfuzNixY4mKiuL48eNs3ryZ1NRUv3jJOEdB3e9s8V23b0+LvzqsEfGrNGbyav3xbGpO+l2vbtWI+FX6v6t3V0o5jP8oqmttdbFnzx598MEHNSIiQoODgzUsLEz79u2r8+fPz+nCnJGRoQkJCRoZGalBQUEaGRmp06dPz9e1GtDx48fnO0fertSqqjNnztSWLVtqQEBAri7gBR1DVfX111/XmJgYDQkJ0Y4dO+rChQt16tSp6vyKdXz66ad6++23a4sWLTQ4OFhbtGihI0eO1H379uU61htvvKFt27bVwMDAXF3AvZVVVfXbb79VQAHdvn271/Lt379fR40apU2aNNHg4GBt3bq1Dh06VFetWuU1f2mVtdu3aCl7eNRkInImPDw8vDLGu7qUnsV1z33KmYsZTBjQnvEDYsjOVrpO+5jzaZm8PPIqhlzZosLLYfyHu/dYaZ6RGFORiro369evT4rz4lR9b9vtGZKfqx0cyIhrnO6q7351mPTMbI6cvsj5tEzAunwbY6oPC0hVwOjrIggMEE6eS2P1zuM5HRrqBAcS1bDgtm1jjKlKLCBVAS3q1+aW2GYALNx6KGdSvk7NwwgIsA4NxpjqwQJSFXGfqwv4ju9TWPmfY4A11xljqhcLSFXE1RGX07mlE4COpzjvLNgcSMaY6sQCUhUhItzfO/c4WjZCgzGmOrGAVIUMvrI5jeoFA1ArQGjfrJ6PS2R8ISAgIN8EdMb4g6ysLAICSh9WLCBVISG1Avl1L6d/f2zLcEJqlXwwRlP1hYaGkpaWxunTp31dFGNynD59mrS0NEJDQ0t9DJ8OHSQiITjTVtyLM4XFDmCKqn5ajH0HAE8BXXAC6x5gtqou88jTGmfepluBGJwJBXcCfyzOOfzR4zdG07BeCNe1bejrohgfadSoEWlpaZw4cYIzZ86UapRoY8pTVlYWaWlpXHbZZTkz7ZaGr2tIbwITgHeB8UA2sEZEritsJxEZDHyME1CnAk/jBJulIvKgR9bbgYnAAZzgNQMIAz4RkXvL9ZtUkqDAAO7tFUF0E2uuq6lEhJYtW9KoUSOCgoJ8XRxjCAoKolGjRrRs2bJMY2v6bOggEbkW+AqYoKpzXGmhwLfAD6rat5B91wBdgbaqmuZKCwEOAgdU9ReutFjghKr+5LFvCLAdqK2qkaUse6UNHWSMMdWFPw8ddBeQAbzmTlDVVOB1IE5Emhe0I04tJ9kdjFz7pgHJwCWPtF2ewcgj32ogQkTyz7ZljDHGJ3z5DOkqYI+qns+T/k9AgG7A8QL2/QyYJCIzcJr9AO4D2uM0ARalGXAeSPW2UUSKqvpYf2tjjClnvgxIzYFjXtLdQaiwIayfBdoBU3CeDYETYIao6vrCTioi0cAw4D21oc6NMcZv+DIg1QbSvKSnemwvSBqwD3gfWAkEAg8By0Skv6pu87aTiNRx7XMBmFzQwQtq3/Q4zhmslmSMMeXKlwHpEuBt7u1Qj+0F+QtwLXCNqmYDiMgyYBcwB7g+7w4iEgi8B3QCBqpqQc2BxhhjfMCXAek4TrNdXu60H7ztJCLBwFhgpjsYAahqhqv33WMiUktVM/PsugAYBIxU1c/KWPawlJQU6tcvtCJljDHGQ0pKCjid0rzyZUDaDowXkXp5Ojb0dK13FLBfQ5xye3sbMMi1LVdHeBH5E3A/8FvPF2fLIBsISElJOVvK/d3NfSnlUJaayK5f2dj1Kxu7fqUXhvP70ytfvofUE/iS3O8hheC8h3RCVeNcaW2AOqq6x/U5EPgJp4Z1papmuNLrAbtx5mvv4nGe3wMv4NSoplTW9yuMuxdfUc+qjHd2/crGrl/Z2PWrOD6rIanqVyLyPvCC652jRGAMEIHThdvtbeAXuGo9qpolIrOAPwJfiMi7OLWlB4FWwP9z7ygid+AEo/3AbhH5dZ5irFTVCxXw9YwxxpSQT8eyA0bjDOczGmcsu2+AW1V1a2E7qeqzInIIZ7ihqTidI74BhqnqSo+sV7rWMcA7Xg4VhdPjzhhjjI/5rMmuJrMqf9nY9Ssbu35lY9ev4vh6cFVjjDEGsIBkjDHGT1hAMsYY4xcsIBljjPEL1qnBGGOMX7AakjHGGL9gAckYY4xfsIBkjDHGL1hAMsYY4xcsIFUiEQkRkedF5AcRuSQiX4pIf1+XqyoQkRtERAtYOvq6fP5ERJqLyHMiskFEzrmu0Q0F5B0iIl+LSKqIHBGRqSLi6yHFfKq4109Ekgq4H5/zQbGrhRp94/nAm8CdOJMIHsAZRHaNiPxCVb/wYbmqkjnAv/OkeZ07qwbrAMTj3GPfAL29ZRKRXwIfAP8AxgFdgGeARq7PNVWxrp/Lv3HuSU/fVlC5qj0LSJVERK4FRpB7uo23cW7e54G+PixeVfKZqn7g60L4uX8DjVT1lIgMBVYWkG8W8B+cGZSzAETkLDBJRF5W1f2VU1y/U9zrB/C9qr5bSeWq9qzJrvLcBWQAr7kTVDUVeB2Ic03BYYpBRC6r6c1KhVHVc6p6qrA8InIFcAUw3x2MXObh/F64swKL6NeKc/08uZri61RkmWoKC0iV5ypgT57ZcQH+iTPXU7fKL1KV9A5wFrgkIh+LSJeidjBeXeVa/8szUVV/AL732G4KdzPOFDYXRCRRRB7ydYGqMvsrs/I0B455ST/uWreoxLJURenAcmANzozBXXEmY9wiIteo6j5fFq4KctfIj3vZdhy7H4vjG2AzsA9oDPwGmC8iDVTVOjaUggWkylMbSPOSnuqx3RRAVT8HPvdI+ruIfIjzF/5UYJRPClZ1ue+3gu5Ja4IqgqoO8fwsIguBLcDTIvKqqqb4pmRVlzXZVZ5LODPb5hXqsd2UgKruAD4BrOt8ybnvt4LuSbsfS8j1LG4OTjC/zsfFqZIsIFWe4/zcTOLJnWZdl0vnKNDA14WogtxNdQXdk3Y/ls5R19ruyVKwgFR5tgMdRaRenvServWOSi5PddEWOOnrQlRB213rqz0TRaQF0MpjuymZtq613ZOlYAGp8iwHgoCx7gQRCQHuB7a6ejeZAohIYy9pccCNwLrKL1HVpqq7gD3AQyIS6LHpUSAbWOGTglURItJARALypIUCvwfOAfaieylYp4ZKoqpficj7wAuud44SgTFABM6IDaZwS0XkIk7Hhp+AzsBDrn8n+LBcfklEnnL9s5Nrfa8rgJ9R1Vdcab8H/g6sE5GlONf0f3DeTarRvRaLcf2GAFNEZDmQBDTE+XluDzzq5fUOUww2QV8lcv0FNQP4NXA5TrfRyar6iU8LVgWIyG9xetJFA2HAjzg1owRVPeLLsvkjESnoB/uwqkZ65BuK00uxE04z0xvADFXNrPBC+rGirp+I9MD5Q+gqnC7facDXwCxVXVU5pax+LCAZY4zxC/YMyRhjjF+wgGSMMcYvWEAyxhjjFywgGWOM8QsWkIwxxvgFC0jGGGP8ggUkY4wxfsECkjHGKxHZKCJJvi6HqTksIBlTiUTkBhHRQpYaPUKCqdlsLDtjfGMJsNpLenZlF8QYf2EByRjf+FpV3/V1IYzxJ9ZkZ4wfEpFIVxNegoiMFJFvRCRVRI640vL9MSkiXUVkpYiccuX9TkQm5plewp23mYi8LCIHRSRNRH4UkfUicpOXvC1EZImIJIvIRRFZJyLtK+q7m5rLakjG+EYdEWnkJT1dVc96fB6CM+nbXOC/rs9TcaYtud+dSUSuBj4DMjzy3gY8D1yJM1K6O28ksBVoCrwN/AuoC/QCBgDrPc5fF9gEfAlMBqKA8cDfRKSza9puY8qHqtpiiy2VtAA3AFrIssqVL9L1OQvo7rG/ACtd23p5pG8FMoGuefIuc+Xt75G+2pU20Ev5Ajz+vdGVb2KePL8vaH9bbCnLYk12xvjGX4GbvCxT8uRbr6pfuz+oqgIvuD7eASAiTYDewN9V9Zs8eZ/Nk7cBcAuwVlXzzbSrqnk7VWQDL+dJ+4drHVPktzSmBKzJzhjf2K/Fm5hxt5e071zrtq51lGu9q4D9sz3yRuPUnP5TzHL+oKqpedJOudYNi3kMY4rFakjGmMIU9oxIKq0UpkawgGSMf+vkJe0K1/qga33ItY71krcjzs+5O+8BnOc/3cqrgMaUFwtIxvi3m0Sku/uDiAgw0fXxAwBV/RH4HLhNRDrnyTvJ9XGlK+9pYA3wSxEZkPfOCA/6AAAA+ElEQVRkrn2M8Ql7hmSMb3QXkV8XsO0Dj3/vAP4hInOB48DtOF2z31HVLzzyjcfp9r3Zlfe/wGBgILBYVT/1yPs/OAFsjYi8BfwbqA30BJKA+DJ+N2NKxQKSMb4x0rV4E4PThRvg78BenJpOB+BHYIZryaGq/xKR3sA04DGc94cO4gSXF/PkPeR6b+lp4FZgNJCME/z+WtYvZkxpidMz1BjjT1wvrx4Cpqlqgk8LY0wlsWdIxhhj/IIFJGOMMX7BApIxxhi/YM+QjDHG+AWrIRljjPELFpCMMcb4BQtIxhhj/IIFJGOMMX7BApIxxhi/YAHJGGOMX/j/iJNXtxKtt94AAAAASUVORK5CYII=\n",
183 | "text/plain": [
184 | ""
185 | ]
186 | },
187 | "metadata": {
188 | "needs_background": "light"
189 | },
190 | "output_type": "display_data"
191 | }
192 | ],
193 | "source": [
194 | "fig, ax = plt.subplots()\n",
195 | "ax.plot(df1['Step'], df1['Value'], label='MLP baseline')\n",
196 | "ax.plot(df2['Step'], df2['Value'], label='Contrastive')\n",
197 | "\n",
198 | "ax.set(xlabel='Epoch', ylabel='Accuracy (Test set)', title='Fashion MNIST dataset');\n",
199 | "ax.legend();\n",
200 | "fig.savefig('figs/fashion_mnist_test_acc_curves.png')"
201 | ]
202 | },
203 | {
204 | "cell_type": "code",
205 | "execution_count": null,
206 | "metadata": {},
207 | "outputs": [],
208 | "source": []
209 | }
210 | ],
211 | "metadata": {
212 | "kernelspec": {
213 | "display_name": "venv",
214 | "language": "python",
215 | "name": "venv"
216 | },
217 | "language_info": {
218 | "codemirror_mode": {
219 | "name": "ipython",
220 | "version": 3
221 | },
222 | "file_extension": ".py",
223 | "mimetype": "text/x-python",
224 | "name": "python",
225 | "nbconvert_exporter": "python",
226 | "pygments_lexer": "ipython3",
227 | "version": "3.7.1"
228 | }
229 | },
230 | "nbformat": 4,
231 | "nbformat_minor": 4
232 | }
233 |
--------------------------------------------------------------------------------
/README.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "## Preliminary\n",
8 | "\n",
9 | "Let $\\mathbf{x}$ be the input feature vector and $y$ be its label. Let $f(\\cdot)$ be a encoder network mapping the input space to the latent space and $\\mathbf{z} = f(\\mathbf{x})$ be the latent vector. \n"
10 | ]
11 | },
12 | {
13 | "attachments": {},
14 | "cell_type": "markdown",
15 | "metadata": {},
16 | "source": [
17 | "## Types of contrastive loss functions\n",
18 | "\n",
19 | "### 1. Max margin contrastive loss (Hadsell et al. 2006)\n",
20 | "\n",
21 | "$$ \\mathcal{L}(\\mathbf{z_i}, \\mathbf{z_j}) = \n",
22 | "\\mathbb{1}_{y_i=y_j} \\left\\lVert \\mathbf{z_i} - \\mathbf{z_j} \\right\\rVert^2_2 + \n",
23 | "\\mathbb{1}_{y_i \\neq y_j} \\max(0, m - \\left\\lVert \\mathbf{z_i} - \\mathbf{z_j} \\right\\rVert_2)^2$$\n",
24 | "\n",
25 | ", where $m > 0$ is a margin. The margin imposes a lower bound on the distance between a pair of samples with different labels. \n",
26 | "\n",
27 | "### 2. Triplet loss (Weinberger et al. 2006)\n",
28 | "\n",
29 | "Triplet loss operates on a triplet of vectors whose labels follow $y_i = y_j$ and $y_i \\neq y_k$. That is to say two of the three ($\\mathbf{z_i}$ and $\\mathbf{z_j}$) shared the same label and a third vector $\\mathbf{z_k}$ has a different label. In triplet learning literatures, they are termed anchor, positive, and negative, respectively. Triplet loss is defined as:\n",
30 | "\n",
31 | "$$ \\mathcal{L}(\\mathbf{z_i}, \\mathbf{z_j}, \\mathbf{z_k}) = \n",
32 | "\\max(0, \\left\\lVert \\mathbf{z_i} - \\mathbf{z_j} \\right\\rVert^2_2 - \n",
33 | " \\left\\lVert \\mathbf{z_i} - \\mathbf{z_k} \\right\\rVert^2_2 + m)\n",
34 | "$$\n",
35 | ", where $m$ again is the margin parameter that requires the delta distances between anchor-positive and anchor-negative has to be larger than $m$. The intuition for this loss function is to push negative samples outside of the neighborhood by a margin while keeping positive samples within the neighborhood. Graphically:\n",
36 | "\n",
37 | "\n",
38 | "\n",
39 | "#### Triplet mining\n",
40 | "\n",
41 | "Based on the definition of the triplet loss, a triplet may have the following three scenarios before any training: \n",
42 | "- **easy**: triplets with a loss of 0 because the negative is already more than a margin away from the anchor than the positive, i.e. $ \\left\\lVert \\mathbf{z_i} - \\mathbf{z_j} \\right\\rVert^2_2 + m < \n",
43 | " \\left\\lVert \\mathbf{z_i} - \\mathbf{z_k} \\right\\rVert^2_2 $\n",
44 | "- **hard**: triplets where the negative is closer to the anchor than the positive, i.e. $ \\left\\lVert \\mathbf{z_i} - \\mathbf{z_j} \\right\\rVert^2_2 >\n",
45 | " \\left\\lVert \\mathbf{z_i} - \\mathbf{z_k} \\right\\rVert^2_2$ \n",
46 | "- **semi-hard**: triplets where the negative lies in the margin, i.e. $ \\left\\lVert \\mathbf{z_i} - \\mathbf{z_j} \\right\\rVert^2_2 <\n",
47 | " \\left\\lVert \\mathbf{z_i} - \\mathbf{z_k} \\right\\rVert^2_2 < \\left\\lVert \\mathbf{z_i} - \\mathbf{z_j} \\right\\rVert^2_2 + m$\n",
48 | "\n",
49 | "In the FaceNet (Schroff et al. 2015) paper, which uses triplet loss to learn embeddings for faces, the authors argued that triplet mining is crucial for model performance and convergence. They also found that hardest triplets led to local minima early on in training, specifically resulted in a collapsed model, whereas semi-hard triplets yields more stable results and faster convergence.\n",
50 | "\n",
51 | "\n",
52 | "\n",
53 | "### 3. Multi-class N-pair loss (Sohn 2016)\n",
54 | "\n",
55 | "Multi-class N-pair loss is a generalization of triplet loss allowing joint comparison among more than one negative samples. When applied on a pair of positive samples $\\mathbf{z_i}$ and $\\mathbf{z_j}$ sharing the same label ($y_i = y_j$) from a mini-batch with $2N$ samples, it is computed as:\n",
56 | "\n",
57 | "$$ \\mathcal{L}(\\mathbf{z_i}, \\mathbf{z_j}) = \n",
58 | "\\log(1+\\sum_{k=1}^{2N}{\\mathbb{1}_{k \\neq i} \\exp(\\mathbf{z_i} \\mathbf{z_k} - \\mathbf{z_i} \\mathbf{z_j})})\n",
59 | "$$\n",
60 | ", where $z_i z_j$ is the cosine similarity between the two vectors. \n",
61 | "\n"
62 | ]
63 | },
64 | {
65 | "cell_type": "markdown",
66 | "metadata": {},
67 | "source": [
68 | "With some algebraic manipulation, multi-class N-pair loss can be written as the following:\n",
69 | "\n",
70 | "\\begin{equation}\n",
71 | "\\begin{split}\n",
72 | "\\mathcal{L}(\\mathbf{z_i}, \\mathbf{z_j}) & = \\log(1+\\sum_{k=1}^{2N}{\\mathbb{1}_{k \\neq i} \\exp(\\mathbf{z_i} \\mathbf{z_k} - \\mathbf{z_i} \\mathbf{z_j})}) \\\\\n",
73 | " & = -\\log \\frac{1}{1+\\sum_{k=1}^{2N}{\\mathbb{1}_{k \\neq i} \\exp(\\mathbf{z_i} \\mathbf{z_k} - \\mathbf{z_i} \\mathbf{z_j})}} \\\\\n",
74 | " & = -\\log \\frac{1}{1+\\sum_{k=1}^{2N}{\\mathbb{1}_{k \\neq i} \\frac{\\exp(\\mathbf{z_i} \\mathbf{z_k})}{\\exp(\\mathbf{z_i} \\mathbf{z_j})}}} \\\\\n",
75 | " & = -\\log \\frac{\\exp(\\mathbf{z_i} \\mathbf{z_j})}{\\exp(\\mathbf{z_i} \\mathbf{z_j}) + \\sum_{k=1}^{2N}\\mathbb{1}_{k \\neq i} \\exp(\\mathbf{z_i} \\mathbf{z_k})}\n",
76 | "\\end{split}\n",
77 | "\\end{equation}\n"
78 | ]
79 | },
80 | {
81 | "cell_type": "markdown",
82 | "metadata": {},
83 | "source": [
84 | "### 4. Supervised NT-Xent loss (Khosla et al. 2020)\n",
85 | "\n",
86 | "- Self-supervised NT-xent loss (Chen et al. 2020 in SimCLR paper) \n",
87 | "\n",
88 | "NT-Xent is coined by Chen et al. 2020 and is short for normalized temperature-scaled cross entropy loss. It is a modification of Multi-class N-pair loss with addition of the temperature parameter ($\\tau$).\n",
89 | "\n",
90 | "$$\n",
91 | "\\mathcal{L}(\\mathbf{z_i}, \\mathbf{z_j}) = \n",
92 | "-\\log \\frac{\\exp(\\mathbf{z_i} \\mathbf{z_j} / \\tau)}{\\sum_{k=1}^{2N}{\\mathbb{1}_{k \\neq i} \\exp(\\mathbf{z_i} \\mathbf{z_k} / \\tau)}}\n",
93 | "$$\n",
94 | "\n",
95 | "- Supervised NT-xent loss\n",
96 | "\n",
97 | "$$\n",
98 | "\\mathcal{L}(\\mathbf{z_i}, \\mathbf{z_j}) = \n",
99 | "\\frac{-1}{2N_{y_i}-1} \\sum_{j=1}^{2N} \\log \\frac{\\exp(\\mathbf{z_i} \\mathbf{z_j} / \\tau)}{\\sum_{k=1}^{2N}{\\mathbb{1}_{k \\neq i} \\exp(\\mathbf{z_i} \\mathbf{z_k} / \\tau)}}\n",
100 | "$$\n",
101 | "\n"
102 | ]
103 | },
104 | {
105 | "attachments": {},
106 | "cell_type": "markdown",
107 | "metadata": {},
108 | "source": []
109 | },
110 | {
111 | "cell_type": "markdown",
112 | "metadata": {},
113 | "source": [
114 | "# References\n",
115 | "- [Hadsell, R., Chopra, S., & LeCun, Y. (2006, June). Dimensionality reduction by learning an invariant mapping.](http://yann.lecun.com/exdb/publis/pdf/hadsell-chopra-lecun-06.pdf) In 2006 IEEE Computer Society Conference on Computer Vision and Pattern Recognition (CVPR'06) (Vol. 2, pp. 1735-1742). IEEE.\n",
116 | "- [Weinberger, K. Q., Blitzer, J., & Saul, L. K. (2006). Distance metric learning for large margin nearest neighbor classification.](https://papers.nips.cc/paper/2795-distance-metric-learning-for-large-margin-nearest-neighbor-classification.pdf) In Advances in neural information processing systems (pp. 1473-1480).\n",
117 | "- [Schroff, F., Kalenichenko, D., & Philbin, J. (2015). Facenet: A unified embedding for face recognition and clustering.](https://arxiv.org/abs/1503.03832) In Proceedings of the IEEE conference on computer vision and pattern recognition (pp. 815-823).\n",
118 | "- [Sohn, K. (2016). Improved deep metric learning with multi-class n-pair loss objective.](https://papers.nips.cc/paper/6200-improved-deep-metric-learning-with-multi-class-n-pair-loss-objective) In Advances in neural information processing systems (pp. 1857-1865).\n",
119 | "- [Chen, T., Kornblith, S., Norouzi, M., & Hinton, G. (2020). A simple framework for contrastive learning of visual representations.](https://arxiv.org/pdf/2002.05709.pdf) arXiv preprint arXiv:2002.05709.\n",
120 | "- [Khosla, P., Teterwak, P., Wang, C., Sarna, A., Tian, Y., Isola, P., ... & Krishnan, D. (2020). Supervised Contrastive Learning.](https://arxiv.org/pdf/2004.11362.pdf) arXiv preprint arXiv:2004.11362."
121 | ]
122 | },
123 | {
124 | "cell_type": "code",
125 | "execution_count": null,
126 | "metadata": {},
127 | "outputs": [],
128 | "source": []
129 | }
130 | ],
131 | "metadata": {
132 | "kernelspec": {
133 | "display_name": "venv",
134 | "language": "python",
135 | "name": "venv"
136 | },
137 | "language_info": {
138 | "codemirror_mode": {
139 | "name": "ipython",
140 | "version": 3
141 | },
142 | "file_extension": ".py",
143 | "mimetype": "text/x-python",
144 | "name": "python",
145 | "nbconvert_exporter": "python",
146 | "pygments_lexer": "ipython3",
147 | "version": "3.7.1"
148 | }
149 | },
150 | "nbformat": 4,
151 | "nbformat_minor": 4
152 | }
153 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Contrastive loss functions
2 |
3 | Experiments with different contrastive loss functions to see if they help supervised learning.
4 |
5 | For detailed reviews and intuitions, please check out those posts:
6 | - [Contrastive loss for supervised classification](https://towardsdatascience.com/contrastive-loss-for-supervised-classification-224ae35692e7)
7 | - [Contrasting contrastive loss functions](https://medium.com/@wangzc921/contrasting-contrastive-loss-functions-3c13ca5f055e)
8 |
--------------------------------------------------------------------------------
/lars_optimizer.py:
--------------------------------------------------------------------------------
1 | # coding=utf-8
2 | # Copyright 2020 The SimCLR Authors.
3 | #
4 | # Licensed under the Apache License, Version 2.0 (the "License");
5 | # you may not use this file except in compliance with the License.
6 | # You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific simclr governing permissions and
14 | # limitations under the License.
15 | # ==============================================================================
16 | """Functions and classes related to optimization (weight updates)."""
17 |
18 | from __future__ import absolute_import
19 | from __future__ import division
20 | from __future__ import print_function
21 |
22 | import re
23 |
24 | import tensorflow.compat.v1 as tf
25 |
26 | EETA_DEFAULT = 0.001
27 |
28 |
29 | class LARSOptimizer(tf.train.Optimizer):
30 | """Layer-wise Adaptive Rate Scaling for large batch training.
31 |
32 | Introduced by "Large Batch Training of Convolutional Networks" by Y. You,
33 | I. Gitman, and B. Ginsburg. (https://arxiv.org/abs/1708.03888)
34 | """
35 |
36 | def __init__(self,
37 | learning_rate,
38 | momentum=0.9,
39 | use_nesterov=False,
40 | weight_decay=0.0,
41 | exclude_from_weight_decay=None,
42 | exclude_from_layer_adaptation=None,
43 | classic_momentum=True,
44 | eeta=EETA_DEFAULT,
45 | name="LARSOptimizer"):
46 | """Constructs a LARSOptimizer.
47 |
48 | Args:
49 | learning_rate: A `float` for learning rate.
50 | momentum: A `float` for momentum.
51 | use_nesterov: A 'Boolean' for whether to use nesterov momentum.
52 | weight_decay: A `float` for weight decay.
53 | exclude_from_weight_decay: A list of `string` for variable screening, if
54 | any of the string appears in a variable's name, the variable will be
55 | excluded for computing weight decay. For example, one could specify
56 | the list like ['batch_normalization', 'bias'] to exclude BN and bias
57 | from weight decay.
58 | exclude_from_layer_adaptation: Similar to exclude_from_weight_decay, but
59 | for layer adaptation. If it is None, it will be defaulted the same as
60 | exclude_from_weight_decay.
61 | classic_momentum: A `boolean` for whether to use classic (or popular)
62 | momentum. The learning rate is applied during momeuntum update in
63 | classic momentum, but after momentum for popular momentum.
64 | eeta: A `float` for scaling of learning rate when computing trust ratio.
65 | name: The name for the scope.
66 | """
67 | super(LARSOptimizer, self).__init__(False, name)
68 |
69 | self.learning_rate = learning_rate
70 | self.momentum = momentum
71 | self.weight_decay = weight_decay
72 | self.use_nesterov = use_nesterov
73 | self.classic_momentum = classic_momentum
74 | self.eeta = eeta
75 | self.exclude_from_weight_decay = exclude_from_weight_decay
76 | # exclude_from_layer_adaptation is set to exclude_from_weight_decay if the
77 | # arg is None.
78 | if exclude_from_layer_adaptation:
79 | self.exclude_from_layer_adaptation = exclude_from_layer_adaptation
80 | else:
81 | self.exclude_from_layer_adaptation = exclude_from_weight_decay
82 |
83 | def apply_gradients(self, grads_and_vars, global_step=None, name=None):
84 | if global_step is None:
85 | global_step = tf.train.get_or_create_global_step()
86 | new_global_step = global_step + 1
87 |
88 | assignments = []
89 | for (grad, param) in grads_and_vars:
90 | if grad is None or param is None:
91 | continue
92 |
93 | param_name = param.op.name
94 |
95 | v = tf.get_variable(
96 | name=param_name + "/Momentum",
97 | shape=param.shape.as_list(),
98 | dtype=tf.float32,
99 | trainable=False,
100 | initializer=tf.zeros_initializer())
101 |
102 | if self._use_weight_decay(param_name):
103 | grad += self.weight_decay * param
104 |
105 | if self.classic_momentum:
106 | trust_ratio = 1.0
107 | if self._do_layer_adaptation(param_name):
108 | w_norm = tf.norm(param, ord=2)
109 | g_norm = tf.norm(grad, ord=2)
110 | trust_ratio = tf.where(
111 | tf.greater(w_norm, 0), tf.where(
112 | tf.greater(g_norm, 0), (self.eeta *
113 | w_norm / g_norm),
114 | 1.0),
115 | 1.0)
116 | scaled_lr = self.learning_rate * trust_ratio
117 |
118 | next_v = tf.multiply(self.momentum, v) + scaled_lr * grad
119 | if self.use_nesterov:
120 | update = tf.multiply(
121 | self.momentum, next_v) + scaled_lr * grad
122 | else:
123 | update = next_v
124 | next_param = param - update
125 | else:
126 | next_v = tf.multiply(self.momentum, v) + grad
127 | if self.use_nesterov:
128 | update = tf.multiply(self.momentum, next_v) + grad
129 | else:
130 | update = next_v
131 |
132 | trust_ratio = 1.0
133 | if self._do_layer_adaptation(param_name):
134 | w_norm = tf.norm(param, ord=2)
135 | v_norm = tf.norm(update, ord=2)
136 | trust_ratio = tf.where(
137 | tf.greater(w_norm, 0), tf.where(
138 | tf.greater(v_norm, 0), (self.eeta *
139 | w_norm / v_norm),
140 | 1.0),
141 | 1.0)
142 | scaled_lr = trust_ratio * self.learning_rate
143 | next_param = param - scaled_lr * update
144 |
145 | assignments.extend(
146 | [param.assign(next_param),
147 | v.assign(next_v),
148 | global_step.assign(new_global_step)])
149 | return tf.group(*assignments, name=name)
150 |
151 | def _use_weight_decay(self, param_name):
152 | """Whether to use L2 weight decay for `param_name`."""
153 | if not self.weight_decay:
154 | return False
155 | if self.exclude_from_weight_decay:
156 | for r in self.exclude_from_weight_decay:
157 | if re.search(r, param_name) is not None:
158 | return False
159 | return True
160 |
161 | def _do_layer_adaptation(self, param_name):
162 | """Whether to do layer-wise learning rate adaptation for `param_name`."""
163 | if self.exclude_from_layer_adaptation:
164 | for r in self.exclude_from_layer_adaptation:
165 | if re.search(r, param_name) is not None:
166 | return False
167 | return True
168 |
--------------------------------------------------------------------------------
/losses.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import tensorflow as tf
3 | import tensorflow_addons as tfa
4 |
5 |
6 | def pdist_euclidean(A):
7 | # Euclidean pdist
8 | # https://stackoverflow.com/questions/37009647/compute-pairwise-distance-in-a-batch-without-replicating-tensor-in-tensorflow
9 | r = tf.reduce_sum(A*A, 1)
10 |
11 | # turn r into column vector
12 | r = tf.reshape(r, [-1, 1])
13 | D = r - 2*tf.matmul(A, tf.transpose(A)) + tf.transpose(r)
14 | return tf.sqrt(D)
15 |
16 |
17 | def square_to_vec(D):
18 | '''Convert a squared form pdist matrix to vector form.
19 | '''
20 | n = D.shape[0]
21 | triu_idx = np.triu_indices(n, k=1)
22 | d_vec = tf.gather_nd(D, list(zip(triu_idx[0], triu_idx[1])))
23 | return d_vec
24 |
25 |
26 | def get_contrast_batch_labels(y):
27 | '''
28 | Make contrast labels by taking all the pairwise in y
29 | y: tensor with shape: (batch_size, )
30 | returns:
31 | tensor with shape: (batch_size * (batch_size-1) // 2, )
32 | '''
33 | y_col_vec = tf.reshape(tf.cast(y, tf.float32), [-1, 1])
34 | D_y = pdist_euclidean(y_col_vec)
35 | d_y = square_to_vec(D_y)
36 | y_contrasts = tf.cast(d_y == 0, tf.int32)
37 | return y_contrasts
38 |
39 |
40 | def get_contrast_batch_labels_regression(y):
41 | '''
42 | Make contrast labels for regression by taking all the pairwise in y
43 | y: tensor with shape: (batch_size, )
44 | returns:
45 | tensor with shape: (batch_size * (batch_size-1) // 2, )
46 | '''
47 | raise NotImplementedError
48 |
49 |
50 | def max_margin_contrastive_loss(z, y, margin=1.0, metric='euclidean'):
51 | '''
52 | Wrapper for the maximum margin contrastive loss (Hadsell et al. 2006)
53 | `tfa.losses.contrastive_loss`
54 | Args:
55 | z: hidden vector of shape [bsz, n_features].
56 | y: ground truth of shape [bsz].
57 | metric: one of ('euclidean', 'cosine')
58 | '''
59 | # compute pair-wise distance matrix
60 | if metric == 'euclidean':
61 | D = pdist_euclidean(z)
62 | elif metric == 'cosine':
63 | D = 1 - tf.matmul(z, z, transpose_a=False, transpose_b=True)
64 | # convert squareform matrix to vector form
65 | d_vec = square_to_vec(D)
66 | # make contrastive labels
67 | y_contrasts = get_contrast_batch_labels(y)
68 | loss = tfa.losses.contrastive_loss(y_contrasts, d_vec, margin=margin)
69 | # exploding/varnishing gradients on large batch?
70 | return tf.reduce_mean(loss)
71 |
72 |
73 | def multiclass_npairs_loss(z, y):
74 | '''
75 | Wrapper for the multiclass N-pair loss (Sohn 2016)
76 | `tfa.losses.npairs_loss`
77 | Args:
78 | z: hidden vector of shape [bsz, n_features].
79 | y: ground truth of shape [bsz].
80 | '''
81 | # cosine similarity matrix
82 | S = tf.matmul(z, z, transpose_a=False, transpose_b=True)
83 | loss = tfa.losses.npairs_loss(y, S)
84 | return loss
85 |
86 |
87 | def triplet_loss(z, y, margin=1.0, kind='hard'):
88 | '''
89 | Wrapper for the triplet losses
90 | `tfa.losses.triplet_hard_loss` and `tfa.losses.triplet_semihard_loss`
91 | Args:
92 | z: hidden vector of shape [bsz, n_features], assumes it is l2-normalized.
93 | y: ground truth of shape [bsz].
94 | '''
95 | if kind == 'hard':
96 | loss = tfa.losses.triplet_hard_loss(y, z, margin=margin, soft=False)
97 | elif kind == 'soft':
98 | loss = tfa.losses.triplet_hard_loss(y, z, margin=margin, soft=True)
99 | elif kind == 'semihard':
100 | loss = tfa.losses.triplet_semihard_loss(y, z, margin=margin)
101 | return loss
102 |
103 |
104 | def supervised_nt_xent_loss(z, y, temperature=0.5, base_temperature=0.07):
105 | '''
106 | Supervised normalized temperature-scaled cross entropy loss.
107 | A variant of Multi-class N-pair Loss from (Sohn 2016)
108 | Later used in SimCLR (Chen et al. 2020, Khosla et al. 2020).
109 | Implementation modified from:
110 | - https://github.com/google-research/simclr/blob/master/objective.py
111 | - https://github.com/HobbitLong/SupContrast/blob/master/losses.py
112 | Args:
113 | z: hidden vector of shape [bsz, n_features].
114 | y: ground truth of shape [bsz].
115 | '''
116 | batch_size = tf.shape(z)[0]
117 | contrast_count = 1
118 | anchor_count = contrast_count
119 | y = tf.expand_dims(y, -1)
120 |
121 | # mask: contrastive mask of shape [bsz, bsz], mask_{i,j}=1 if sample j
122 | # has the same class as sample i. Can be asymmetric.
123 | mask = tf.cast(tf.equal(y, tf.transpose(y)), tf.float32)
124 | anchor_dot_contrast = tf.divide(
125 | tf.matmul(z, tf.transpose(z)),
126 | temperature
127 | )
128 | # # for numerical stability
129 | logits_max = tf.reduce_max(anchor_dot_contrast, axis=1, keepdims=True)
130 | logits = anchor_dot_contrast - logits_max
131 | # # tile mask
132 | logits_mask = tf.ones_like(mask) - tf.eye(batch_size)
133 | mask = mask * logits_mask
134 | # compute log_prob
135 | exp_logits = tf.exp(logits) * logits_mask
136 | log_prob = logits - \
137 | tf.math.log(tf.reduce_sum(exp_logits, axis=1, keepdims=True))
138 |
139 | # compute mean of log-likelihood over positive
140 | # this may introduce NaNs due to zero division,
141 | # when a class only has one example in the batch
142 | mask_sum = tf.reduce_sum(mask, axis=1)
143 | mean_log_prob_pos = tf.reduce_sum(
144 | mask * log_prob, axis=1)[mask_sum > 0] / mask_sum[mask_sum > 0]
145 |
146 | # loss
147 | loss = -(temperature / base_temperature) * mean_log_prob_pos
148 | # loss = tf.reduce_mean(tf.reshape(loss, [anchor_count, batch_size]))
149 | loss = tf.reduce_mean(loss)
150 | return loss
151 |
--------------------------------------------------------------------------------
/main.py:
--------------------------------------------------------------------------------
1 | '''
2 | Script to run various two-stage supervised contrastive loss functions on
3 | MNIST or Fashion MNIST data.
4 |
5 | Author: Zichen Wang (wangzc921@gmail.com)
6 | '''
7 | import argparse
8 | import datetime
9 | import numpy as np
10 | import tensorflow as tf
11 | import tensorflow_addons as tfa
12 | import pandas as pd
13 | from sklearn.decomposition import PCA
14 | import matplotlib.pyplot as plt
15 | import seaborn as sns
16 |
17 | from model import *
18 | import losses
19 |
20 | SEED = 42
21 | np.random.seed(SEED)
22 | tf.random.set_seed(SEED)
23 |
24 | LOSS_NAMES = {
25 | 'max_margin': 'Max margin contrastive',
26 | 'npairs': 'Multiclass N-pairs',
27 | 'sup_nt_xent': 'Supervised NT-Xent',
28 | 'triplet-hard': 'Triplet hard',
29 | 'triplet-semihard': 'Triplet semihard',
30 | 'triplet-soft': 'Triplet soft'
31 | }
32 |
33 |
34 | def parse_option():
35 | parser = argparse.ArgumentParser('arguments for two-stage training ')
36 | # training params
37 | parser.add_argument('--batch_size_1', type=int, default=512,
38 | help='batch size for stage 1 pretraining'
39 | )
40 | parser.add_argument('--batch_size_2', type=int, default=32,
41 | help='batch size for stage 2 training'
42 | )
43 | parser.add_argument('--lr_1', type=float, default=0.5,
44 | help='learning rate for stage 1 pretraining'
45 | )
46 | parser.add_argument('--lr_2', type=float, default=0.001,
47 | help='learning rate for stage 2 training'
48 | )
49 | parser.add_argument('--epoch', type=int, default=20,
50 | help='Number of epochs for training in stage1, the same number of epochs will be applied on stage2')
51 | parser.add_argument('--optimizer', type=str, default='adam',
52 | help='Optimizer to use, choose from ("adam", "lars", "sgd")'
53 | )
54 | # loss functions
55 | parser.add_argument('--loss', type=str, default='max_margin',
56 | help='Loss function used for stage 1, choose from ("max_margin", "npairs", "sup_nt_xent", "triplet-hard", "triplet-semihard", "triplet-soft")')
57 | parser.add_argument('--margin', type=float, default=1.0,
58 | help='margin for tfa.losses.contrastive_loss. will only be used when --loss=max_margin')
59 | parser.add_argument('--metric', type=str, default='euclidean',
60 | help='distance metrics for tfa.losses.contrastive_loss, choose from ("euclidean", "cosine"). will only be used when --loss=max_margin')
61 | parser.add_argument('--temperature', type=float, default=0.5,
62 | help='temperature for sup_nt_xent loss. will only be used when --loss=sup_nt_xent')
63 | parser.add_argument('--base_temperature', type=float, default=0.07,
64 | help='base_temperature for sup_nt_xent loss. will only be used when --loss=sup_nt_xent')
65 | # dataset params
66 | parser.add_argument('--data', type=str, default='mnist',
67 | help='Dataset to choose from ("mnist", "fashion_mnist")'
68 | )
69 | parser.add_argument('--n_data_train', type=int, default=60000,
70 | help='number of data points used for training both stage 1 and 2'
71 | )
72 |
73 | # model architecture
74 | parser.add_argument('--projection_dim', type=int, default=128,
75 | help='output tensor dimension from projector'
76 | )
77 | parser.add_argument('--activation', type=str, default='leaky_relu',
78 | help='activation function between hidden layers'
79 | )
80 |
81 | # output options
82 | parser.add_argument('--write_summary', action='store_true',
83 | help='write summary for tensorboard'
84 | )
85 | parser.add_argument('--draw_figures', action='store_true',
86 | help='produce figures for the projections'
87 | )
88 |
89 | args = parser.parse_args()
90 | return args
91 |
92 |
93 | def main():
94 | args = parse_option()
95 | print(args)
96 |
97 | # check args
98 | if args.loss not in LOSS_NAMES:
99 | raise ValueError('Unsupported loss function type {}'.format(args.loss))
100 |
101 | if args.optimizer == 'adam':
102 | optimizer1 = tf.keras.optimizers.Adam(lr=args.lr_1)
103 | elif args.optimizer == 'lars':
104 | from lars_optimizer import LARSOptimizer
105 | # not compatible with tf2
106 | optimizer1 = LARSOptimizer(args.lr_1,
107 | exclude_from_weight_decay=['batch_normalization', 'bias'])
108 | elif args.optimizer == 'sgd':
109 | optimizer1 = tfa.optimizers.SGDW(learning_rate=args.lr_1,
110 | momentum=0.9,
111 | weight_decay=1e-4
112 | )
113 | optimizer2 = tf.keras.optimizers.Adam(lr=args.lr_2)
114 |
115 | model_name = '{}_model-bs_{}-lr_{}'.format(
116 | args.loss, args.batch_size_1, args.lr_1)
117 |
118 | # 0. Load data
119 | if args.data == 'mnist':
120 | mnist = tf.keras.datasets.mnist
121 | elif args.data == 'fashion_mnist':
122 | mnist = tf.keras.datasets.fashion_mnist
123 | print('Loading {} data...'.format(args.data))
124 | (x_train, y_train), (x_test, y_test) = mnist.load_data()
125 | x_train, x_test = x_train / 255.0, x_test / 255.0
126 | x_train = x_train.reshape(-1, 28*28).astype(np.float32)
127 | x_test = x_test.reshape(-1, 28*28).astype(np.float32)
128 | print(x_train.shape, x_test.shape)
129 |
130 | # simulate low data regime for training
131 | n_train = x_train.shape[0]
132 | shuffle_idx = np.arange(n_train)
133 | np.random.shuffle(shuffle_idx)
134 |
135 | x_train = x_train[shuffle_idx][:args.n_data_train]
136 | y_train = y_train[shuffle_idx][:args.n_data_train]
137 | print('Training dataset shapes after slicing:')
138 | print(x_train.shape, y_train.shape)
139 |
140 | train_ds = tf.data.Dataset.from_tensor_slices(
141 | (x_train, y_train)).shuffle(5000).batch(args.batch_size_1)
142 |
143 | train_ds2 = tf.data.Dataset.from_tensor_slices(
144 | (x_train, y_train)).shuffle(5000).batch(args.batch_size_2)
145 |
146 | test_ds = tf.data.Dataset.from_tensor_slices(
147 | (x_test, y_test)).batch(args.batch_size_1)
148 |
149 | # 1. Stage 1: train encoder with multiclass N-pair loss
150 | encoder = Encoder(normalize=True, activation=args.activation)
151 | projector = Projector(args.projection_dim,
152 | normalize=True, activation=args.activation)
153 |
154 | if args.loss == 'max_margin':
155 | def loss_func(z, y): return losses.max_margin_contrastive_loss(
156 | z, y, margin=args.margin, metric=args.metric)
157 | elif args.loss == 'npairs':
158 | loss_func = losses.multiclass_npairs_loss
159 | elif args.loss == 'sup_nt_xent':
160 | def loss_func(z, y): return losses.supervised_nt_xent_loss(
161 | z, y, temperature=args.temperature, base_temperature=args.base_temperature)
162 | elif args.loss.startswith('triplet'):
163 | triplet_kind = args.loss.split('-')[1]
164 | def loss_func(z, y): return losses.triplet_loss(
165 | z, y, kind=triplet_kind, margin=args.margin)
166 |
167 | train_loss = tf.keras.metrics.Mean(name='train_loss')
168 | test_loss = tf.keras.metrics.Mean(name='test_loss')
169 |
170 | # tf.config.experimental_run_functions_eagerly(True)
171 | @tf.function
172 | # train step for the contrastive loss
173 | def train_step_stage1(x, y):
174 | '''
175 | x: data tensor, shape: (batch_size, data_dim)
176 | y: data labels, shape: (batch_size, )
177 | '''
178 | with tf.GradientTape() as tape:
179 | r = encoder(x, training=True)
180 | z = projector(r, training=True)
181 | loss = loss_func(z, y)
182 |
183 | gradients = tape.gradient(loss,
184 | encoder.trainable_variables + projector.trainable_variables)
185 | optimizer1.apply_gradients(zip(gradients,
186 | encoder.trainable_variables + projector.trainable_variables))
187 | train_loss(loss)
188 |
189 | @tf.function
190 | def test_step_stage1(x, y):
191 | r = encoder(x, training=False)
192 | z = projector(r, training=False)
193 | t_loss = loss_func(z, y)
194 | test_loss(t_loss)
195 |
196 | print('Stage 1 training ...')
197 | for epoch in range(args.epoch):
198 | # Reset the metrics at the start of the next epoch
199 | train_loss.reset_states()
200 | test_loss.reset_states()
201 |
202 | for x, y in train_ds:
203 | train_step_stage1(x, y)
204 |
205 | for x_te, y_te in test_ds:
206 | test_step_stage1(x_te, y_te)
207 |
208 | template = 'Epoch {}, Loss: {}, Test Loss: {}'
209 | print(template.format(epoch + 1,
210 | train_loss.result(),
211 | test_loss.result()))
212 |
213 | if args.draw_figures:
214 | # projecting data with the trained encoder, projector
215 | x_tr_proj = projector(encoder(x_train))
216 | x_te_proj = projector(encoder(x_test))
217 | # convert tensor to np.array
218 | x_tr_proj = x_tr_proj.numpy()
219 | x_te_proj = x_te_proj.numpy()
220 | print(x_tr_proj.shape, x_te_proj.shape)
221 |
222 | # check learned embedding using PCA
223 | pca = PCA(n_components=2)
224 | pca.fit(x_tr_proj)
225 | x_te_proj_pca = pca.transform(x_te_proj)
226 |
227 | x_te_proj_pca_df = pd.DataFrame(x_te_proj_pca, columns=['PC1', 'PC2'])
228 | x_te_proj_pca_df['label'] = y_test
229 | # PCA scatter plot
230 | fig, ax = plt.subplots()
231 | ax = sns.scatterplot('PC1', 'PC2',
232 | data=x_te_proj_pca_df,
233 | palette='tab10',
234 | hue='label',
235 | linewidth=0,
236 | alpha=0.6,
237 | ax=ax
238 | )
239 |
240 | box = ax.get_position()
241 | ax.set_position([box.x0, box.y0, box.width * 0.8, box.height])
242 | ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
243 | title = 'Data: {}\nEmbedding: {}\nbatch size: {}; LR: {}'.format(
244 | args.data, LOSS_NAMES[args.loss], args.batch_size_1, args.lr_1)
245 | ax.set_title(title)
246 | fig.savefig(
247 | 'figs/PCA_plot_{}_{}_embed.png'.format(args.data, model_name))
248 |
249 | # density plot for PCA
250 | g = sns.jointplot('PC1', 'PC2', data=x_te_proj_pca_df,
251 | kind="hex"
252 | )
253 | plt.subplots_adjust(top=0.95)
254 | g.fig.suptitle(title)
255 |
256 | g.savefig(
257 | 'figs/Joint_PCA_plot_{}_{}_embed.png'.format(args.data, model_name))
258 |
259 | # Stage 2: freeze the learned representations and then learn a classifier
260 | # on a linear layer using a softmax loss
261 | softmax = SoftmaxPred()
262 |
263 | train_loss = tf.keras.metrics.Mean(name='train_loss')
264 | train_acc = tf.keras.metrics.SparseCategoricalAccuracy(name='train_ACC')
265 | test_loss = tf.keras.metrics.Mean(name='test_loss')
266 | test_acc = tf.keras.metrics.SparseCategoricalAccuracy(name='test_ACC')
267 |
268 | cce_loss_obj = tf.keras.losses.SparseCategoricalCrossentropy(
269 | from_logits=True)
270 |
271 | @tf.function
272 | # train step for the 2nd stage
273 | def train_step(x, y):
274 | '''
275 | x: data tensor, shape: (batch_size, data_dim)
276 | y: data labels, shape: (batch_size, )
277 | '''
278 | with tf.GradientTape() as tape:
279 | r = encoder(x, training=False)
280 | y_preds = softmax(r, training=True)
281 | loss = cce_loss_obj(y, y_preds)
282 |
283 | # freeze the encoder, only train the softmax layer
284 | gradients = tape.gradient(loss,
285 | softmax.trainable_variables)
286 | optimizer2.apply_gradients(zip(gradients,
287 | softmax.trainable_variables))
288 | train_loss(loss)
289 | train_acc(y, y_preds)
290 |
291 | @tf.function
292 | def test_step(x, y):
293 | r = encoder(x, training=False)
294 | y_preds = softmax(r, training=False)
295 | t_loss = cce_loss_obj(y, y_preds)
296 | test_loss(t_loss)
297 | test_acc(y, y_preds)
298 |
299 | if args.write_summary:
300 | current_time = datetime.datetime.now().strftime("%Y%m%d-%H%M%S")
301 | train_log_dir = 'logs/{}/{}/{}/train'.format(
302 | model_name, args.data, current_time)
303 | test_log_dir = 'logs/{}/{}/{}/test'.format(
304 | model_name, args.data, current_time)
305 | train_summary_writer = tf.summary.create_file_writer(train_log_dir)
306 | test_summary_writer = tf.summary.create_file_writer(test_log_dir)
307 |
308 | print('Stage 2 training ...')
309 | for epoch in range(args.epoch):
310 | # Reset the metrics at the start of the next epoch
311 | train_loss.reset_states()
312 | train_acc.reset_states()
313 | test_loss.reset_states()
314 | test_acc.reset_states()
315 |
316 | for x, y in train_ds2:
317 | train_step(x, y)
318 |
319 | if args.write_summary:
320 | with train_summary_writer.as_default():
321 | tf.summary.scalar('loss', train_loss.result(), step=epoch)
322 | tf.summary.scalar('accuracy', train_acc.result(), step=epoch)
323 |
324 | for x_te, y_te in test_ds:
325 | test_step(x_te, y_te)
326 |
327 | if args.write_summary:
328 | with test_summary_writer.as_default():
329 | tf.summary.scalar('loss', test_loss.result(), step=epoch)
330 | tf.summary.scalar('accuracy', test_acc.result(), step=epoch)
331 |
332 | template = 'Epoch {}, Loss: {}, Acc: {}, Test Loss: {}, Test Acc: {}'
333 | print(template.format(epoch + 1,
334 | train_loss.result(),
335 | train_acc.result() * 100,
336 | test_loss.result(),
337 | test_acc.result() * 100))
338 |
339 |
340 | if __name__ == '__main__':
341 | main()
342 |
--------------------------------------------------------------------------------
/main_ce_baseline.py:
--------------------------------------------------------------------------------
1 | '''
2 | Script to run baseline MLP with cross-entropy loss on
3 | MNIST or Fashion MNIST data.
4 |
5 | Author: Zichen Wang (wangzc921@gmail.com)
6 | '''
7 | import argparse
8 | import datetime
9 | import numpy as np
10 | import tensorflow as tf
11 | import tensorflow_addons as tfa
12 | import pandas as pd
13 | from sklearn.decomposition import PCA
14 | import matplotlib.pyplot as plt
15 | import seaborn as sns
16 |
17 | from model import *
18 |
19 | SEED = 42
20 | np.random.seed(SEED)
21 | tf.random.set_seed(SEED)
22 |
23 |
24 | def parse_option():
25 | parser = argparse.ArgumentParser('arguments for training baseline MLP')
26 | # training params
27 | parser.add_argument('--batch_size', type=int, default=32,
28 | help='batch size training'
29 | )
30 | parser.add_argument('--lr', type=float, default=0.001,
31 | help='learning rate training'
32 | )
33 | parser.add_argument('--epoch', type=int, default=20,
34 | help='Number of epochs for training')
35 |
36 | # dataset params
37 | parser.add_argument('--data', type=str, default='mnist',
38 | help='Dataset to choose from ("mnist", "fashion_mnist")'
39 | )
40 | parser.add_argument('--n_data_train', type=int, default=60000,
41 | help='number of data points used for training both stage 1 and 2'
42 | )
43 | # model architecture
44 | parser.add_argument('--projection_dim', type=int, default=128,
45 | help='output tensor dimension from projector'
46 | )
47 | parser.add_argument('--activation', type=str, default='leaky_relu',
48 | help='activation function between hidden layers'
49 | )
50 | # output options
51 | parser.add_argument('--write_summary', action='store_true',
52 | help='write summary for tensorboard'
53 | )
54 | parser.add_argument('--draw_figures', action='store_true',
55 | help='produce figures for the projections'
56 | )
57 |
58 | args = parser.parse_args()
59 | return args
60 |
61 |
62 | def main():
63 | args = parse_option()
64 | print(args)
65 |
66 | optimizer = tf.keras.optimizers.Adam(lr=args.lr)
67 | # 0. Load data
68 | if args.data == 'mnist':
69 | mnist = tf.keras.datasets.mnist
70 | elif args.data == 'fashion_mnist':
71 | mnist = tf.keras.datasets.fashion_mnist
72 | print('Loading {} data...'.format(args.data))
73 | (x_train, y_train), (x_test, y_test) = mnist.load_data()
74 | x_train, x_test = x_train / 255.0, x_test / 255.0
75 | x_train = x_train.reshape(-1, 28*28).astype(np.float32)
76 | x_test = x_test.reshape(-1, 28*28).astype(np.float32)
77 | print(x_train.shape, x_test.shape)
78 |
79 | # simulate low data regime for training
80 | n_train = x_train.shape[0]
81 | shuffle_idx = np.arange(n_train)
82 | np.random.shuffle(shuffle_idx)
83 |
84 | x_train = x_train[shuffle_idx][:args.n_data_train]
85 | y_train = y_train[shuffle_idx][:args.n_data_train]
86 | print('Training dataset shapes after slicing:')
87 | print(x_train.shape, y_train.shape)
88 |
89 | train_ds = tf.data.Dataset.from_tensor_slices(
90 | (x_train, y_train)).shuffle(5000).batch(args.batch_size)
91 |
92 | test_ds = tf.data.Dataset.from_tensor_slices(
93 | (x_test, y_test)).batch(args.batch_size)
94 |
95 | # 1. the baseline MLP model
96 | mlp = MLP(normalize=True, activation=args.activation)
97 | cce_loss_obj = tf.keras.losses.SparseCategoricalCrossentropy(
98 | from_logits=True)
99 |
100 | train_loss = tf.keras.metrics.Mean(name='train_loss')
101 | train_acc = tf.keras.metrics.SparseCategoricalAccuracy(name='train_ACC')
102 |
103 | test_loss = tf.keras.metrics.Mean(name='test_loss')
104 | test_acc = tf.keras.metrics.SparseCategoricalAccuracy(name='test_ACC')
105 |
106 | @tf.function
107 | def train_step_baseline(x, y):
108 | with tf.GradientTape() as tape:
109 | y_preds = mlp(x, training=True)
110 | loss = cce_loss_obj(y, y_preds)
111 |
112 | gradients = tape.gradient(loss,
113 | mlp.trainable_variables)
114 | optimizer.apply_gradients(zip(gradients,
115 | mlp.trainable_variables))
116 |
117 | train_loss(loss)
118 | train_acc(y, y_preds)
119 |
120 | @tf.function
121 | def test_step_baseline(x, y):
122 | y_preds = mlp(x, training=False)
123 | t_loss = cce_loss_obj(y, y_preds)
124 | test_loss(t_loss)
125 | test_acc(y, y_preds)
126 |
127 | model_name = 'baseline'
128 | if args.write_summary:
129 | current_time = datetime.datetime.now().strftime("%Y%m%d-%H%M%S")
130 | train_log_dir = 'logs/%s/%s/%s/train' % (
131 | model_name, args.data, current_time)
132 | test_log_dir = 'logs/%s/%s/%s/test' % (
133 | model_name, args.data, current_time)
134 | train_summary_writer = tf.summary.create_file_writer(train_log_dir)
135 | test_summary_writer = tf.summary.create_file_writer(test_log_dir)
136 |
137 | for epoch in range(args.epoch):
138 | # Reset the metrics at the start of the next epoch
139 | train_loss.reset_states()
140 | train_acc.reset_states()
141 | test_loss.reset_states()
142 | test_acc.reset_states()
143 |
144 | for x, y in train_ds:
145 | train_step_baseline(x, y)
146 |
147 | if args.write_summary:
148 | with train_summary_writer.as_default():
149 | tf.summary.scalar('loss', train_loss.result(), step=epoch)
150 | tf.summary.scalar('accuracy', train_acc.result(), step=epoch)
151 |
152 | for x_te, y_te in test_ds:
153 | test_step_baseline(x_te, y_te)
154 |
155 | if args.write_summary:
156 | with test_summary_writer.as_default():
157 | tf.summary.scalar('loss', test_loss.result(), step=epoch)
158 | tf.summary.scalar('accuracy', test_acc.result(), step=epoch)
159 |
160 | template = 'Epoch {}, Loss: {}, Acc: {}, Test Loss: {}, Test Acc: {}'
161 | print(template.format(epoch + 1,
162 | train_loss.result(),
163 | train_acc.result() * 100,
164 | test_loss.result(),
165 | test_acc.result() * 100))
166 |
167 | # get the projections from the last hidden layer before output
168 | x_tr_proj = mlp.get_last_hidden(x_train)
169 | x_te_proj = mlp.get_last_hidden(x_test)
170 | # convert tensor to np.array
171 | x_tr_proj = x_tr_proj.numpy()
172 | x_te_proj = x_te_proj.numpy()
173 | print(x_tr_proj.shape, x_te_proj.shape)
174 | # 2. Check learned embedding
175 | if args.draw_figures:
176 | # do PCA for the projected data
177 | pca = PCA(n_components=2)
178 | pca.fit(x_tr_proj)
179 | x_te_proj_pca = pca.transform(x_te_proj)
180 |
181 | x_te_proj_pca_df = pd.DataFrame(x_te_proj_pca, columns=['PC1', 'PC2'])
182 | x_te_proj_pca_df['label'] = y_test
183 | # PCA scatter plot
184 | fig, ax = plt.subplots()
185 | ax = sns.scatterplot('PC1', 'PC2',
186 | data=x_te_proj_pca_df,
187 | palette='tab10',
188 | hue='label',
189 | linewidth=0,
190 | alpha=0.6,
191 | ax=ax
192 | )
193 |
194 | box = ax.get_position()
195 | ax.set_position([box.x0, box.y0, box.width * 0.8, box.height])
196 | ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
197 | title = 'Data: %s; Embedding: MLP' % args.data
198 | ax.set_title(title)
199 | fig.savefig('figs/PCA_plot_%s_MLP_last_layer.png' % args.data)
200 | # density plot for PCA
201 | g = sns.jointplot('PC1', 'PC2', data=x_te_proj_pca_df,
202 | kind="hex"
203 | )
204 | plt.subplots_adjust(top=0.95)
205 | g.fig.suptitle(title)
206 | g.savefig('figs/Joint_PCA_plot_%s_MLP_last_layer.png' % args.data)
207 |
208 |
209 | if __name__ == '__main__':
210 | main()
211 |
--------------------------------------------------------------------------------
/model.py:
--------------------------------------------------------------------------------
1 | import tensorflow as tf
2 |
3 |
4 | class UnitNormLayer(tf.keras.layers.Layer):
5 | '''Normalize vectors (euclidean norm) in batch to unit hypersphere.
6 | '''
7 |
8 | def __init__(self):
9 | super(UnitNormLayer, self).__init__()
10 |
11 | def call(self, input_tensor):
12 | norm = tf.norm(input_tensor, axis=1)
13 | return input_tensor / tf.reshape(norm, [-1, 1])
14 |
15 |
16 | class DenseLeakyReluLayer(tf.keras.layers.Layer):
17 | '''A dense layer followed by a LeakyRelu layer
18 | '''
19 |
20 | def __init__(self, n, alpha=0.3):
21 | super(DenseLeakyReluLayer, self).__init__()
22 | self.dense = tf.keras.layers.Dense(n, activation=None)
23 | self.lrelu = tf.keras.layers.LeakyReLU(alpha=alpha)
24 |
25 | def call(self, input_tensor):
26 | x = self.dense(input_tensor)
27 | return self.lrelu(x)
28 |
29 |
30 | class Encoder(tf.keras.Model):
31 | '''An encoder network, E(·), which maps an augmented image x to a representation vector, r = E(x) ∈ R^{DE}
32 | '''
33 |
34 | def __init__(self, normalize=True, activation='relu'):
35 | super(Encoder, self).__init__(name='')
36 | if activation == 'leaky_relu':
37 | self.hidden1 = DenseLeakyReluLayer(256)
38 | self.hidden2 = DenseLeakyReluLayer(256)
39 | else:
40 | self.hidden1 = tf.keras.layers.Dense(256, activation=activation)
41 | self.hidden2 = tf.keras.layers.Dense(256, activation=activation)
42 |
43 | self.normalize = normalize
44 | if self.normalize:
45 | self.norm = UnitNormLayer()
46 |
47 | def call(self, input_tensor, training=False):
48 | x = self.hidden1(input_tensor, training=training)
49 | x = self.hidden2(x, training=training)
50 | if self.normalize:
51 | x = self.norm(x)
52 | return x
53 |
54 |
55 | class Projector(tf.keras.Model):
56 | '''
57 | A projection network, P(·), which maps the normalized representation vector r into a vector z = P(r) ∈ R^{DP}
58 | suitable for computation of the contrastive loss.
59 | '''
60 |
61 | def __init__(self, n, normalize=True, activation='relu'):
62 | super(Projector, self).__init__(name='')
63 | if activation == 'leaky_relu':
64 | self.dense = DenseLeakyReluLayer(256)
65 | self.dense2 = DenseLeakyReluLayer(256)
66 | else:
67 | self.dense = tf.keras.layers.Dense(256, activation=activation)
68 | self.dense2 = tf.keras.layers.Dense(256, activation=activation)
69 |
70 | self.normalize = normalize
71 | if self.normalize:
72 | self.norm = UnitNormLayer()
73 |
74 | def call(self, input_tensor, training=False):
75 | x = self.dense(input_tensor, training=training)
76 | x = self.dense2(x, training=training)
77 | if self.normalize:
78 | x = self.norm(x)
79 | return x
80 |
81 |
82 | class SoftmaxPred(tf.keras.Model):
83 | '''For stage 2, simply a softmax on top of the Encoder.
84 | '''
85 |
86 | def __init__(self, num_classes=10):
87 | super(SoftmaxPred, self).__init__(name='')
88 | self.dense = tf.keras.layers.Dense(num_classes, activation='softmax')
89 |
90 | def call(self, input_tensor, training=False):
91 | return self.dense(input_tensor, training=training)
92 |
93 |
94 | class MLP(tf.keras.Model):
95 | '''A simple baseline MLP with the same architecture to Encoder + Softmax/Regression output.
96 | '''
97 |
98 | def __init__(self, num_classes=10, normalize=True, regress=False, activation='relu'):
99 | super(MLP, self).__init__(name='')
100 | if activation == 'leaky_relu':
101 | self.hidden1 = DenseLeakyReluLayer(256)
102 | self.hidden2 = DenseLeakyReluLayer(256)
103 | else:
104 | self.hidden1 = tf.keras.layers.Dense(256, activation=activation)
105 | self.hidden2 = tf.keras.layers.Dense(256, activation=activation)
106 | self.normalize = normalize
107 | if self.normalize:
108 | self.norm = UnitNormLayer()
109 | if not regress:
110 | self.output_layer = tf.keras.layers.Dense(
111 | num_classes, activation='softmax')
112 | else:
113 | self.output_layer = tf.keras.layers.Dense(1)
114 |
115 | def call(self, input_tensor, training=False):
116 | x = self.hidden1(input_tensor, training=training)
117 | x = self.hidden2(x, training=training)
118 | if self.normalize:
119 | x = self.norm(x)
120 | preds = self.output_layer(x, training=training)
121 | return preds
122 |
123 | def get_last_hidden(self, input_tensor):
124 | '''Get the last hidden layer before prediction.
125 | '''
126 | x = self.hidden1(input_tensor, training=False)
127 | x = self.hidden2(x, training=False)
128 | if self.normalize:
129 | x = self.norm(x)
130 | return x
131 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | absl-py==0.9.0
2 | appnope==0.1.0
3 | astor==0.8.1
4 | attrs==19.3.0
5 | backcall==0.1.0
6 | bleach==3.1.4
7 | cachetools==4.1.0
8 | certifi==2020.4.5.1
9 | chardet==3.0.4
10 | cycler==0.10.0
11 | decorator==4.4.2
12 | defusedxml==0.6.0
13 | entrypoints==0.3
14 | gast==0.2.2
15 | google-auth==1.14.1
16 | google-auth-oauthlib==0.4.1
17 | google-pasta==0.2.0
18 | grpcio==1.28.1
19 | h5py==2.10.0
20 | idna==2.9
21 | importlib-metadata==1.6.0
22 | ipykernel==5.2.1
23 | ipython==7.13.0
24 | ipython-genutils==0.2.0
25 | ipywidgets==7.5.1
26 | jedi==0.17.0
27 | Jinja2==2.11.2
28 | joblib==0.14.1
29 | jsonschema==3.2.0
30 | jupyter==1.0.0
31 | jupyter-client==6.1.3
32 | jupyter-console==6.1.0
33 | jupyter-core==4.6.3
34 | Keras-Applications==1.0.8
35 | Keras-Preprocessing==1.1.0
36 | kiwisolver==1.2.0
37 | Markdown==3.2.1
38 | MarkupSafe==1.1.1
39 | matplotlib==3.2.1
40 | mistune==0.8.4
41 | nbconvert==5.6.1
42 | nbformat==5.0.6
43 | notebook==6.1.5
44 | numpy==1.18.3
45 | oauthlib==3.1.0
46 | opt-einsum==3.2.1
47 | pandas==1.0.3
48 | pandocfilters==1.4.2
49 | parso==0.7.0
50 | pexpect==4.8.0
51 | pickleshare==0.7.5
52 | prometheus-client==0.7.1
53 | prompt-toolkit==3.0.5
54 | protobuf==3.11.3
55 | ptyprocess==0.6.0
56 | pyasn1==0.4.8
57 | pyasn1-modules==0.2.8
58 | Pygments==2.6.1
59 | pyparsing==2.4.7
60 | pyrsistent==0.16.0
61 | python-dateutil==2.8.1
62 | pytz==2019.3
63 | pyzmq==19.0.0
64 | qtconsole==4.7.3
65 | QtPy==1.9.0
66 | requests==2.23.0
67 | requests-oauthlib==1.3.0
68 | rsa==4.0
69 | scikit-learn==0.22.2.post1
70 | scipy==1.4.1
71 | seaborn==0.10.0
72 | Send2Trash==1.5.0
73 | six==1.14.0
74 | tensorboard==2.1.1
75 | tensorflow==2.3.1
76 | tensorflow-addons==0.9.1
77 | tensorflow-estimator==2.1.0
78 | termcolor==1.1.0
79 | terminado==0.8.3
80 | testpath==0.4.4
81 | tornado==6.0.4
82 | traitlets==4.3.3
83 | typeguard==2.7.1
84 | urllib3==1.25.9
85 | wcwidth==0.1.9
86 | webencodings==0.5.1
87 | Werkzeug==1.0.1
88 | widgetsnbextension==3.5.1
89 | wrapt==1.12.1
90 | zipp==3.1.0
91 |
--------------------------------------------------------------------------------
/supcontrast.py:
--------------------------------------------------------------------------------
1 | # From https://github.com/HobbitLong/SupContrast/blob/master/losses.py
2 | """
3 | Author: Yonglong Tian (yonglong@mit.edu)
4 | Date: May 07, 2020
5 | """
6 | from __future__ import print_function
7 |
8 | import torch
9 | import torch.nn as nn
10 |
11 |
12 | class SupConLoss(nn.Module):
13 | """Supervised Contrastive Learning: https://arxiv.org/pdf/2004.11362.pdf.
14 | It also supports the unsupervised contrastive loss in SimCLR"""
15 |
16 | def __init__(self, temperature=0.07, contrast_mode='all',
17 | base_temperature=0.07):
18 | super(SupConLoss, self).__init__()
19 | self.temperature = temperature
20 | self.contrast_mode = contrast_mode
21 | self.base_temperature = base_temperature
22 |
23 | def forward(self, features, labels=None, mask=None):
24 | """Compute loss for model. If both `labels` and `mask` are None,
25 | it degenerates to SimCLR unsupervised loss:
26 | https://arxiv.org/pdf/2002.05709.pdf
27 | Args:
28 | features: hidden vector of shape [bsz, n_views, ...].
29 | labels: ground truth of shape [bsz].
30 | mask: contrastive mask of shape [bsz, bsz], mask_{i,j}=1 if sample j
31 | has the same class as sample i. Can be asymmetric.
32 | Returns:
33 | A loss scalar.
34 | """
35 | device = (torch.device('cuda')
36 | if features.is_cuda
37 | else torch.device('cpu'))
38 |
39 | if len(features.shape) < 3:
40 | raise ValueError('`features` needs to be [bsz, n_views, ...],'
41 | 'at least 3 dimensions are required')
42 | if len(features.shape) > 3:
43 | features = features.view(features.shape[0], features.shape[1], -1)
44 |
45 | batch_size = features.shape[0]
46 | if labels is not None and mask is not None:
47 | raise ValueError('Cannot define both `labels` and `mask`')
48 | elif labels is None and mask is None:
49 | mask = torch.eye(batch_size, dtype=torch.float32).to(device)
50 | elif labels is not None:
51 | labels = labels.contiguous().view(-1, 1)
52 | if labels.shape[0] != batch_size:
53 | raise ValueError(
54 | 'Num of labels does not match num of features')
55 | mask = torch.eq(labels, labels.T).float().to(device)
56 | else:
57 | mask = mask.float().to(device)
58 |
59 | contrast_count = features.shape[1]
60 | contrast_feature = torch.cat(torch.unbind(features, dim=1), dim=0)
61 | if self.contrast_mode == 'one':
62 | anchor_feature = features[:, 0]
63 | anchor_count = 1
64 | elif self.contrast_mode == 'all':
65 | anchor_feature = contrast_feature
66 | anchor_count = contrast_count
67 | else:
68 | raise ValueError('Unknown mode: {}'.format(self.contrast_mode))
69 |
70 | # compute logits
71 | anchor_dot_contrast = torch.div(
72 | torch.matmul(anchor_feature, contrast_feature.T),
73 | self.temperature)
74 | # for numerical stability
75 | logits_max, _ = torch.max(anchor_dot_contrast, dim=1, keepdim=True)
76 | logits = anchor_dot_contrast - logits_max.detach()
77 |
78 | # tile mask
79 | mask = mask.repeat(anchor_count, contrast_count)
80 | # mask-out self-contrast cases
81 | logits_mask = torch.scatter(
82 | torch.ones_like(mask),
83 | 1,
84 | torch.arange(batch_size * anchor_count).view(-1, 1).to(device),
85 | 0
86 | )
87 | mask = mask * logits_mask
88 |
89 | # compute log_prob
90 | exp_logits = torch.exp(logits) * logits_mask
91 | log_prob = logits - torch.log(exp_logits.sum(1, keepdim=True))
92 |
93 | # compute mean of log-likelihood over positive
94 | mean_log_prob_pos = (mask * log_prob).sum(1) / mask.sum(1)
95 |
96 | # loss
97 | loss = - (self.temperature / self.base_temperature) * mean_log_prob_pos
98 | loss = loss.view(anchor_count, batch_size).mean()
99 |
100 | return loss
101 |
--------------------------------------------------------------------------------
/test_supcontrast_loss.py:
--------------------------------------------------------------------------------
1 | from losses import supervised_nt_xent_loss
2 | from supcontrast import SupConLoss
3 | import torch
4 | import tensorflow as tf
5 | import unittest
6 | import numpy as np
7 | np.random.seed(42)
8 |
9 |
10 | class TestSupContrastLoss(unittest.TestCase):
11 | '''To test my tensorflow implementation of Supervised Contrastive loss yields the same
12 | values with the Torch implementation.
13 | '''
14 |
15 | def setUp(self):
16 | self.batch_size = 128
17 | X = np.random.randn(self.batch_size, 128)
18 | X /= np.linalg.norm(X, axis=1).reshape(-1, 1)
19 | self.X = X.astype(np.float32)
20 | self.y = np.random.choice(np.arange(10), self.batch_size, replace=True)
21 |
22 | # very small batch where there could be only class with only one example
23 | self.batch_size_s = 8
24 | X_s = np.random.randn(self.batch_size_s, 128)
25 | X_s /= np.linalg.norm(X_s, axis=1).reshape(-1, 1)
26 | self.X_s = X_s.astype(np.float32)
27 | self.y_s = np.random.choice(
28 | np.arange(10), self.batch_size_s, replace=True)
29 |
30 | self.temperature = 0.5
31 | self.base_temperature = 0.07
32 |
33 | def test_nt_xent_loss_equals_sup_con_loss(self):
34 | l1 = supervised_nt_xent_loss(tf.constant(self.X),
35 | tf.constant(self.y),
36 | temperature=self.temperature,
37 | base_temperature=self.base_temperature
38 | )
39 |
40 | scl = SupConLoss(temperature=self.temperature,
41 | base_temperature=self.base_temperature
42 | )
43 | l2 = scl.forward(features=torch.Tensor(self.X.reshape(self.batch_size, 1, 128)),
44 | labels=torch.Tensor(self.y)
45 | )
46 | print('\nLosses from normal batch size={}:'.format(self.batch_size))
47 | print('l1 = {}'.format(l1.numpy()))
48 | print('l2 = {}'.format(l2.numpy()))
49 | self.assertTrue(np.allclose(l1.numpy(), l2.numpy()))
50 |
51 | def test_nt_xent_loss_and_sup_con_loss_small_batch(self):
52 | # on very small batch, the SupConLoss would return NaN
53 | # whereas supervised_nt_xent_loss will ignore those classes
54 | l1 = supervised_nt_xent_loss(tf.constant(self.X_s),
55 | tf.constant(self.y_s),
56 | temperature=self.temperature,
57 | base_temperature=self.base_temperature
58 | )
59 |
60 | scl = SupConLoss(temperature=self.temperature,
61 | base_temperature=self.base_temperature
62 | )
63 | l2 = scl.forward(features=torch.Tensor(self.X_s.reshape(self.batch_size_s, 1, 128)),
64 | labels=torch.Tensor(self.y_s)
65 | )
66 | print('\nLosses from small batch size={}:'.format(self.batch_size_s))
67 | print('l1 = {}'.format(l1.numpy()))
68 | print('l2 = {}'.format(l2.numpy()))
69 | self.assertTrue(np.isfinite(l1.numpy()))
70 | self.assertTrue(np.isnan(l2.numpy()))
71 |
72 |
73 | if __name__ == "__main__":
74 | unittest.main()
75 |
--------------------------------------------------------------------------------