├── Lab-2 ├── Thyroid.ipynb ├── Thyroid2 (1).ipynb ├── Thyroid2.ipynb ├── kali └── lab_2 (1).ipynb ├── Lab-3 ├── Lab03 ├── Lab3-comm.ipynb └── Lab3.ipynb ├── Lab-4 ├── Lab-4-comm.ipynb ├── Lab-4-updated.ipynb ├── Lab-4.ipynb └── ss ├── Lab-5 ├── Lab-5 (1).ipynb ├── Lab-5.ipynb └── sss ├── Lab-7 ├── Lab-7.ipynb └── ss ├── Lab6 ├── Lab-6 (1).ipynb └── ss ├── Lab_1 (1).ipynb └── lab_2.ipynb /Lab-2/kali: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /Lab-2/lab_2 (1).ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 18, 6 | "metadata": { 7 | "colab": { 8 | "base_uri": "https://localhost:8080/" 9 | }, 10 | "id": "uzfbpsfgPudm", 11 | "outputId": "c65f5337-21a7-4511-bca2-1892d49eed9e" 12 | }, 13 | "outputs": [ 14 | { 15 | "name": "stdout", 16 | "output_type": "stream", 17 | "text": [ 18 | "Dimensionality of the vector space: 3\n", 19 | "Number of vectors in the vector space: 10\n", 20 | "Rank of Matrix A: 3\n", 21 | "\n", 22 | "Matrix A:\n", 23 | "[[20 6 2]\n", 24 | " [16 3 6]\n", 25 | " [27 6 2]\n", 26 | " [19 1 2]\n", 27 | " [24 4 2]\n", 28 | " [22 1 5]\n", 29 | " [15 4 2]\n", 30 | " [18 4 2]\n", 31 | " [21 1 4]\n", 32 | " [16 2 4]]\n", 33 | "\n", 34 | "Matrix C:\n", 35 | "[[386]\n", 36 | " [289]\n", 37 | " [393]\n", 38 | " [110]\n", 39 | " [280]\n", 40 | " [167]\n", 41 | " [271]\n", 42 | " [274]\n", 43 | " [148]\n", 44 | " [198]]\n", 45 | "\n", 46 | "Pseudo-Inverse of A:\n", 47 | "[[-0.01008596 -0.03124505 0.01013951 0.0290728 0.0182907 0.01161794\n", 48 | " -0.00771348 0.00095458 0.01743623 -0.00542016]\n", 49 | " [ 0.09059668 0.07263726 0.03172933 -0.09071908 -0.01893196 -0.06926996\n", 50 | " 0.05675464 0.03152577 -0.07641966 0.00357352]\n", 51 | " [ 0.00299878 0.15874243 -0.05795468 -0.06609024 -0.06295043 0.03348017\n", 52 | " 0.01541831 -0.01070461 0.00029003 0.05938755]]\n", 53 | "\n", 54 | "Cost of each product available for sale (Matrix X):\n", 55 | "[[ 1.]\n", 56 | " [55.]\n", 57 | " [18.]]\n" 58 | ] 59 | } 60 | ], 61 | "source": [ 62 | "import numpy as np\n", 63 | "import pandas as pd\n", 64 | "\n", 65 | "df = pd.read_excel('Lab Session Data.xlsx', sheet_name=0)\n", 66 | "df = df.iloc[:,:5]\n", 67 | "A = df[['Candies (#)', 'Mangoes (Kg)', 'Milk Packets (#)']].to_numpy()\n", 68 | "C = df['Payment (Rs)'].to_numpy()\n", 69 | "\n", 70 | "C = C.reshape(-1, 1)\n", 71 | "\n", 72 | "dimensionality = A.shape[1]\n", 73 | "print(f\"Dimensionality of the vector space: {dimensionality}\")\n", 74 | "\n", 75 | "num_vectors = A.shape[0]\n", 76 | "print(f\"Number of vectors in the vector space: {num_vectors}\")\n", 77 | "\n", 78 | "rank_A = np.linalg.matrix_rank(A)\n", 79 | "print(f\"Rank of Matrix A: {rank_A}\")\n", 80 | "\n", 81 | "A_pseudo_inv = np.linalg.pinv(A)\n", 82 | "\n", 83 | "X = A_pseudo_inv @ C\n", 84 | "\n", 85 | "print(\"\\nMatrix A:\")\n", 86 | "print(A)\n", 87 | "\n", 88 | "print(\"\\nMatrix C:\")\n", 89 | "print(C)\n", 90 | "\n", 91 | "print(\"\\nPseudo-Inverse of A:\")\n", 92 | "print(A_pseudo_inv)\n", 93 | "\n", 94 | "print(\"\\nCost of each product available for sale (Matrix X):\")\n", 95 | "print(X)\n" 96 | ] 97 | }, 98 | { 99 | "cell_type": "code", 100 | "execution_count": 19, 101 | "metadata": { 102 | "id": "miX7qSvFQVS4" 103 | }, 104 | "outputs": [], 105 | "source": [ 106 | "df['Class'] = np.where(df['Payment (Rs)'] > 200, 'RICH', 'POOR')" 107 | ] 108 | }, 109 | { 110 | "cell_type": "code", 111 | "execution_count": 20, 112 | "metadata": {}, 113 | "outputs": [ 114 | { 115 | "name": "stdout", 116 | "output_type": "stream", 117 | "text": [ 118 | " Customer Candies (#) Mangoes (Kg) Milk Packets (#) Payment (Rs) Class\n", 119 | "0 C_1 20 6 2 386 RICH\n", 120 | "1 C_2 16 3 6 289 RICH\n", 121 | "2 C_3 27 6 2 393 RICH\n", 122 | "3 C_4 19 1 2 110 POOR\n", 123 | "4 C_5 24 4 2 280 RICH\n", 124 | "5 C_6 22 1 5 167 POOR\n", 125 | "6 C_7 15 4 2 271 RICH\n", 126 | "7 C_8 18 4 2 274 RICH\n", 127 | "8 C_9 21 1 4 148 POOR\n", 128 | "9 C_10 16 2 4 198 POOR\n" 129 | ] 130 | } 131 | ], 132 | "source": [ 133 | "print(df)" 134 | ] 135 | }, 136 | { 137 | "cell_type": "code", 138 | "execution_count": 21, 139 | "metadata": {}, 140 | "outputs": [ 141 | { 142 | "name": "stdout", 143 | "output_type": "stream", 144 | "text": [ 145 | "Mean Price: 1560.663453815261\n", 146 | "Variance Price: 58732.365352539186\n" 147 | ] 148 | } 149 | ], 150 | "source": [ 151 | "#4Q\n", 152 | "#i)\n", 153 | "import pandas as pd\n", 154 | "import statistics\n", 155 | "\n", 156 | "\n", 157 | "df = pd.read_excel('Lab Session Data.xlsx', sheet_name=1)\n", 158 | "\n", 159 | "\n", 160 | "prices = df['Price']\n", 161 | "\n", 162 | "\n", 163 | "mean_price = statistics.mean(prices)\n", 164 | "variance_price = statistics.variance(prices)\n", 165 | "\n", 166 | "print(f\"Mean Price: {mean_price}\")\n", 167 | "print(f\"Variance Price: {variance_price}\")\n" 168 | ] 169 | }, 170 | { 171 | "cell_type": "code", 172 | "execution_count": 22, 173 | "metadata": {}, 174 | "outputs": [ 175 | { 176 | "name": "stdout", 177 | "output_type": "stream", 178 | "text": [ 179 | "Mean Price on Wednesdays: 1550.706\n", 180 | "Population Mean Price: 1560.663453815261\n" 181 | ] 182 | } 183 | ], 184 | "source": [ 185 | "#4Q\n", 186 | "#ii)\n", 187 | "wednesdays = df[df['Day'] == 'Wed']\n", 188 | "wednesday_prices = wednesdays['Price']\n", 189 | "\n", 190 | "mean_wednesday_price = statistics.mean(wednesday_prices)\n", 191 | "\n", 192 | "print(f\"Mean Price on Wednesdays: {mean_wednesday_price}\")\n", 193 | "print(f\"Population Mean Price: {mean_price}\")\n" 194 | ] 195 | }, 196 | { 197 | "cell_type": "code", 198 | "execution_count": 23, 199 | "metadata": {}, 200 | "outputs": [ 201 | { 202 | "name": "stdout", 203 | "output_type": "stream", 204 | "text": [ 205 | "Mean Price in April: 1698.9526315789474\n", 206 | "Population Mean Price: 1560.663453815261\n" 207 | ] 208 | } 209 | ], 210 | "source": [ 211 | "#4Q\n", 212 | "#iii)\n", 213 | "# Filter for April\n", 214 | "april_data = df[df['Month'] == 'Apr']\n", 215 | "april_prices = april_data['Price']\n", 216 | "\n", 217 | "# Calculate sample mean for April\n", 218 | "mean_april_price = statistics.mean(april_prices)\n", 219 | "\n", 220 | "print(f\"Mean Price in April: {mean_april_price}\")\n", 221 | "print(f\"Population Mean Price: {mean_price}\")\n" 222 | ] 223 | }, 224 | { 225 | "cell_type": "code", 226 | "execution_count": 24, 227 | "metadata": {}, 228 | "outputs": [ 229 | { 230 | "name": "stdout", 231 | "output_type": "stream", 232 | "text": [ 233 | "Probability of Making a Loss: 0.4979919678714859\n" 234 | ] 235 | } 236 | ], 237 | "source": [ 238 | "#4Q\n", 239 | "#iV)\n", 240 | "df['Chg%'] = df['Chg%'].astype(float) / 100\n", 241 | "loss_prob = len(df[df['Chg%'] < 0]) / len(df)\n", 242 | "\n", 243 | "print(f\"Probability of Making a Loss: {loss_prob}\")\n" 244 | ] 245 | }, 246 | { 247 | "cell_type": "code", 248 | "execution_count": 25, 249 | "metadata": {}, 250 | "outputs": [ 251 | { 252 | "name": "stdout", 253 | "output_type": "stream", 254 | "text": [ 255 | "Probability of Making a Profit on Wednesdays: 0.42\n" 256 | ] 257 | } 258 | ], 259 | "source": [ 260 | "#4Q\n", 261 | "#V)\n", 262 | "wednesday_chg = wednesdays['Chg%']\n", 263 | "\n", 264 | "profit_prob_wed = len(wednesday_chg[wednesday_chg > 0]) / len(wednesday_chg)\n", 265 | "\n", 266 | "print(f\"Probability of Making a Profit on Wednesdays: {profit_prob_wed}\")\n" 267 | ] 268 | }, 269 | { 270 | "cell_type": "code", 271 | "execution_count": 26, 272 | "metadata": {}, 273 | "outputs": [ 274 | { 275 | "name": "stdout", 276 | "output_type": "stream", 277 | "text": [ 278 | "Conditional Probability of Making a Profit Given that Today is Wednesday: 0.42\n" 279 | ] 280 | } 281 | ], 282 | "source": [ 283 | "#4Q\n", 284 | "#Vi)\n", 285 | "\n", 286 | "wednesday_data = df[df['Day'] == 'Wed']\n", 287 | "\n", 288 | "\n", 289 | "wednesday_count = len(wednesday_data)\n", 290 | "wednesday_profit_count = len(wednesday_data[wednesday_data['Chg%'] > 0])\n", 291 | "\n", 292 | "conditional_prob = wednesday_profit_count / wednesday_count\n", 293 | "\n", 294 | "print(f\"Conditional Probability of Making a Profit Given that Today is Wednesday: {conditional_prob}\")\n" 295 | ] 296 | }, 297 | { 298 | "cell_type": "code", 299 | "execution_count": 27, 300 | "metadata": {}, 301 | "outputs": [ 302 | { 303 | "data": { 304 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAl0AAAHFCAYAAADIX0yYAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy81sbWrAAAACXBIWXMAAA9hAAAPYQGoP6dpAABxx0lEQVR4nO3deVxUVf8H8M8Aw44oIgwoIO4S5L7ghmgsmkuZS1mEueRSmZq5ZOb2K9PSqFzIHtestB6XsozAXHIBxT3ULHtwyUBUEAQEBub8/vBhHsdhuaMzl5nx8369eOmc+d57z8yZ5TvnnHuuQgghQEREREQmZVPTFSAiIiJ6FDDpIiIiIpIBky4iIiIiGTDpIiIiIpIBky4iIiIiGTDpIiIiIpIBky4iIiIiGTDpIiIiIpIBky4iIiIiGTDpsmKHDx/G008/DX9/fzg4OMDb2xuhoaF44403THbMQ4cOYe7cubh165befStWrMC6detMduyK9OzZEwqFQvvn5OSEVq1aIS4uDhqNRhs3YsQINGzY8IGOYarHVVJSgnHjxsHHxwe2trZo3bp1tdvs2LED/fv3h7e3N+zt7eHh4YHevXvjyy+/hFqtBgBcvHgRCoUCH374odHqKoTAnDlzUL9+fXh5eWHixIkoLi7WicnNzYWvry/WrFkjeb/3tp2trS3q1KmDVq1aYezYsUhJSXmoOr/33nvYvn37Q+2jOm3btjX6c12dh3ktS/HVV18hLi5Ocnxl74+9e/dCoVDg3//+t/EqV4Hy13v5n1KpRN26ddGhQwdMnjwZZ86cMenxjcGQz4LK2scU7/v7lZWVoXbt2ujTp4/efR999BEUCgWee+45vfsWLFgAhUKB06dPG71OcjxugwiySj/88IOwsbERvXr1El9//bXYu3ev+Prrr8Ubb7wh6tevb7LjfvDBBwKASE9P17vvscceE2FhYSY7dkXCwsJEo0aNRHJyskhOThbfffed6NOnjwAgpk2bpo2LjY0VAQEBD3QMUz2uuLg4AUB8+umn4tChQ+L06dOVxmo0GjFixAgBQPTt21ds3LhR7Nu3T3z//fdi8uTJolatWiIuLk4IIUR6eroAID744AOj1XX9+vXCxcVFrF27VnzzzTfCy8tLLFiwQCdm/PjxIiwsTGg0Gsn7BSAGDx4skpOTxaFDh0RCQoL48MMPxeOPPy4AiIkTJz5wnV1cXERsbOwDb1+dEydOCAACgGjRooXJjnO/CxcuiOPHj5ts/08++aRB75XK3h979uwRAMS3335rvMpVoPz1/tprr4nk5GRx8OBB8eOPP4r/+7//E40aNRK2trZi8eLFJq3DwzLks6Cy9jHF+74i/fr1E66urkKtVuuUDxgwQLi4uAhvb2+9bXr16iXq1q1r0GeDVHI9bqmYdFmpHj16iMaNG+u98IUQoqyszGTHlTvp0mg0orCwsNL7w8LCxGOPPaZTVlJSIho1aiScnZ1FSUmJEMI8k67Ro0cLJycnSbGLFi0SAMS8efMqvD8jI0Ps379fCGGaD6GhQ4eKl19+WXv73XffFZ06ddLePnTokHBychK///67QfsFIF555RW98tLSUjFy5EgBQKxYseKB6mzqpOuVV14RAMSTTz4pAIiDBw+a7FhystSkq6LXe2FhoYiOjhYAxM6dO01aj4dhyGdBTSddS5YsEQBEcnKytqysrEzUqVNHTJ06VQAQZ8+e1d5XXFwsnJycxDPPPGOS+phb0sXhRSt18+ZNeHp6ws7OTu8+Gxv9Zv/qq68QGhoKV1dXuLq6onXr1li9erX2/qSkJAwcOBANGjSAo6MjmjRpgrFjx+LGjRvamLlz5+LNN98EAAQGBmq78/fu3YuGDRvizJkz2Ldvn7b83iGQvLw8TJ06FYGBgbC3t0f9+vUxadIkFBQU6NRToVDg1VdfRXx8PFq2bAkHBwesX7/eoOdGqVSiXbt2KCwsxPXr1yuNKyoqwsyZM3Xq9Morr+gMnVb3uB50vwqFAv/6179w584d7X4rG8JUq9VYtGgRWrRogdmzZ1cYo1Kp0K1bN73ypUuXIjAwEK6urggNDa1wyO7zzz9Hs2bN4ODggKCgIHz11Vd6Q1hFRUVwcXHR3nZ1dUVRUZG2fi+//DJmzJiB5s2bV/ncSGVra4tly5bB09MTH3zwgU493njjDbRu3Rru7u7w8PBAaGgovvvuO53tFQoFCgoKsH79eu3z27NnTwDA9evXMWHCBAQFBcHV1RVeXl7o1asX9u/fL7l+RUVF+Oqrr9CuXTt89NFHAFDpsOp3332Hxx9/HA4ODmjUqBE+/vhjzJ07FwqFQidu+fLl6NGjB7y8vODi4oKQkBAsXrxYO2xcrqLhxfL3zRdffIGWLVvC2dkZrVq1wg8//KATd/36dbz88svw8/ODg4MD6tWrh65du2LXrl0A7g7X//jjj7h06ZLOkF1lpLw/1Go1Zs2aBV9fX9SqVQtPPPEEzp8/r7evXbt2oXfv3qhVqxacnZ3RtWtX/PLLL5UeWwonJyesXr0aSqVS53Uk5TUghEDTpk0RFRWlt9/8/Hy4u7vjlVdeqfL4xv4skNo+Ut73R48exYABA+Dh4QFHR0e0adMG33zzTZWPBwDCw8MB3B0+Lnfq1Cnk5OTg5Zdfho+PD/bs2aO97/Dhw7hz5452O0OOnZmZibFjx6JBgwawt7dHYGAg5s2bh9LS0irrqFarERsbC1dXV733gMnVdNZHpjF69Ghtl3pKSoq2R6cis2fPFgDEoEGDxLfffisSExPF0qVLxezZs7UxK1euFAsXLhTff/+92Ldvn1i/fr1o1aqVaN68uXbfV65cEa+99poAILZu3aod0svNzRXHjx8XjRo1Em3atNGWlw+BFBQUiNatWwtPT0+xdOlSsWvXLvHxxx8Ld3d30atXL50uZwCifv364vHHHxdfffWV2L17t0hLS6v0sVXU0yWEEG3bthV2dnbaXrL7e7o0Go2IiooSdnZ2Yvbs2SIxMVF8+OGHwsXFRbRp00YUFRUJIUSVj6siUvebnJws+vbtK5ycnLT7zcrKqnCfhw4dEgDE9OnTKz3uvcp/+TVs2FBER0eL7du3i+3bt4uQkBBRp04dcevWLW3sZ599JgCIZ555Rvzwww/iyy+/FM2aNRMBAQE6z9f7778vGjRoINLS0sTFixdFcHCwGD9+vBDibq9XixYtRHFxsaT63QuV9HSVe/bZZwUAceXKFSGEELdu3RIjRowQX3zxhdi9e7dISEgQU6dOFTY2NmL9+vXa7ZKTk4WTk5Po27ev9vk9c+aMEEKI33//XYwfP15s2rRJ7N27V/zwww9i1KhRwsbGRuzZs0dSvb/88ksBQCxfvlwIIUS3bt2Eq6uruH37tk7cTz/9JGxsbETPnj3Ftm3bxLfffis6deokGjZsKO7/eJ48ebJYuXKlSEhIELt37xYfffSR8PT0FC+99JJOXEW9tuXt3bFjR/HNN9+InTt3ip49ewo7Ozvx119/aeOioqJEvXr1xKpVq8TevXvF9u3bxTvvvCM2bdokhBDizJkzomvXrkKlUmmft3t7NO5X1fujvKerYcOG4vnnnxc//vij+Prrr4W/v79o2rSpKC0t1e7niy++EAqFQjz11FNi69atYseOHaJfv37C1tZW7Nq1q8q2kNLT0blzZ+Hg4KAdGZD6Gvj444+FQqEQf/zxh87+li9fLgBoX1MVMcVnQVXtY8j7fvfu3cLe3l50795dbN68WSQkJGinL6xdu7bK57u8VysyMlJbtmTJEuHj4yOEEGLYsGFiyJAh2vvmzZun81xJPXZGRobw8/MTAQEB4rPPPhO7du0SCxYsEA4ODmLEiBHauPvbPycnR4SHhwuVSiWOHj1a5WMxBSZdVurGjRuiW7du2jklSqVSdOnSRSxcuFDng/8///mPsLW1Fc8//7zkfWs0GqFWq8WlS5cEAPHdd99p73uQ4cWFCxcKGxsbkZqaqlP+73//W6/bH4Bwd3cX2dnZkupannSp1WqhVqvFP//8I2bMmCEA6Lzx7/+iSkhIEAD05nps3rxZABCrVq2q9nFVxJD9xsbGChcXl2r3uWnTJgFAxMfHS6pD+YdQSEiIzhfbkSNHBADx9ddfCyHufniqVCqdYUIhhLh06ZJQKpU6z1dBQYF2mAaA6NSpk7h27Zr4888/hbOzs/j1118l1e1+1SVd06dPFwDE4cOHK7y/tLRUqNVqMWrUKNGmTRud+6QOL5bvo3fv3uLpp5+WVO9evXoJR0dHkZOTI4QQYu3atQKAWL16tU5chw4dhJ+fn05Cevv2bVG3bl29pOteZWVlQq1Wiw0bNghbW1ud90NlSZe3t7fIy8vTlmVmZgobGxuxcOFCbZmrq6uYNGlSlY/N2MOLffv21Sn/5ptvdIanCgoKhIeHh+jfv79OXFlZmWjVqpXo2LFjlceXknQNGzZMABDXrl2r8P7KXgN5eXnCzc1NvP766zrxQUFBIjw8vMp6meKzQIjqhxere98LIUSLFi1EmzZt9Kan9OvXT/j4+FQ7ReWpp54SLi4u2u379+8vnn32WSGEECtWrBD16tXT/pgODw8XXl5eBh977NixwtXVVVy6dEkn7sMPP9RJ4u5t//T0dBEUFCSCgoLExYsXq3wMpsLhRStVt25d7N+/H6mpqXj//fcxcOBA/PHHH5g5cyZCQkK0w4JJSUkoKyurths8KysL48aNg5+fH+zs7KBUKhEQEAAAOHfu3EPV9YcffkBwcDBat26N0tJS7V9UVJR2ePJevXr1Qp06dSTv/8yZM1AqlVAqlfD19cWSJUvw/PPP4/PPP690m927dwO4O1RzryFDhsDFxeWBhzVMtd8H8eSTT8LW1lZ7+/HHHwcAXLp0CQBw/vx5ZGZmYujQoTrb+fv7o2vXrjplzs7O+Omnn/D333/j4sWLSElJgZeXF8aNG4fnn38e3bt3x759+9C+fXvUrl0bYWFhSEtLe+jHIITQK/v222/RtWtXuLq6al+rq1evNuh1Gh8fj7Zt28LR0VG7j19++UXSPtLT07Fnzx4MGjQItWvXBnC3fd3c3HSGGAsKCnD06FE89dRTsLe315a7urqif//+evs9ceIEBgwYgLp168LW1hZKpRIvvvgiysrK8Mcff1Rbr/DwcLi5uWlve3t7w8vLS9veANCxY0esW7cO//d//4eUlBS9oUtTGDBggM7t+1+Hhw4dQnZ2NmJjY3U+HzQaDaKjo5Gamqo3DcFQFb2OpLwG3Nzc8NJLL2HdunXaOuzevRtnz57Fq6++WuUxa+qzoLr3/YULF/D777/j+eefBwCd57xv377IyMiocPj3XuHh4SgoKEBqaio0Gg3279+vHb4PCwvD9evXcebMGRQXFyMlJUU7tGjIsX/44QeEh4fD19dXJ678zMl9+/bp1On48ePo3LkzvL29cfDgQe33l9yYdFm59u3bY/r06fj222/xzz//YPLkybh48SIWL14MANo5TQ0aNKh0HxqNBpGRkdi6dSumTZuGX375BUeOHNHOA7hz585D1fHatWs4ffq0NjEq/3Nzc4MQQmfeGAD4+PgYtP/GjRsjNTUVR48eRVpaGm7duoWNGzfC3d290m1u3rwJOzs71KtXT6dcoVBApVLh5s2bBtXBlPv19/cHcPfL3hB169bVue3g4ADgf+1ZXhdvb2+9bSsqA4D69etrP8w2bNiAtLQ0LFq0CDdv3sRTTz2FcePGISMjA927d8fTTz/90F/q5V8Uvr6+AICtW7di6NChqF+/PjZu3Ijk5GSkpqZi5MiR2jlm1Vm6dCnGjx+PTp06YcuWLUhJSUFqaiqio6MlvdbXrFkDIQQGDx6MW7du4datW1Cr1RgwYAAOHjyI33//HQCQk5MDIYSk5/fy5cvo3r07rl69io8//lj7g2r58uUApL0H729v4G6b37vt5s2bERsbi3/9618IDQ2Fh4cHXnzxRWRmZla7/wdV3evw2rVrAIDBgwfrfUYsWrQIQghkZ2c/VB0uXboEBwcHeHh4ADDsNfDaa6/h9u3b+PLLLwEAy5YtQ4MGDTBw4MAqj2mqz5jqSH2+p06dqvd8T5gwAQD0PpPvV55E7dmzBydOnMCtW7cQFhYGAAgKCkK9evWwd+9epKSk6MznMuTY165dw44dO/TiHnvssQrrmJSUhGvXrmH06NHaH0M1QX+WNVktpVKJOXPm4KOPPtL2MpS/4f/++2/4+flVuF1aWhpOnTqFdevWITY2Vlt+4cIFo9TL09MTTk5OlU409vT01Lld1cTdijg6OqJ9+/YGbVO3bl2Ulpbi+vXrOh+KQghkZmaiQ4cOBu3PlPtt3749PDw88N1332HhwoUGPz9V1RX43wfhvar7Er558ybeeOMNfPrpp6hTpw5++OEH2NjYYPTo0QCAadOm4d1338Uff/yh/ZA01J07d7Br1y40btxY+6Nh48aNCAwMxObNm3Weh/vXDKvKxo0b0bNnT6xcuVKn/Pbt29Vuq9FotJOcBw0aVGHMmjVrsHjxYtSpUwcKhULS87t9+3YUFBRg69atOr/QT548WW2dDOHp6Ym4uDjExcXh8uXL+P777zFjxgxkZWUhISHBqMcypE4A8Omnn6Jz584VxlT2I0CKq1ev4tixYwgLC9OeeGTIa6BJkybo06cPli9fjj59+uD777/HvHnzdHqTKmKqz5iHVf58z5w5s9LXcHUnxAQHB2sTq/I1Ilu0aKG9v0ePHtizZ482sSxPugw5tqenJx5//HG8++67FcaV/xAr9+abb+Kvv/7Ciy++iNLSUrz44otVPgZTYU+XlcrIyKiwvLxrvPwFGRkZCVtbW70Pl3uVf3mV/yIq99lnn+nF3v+r6f77Kirv168f/vrrL9StWxft27fX+zPlQo+V6d27N4C7H7732rJlCwoKCrT3A5U/rofdr1RKpRLTp0/H77//jgULFlQYk5WVhYMHDxq03+bNm0OlUumdNXT58mUcOnSoym2nTJmCDh064NlnnwVw94ukuLhYe1ZRfn6+tvxBlJWV4dVXX8XNmzcxffp0bblCoYC9vb1OwpWZmal39iJQebspFAq91/rp06eRnJxcbb1+/vln/P3333jllVewZ88evb/HHnsMGzZsQGlpKVxcXNC+fXts374dJSUl2n3k5+frnVFV0XtQCFHlEPnD8vf3x6uvvoqIiAgcP35cW27I6/1B4u/XtWtX1K5dG2fPnq3w86F9+/Y6w7OGuHPnDkaPHo3S0lJMmzZNW27oa+D111/H6dOnERsbC1tbW4wZM6baY5viswB4+Oe7efPmaNq0KU6dOlXp833vMHVFFAoFwsLCcOjQISQlJWl7ucqFhYVh37592LNnD3x9fdGsWTODj92vXz+kpaWhcePGFcbdn3TZ2Njgs88+w+uvv44RI0ZU+Z1nSuzpslJRUVFo0KAB+vfvjxYtWkCj0eDkyZNYsmQJXF1d8frrrwO4e0r3W2+9hQULFuDOnTt47rnn4O7ujrNnz+LGjRuYN28eWrRogcaNG2PGjBkQQsDDwwM7duxAUlKS3nFDQkIAAB9//DFiY2OhVCrRvHlzuLm5ISQkBJs2bcLmzZvRqFEjODo6IiQkBJMmTcKWLVvQo0cPTJ48GY8//jg0Gg0uX76MxMREvPHGG+jUqZOsz19ERASioqIwffp05OXloWvXrjh9+jTmzJmDNm3aICYmRucxV/S4Hna/hnjzzTdx7tw5zJkzB0eOHMHw4cPh5+eH3Nxc/Prrr1i1ahXmzZunNxerKjY2Npg3bx7Gjh2LwYMHY+TIkbh16xbmzZsHHx+fCpceAe7OVdmyZYvOnK3Q0FDY2NjglVdewZAhQ/Dpp5+iYcOGkpaQuHbtGlJSUiCEwO3bt5GWloYNGzbg1KlTmDx5ss4XXL9+/bB161ZMmDABgwcPxpUrV7BgwQL4+Pjgzz//1NlvSEgI9u7dix07dsDHxwdubm5o3rw5+vXrhwULFmDOnDkICwvD+fPnMX/+fAQGBlZ7Kvrq1athZ2eHt956S+9DHwDGjh2LiRMn4scff8TAgQMxf/58PPnkk4iKisLrr7+OsrIyfPDBB3B1ddUZMouIiIC9vT2ee+45TJs2DUVFRVi5ciVycnKqff6kys3NRXh4OIYPH44WLVrAzc0NqampSEhI0Ol1CAkJwdatW7Fy5Uq0a9cONjY2VfYkG/L+qIirqys+/fRTxMbGIjs7G4MHD4aXlxeuX7+OU6dO4fr165K+QC9fvoyUlBRoNBrk5ubixIkTWLNmDS5duoQlS5YgMjJSG2voayAiIgJBQUHYs2cPXnjhBXh5eVVbH1N9FhjaPhX57LPP0KdPH0RFRWHEiBGoX78+srOzce7cORw/fhzffvtttfsIDw/Hv//9byQmJmLZsmU694WFheHmzZv49ddfMXz48Ac69vz585GUlIQuXbpg4sSJaN68OYqKinDx4kXs3LkT8fHxFU6bWbJkCdzc3DBhwgTk5+drlzmSTU3M3ifT27x5sxg+fLho2rSpcHV1FUqlUvj7+4uYmBidhenKbdiwQXTo0EE4OjoKV1dX0aZNG53Tc8+ePSsiIiKEm5ubqFOnjhgyZIi4fPmyACDmzJmjs6+ZM2cKX19fYWNjIwBoT7G+ePGiiIyMFG5ubgKAzhk2+fn54u233xbNmzcX9vb2wt3dXYSEhIjJkyeLzMxMbRyqOZvtfpUtGXG/is74unPnjpg+fboICAgQSqVS+Pj4iPHjx2vPSCtX1eOqiNT9GnLGUrnvvvtOPPnkk6JevXrCzs5O1KlTR4SHh4v4+HjtGXJVnc1VUXuuWrVKNGnSRNjb24tmzZqJNWvWiIEDB+qdDVj+2Jo2bVrhvpOSkkRISIhwdnYWnTt3FidOnKj28eC/Z0MCEDY2NqJWrVoiJCREvPzyy5UuVfD++++Lhg0bCgcHB9GyZUvx+eefizlz5uidDXjy5EnRtWtX4ezsLABoz7ArLi4WU6dOFfXr1xeOjo6ibdu2Yvv27dUuoHv9+nVhb28vnnrqqUpjcnJyhJOTk86ZeNu2bRMhISHC3t5e+Pv7i/fff19MnDhR1KlTR2fbHTt2iFatWglHR0dRv3598eabb4qffvpJ5z0mROVnL1b0vgkICNCewVlUVCTGjRsnHn/8cVGrVi3h5OQkmjdvLubMmSMKCgq022RnZ4vBgweL2rVrC4VCUeVZlkJU/v6obHHU8tfn/UsT7Nu3Tzz55JPCw8NDKJVKUb9+ffHkk09Wu7hq+f7K/2xtbUWdOnVEu3btxKRJkypc1uFBXgNz584VAERKSkqV9bmXKT4LKmsfQ9/3p06dEkOHDhVeXl5CqVQKlUolevXqJfks6bNnz2qf8/uX9dFoNMLDw0MAEJ9//rnetlKPff36dTFx4kQRGBgolEql8PDwEO3atROzZs0S+fn5VT7u8jPt33nnHUmPx1gUQjxg/z4RPZJu3bqFZs2a4amnnsKqVatqujpWR61Wo3Xr1qhfvz4SExNrujokUfv27aFQKJCamlrTVSEzxuFFIqpUZmYm3n33XYSHh6Nu3bq4dOkSPvroI9y+fVs7RE0PZ9SoUYiIiICPjw8yMzMRHx+Pc+fO4eOPP67pqlE18vLykJaWhh9++AHHjh3Dtm3barpKZOaYdBFRpRwcHHDx4kVMmDAB2dnZcHZ2RufOnREfH//AZx2Srtu3b2Pq1Km4fv06lEol2rZti507d+KJJ56o6apRNY4fP679QTJnzhw89dRTNV0lMnMcXiQiIiKSAZeMICIiIpIBky4iIiIiGVhc0rVixQoEBgbC0dER7dq1w/79+6uM37dvH9q1awdHR0c0atQI8fHxejFbtmxBUFAQHBwcEBQUpDcZ8tdff0X//v3h6+sLhUKB7du36+1jxIgRUCgUOn+VrZ5MREREjx6Lmki/efNmTJo0CStWrEDXrl21i6idPXtWe/25e6Wnp6Nv374YM2YMNm7ciIMHD2LChAmoV68ennnmGQBAcnIyhg0bhgULFuDpp5/Gtm3bMHToUBw4cEC7IGdBQQFatWqFl156SbtdRaKjo7F27VrtbUNXSdZoNPjnn3/g5uZmtEu5EBERkWmJ/y7e7OvrW+nC0eWBFqNjx45i3LhxOmUtWrQQM2bMqDB+2rRpokWLFjplY8eOFZ07d9beHjp0qIiOjtaJiYqKEs8++2yF+wQgtm3bplceGxsrBg4cKOFRVO7KlSs6i/jxj3/84x//+Mc/y/m7cuVKld/zFtPTVVJSgmPHjmHGjBk65ZGRkZVeBy45OVnn0g7A3cvjrF69Gmq1GkqlEsnJyZg8ebJeTFxcnMF13Lt3L7y8vFC7dm2EhYXh3XfflXQ5iHLl15S6cuUKatWqZfDxK6NWq5GYmIjIyEgolUqj7Zfkwza0fGxDy8b2s3ymbMO8vDz4+flVe11Ki0m6bty4gbKyMr2ryXt7eyMzM7PCbTIzMyuMLy0txY0bN7SLERqyz8r06dMHQ4YMQUBAANLT0zF79mz06tULx44d07twarni4mIUFxdrb5dfwd7JyQlOTk4GHb8qdnZ2cHZ2hpOTEz8sLBTb0PKxDS0b28/ymbIN1Wo1AFQ7Nchikq5y9z8gIUSVD7Ki+PvLDd1nRYYNG6b9f3BwMNq3b4+AgAD8+OOPOheLvdfChQsxb948vfLExEQ4OzsbdHwpKrpANVkWtqHlYxtaNraf5TNFGxYWFkqKs5iky9PTE7a2tno9UFlZWXo9VeVUKlWF8XZ2dqhbt26VMZXtUyofHx8EBATgzz//rDRm5syZmDJlivZ2efdkZGSk0YcXk5KSEBERwV9oFoptaPnYhpaN7Wf5TNmGeXl5kuIsJumyt7dHu3btkJSUhKefflpbnpSUhIEDB1a4TWhoKHbs2KFTlpiYiPbt22uf8NDQUCQlJenM60pMTESXLl0eqr43b97ElStX4OPjU2mMg4NDhUOPSqXSJG9qU+2X5MM2tHxsQ8vG9rN8pmhDqfuzmKQLAKZMmYKYmBi0b98eoaGhWLVqFS5fvoxx48YBuNtzdPXqVWzYsAEAMG7cOCxbtgxTpkzBmDFjkJycjNWrV+Prr7/W7vP1119Hjx49sGjRIgwcOBDfffcddu3ahQMHDmhj8vPzceHCBe3t9PR0nDx5Eh4eHvD390d+fj7mzp2LZ555Bj4+Prh48SLeeusteHp66iSIRERE9OiyqKRr2LBhuHnzJubPn4+MjAwEBwdj586dCAgIAABkZGTg8uXL2vjAwEDs3LkTkydPxvLly+Hr64tPPvlEZ62tLl26YNOmTXj77bcxe/ZsNG7cGJs3b9au0QUAR48eRXh4uPZ2+ZBgbGws1q1bB1tbW/z222/YsGEDbt26BR8fH4SHh2Pz5s3VnslAREREjwaLSroAYMKECZgwYUKF961bt06vLCwsDMePH69yn4MHD8bgwYMrvb9nz57aCfgVcXJyws8//1zlMYiIiOjRZnGXASIiIiKyREy6iIiIiGTApIuIiIhIBky6iIiIyKqVaQSOpGcDAI6kZ6NMU/k8bVNi0kVERERWKyEtA90W7cbI9akAgJHrU9Ft0W4kpGXIXhcmXURERGSVEtIyMH7jcWTkFumUZ+YWYfzG47InXky6iIiIyOqUaQTm7TiLigYSy8vm7Tgr61Ajky4iIiKyOkfSs/V6uO4lAGTkFmnnesmBSRcRERFZnazblSdcDxJnDEy6iIiIyOp4uTkaNc4YmHQRERGR1ekY6AEfd0coKrlfAcDH3REdAz1kqxOTLiIiIrI6tjYKzOkfBAB6iVf57Tn9g2BrU1laZnxMuoiIiMgqRQf7YOULbaFy1x1CVLk7YuULbREd7CNrfexkPRoRERGRjKKDfRARpELKhSzcOJeCNbEd0LmJl6w9XOXY00VERERWzdZGoZ271THQo0YSLoBJFxEREZEsmHQRERERyYBJFxEREZEMmHQRERERyYBJFxEREZEMmHQRERERyYBJFxEREZEMmHQRERERyYBJFxEREZEMmHQRERERyYBJFxEREZEMmHQRERERyYBJFxEREZEMmHQRERERyYBJFxEREZEMmHQRERERyYBJFxEREZEMmHQRERERyYBJFxEREZEMmHQRERERyYBJFxEREZEMmHQRERERyYBJFxEREZEMmHQRERERyYBJFxEREZEMmHQRERERyYBJFxEREZEMmHQRERERyYBJFxEREZEMmHQRERERyYBJFxEREZEMmHQRERERyYBJFxEREZEMmHQRERERyYBJFxEREZEMmHQRERERyYBJFxEREZEMmHQRERERyYBJFxEREZEMmHQRERERyYBJFxEREZEMmHRZuTKNwJH0bADAkfRslGlEDdeIiIjo0cSky4olpGWg26LdGLk+FQAwcn0qui3ajYS0jBquGRER0aOHSZeVSkjLwPiNx5GRW6RTnplbhPEbjzPxIiIikpnFJV0rVqxAYGAgHB0d0a5dO+zfv7/K+H379qFdu3ZwdHREo0aNEB8frxezZcsWBAUFwcHBAUFBQdi2bZvO/b/++iv69+8PX19fKBQKbN++XW8fQgjMnTsXvr6+cHJyQs+ePXHmzJmHeqwPqkwjMG/HWVQ0kFheNm/HWQ41EhERyciikq7Nmzdj0qRJmDVrFk6cOIHu3bujT58+uHz5coXx6enp6Nu3L7p3744TJ07grbfewsSJE7FlyxZtTHJyMoYNG4aYmBicOnUKMTExGDp0KA4fPqyNKSgoQKtWrbBs2bJK67Z48WIsXboUy5YtQ2pqKlQqFSIiInD79m3jPQESHUnP1uvhupcAkJFbpJ3rRURERKZnUUnX0qVLMWrUKIwePRotW7ZEXFwc/Pz8sHLlygrj4+Pj4e/vj7i4OLRs2RKjR4/GyJEj8eGHH2pj4uLiEBERgZkzZ6JFixaYOXMmevfujbi4OG1Mnz598H//938YNGhQhccRQiAuLg6zZs3CoEGDEBwcjPXr16OwsBBfffWVUZ8DKbJuV55wPUgcERERPTy7mq6AVCUlJTh27BhmzJihUx4ZGYlDhw5VuE1ycjIiIyN1yqKiorB69Wqo1WoolUokJydj8uTJejH3Jl3VSU9PR2Zmps6xHBwcEBYWhkOHDmHs2LEVbldcXIzi4mLt7by8PACAWq2GWq2WfPz7eTrbwcH2f0OHDjZC59974x7mOCSf8nZie1kutqFlY/tZPlO2odR9WkzSdePGDZSVlcHb21un3NvbG5mZmRVuk5mZWWF8aWkpbty4AR8fn0pjKttnZccp3+7+/Vy6dKnS7RYuXIh58+bplScmJsLZ2Vny8SuyuKN+2YL2Gp3bN86lYOe5hzoMySwpKammq0APiW1o2dh+ls8UbVhYWCgpzmKSrnIKhULnthBCr6y6+PvLDd2nseo2c+ZMTJkyRXs7Ly8Pfn5+iIyMRK1atQw+/r12nbuGyZtPAgDsbQQWtNdg9lEblGju1uejYa3xREvvKvZA5kStViMpKQkRERFQKpU1XR16AGxDy8b2s3ymbMPykarqWEzS5enpCVtbW70eqKysLL0epnIqlarCeDs7O9StW7fKmMr2WdlxgLs9Xj4+PpL34+DgAAcHB71ypVL50C+IPo83gMLGFvN2nEV2/h0AQLFGAQ9XJ8zpH4ToYJ9q9kDmyBivDapZbEPLxvazfKZoQ6n7s5iJ9Pb29mjXrp1et2BSUhK6dOlS4TahoaF68YmJiWjfvr32CaosprJ9ViQwMBAqlUpnPyUlJdi3b59B+zG26GAfHJjeC2tiOwAA1sR2wIHpvZhwERER1QCL6ekCgClTpiAmJgbt27dHaGgoVq1ahcuXL2PcuHEA7g7XXb16FRs2bAAAjBs3DsuWLcOUKVMwZswYJCcnY/Xq1fj666+1+3z99dfRo0cPLFq0CAMHDsR3332HXbt24cCBA9qY/Px8XLhwQXs7PT0dJ0+ehIeHB/z9/aFQKDBp0iS89957aNq0KZo2bYr33nsPzs7OGD58uEzPTsVsbRToGOiBneeAjoEesLUxfNiUiIiIHp5FJV3Dhg3DzZs3MX/+fGRkZCA4OBg7d+5EQEAAACAjI0Nnza7AwEDs3LkTkydPxvLly+Hr64tPPvkEzzzzjDamS5cu2LRpE95++23Mnj0bjRs3xubNm9GpUydtzNGjRxEeHq69XT4PKzY2FuvWrQMATJs2DXfu3MGECROQk5ODTp06ITExEW5ubqZ8SoiIiMhCKET5zHKqcXl5eXB3d0dubu5DT6S/l1qtxs6dO9G3b1/ORbBQbEPLxza0bGw/y2fKNpT6/W0xc7qIiIiILBmTLiIiIiIZMOmycmUaob3G4pH0bF7kmoiIqIYw6bJiCWkZ6LZoN0auTwUAjFyfim6LdiMhLaOGa0ZERPToYdJlpRLSMjB+43Fk5Ope1DoztwjjNx5n4kVERCQzJl1WqEwjMG/HWVQ0kFheNm/HWQ41EhERyYhJlxU6kp6t18N1LwEgI7dIO9eLiIiITI9JlxXKul15wvUgcURERPTwmHRZIS83R6PGERER0cNj0mWFOgZ6wMfdEZVdZVEBwMfdER0DPeSsFhER0SONSZcVsrVRYE7/IADQS7zKb8/pH8SLXxMREcmISZeVig72wcoX2kLlrjuEqHJ3xMoX2iI62KeGakZERPRosqvpCpDpRAf7ICJIhZQLWbhxLgVrYjugcxMv9nARERHVAPZ0WTlbG4V27lbHQA8mXERERDWESRcRERGRDJh0EREREcmASRcRERGRDJh0EREREcmASRcRERGRDJh0EREREcmASZeVK9MIHEnPBgAcSc9GmUbUcI2IiIgeTUy6rFhCWga6LdqNketTAQAj16ei26LdSEjLqOGaERERPXqYdFmphLQMjN94HBm5RTrlmblFGL/xOBMvIiIimTHpskJlGoF5O86iooHE8rJ5O85yqJGIiEhGTLqs0JH0bL0ernsJABm5Rdq5XkRERGR6TLqsUNbtyhOuB4kjIiKih8ekywp5uTkaNY6IiIgeHpMuK9Qx0AM+7o5QVHK/AoCPuyM6BnrIWS0iIqJHGpMuK2Rro8Cc/kEAoJd4ld+e0z8ItjaVpWVERERkbEy6rFR0sA9WvtAWKnfdIUSVuyNWvtAW0cE+NVQzIiKiR5NdTVeATCc62AcRQSqkXMjCjXMpWBPbAZ2beLGHi4iIHin3X52lpr4L2dNl5WxtFNq5Wx0DPZhwERHRI8Wcrs7CpIuIiIiskrldnYVJFxEREVkdc7w6C5MuIiIisjrmeHUWJl1ERERkdczx6ixMuoiIiMjqmOPVWZh0ERERkdUxx6uzMOkiIiIiq2OOV2dh0kVERERWydyuzsIV6YmIiMhqmdPVWdjTRURERFbNXK7OwqSLiIiISAZMuoiIiIhkwKSLiIiISAZMuoiIiIhkwKSLiIiISAZMuoiIiIhkwKSLiIiISAZMuoiIiIhkwKSLiIiISAZMuoiIiIhkwKSLiIiISAZMuoiIiIhkwKSLiIiISAZMuoiIiIhkwKSLiIiISAZMuoiIiMiqlWkEjqRnAwCOpGejTCNqpB4Wl3StWLECgYGBcHR0RLt27bB///4q4/ft24d27drB0dERjRo1Qnx8vF7Mli1bEBQUBAcHBwQFBWHbtm0GH3fEiBFQKBQ6f507d364B0tEREQPJSEtA90W7cbI9akAgJHrU9Ft0W4kpGXIXheLSro2b96MSZMmYdasWThx4gS6d++OPn364PLlyxXGp6eno2/fvujevTtOnDiBt956CxMnTsSWLVu0McnJyRg2bBhiYmJw6tQpxMTEYOjQoTh8+LDBx42OjkZGRob2b+fOnaZ5IoiIiKhaCWkZGL/xODJyi3TKM3OLMH7jcdkTL4tKupYuXYpRo0Zh9OjRaNmyJeLi4uDn54eVK1dWGB8fHw9/f3/ExcWhZcuWGD16NEaOHIkPP/xQGxMXF4eIiAjMnDkTLVq0wMyZM9G7d2/ExcUZfFwHBweoVCrtn4eHh0meByIiIqpamUZg3o6zqGggsbxs3o6zsg41WkzSVVJSgmPHjiEyMlKnPDIyEocOHapwm+TkZL34qKgoHD16FGq1usqY8n0acty9e/fCy8sLzZo1w5gxY5CVlWX4AyUiIqKHdiQ9W6+H614CQEZukXaulxzsZDvSQ7px4wbKysrg7e2tU+7t7Y3MzMwKt8nMzKwwvrS0FDdu3ICPj0+lMeX7lHrcPn36YMiQIQgICEB6ejpmz56NXr164dixY3BwcKiwfsXFxSguLtbezsvLAwCo1WptUmgM5fsy5j5JPmUagdT/XAcApFzIQodG9WBro6jhWpGh+D60bGw/y5OVWwAH2//1YjnYCJ1/741Tq2s91LGkvi4sJukqp1DoftkIIfTKqou/v1zKPquLGTZsmPb/wcHBaN++PQICAvDjjz9i0KBBFdZt4cKFmDdvnl55YmIinJ2dK31MDyopKcno+yR5Zf9xFD//UdO1oIfB96FlY/tZlsUd9csWtNfoFlw5gZ1XTjzUcQoLCyXFWUzS5enpCVtbW71eraysLL1eqHIqlarCeDs7O9StW7fKmPJ9PshxAcDHxwcBAQH4888/K42ZOXMmpkyZor2dl5cHPz8/REZGolath8u676VWq5GUlISIiAgolUqj7ZdMa9e5a5i8+SQE7v4yW9Beg9lHbVCiuZvsfzSsNZ5oWflrkMwL34eWje1neco0AlFxv+JaXpHe52ixRgEFAO9ajvh5Uo+HHj0oH6mqjsUkXfb29mjXrh2SkpLw9NNPa8uTkpIwcODACrcJDQ3Fjh07dMoSExPRvn177ZsmNDQUSUlJmDx5sk5Mly5dHvi4AHDz5k1cuXIFPj4+lcY4ODhUOPSoVCpN8qY21X7J+Mo0AvN/PI+iMt0PgmKNAsVldz8s5v94HpHB9TnUaGH4PrRsbD/LoQQw88nHMH7jcZ3yYo0CJf/9bJ355GNwdLB/+GNJfE1YzER6AJgyZQr+9a9/Yc2aNTh37hwmT56My5cvY9y4cQDu9hy9+OKL2vhx48bh0qVLmDJlCs6dO4c1a9Zg9erVmDp1qjbm9ddfR2JiIhYtWoTff/8dixYtwq5duzBp0iTJx83Pz8fUqVORnJyMixcvYu/evejfvz88PT11EjUiqcxxAigRkaWJDvbByhfaQuXuqFOucnfEyhfaIjq48o4RU7CYni7g7rypmzdvYv78+cjIyEBwcDB27tyJgIAAAEBGRobO2lmBgYHYuXMnJk+ejOXLl8PX1xeffPIJnnnmGW1Mly5dsGnTJrz99tuYPXs2GjdujM2bN6NTp06Sj2tra4vffvsNGzZswK1bt+Dj44Pw8HBs3rwZbm5uMj07ZE2ybleecD1IHBHRoyo62AcRQSqkXMjCjXMpWBPbAZ2beNXIKIFFJV0AMGHCBEyYMKHC+9atW6dXFhYWhuPHj+sH32Pw4MEYPHjwAx/XyckJP//8c5XbExnCy82x+iAD4oiIHmW2Ngp0DPTAznNAx0CPGpuWYVHDi0SPio6BHvBxd0RlHwsKAD7ujugYyAV4iYgsBZMuK1dSqsEXyRcBAF8kX0RJqabqDcgs2NooMKd/EADoJV7lt+f0D+IkeiIiC8Kky4ot3HkWLWb/hEU/nwcALPr5PFrM/gkLd56t4ZqRFNHBPni5RyDuX4ZOoQBe7hEo+wRQIiJ6OEy6rNTCnWfx2a/puP+SUhoBfPZrOhMvC5CQloFVlbThql/TZb9QK9GjqkwjtGcKH0nPlvVafWRdmHRZoZJSDT7fn15lzOf70znUaMaqulBrObkv1Er0KEpIy0C3Rbsxcn0qAGDk+lR0W7SbP3rogTDpskJfJF/U6x25n0ZAO9eLzA/X6SKqeQlpGRi/8bjeezEztwjjNx5n4kUGY9JlhS5lS7sGlNQ4kh/X6SKqWVX1NpeXsbeZDMWkywoFeEi7WLbUOJIf1+kiqlnsbSZTYNJlhYZ3CjBqHMmvfJ2uqnCdLiLTYW8zmQKTLit08soto8aR/GxtFBjQquolIQa08uE6XUQmwt5mMgUmXVaIv9AsX5lGYPPRv6uM+ebo35xPQmQiHQM9UNtZWWVMbWcle5vJIEy6rJCni4NR40h+Kf+5iVuF6ipjcgrVSPnPTZlqRET3Yz8zGYpJlzWS+knATwyzlfyXtGRKahwRGeZIerakHz6cSE+GYNJlhW7kFxs1jmqC1GFDDi8SmQKnaZApMOmyQpwAavlCG3kaNY6IDOPpKnGahsQ4IoBJl1XqGOgBF3vbKmNcHGw5AdSMdW5cV9Ik3s6N68pUI6JHDDubyQSYdFmhMo1AYUlZlTGFxWU8882M2doo8P6gkCpj3h8UwiUjiEzkRoHEaRoS44gAJl1W6Yvki9X++BLgtRfNXXSwD+JfaAtVLd3hC1UtB8S/0BbRwVWv40VED47TNMgU7Gq6AmR8vPai9YgO9kFEkAopF7Jw41wK1sR2QOcmXuzhIjKxdgF1YKMAqhoQsFHcjSOS6qF6ukpLS7F8+XIMGTIEgwYNwpIlS1BUxDM5ahqvvWhdbG0U2vl3HQM9mHARyeDYpZwqEy7gbkJ27FKOPBUiq/BQSdfEiROxbds2hIeHIywsDF999RVeeuklY9WNHlBMaENU971so7gbR0RE+rhkBJmCQcOL27Ztw9NPP629nZiYiPPnz8PW9u6ZclFRUejcubNxa0gGs7ezwZjugfjs1/RKY8Z0D4S9Haf0ERFVhHO6yBQM+tZdvXo1nnrqKVy9ehUA0LZtW4wbNw4JCQnYsWMHpk2bhg4dOpikomSYmX2DMLZHoF6Pl40CGNsjEDP7BtVMxYgeQWUaoV25/Eh6Ns8ctgAdAz3g4+5Y6YU7FAB83B259A4ZxKCk64cffsCzzz6Lnj174tNPP8WqVatQq1YtzJo1C7Nnz4afnx+++uorU9WVDDSzbxB+X9AH06OaAwCmRzXH7wv6MOGyMPzCtmwJaRnotmg3Rq5PBQCMXJ+Kbot2IyEto4ZrRlWxtVFgTv+7n5X3J17lt+f0D+IcSwthLp+jBo8vPfvss0hNTcXp06cRFRWFmJgYHDt2DCdPnsTy5ctRr149U9STHpC9nY127lZMaEMOKVoYfmFbtoS0DIzfeBwZubrzfjJzizB+43G2o5mLDvbByhfaQuWuO4SocnfESi7bYjHM6XP0gb6Ba9eujc8//xwffPABYmJi8Oabb+LOnTvGrhvRI41f2JatTCMwb8fZCtfMKy+bt+Msey7NXHSwDw5M74U1sXenzqyJ7YAD03sx4bIQ5vY5alDSdeXKFQwbNgwhISF4/vnn0bRpUxw7dgxOTk5o3bo1fvrpJ1PVk+iRUt0XtgC/sM3dkfRsvQ/6ewkAGblF2iEPMl9ctsUymePnqEFJ14svvgiFQoEPPvgAXl5eGDt2LOzt7TF//nxs374dCxcuxNChQ01VV6JHRnVf2AC/sM0dlxwgqlnm+Dlq0JIRR48excmTJ9G4cWNERUUhMDBQe1/Lli3x66+/YtWqVUavJNGjJjNP2hex1DiSH5ccIKpZ5vg5alBPV9u2bfHOO+8gMTER06dPR0iI/gV5X375ZaNVjuhRlZ0v7SK6UuNIflxywHqYy5lvZJgbtyVetFxinDEYlHRt2LABxcXFmDx5Mq5evYrPPvvMVPUieqR5uNgbNY7kd++SA5XhkgPmLyEtA13f1z3zrev7PIPYEuQUlhg1zhgMGl4MCAjAv//9b1PVhYj+S+XuZNQ4qhnRwT54uUcgPt+ve3UIG8Xdq0LwDDjzlpCWgXEbjwMAHGz/V56ZV4RxG48jnstGmDWpv2fk/N3zQEtG5OXlVfh3+/ZtlJTIlzESWavyoamqcGjK/CWkZWDVr+l6F04WAlj1azp7S8xYmUZgxtbfqoyZufU3DjWasdBGnkaNM4YHXqerTp06en+1a9eGk5MTAgICMGfOHGg0GmPXl+iRwKEpy8d1uixbyn9u4lahusqYnEI1Uv5zU6YakaE6N66L2s7KKmNqOyvRuXFdmWr0gEnXunXr4Ovri7feegvbt2/Htm3b8NZbb6F+/fpYuXIlXn75ZXzyySd4//33jV1fokfGics5D3U/1Syu02XZkv+SlkxJjSP52dooMKx9gypjhrVvIOuPV4PmdJVbv349lixZorMm14ABAxASEoLPPvsMv/zyC/z9/fHuu+/irbfeMlpliR4VJaUavXlA9/t8fzreiGzBSzuZKa7TZemk9kCyp9JclWkEvj9V9RD+96cyMC26pWyJ1wN9WicnJ6NNmzZ65W3atEFycjIAoFu3brh8+fLD1Y7oEfVF8kW9eUD304i7cWSePF0djBpH8uoUKG3ISWocyc8cF0d9oKSrQYMGWL16tV756tWr4efnBwC4efMm6tSp83C1I3pEXbxZaNQ4kp+mTFoPiNQ4IjKMOfY2P9Dw4ocffoghQ4bgp59+QocOHaBQKJCamorff/9du6REamoqhg0bZtTKEj0qhJD2RSw1juR3+KK0uT6HL95E9+b1TFwbMlSyxAnyyf+5ie7N2H7myByvCvFASdeAAQNw/vx5xMfH448//oAQAn369MH27dvRsGFDAMD48eONWU+iR4qro7S3ptQ4qglS54jwDFRz9M+tO0aNI/mVL71T1RCj3EvvPPAndsOGDXl2IpGJ2Emc1Ck1juQX2rgulu25ICmOzE/92tIWHpYaR/KztVFgQCsffPZr5SclDWjlY/5nLwLArVu3cOTIEWRlZemtx/Xiiy8+dMXIOO6/ZljnJl5c28kCdPD3APCXxDgyRx0aekChuLsQamUUirtxZH66NPHE8r3Vvwe7NJFvYU0yjDmevfhASdeOHTvw/PPPo6CgAG5ublAo/ldZhULBpMtMJKRlYN6Os8jOv4PFHe9eM8zD1Qlz+gfx0hVm7ves25Ljwlp6mbg29CCOXcqpMuEC7iZkxy7lsLfLDHVuVBcu9rYoKCmrNMbFwRadG7HtzJUhZy/K9R58oLMX33jjDYwcORK3b9/GrVu3kJOTo/3LzuZCf+YgIS0D4zce13vBZeQWYfzG47z8iJk7dknawqdS40h+mbnS5vpIjSP5KatZA09pyzXyzJk5nr34QK+Yq1evYuLEiXB2djZ2fcgIqrr8CHB3KT9efsS8OdvbVh9kQBzJ71qetA9yqXEkryPp2dVeBuhWoZpXFDBj5rhW3gMlXVFRUTh69Kix60JGYo4LwpFhWni7GTWO5Lf/z+tGjSN5mWMvCRnGHNfKkzyn6/vvv9f+/8knn8Sbb76Js2fPIiQkBEql7gUlBwwYYLwaksH+zi6QHse5JGYp907Vv7ANjSP5sQ0tmzmu8USGMce18iQnXU899ZRe2fz58/XKFAoFysoqn3hIppd49prkuCEd/E1cG3oQGRLn+UiNI/mp3B2R9k/1J0So3PmlbY7aBdSBAlVfWVHx3zgyT1LXjpZzjWnJw4sajUbSHxOumndHrak+yIA4qgEKiacvS40j2UU/Ju0MYalxJK/U9OxqL2Ut/htH5qm2s71R44zBoDldu3fvRlBQEPLy8vTuy83NxWOPPYb9+/cbrXL0YAI9pZ3gIDWO5MeFGS2fdy1pPVhS40heyf+5YdQ4kp+nq7RkSmqcMRiUdMXFxWHMmDGoVauW3n3u7u4YO3Ysli5darTK0YOZHt3SqHEkP6kLLnJhRvP1e6b+j9OHiSN5mePQFBlG5S7tR6nUOGMwKOk6deoUoqOjK70/MjISx44de+hK0cP57WquUeNIfuWrmVeFq5mbtys50ubbSY0jebk5SJvyLDWO5Fd+7cWqyH3tRYOSrmvXrumdqXgvOzs7XL/O059rGhdltHyGrGZO5smvjrThe6lxJK+zGdJ+lEqNI/mVX3uxKnJfe9GgpKt+/fr47bffKr3/9OnT8PHhpNCall1QYtQ4kl+mxAUzpcaR/Jp4uhg1juT1T26xUeNIfmUagS9SLlcZszHlsqwLhRuUdPXt2xfvvPMOior0P+jv3LmDOXPmoF+/fkarHD0YD4mr60qNI/ndkLjgotQ4kt/2U1eNGkfyalBH2jwfqXEkv0MXbqCwimtnAkBBSRkOXZDvZAiDkq63334b2dnZaNasGRYvXozvvvsO33//PRYtWoTmzZsjOzsbs2bNMlVdSSJPF4lnbEiMI/ndzJfWCyk1juR39Za0hFhqHMnrmbYNjBpH8tty/G+jxhmDQTMAvb29cejQIYwfPx4zZ86E+O+kE4VCgaioKKxYsQLe3t4mqShJl/aPtDkGaf/konszeVbhJcOc+vuWUeNIfg3qOOGohDl37CkxT50a1ZW0OGqnRryqh7nKLy41apwxGHztxYCAAOzcuRM3btzA4cOHkZKSghs3bmDnzp1o2LChCapIhtp1NtOocSS/SzelXcpJahzJ7+nW9Y0aR/I6dilH0uKoPJnFnEmdq2WG1168X506ddChQwdj1oWM5HaRtKsCSI0j+d0pltY2UuNIfnZ20n7TSo0jef2dU2hAHHu7zFE9ifOWpcYZg8W921esWIHAwEA4OjqiXbt21a6Av2/fPrRr1w6Ojo5o1KgR4uPj9WK2bNmCoKAgODg4ICgoCNu2bTP4uEIIzJ07F76+vnByckLPnj1x5syZh3uwD6iuxNV1pcaR/EqFtGRKahzJ71qutLlaUuNIXj/9lmHUOJKf1KUgzHbJiJq2efNmTJo0CbNmzcKJEyfQvXt39OnTB5cvV3xKaHp6Ovr27Yvu3bvjxIkTeOuttzBx4kRs2bJFG5OcnIxhw4YhJiYGp06dQkxMDIYOHYrDhw8bdNzFixdj6dKlWLZsGVJTU6FSqRAREYHbt6u/4K2xeblJy9qlxpH8bCHxw0JiHMnv6CVp1+STGkfyOiNxbqzUOJKfs720wTypccZgUUnX0qVLMWrUKIwePRotW7ZEXFwc/Pz8sHLlygrj4+Pj4e/vj7i4OLRs2RKjR4/GyJEj8eGHH2pj4uLiEBERgZkzZ6JFixaYOXMmevfujbi4OMnHFUIgLi4Os2bNwqBBgxAcHIz169ejsLAQX331lUmfk4rUl7jYotQ4kp+zxFWupcaR/M5LvLyP1DiSV0E1Sw0YGkfyOy3xRCOpccZgMZ/YJSUlOHbsGGbMmKFTHhkZiUOHDlW4TXJyMiIjI3XKoqKisHr1aqjVaiiVSiQnJ2Py5Ml6MeVJl5TjpqenIzMzU+dYDg4OCAsLw6FDhzB27NgK61dcXIzi4v8trFd+IXG1Wg21Wl3ZU1GtzgHuWG37v4mBDjZC59974x7mOGQ69VztcbPgf6+Nytqwnqs929BM2UIDBwnvQ1to2IZmqJ6zLdSl/zurrdL3oLMt289MXbyeJ+k9ePF63kO3odTtLSbpunHjBsrKyvSWpPD29kZmZsVn4WVmZlYYX1paihs3bsDHx6fSmPJ9Sjlu+b8VxVy6dKnSx7Rw4ULMmzdPrzwxMRHOzg/XC7W4o37ZgvYands5fxzBzj8e6jBkIiMDAATol9/fhsAt7Ny5U44qkYGer3/37376bZjDNjRDk1pUXK7ffoVsPzM1M6Ticv02LHvoNiwslHbihcUkXeUU910FWAihV1Zd/P3lUvZprJh7zZw5E1OmTNHezsvLg5+fHyIjI1GrVq1Kt6vO9yeu4q3v0rS3HWwEFrTXYPZRGxRr/lef9wYGY0Abnq5ujqZ9exI7z1zT3q6sDfs+5o3FQ1rXQA2pOi+tSUbq5f8NHVbWhh38a2HtyNCaqCJVYWj8QZzNzNferqz9glSu+GZc15qoIlWj87tJyFf/L8GqrA1dlTZImRXxUMcqH6mqjsUkXZ6enrC1tdXr1crKyqp0QVaVSlVhvJ2dHerWrVtlTPk+pRxXpVIBuNvjde+1J6uqG3B3CNLBQX8yu1KprPLC4tU5cuUWisv0k71ijUKn/MiVW3imY8MHPg6ZTsv6Hth2Okuv/P42bFnf46FeK2Q6F3PUkt6HF3PUbEMzdPqfApSJ6tvv9D8FbD8z5eLkgJsVXLbw/jb0ruXw0G0odXuLmUhvb2+Pdu3aISkpSac8KSkJXbp0qXCb0NBQvfjExES0b99e+wRVFlO+TynHDQwMhEql0okpKSnBvn37Kq2bKZ24JO1sGqlxJL96tRyNGkfyc1TaGjWO5FUmcb1MqXEkPw9naYmQ1DhjsJieLgCYMmUKYmJi0L59e4SGhmLVqlW4fPkyxo0bB+DucN3Vq1exYcMGAMC4ceOwbNkyTJkyBWPGjEFycjJWr16Nr7/+WrvP119/HT169MCiRYswcOBAfPfdd9i1axcOHDgg+bgKhQKTJk3Ce++9h6ZNm6Jp06Z477334OzsjOHDh8v4DN2l1tw/Xv1wcSQ/Xj/T8tVykvbxKjWO5FXdJYDujSPzVCoxIZYaZwwW9W4fNmwYbt68ifnz5yMjIwPBwcHYuXMnAgLuzjjOyMjQWTsrMDAQO3fuxOTJk7F8+XL4+vrik08+wTPPPKON6dKlCzZt2oS3334bs2fPRuPGjbF582Z06tRJ8nEBYNq0abhz5w4mTJiAnJwcdOrUCYmJiXBzc5PhmdHVuK4L0m9UP6mvcV0XGWpDD+JshrT5AWcz8nj9TDPVyt8dxy9X35vcyt9dhtqQoRp5OuGvG3ckxZF5sreVNpgnNc4YLCrpAoAJEyZgwoQJFd63bt06vbKwsDAcP368yn0OHjwYgwcPfuDjAnd7u+bOnYu5c+dWuR85hLesh13nr0uKI/N0TOKCmXfjGpu2MvRA/siQdl1MqXEkryHt/fF+wnlJcWSenCQueio1zhgsZk4XSbf3/A2jxpH8nJQSPywkxpH87khcNFNqHMkrtkugUeNIfnUlXlNRapwxMOmyQoUSP8SlxpH8gnylLRkiNY7kV1Imbc6k1DiS18krt4waR/LzqyNt6FdqnDEw6bJCzvbSzoaSGkfy85J4VqLUOJKf0kba7FypcSSvrNvSLkQuNY7k16Wxp1HjjIFJlxWKDKp8bbAHiSP58exFy+fsIO00dKlxJC9PF2lDTlLjSH6dG9dF7WqWg6jtrETnxnVlqhGTLqvk5Saxl0RiHMnv98zbRo0j+bVqUMeocSQzqWtBcM0Is2Vro0CHhlW/vzo0rANbG/kakUmXFfr5bMXXonzQOJLflRxp1/GSGkfy69pU2pCF1DiS14384uqDDIgj+ZWUavDLOf0re9zrl3NZKCmVb14lky4rdPpvaSvNS40j+QV4SLvgudQ4kl+Hhh7VdoIo/htH5sdT4hltUuNIfl8kX4SmmimTGnE3Ti5MuojMUExoQ1TX422juBtH5ik1PbvaFc3Ff+PIDEk9v4HnQZit/1zPrz7IgDhjYNJlhTo3kvbLWWocyc/ezgZjule9/s+Y7oGwt+Nb2Fwd+Kv6BYoNiSN53SiQOLwoMY7kd+22tLaRGmcMXFnRChWppa2/JTWOasbMvkEAgM/3p+uU2yjuJlzl95N5On1F4jC/xDiSF09Isgbm113JpMsKCYmn00iNo5ozs28Q3ohsgY2H/gJyzmJ6VHO80KUxe7gsAH/8WLaOgR7wcXdEZm5RhV/JCgAqd0d0DOSIgblylbgci9Q4Y+AntzUSErN2qXFUo+ztbLRzt2JCGzLhshD1Ja5yLTWO5GVro8Cc/nd7k+//eVp+e07/IFmXGyDDPNOmgVHjjIGf3lYov7jUqHFEZLjHJF6iSWocyS862AcrX2gLlbvuEKLK3RErX2iL6GCfGqoZSdEmQNoaeFLjjIHDi1bojlramiNS44jIcJwTZB2ig30QEaRCyoUs3DiXgjWxHdC5iRd7uCzA+z+dkxz3f0+HmLg2d7Gnywq1C6ht1DgiMpzKXdqwodQ4IjLMyb9vGTXOGJh0WaEgH3ejxhGR4conYlfFhxOxzV5CWga6LdqNketTAQAj16ei26LdSEjLqOGaUbXM7+RFJl3WKLuwxKhxRGQ4WxsFBrSqes7PgFY+HKYyYwlpGRi/8Tgycot0yjNzizB+43EmXmauSxNpP2ikxhkDky4rxLkk1qVMI3Dkv6uWH0nPRll117Ugs1CmEfj+VNVfyt+fymB7mqkyjcC8HWcr7AQpL5u34yzbz4x1bVTPqHHGwKTLCnUM9EBt56rXHanjrOSwhgXg0IblOpKerddDcr+M3CJtQk3mpbr2E2D7mbs/sqRd3kdqnDEw6XpE8beZ+ePQhmXLul11wmVoHMmL7Wf5ruQUGjXOGJh0WaEj6dm4VaiuMuZWoZq/0MxYdUMbAhzaMHcc5rdsbD/LF+DhbNQ4Y2DSZYX4C83ycWjK8kkZ5q/NYX6zxfazfDGhDau92J3iv3FyYdJlhfgLzfJl5klLiKXGkXnieYuWje1n3mxtFHC2t60yxtnBVtYziJl0WaHy9YEqexkpwPWBzN0Nib2QUuNIflKG+XM4zG+22H6W70h6NgpKqr6gfEFxmaxtyKTLCvFCrZavug97Q+NIfhzmt2xsP8tnjm3IpMtK8UKtlk2hkJYQS40j+XGY37Kx/SyfObYhL3htxXihVsvVQeJV76XGkfzKJ2JX1RvJ9fLMV7uAOrBRAFWdIGyjuBtH5ql8qk1mblGFZ4IrcLcjQs73IHu6iMzQucw8o8ZRzSgp1VR5f3E191PNOXYpp8qEC7ibkB27lCNPhchg5jjVhkmXFeNq5pZr17lrRo0j+aX8dROF1UziLSwpQ8pfN2WqERnCHOcDkeHMbaoNhxetVPlq5gKAwz1nzJavZs55XeZO6i8vDhWbq+T/3JAc17Wpp4lrQ4Yyx/lA9GDMaaoNe7qsEC/UavkigryMGkc1gYmzJePSO9bF1kahbauOgR41NreZSZcV4oVaLd9LXRsZNY7kF9q4rlHjSF7mOB+ILB+TLivEuQiWz97OptperIggL9jb8S1srjo3qlvtZWTqOCvRuRGTLnNVPh/Iu5Z5zAciy8dPbCvEuQiWr0wjkHa16jMT067mcYjYjNnaKPD+oJAqYxYOCmFPiUXQfZ8JwfcdPRgmXVaIcxEsHy94bR2ig30Q/0JbqO7rKfFxd0Q8e0rMXvkJSZl5xTrl1/KKMX7jcZ4JTgbj2YtWqHwuwviNxzkXwUJxiNh6mNOZUyRddSckKXD3hKSIIBXbkiRjT5eVMre1ScgwHCImqlk8IYlMgT1dViw62Ae9Wnhj46G/gJyzmB7VHC90aczJ1xbAHC9fQQ8mIS0D83acRXb+HSzueHeRYg9XJ8zpH8QfP2aMvc1kCvz2tWIJaRnosXg3Fv18HgCw6Ofz6LGYK9JbAp6ubh3K5wTd32NSvkgx34vmi73NZApMuqxUQloGxlUwATQzrxjj+GFvEThEbNm4SLFl4wlJZApMuqxQmUZgxtbfqoyZsfU3fthbgOhgHxyY3gtrYjsAANbEdsCB6b2YcFkAzgmybOxtJlNg0mWFUv66iVuF6ipjbhWqeaFdC2Eul68gw3BOkOVjbzMZGyfSWyFeaJeo5nFOkHXgkh9kTOzpskq80C5RTeOcIOvB3mYyFiZdVogX2iWqeZwTRET3Y9JlhXihXSLzwDlBRHQvzumyQuUX2h238XilMbzQLpE8OCeIiMqxp8tK8UK7RERE5oU9XVaMv7CJah4vA0RE5djTZeV41g1RzeFlgIjoXky6iIhMgJcBIqL7MekiIjIBXgaIiO7HpIuIyAR4GSAiuh+TLiIiE/B0dTBqHBFZPiZdRESmIHWqFqd0ET0ymHQREZnAjYJio8YRkeVj0kVEZAJebo7VBxkQR0SWj0kXEZEJdAz0gI+7o97FrsspcPcKEeXr6BGR9bOYpCsnJwcxMTFwd3eHu7s7YmJicOvWrSq3EUJg7ty58PX1hZOTE3r27IkzZ87oxBQXF+O1116Dp6cnXFxcMGDAAPz9998GH1uhUOj9xcfHG+OhE5EFsrVRYE7/IADQS7zKb8/pH8QFi4keIRaTdA0fPhwnT55EQkICEhIScPLkScTExFS5zeLFi7F06VIsW7YMqampUKlUiIiIwO3bt7UxkyZNwrZt27Bp0yYcOHAA+fn56NevH8rKygw+9tq1a5GRkaH9i42NNd4TQEQWJzrYBytfaAuVu+4QosrdESt5DVSiR4+wAGfPnhUAREpKirYsOTlZABC///57hdtoNBqhUqnE+++/ry0rKioS7u7uIj4+XgghxK1bt4RSqRSbNm3Sxly9elXY2NiIhIQEg44NQGzbtu2hHmdubq4AIHJzcx9qP/crKSkR27dvFyUlJUbdL8mHbWjZSss04sD5TLF9+3Zx4HymKC3T1HSVyEB8D1o+U7ah1O9vi+jpSk5Ohru7Ozp16qQt69y5M9zd3XHo0KEKt0lPT0dmZiYiIyO1ZQ4ODggLC9Nuc+zYMajVap0YX19fBAcHa2MMOfarr74KT09PdOjQAfHx8dBoNA//4InI4vEaqEQEAHY1XQEpMjMz4eXlpVfu5eWFzMzMSrcBAG9vb51yb29vXLp0SRtjb2+POnXq6MWUby/12AsWLEDv3r3h5OSEX375BW+88QZu3LiBt99+u9LHVVxcjOLi/50unpeXBwBQq9VQq9WVbmeo8n0Zc58kL7ah5WMbWja2n+UzZRtK3WeNJl1z587FvHnzqoxJTU0FcHei+v2EEBWW3+v++6Vsc3+MlGPfm1y1bt0aADB//vwqk66FCxdW+PgTExPh7OxcZR0fRFJSktH3SfJiG1o+tqFlY/tZPlO0YWFhoaS4Gk26Xn31VTz77LNVxjRs2BCnT5/GtWvX9O67fv26Xk9WOZVKBeBuT5WPz/8mq2ZlZWm3UalUKCkpQU5Ojk5vV1ZWFrp06aKNMfTYwN0hyLy8PFy7dq3SuJkzZ2LKlCna23l5efDz80NkZCRq1apV6b4NpVarkZSUhIiICCiVSqPtl+TDNrR8bEPLxvazfKZsw/KRqurUaNLl6ekJT0/PauNCQ0ORm5uLI0eOoGPHjgCAw4cPIzc3V5sc3S8wMBAqlQpJSUlo06YNAKCkpAT79u3DokWLAADt2rWDUqlEUlIShg4dCgDIyMhAWloaFi9e/MDHBoATJ07A0dERtWvXrjTGwcEBDg76111TKpUmeVObar8kH7ah5WMbWja2n+UzRRtK3Z9FzOlq2bIloqOjMWbMGHz22WcAgJdffhn9+vVD8+bNtXEtWrTAwoUL8fTTT0OhUGDSpEl477330LRpUzRt2hTvvfcenJ2dMXz4cACAu7s7Ro0ahTfeeAN169aFh4cHpk6dipCQEDzxxBOSj71jxw5kZmYiNDQUTk5O2LNnD2bNmoWXX365wqSKiIiIHj0WkXQBwJdffomJEydqzzQcMGAAli1bphNz/vx55Obmam9PmzYNd+7cwYQJE5CTk4NOnTohMTERbm5u2piPPvoIdnZ2GDp0KO7cuYPevXtj3bp1sLW1lXxspVKJFStWYMqUKdBoNGjUqBHmz5+PV155xSTPBREREVkei0m6PDw8sHHjxipjhBA6txUKBebOnYu5c+dWuo2joyM+/fRTfPrppw987OjoaERHR1dZNyIiInq0WcQ6XURERESWjkkXERERkQyYdBERERHJgEkXERERkQyYdBERERHJgEkXERERkQyYdBERERHJgEkXERERkQyYdBERERHJgEkXERERkQyYdBERERHJgEkXERERkQyYdBERERHJgEkXERERkQyYdBERERHJgEkXERERkQyYdBERERHJgEkXERERkQyYdBERERHJgEkXERERkQyYdBERERHJgEkXERERkQyYdBERERHJgEkXERERkQyYdBERERHJgEkXERERkQyYdBERERHJgEkXERERkQyYdBERERHJgEkXERERkQyYdBERERHJgEkXERERkQyYdBERERHJgEkXERERkQyYdBERERHJgEkXERERkQyYdBERERHJgEkXERERkQyYdBERERHJgEmXlSvTCBxJzwYAHEnPRplG1HCNiIiIHk1MuqxYQloGui3ajZHrUwEAI9enotui3UhIy6jhmhERET16mHRZqYS0DIzfeBwZuUU65Zm5RRi/8TgTLyIiIpkx6bJCZRqBeTvOoqKBxPKyeTvOcqiRiIhIRky6rNCR9Gy9Hq57CQAZuUXauV5ERERkeky6rFDW7coTrgeJIyIioofHpMsKebk5GjWOiIiIHh6TLivUMdADPu6OUFRyvwKAj7sjOgZ6yFktIiKiRxqTLitka6PAnP5BAKCXeJXfntM/CLY2laVlREREZGxMuqxUdLAPVr7QFip33SFElbsjVr7QFtHBPjVUMyIiokeTXU1XgEwnOtgHEUEqpFzIwo1zKVgT2wGdm3ixh4uIiKgGsKfLytnaKLRztzoGejDhIiIiqiFMuoiIiIhkwKSLiIiISAZMuoiIiIhkwKSLiIiISAZMuoiIiIhkwKSLiIiISAZMuoiIiIhkwKSLiIiISAYWk3Tl5OQgJiYG7u7ucHd3R0xMDG7dulXlNkIIzJ07F76+vnByckLPnj1x5swZnZji4mK89tpr8PT0hIuLCwYMGIC///5bJ+bdd99Fly5d4OzsjNq1a1d4rMuXL6N///5wcXGBp6cnJk6ciJKSkod5yERERGRFLCbpGj58OE6ePImEhAQkJCTg5MmTiImJqXKbxYsXY+nSpVi2bBlSU1OhUqkQERGB27dva2MmTZqEbdu2YdOmTThw4ADy8/PRr18/lJWVaWNKSkowZMgQjB8/vsLjlJWV4cknn0RBQQEOHDiATZs2YcuWLXjjjTeM8+CJiIjI8gkLcPbsWQFApKSkaMuSk5MFAPH7779XuI1GoxEqlUq8//772rKioiLh7u4u4uPjhRBC3Lp1SyiVSrFp0yZtzNWrV4WNjY1ISEjQ2+fatWuFu7u7XvnOnTuFjY2NuHr1qrbs66+/Fg4ODiI3N1fy48zNzRUADNpGipKSErF9+3ZRUlJi1P2SfNiGlo9taLlKyzTiwPlMsX37dnHgfKYoLdPUdJXoAZjyPSj1+9siLnidnJwMd3d3dOrUSVvWuXNnuLu749ChQ2jevLneNunp6cjMzERkZKS2zMHBAWFhYTh06BDGjh2LY8eOQa1W68T4+voiODgYhw4dQlRUlOT6BQcHw9fXV1sWFRWF4uJiHDt2DOHh4RVuV1xcjOLiYu3tvLw8AIBarYZarZZ0bCnK92XMfZK82IaWj21omXadu4b3f/odOfl3sKA9MP6LI6jj6oQZfVrgiZbeNV09MoAp34NS92kRSVdmZia8vLz0yr28vJCZmVnpNgDg7a37pvD29salS5e0Mfb29qhTp45eTGX7rexY9x+nTp06sLe3r3I/CxcuxLx58/TKExMT4ezsLPn4UiUlJRl9nyQvtqHlYxtanikt/vf/Be01AApQkn4MO9NrrEr0EEzxHiwsLJQUV6NJ19y5cytMOu6VmpoKAFAoFHr3CSEqLL/X/fdL2UZKTHXHkbKfmTNnYsqUKdrbeXl58PPzQ2RkJGrVqmXQ8auiVquRlJSEiIgIKJVKo+2X5MM2tHxsQ8tSphGIivsVmXlFAAAHG4EF7TWYfdQGxRoFFAC8azni50k9YGtj2PcF1QxTvgfLR6qqU6NJ16uvvopnn322ypiGDRvi9OnTuHbtmt59169f1+thKqdSqQDc7YXy8fHRlmdlZWm3UalUKCkpQU5Ojk5vV1ZWFrp06SL5cahUKhw+fFinLCcnB2q1utL6AXeHOx0cHPTKlUqlST6UTbVfkg/b0PKxDS3D0b9u4lJOMQDdhKpYo0Bx2d2ySznFOPH3bYQ2rlsDNaQHZYr3oNT91ejZi56enmjRokWVf46OjggNDUVubi6OHDmi3fbw4cPIzc2tNDkKDAyESqXS6UYsKSnBvn37tNu0a9cOSqVSJyYjIwNpaWkGJV2hoaFIS0tDRkaGtiwxMREODg5o166d5P0QEZF5yLpdZNQ4IsBCloxo2bIloqOjMWbMGKSkpCAlJQVjxoxBv379dCbRt2jRAtu2bQNwd7hv0qRJeO+997Bt2zakpaVhxIgRcHZ2xvDhwwEA7u7uGDVqFN544w388ssvOHHiBF544QWEhITgiSee0O738uXLOHnyJC5fvoyysjKcPHkSJ0+eRH5+PgAgMjISQUFBiImJwYkTJ/DLL79g6tSpGDNmjFGHCYmISB5ebo5GjSMCLGQiPQB8+eWXmDhxovZMwwEDBmDZsmU6MefPn0dubq729rRp03Dnzh1MmDABOTk56NSpExITE+Hm5qaN+eijj2BnZ4ehQ4fizp076N27N9atWwdbW1ttzDvvvIP169drb7dp0wYAsGfPHvTs2RO2trb48ccfMWHCBHTt2hVOTk4YPnw4PvzwQ5M8F0REZFodAz3g4+6IzNwiiAruVwBQuTuiY6CH3FUjC6YQQlT0eqIakJeXB3d3d+Tm5hp9Iv3OnTvRt29fziWxUGxDy8c2tDwJaRkYv/E4AMDeVmBxxzJMO2KLkv/O6Vr5QltEB/tUtQsyI6Z8D0r9/raI4UUiIiK5RQf7YOULbaFy1x1CVLk7MuGiB2Ixw4tERERyiw72QUSQCikXsnDjXArWxHZA5yZeXCaCHgh7uoiIiKpga6PQzt3qGOjBhIseGJMuIiIiIhkw6SIiIiKSAZMuIiIiIhkw6SIiIiKSAZMuIiIiIhkw6SIiIiKSAZMuIiIiIhkw6SIiIiKSAZMuIiIiIhnwMkBmpPza43l5eUbdr1qtRmFhIfLy8nihXQvFNrR8bEPLxvazfKZsw/Lv7fLv8cow6TIjt2/fBgD4+fnVcE2IiIjIULdv34a7u3ul9ytEdWkZyUaj0eCff/6Bm5sbFArjXdsrLy8Pfn5+uHLlCmrVqmW0/ZJ82IaWj21o2dh+ls+UbSiEwO3bt+Hr6wsbm8pnbrGny4zY2NigQYMGJtt/rVq1+GFh4diGlo9taNnYfpbPVG1YVQ9XOU6kJyIiIpIBky4iIiIiGTDpegQ4ODhgzpw5cHBwqOmq0ANiG1o+tqFlY/tZPnNoQ06kJyIiIpIBe7qIiIiIZMCki4iIiEgGTLqIiIiIZMCki4jIDK1btw61a9eu6Wo80i5evAiFQoGTJ0/WdFXIhBQKBbZv3y7LsZh0WaARI0ZAoVBg3LhxevdNmDABCoUCI0aMkL9iZBCFQlHlH9vQPMXHx8PNzQ2lpaXasvz8fCiVSnTv3l0ndv/+/VAoFPjjjz/kriZVg+8/61X+HXn/34ULFyqMz8jIQJ8+fWSpG1ekt1B+fn7YtGkTPvroIzg5OQEAioqK8PXXX8Pf37+Ga0dSZGRkaP+/efNmvPPOOzh//ry2rLxdybyEh4cjPz8fR48eRefOnQHcTa5UKhVSU1NRWFgIZ2dnAMDevXvh6+uLZs2a1WSVqQJS3n85OTk1UTUygujoaKxdu1anrF69ejq3S0pKYG9vD5VKJVu92NNlodq2bQt/f39s3bpVW7Z161b4+fmhTZs22rLi4mJMnDgRXl5ecHR0RLdu3ZCamqq9f+/evVAoFPjll1/Qvn17ODs7o0uXLjofPmQaKpVK++fu7g6FQqG9nZCQgICAAJ347du3612Tc8eOHWjXrh0cHR3RqFEjzJs3T6cHhoyvefPm8PX1xd69e7Vle/fuxcCBA9G4cWMcOnRIpzw8PBwlJSWYNm0a6tevDxcXF3Tq1Elne+DucKK/vz+cnZ3x9NNP4+bNmzI9okdTVe+/8rJy//nPfxAeHg5nZ2e0atUKycnJ2vvmzp2L1q1b6+w7Li4ODRs2lOmRUEUcHBx02lOlUqF379549dVXMWXKFHh6eiIiIgIAhxdJopdeekknk1+zZg1GjhypEzNt2jRs2bIF69evx/Hjx9GkSRNERUUhOztbJ27WrFlYsmQJjh49Cjs7O739kPn5+eef8cILL2DixIk4e/YsPvvsM6xbtw7vvvtuTVfN6vXs2RN79uzR3t6zZw969uyJsLAwbXlJSQmSk5MRHh6Ol156CQcPHsSmTZtw+vRpDBkyBNHR0fjzzz8BAIcPH8bIkSMxYcIEnDx5EuHh4fi///u/GnlspG/WrFmYOnUqTp48iWbNmuG5557jjxsLtX79etjZ2eHgwYP47LPP5K+AIIsTGxsrBg4cKK5fvy4cHBxEenq6uHjxonB0dBTXr18XAwcOFLGxsSI/P18olUrx5ZdfarctKSkRvr6+YvHixUIIIfbs2SMAiF27dmljfvzxRwFA3LlzR/bH9qhau3atcHd3r/S2EEJs27ZN3PuW7d69u3jvvfd0Yr744gvh4+NjyqqSEGLVqlXCxcVFqNVqkZeXJ+zs7MS1a9fEpk2bRJcuXYQQQuzbt08AEBcuXBAKhUJcvXpVZx+9e/cWM2fOFEII8dxzz4no6Gid+4cNG6b3GiDTqOj9JoQQ6enpAoD417/+pS07c+aMACDOnTsnhBBizpw5olWrVjrbffTRRyIgIMCENaaqxMbGCltbW+Hi4qL9Gzx4sAgLCxOtW7fWiwcgtm3bJkvdOKfLgnl6euLJJ5/E+vXrIYTAk08+CU9PT+39f/31F9RqNbp27aotUyqV6NixI86dO6ezr8cff1z7fx8fHwBAVlYW54eZsWPHjiE1NVWnZ6usrAxFRUU684rI+MLDw1FQUIDU1FTk5OSgWbNm8PLyQlhYGGJiYlBQUIC9e/fC398fx48fhxBCb15XcXEx6tatCwA4d+4cnn76aZ37Q0NDkZCQINtjospV9vnYokWLmqoSVSM8PBwrV67U3nZxccFzzz2H9u3b12CtOJHe4o0cORKvvvoqAGD58uU694n/XuHp/nlAQgi9MqVSqf1/+X0ajcbo9SVpbGxstO1XTq1W69zWaDSYN28eBg0apLe9o6OjSev3qGvSpAkaNGiAPXv2ICcnB2FhYQDuzhMKDAzEwYMHsWfPHvTq1QsajQa2trY4duwYbG1tdfbj6uoKAHptTealqs9HKe9Vkp+LiwuaNGlSYXlNYtJl4aKjo1FSUgIAiIqK0rmvSZMmsLe3x4EDBzB8+HAAdz8Mjh49ikmTJsldVTJAvXr1cPv2bRQUFGg/JO5fK6ht27Y4f/58hR8sZHrh4eHYu3cvcnJy8Oabb2rLw8LC8PPPPyMlJQUvvfQS2rRpg7KyMmRlZektKVEuKCgIKSkpOmX33ybzVK9ePWRmZur8mOW6XlQZJl0WztbWVjtUeP+vaBcXF4wfPx5vvvkmPDw84O/vj8WLF6OwsBCjRo2qieqSRJ06dYKzszPeeustvPbaazhy5AjWrVunE/POO++gX79+8PPzw5AhQ2BjY4PTp0/jt99+4yRsGYSHh+OVV16BWq3W9nQBd5Ou8ePHo6ioCOHh4fDz88Pzzz+PF198EUuWLEGbNm1w48YN7N69GyEhIejbty8mTpyILl26YPHixXjqqaeQmJjIoUUL0bNnT1y/fh2LFy/G4MGDkZCQgJ9++gm1atWq6aqRGeLZi1agVq1alb7B33//fTzzzDOIiYlB27ZtceHCBfz888+oU6eOzLUkQ3h4eGDjxo3YuXMnQkJC8PXXX2Pu3Lk6MVFRUfjhhx+QlJSEDh06oHPnzli6dKneUhNkGuHh4bhz5w6aNGkCb29vbXlYWBhu376Nxo0bw8/PDwCwdu1avPjii3jjjTfQvHlzDBgwAIcPH9be37lzZ/zrX//Cp59+itatWyMxMRFvv/12jTwuMkzLli2xYsUKLF++HK1atcKRI0cwderUmq4WmSmF4GQCIiIiIpNjTxcRERGRDJh0EREREcmASRcRERGRDJh0EREREcmASRcRERGRDJh0EREREcmASRcRERGRDJh0ERFJdPDgQYSEhECpVOKpp56SvN26detQu3Ztk9VLbg0bNkRcXFxNV4PI4jDpIiKzMGLECCgUCigUCiiVSnh7eyMiIgJr1qwxm4uvT5kyBa1bt0Z6erreZZnKyZWQxMfHw83NDaWlpdqy/Px8KJVKvWs87t+/HwqFAn/88YfJ60VElWPSRURmIzo6GhkZGbh48SJ++uknhIeH4/XXX0e/fv10koua8tdff6FXr15o0KBBjfdchYeHIz8/H0ePHtWW7d+/HyqVCqmpqSgsLNSW7927F76+vmjWrFlNVJWI/otJFxGZDQcHB6hUKtSvXx9t27bFW2+9he+++w4//fSTTs/S0qVLERISAhcXF/j5+WHChAnIz88HABQUFKBWrVr497//rbPvHTt2wMXFBbdv367w2MXFxZg4cSK8vLzg6OiIbt26ITU1FQBw8eJFKBQK3Lx5EyNHjoRCoaiwp6tnz564dOkSJk+erO21u9fPP/+Mli1bwtXVVZtg3mvt2rVo2bIlHB0d0aJFC6xYsaLS56p58+bw9fXF3r17tWV79+7FwIED0bhxYxw6dEinPDw8HABQUlKCadOmoX79+nBxcUGnTp109gEAhw4dQo8ePeDk5AQ/Pz9MnDgRBQUFldZl7dq1cHd3R1JSUqUxRMSki4jMXK9evdCqVSts3bpVW2ZjY4NPPvkEaWlpWL9+PXbv3o1p06YBAFxcXPDss89i7dq1OvtZu3YtBg8eDDc3twqPM23aNGzZsgXr16/H8ePH0aRJE0RFRSE7Oxt+fn7IyMhArVq1EBcXh4yMDAwbNkxvH1u3bkWDBg0wf/58ZGRk6CRVhYWF+PDDD/HFF1/g119/xeXLl3UujPz5559j1qxZePfdd3Hu3Dm89957mD17NtavX1/pc9OzZ0/s2bNHe3vPnj3o2bMnwsLCtOUlJSVITk7WJl0vvfQSDh48iE2bNuH06dMYMmQIoqOj8eeffwIAfvvtN0RFRWHQoEE4ffo0Nm/ejAMHDuDVV1+tsA4ffvghpk6dip9//hkRERGV1pWIAAgiIjMQGxsrBg4cWOF9w4YNEy1btqx022+++UbUrVtXe/vw4cPC1tZWXL16VQghxPXr14VSqRR79+6tcPv8/HyhVCrFl19+qS0rKSkRvr6+YvHixdoyd3d3sXbt2iofR0BAgPjoo490ytauXSsAiAsXLmjLli9fLry9vbW3/fz8xFdffaWz3YIFC0RoaGilx1q1apVwcXERarVa5OXlCTs7O3Ht2jWxadMm0aVLFyGEEPv27RMAxF9//SUuXLggFAqF9nkp17t3bzFz5kwhhBAxMTHi5Zdf1rl///79wsbGRty5c0fnMc6YMUP4+PiI06dPV/mcENFddjWd9BERVUcIoTNUt2fPHrz33ns4e/Ys8vLyUFpaiqKiIhQUFMDFxQUdO3bEY489hg0bNmDGjBn44osv4O/vjx49elS4/7/++gtqtRpdu3bVlimVSnTs2BHnzp0zymNwdnZG48aNtbd9fHyQlZUFALh+/TquXLmCUaNGYcyYMdqY0tJSuLu7V7rP8PBwFBQUIDU1FTk5OWjWrBm8vLwQFhaGmJgYFBQUYO/evfD390ejRo3w7bffQgihN7eruLgYdevWBQAcO3YMFy5cwJdffqm9XwgBjUaD9PR0tGzZEgCwZMkSFBQU4OjRo2jUqNHDP0FEjwAmXURk9s6dO4fAwEAAwKVLl9C3b1+MGzcOCxYsgIeHBw4cOIBRo0ZBrVZrtxk9ejSWLVuGGTNmYO3atXjppZf05liVE0IAgN799yd7D0OpVOrcVigU2uOWn535+eefo1OnTjpxtra2le6zSZMmaNCgAfbs2YOcnByEhYUBAFQqFQIDA3Hw4EHs2bMHvXr10h7H1tYWx44d09uvq6urNmbs2LGYOHGi3vH8/f21/+/evTt+/PFHfPPNN5gxY4ak54DoUceki4jM2u7du/Hbb79h8uTJAICjR4+itLQUS5YsgY3N3Wmp33zzjd52L7zwAqZNm4ZPPvkEZ86cQWxsbKXHaNKkCezt7XHgwAEMHz4cAKBWq3H06FFMmjTJoPra29ujrKzMoG28vb1Rv359/Oc//8Hzzz9v0Lbh4eHYu3cvcnJy8Oabb2rLw8LC8PPPPyMlJQUvvfQSAKBNmzYoKytDVlaW3rIS5dq2bYszZ86gSZMmVR63Y8eOeO211xAVFQVbW1udYxNRxZh0EZHZKC4uRmZmJsrKynDt2jUkJCRg4cKF6NevH1588UUAQOPGjVFaWopPP/0U/fv3x8GDBxEfH6+3rzp16mDQoEF48803ERkZiQYNGlR6XBcXF4wfPx5vvvkmPDw84O/vj8WLF6OwsBCjRo0y6DE0bNgQv/76K5599lk4ODjA09NT0nZz587FxIkTUatWLfTp0wfFxcU4evQocnJyMGXKlEq3Cw8PxyuvvAK1Wq3t6QLuJl3jx49HUVGRdhJ9s2bN8Pzzz+PFF1/EkiVL0KZNG9y4cQO7d+9GSEgI+vbti+nTp6Nz58545ZVXMGbMGLi4uODcuXNISkrCp59+qnPs0NBQ/PTTT4iOjoadnZ02MSaiStTkhDIionKxsbECgAAg7OzsRL169cQTTzwh1qxZI8rKynRily5dKnx8fISTk5OIiooSGzZsEABETk6OTtwvv/wiAIhvvvmm2uPfuXNHvPbaa8LT01M4ODiIrl27iiNHjujESJlIn5ycLB5//HHh4OAgyj9i165dK9zd3XXitm3bJu7/CP7yyy9F69athb29vahTp47o0aOH2Lp1a5XHS09PFwBEixYtdMqvXLkiAIjGjRvrlJeUlIh33nlHNGzYUCiVSqFSqcTTTz+tMxn+yJEjIiIiQri6ugoXFxfx+OOPi3fffVd7//0nC+zbt0+4uLiIjz/+uMq6Ej3qFEL8d1IBEZGV+fLLL/H666/jn3/+gb29fU1Xh4gecRxeJCKrU1hYiPT0dCxcuBBjx45lwkVEZoGLoxKR1Vm8eDFat24Nb29vzJw5s6arQ0QEAODwIhEREZEM2NNFREREJAMmXUREREQyYNJFREREJAMmXUREREQyYNJFREREJAMmXUREREQyYNJFREREJAMmXUREREQyYNJFREREJIP/BwJ+aypfoHwzAAAAAElFTkSuQmCC", 305 | "text/plain": [ 306 | "
" 307 | ] 308 | }, 309 | "metadata": {}, 310 | "output_type": "display_data" 311 | } 312 | ], 313 | "source": [ 314 | "#4Q\n", 315 | "#vii)\n", 316 | "import matplotlib.pyplot as plt\n", 317 | "\n", 318 | "day_mapping = {'Mon': 0, 'Tue': 1, 'Wed': 2, 'Thu': 3, 'Fri': 4}\n", 319 | "df['Day_num'] = df['Day'].map(day_mapping)\n", 320 | "\n", 321 | "plt.scatter(df['Day_num'], df['Chg%'])\n", 322 | "plt.xlabel('Day of the Week')\n", 323 | "plt.ylabel('Chg%')\n", 324 | "plt.title('Scatter Plot of Chg% Data Against the Day of the Week')\n", 325 | "plt.xticks(ticks=list(day_mapping.values()), labels=list(day_mapping.keys()))\n", 326 | "plt.grid(True)\n", 327 | "plt.show()\n" 328 | ] 329 | }, 330 | { 331 | "cell_type": "code", 332 | "execution_count": null, 333 | "metadata": {}, 334 | "outputs": [], 335 | "source": [] 336 | } 337 | ], 338 | "metadata": { 339 | "colab": { 340 | "provenance": [] 341 | }, 342 | "kernelspec": { 343 | "display_name": "Python 3 (ipykernel)", 344 | "language": "python", 345 | "name": "python3" 346 | }, 347 | "language_info": { 348 | "codemirror_mode": { 349 | "name": "ipython", 350 | "version": 3 351 | }, 352 | "file_extension": ".py", 353 | "mimetype": "text/x-python", 354 | "name": "python", 355 | "nbconvert_exporter": "python", 356 | "pygments_lexer": "ipython3", 357 | "version": "3.11.7" 358 | } 359 | }, 360 | "nbformat": 4, 361 | "nbformat_minor": 4 362 | } 363 | -------------------------------------------------------------------------------- /Lab-3/Lab03: -------------------------------------------------------------------------------- 1 | Subject: 19CSE305 2 | Lab Session: 03 3 | Notes: 4 | 1. Please read the assignment notes carefully and comply to the guidelines provided. 5 | 2. Code should be checked into the GitHub. These details shall be provided in the Lab. 6 | 3. If you have not completed the prerequisite assignments, please complete them before the 7 | next lab session. 8 | Coding Instructions: 9 | 1. The code should be modularized; The asked functionality should be available as a function. 10 | Please create multiple functions if needed. However, all functions should be present within a 11 | single code block, if you are using Jupyter or Colab notebooks. 12 | 2. There should be no print statement within the function. All print statements should be in the 13 | main program. 14 | 3. Please use proper naming of variables. 15 | 4. For lists, strings and matrices, you may use your input values as appropriate. 16 | 5. Please make inline documentation / comments as needed within the code blocks. 17 | Main Section (Mandatory): 18 | Please use the data associated with your own project. This assignment deals with classification 19 | models. 20 | For dot product → use numpy.dot() 21 | For Vector length → use numpy.linalg.norm() 22 | Refer to lecture portions on k-NN. Also refer: 23 | https://scikit-learn.org/stable/modules/generated/sklearn.neighbors.KNeighborsClassifier.html 24 | Please use help manuals of sklearn package to gain understanding of the model behaviors as well as 25 | ways to use various package functionalities. 26 | A1. Evaluate the intraclass spread and interclass distances between the classes in your dataset. If 27 | your data deals with multiple classes, you can take any two classes. Steps below (refer below 28 | diagram for understanding): 29 | • Calculate the mean for each class (also called as class centroid) 30 | (Suggestion: You may use numpy.mean() function for finding the average vector for all 31 | vectors in a given class. Please define the axis property appropriately to use this function. EX: 32 | feat_vecs.mean(axis=0)) 33 | • Calculate spread (standard deviation) for each class 34 | (Suggestion: You may use numpy.std() function for finding the standard deviation vector 35 | for all vectors in a given class. Please define the axis property appropriately to use this 36 | function.) 37 | • Calculate the distance between mean vectors between classes 38 | (Suggestion: numpy.linalg.norm(centroid1 – centroid2) gives the Euclidean 39 | distance between two centroids.) 40 | A2. Take any feature from your dataset. Observe the density pattern for that feature by plotting the 41 | histogram. Use buckets (data in ranges) for histogram generation and study. Calculate the mean and 42 | variance from the available data. 43 | (Suggestion: numpy.histogram()gives the histogram data. Plot of histogram may be 44 | achieved with matplotlib.pyplot.hist()) 45 | A3. Take any two feature vectors from your dataset. Calculate the Minkwoski distance with r from 1 46 | to 10. Make a plot of the distance and observe the nature of this graph. 47 | A4. Divide dataset in your project into two parts – train & test set. To accomplish this, use the traintest_split() function available in SciKit. See below sample code for help: 48 | >>> import numpy as np 49 | >>> from sklearn.model_selection import train_test_split 50 | >>> X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3) 51 | X is the feature vector set for your project and y is the class levels for vectors present in X. 52 | Note: Before set split, make sure you have only two classes. If your project deals with multi-class 53 | problem, take any two classes from them. 54 | A5. Train a kNN classifier (k =3) using the training set obtained from above exercise. Following code 55 | for help: 56 | >>> import numpy as np 57 | >>> from sklearn.neighbors import KNeighborsClassifier 58 | >>> neigh = KNeighborsClassifier(n_neighbors=3) 59 | >>> neigh.fit(X, y) 60 | A6. Test the accuracy of the kNN using the test set obtained from above exercise. Following code for 61 | help. 62 | >>> neigh.score(X_test, y_test) 63 | This code shall generate an accuracy report for you. Please study the report and understand it. 64 | A7. Use the predict() function to study the prediction behavior of the classifier for test vectors. 65 | >>> neigh.predict(X_test) 66 | Perform classification for a given vector using neigh.predict(<>). This shall produce the 67 | class of the test vector (test_vect is any feature vector from your test set). 68 | A8. Make k = 1 to implement NN classifier and compare the results with kNN (k = 3). Vary k from 1 to 69 | 11 and make an accuracy plot. 70 | A9. Please evaluate confusion matrix for your classification problem. From confusion matrix, the 71 | other performance metrics such as precision, recall and F1-Score measures for both training and test 72 | data. Based on your observations, infer the models learning outcome (underfit / regularfit / overfit). 73 | Optional Section: 74 | O1. Create a normal distribution data, plot the graph and compare the normal distribution plot 75 | against the histogram plot. 76 | O2. Use different distance metric for kNN classifier by tuning the metric parameters of 77 | KNeighborsClassifier(). Observe the behaviour with change in the distance for classification. 78 | O3. Make an AUROC plot for your project for kNN classifier. Compare the results with the area 79 | obtained and infer. 80 | O4. Compare the performance of your developed kNN classifier (during 2nd Lab exercise) with that 81 | of the package provided model. 82 | Report Assignment: 83 | 1. Update your understanding of your project in the introduction section of the report. 84 | 2. Study the downloaded papers & update the literature survey section of your report. 85 | 3. Expand the methodology and results sections with outcomes of this experiments & results 86 | obtained. Please discuss your observations, inferences in results & discussion section. Please 87 | conclude the report appropriately with these experiments. Consider following points for 88 | observation analysis & inferences. 89 | • Do you think the classes you have in your dataset are well separated? Justify your 90 | answer. 91 | • Explain the behavior of the kNN classifier with increase in value of k. Explain the 92 | scenarios of over-fitting and under-fitting in kNN classifier. 93 | • Do you think the kNN classifier is a good classifier based on the results obtained on 94 | various metrics? 95 | • Do you think the model has regular fit situation? Use train and test set performances to 96 | arrive at this inference. 97 | • When do you think a situation of overfit happens for kNN classifier 98 | -------------------------------------------------------------------------------- /Lab-4/ss: -------------------------------------------------------------------------------- 1 | lol 2 | -------------------------------------------------------------------------------- /Lab-5/Lab-5.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 7, 6 | "id": "4cedc66f-fcf4-4fc5-8274-6396ef3fe77f", 7 | "metadata": {}, 8 | "outputs": [ 9 | { 10 | "name": "stdout", 11 | "output_type": "stream", 12 | "text": [ 13 | " Header_and_Main_Declaration Incomprehensible_Code \\\n", 14 | "0 2 2.0 \n", 15 | "1 2 2.0 \n", 16 | "2 2 2.0 \n", 17 | "3 2 1.0 \n", 18 | "4 2 2.0 \n", 19 | "\n", 20 | " Comprehensible_Code_with_logical_errors \\\n", 21 | "0 2 \n", 22 | "1 2 \n", 23 | "2 1 \n", 24 | "3 1 \n", 25 | "4 1 \n", 26 | "\n", 27 | " Comprehensible_code_with_syntax_errors Correct_code_and_output \\\n", 28 | "0 2.0 2 \n", 29 | "1 2.0 0 \n", 30 | "2 1.0 1 \n", 31 | "3 1.0 0 \n", 32 | "4 1.0 0 \n", 33 | "\n", 34 | " Final_Marks \n", 35 | "0 10 \n", 36 | "1 8 \n", 37 | "2 7 \n", 38 | "3 5 \n", 39 | "4 6 \n" 40 | ] 41 | } 42 | ], 43 | "source": [ 44 | "#collecting and loading data\n", 45 | "import pandas as pd\n", 46 | "import numpy as np\n", 47 | "\n", 48 | "def read_data(file_name):\n", 49 | " df = pd.read_excel(file_name)\n", 50 | " df = df.fillna(df.mean(numeric_only=True)) # Fill missing values with column mean for numeric columns\n", 51 | " df = df.iloc[:, 4:10] # Select columns 4 to 9\n", 52 | " return df\n", 53 | "\n", 54 | "df = read_data('ml-data.xlsx')\n", 55 | "\n", 56 | "print(df.head())\n" 57 | ] 58 | }, 59 | { 60 | "cell_type": "code", 61 | "execution_count": 12, 62 | "id": "96218e3f-8c15-444a-84f4-dbfd81325108", 63 | "metadata": {}, 64 | "outputs": [ 65 | { 66 | "data": { 67 | "text/html": [ 68 | "
LinearRegression()
In a Jupyter environment, please rerun this cell to show the HTML representation or trust the notebook.
On GitHub, the HTML representation is unable to render, please try loading this page with nbviewer.org.
" 69 | ], 70 | "text/plain": [ 71 | "LinearRegression()" 72 | ] 73 | }, 74 | "execution_count": 12, 75 | "metadata": {}, 76 | "output_type": "execute_result" 77 | } 78 | ], 79 | "source": [ 80 | "#1q)\n", 81 | "from sklearn.model_selection import train_test_split\n", 82 | "from sklearn.linear_model import LinearRegression\n", 83 | "\n", 84 | "def train_test_splits(X, y):\n", 85 | " X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)\n", 86 | " return X_train, X_test, y_train, y_test\n", 87 | "\n", 88 | "def train_model(X_train, y_train):\n", 89 | " reg = LinearRegression().fit(X_train, y_train)\n", 90 | " y_train_pred = reg.predict(X_train)\n", 91 | " return reg\n", 92 | "\n", 93 | "# Split the data into train and test\n", 94 | "X_train, X_test, y_train, y_test = train_test_splits(df[['Correct_code_and_output']], df['Final_Marks'])\n", 95 | "\n", 96 | "# Train the model\n", 97 | "model = train_model(X_train, y_train)\n", 98 | "model" 99 | ] 100 | }, 101 | { 102 | "cell_type": "code", 103 | "execution_count": 13, 104 | "id": "7e2a7767-3016-404e-93ff-50b536f8f017", 105 | "metadata": {}, 106 | "outputs": [ 107 | { 108 | "name": "stdout", 109 | "output_type": "stream", 110 | "text": [ 111 | "Training Set Metrics:\n", 112 | "MSE: 2.64, RMSE: 1.63, MAPE: 11096952697985274.00%, R²: 0.43\n", 113 | "\n", 114 | "Testing Set Metrics:\n", 115 | "MSE: 2.74, RMSE: 1.65, MAPE: 17679890739162960.00%, R²: 0.36\n" 116 | ] 117 | } 118 | ], 119 | "source": [ 120 | "#2q)\n", 121 | "\n", 122 | "from sklearn.metrics import mean_squared_error, mean_absolute_percentage_error, r2_score\n", 123 | "import numpy as np\n", 124 | "\n", 125 | "# Function to evaluate the model's performance\n", 126 | "def evaluate_model(y_true, y_pred):\n", 127 | " mse = mean_squared_error(y_true, y_pred)\n", 128 | " rmse = np.sqrt(mse)\n", 129 | " mape = mean_absolute_percentage_error(y_true, y_pred)\n", 130 | " r2 = r2_score(y_true, y_pred)\n", 131 | " return mse, rmse, mape, r2\n", 132 | "\n", 133 | "\n", 134 | "y_train_pred = model.predict(X_train)\n", 135 | "y_test_pred = model.predict(X_test)\n", 136 | "\n", 137 | "# for the training set\n", 138 | "train_mse, train_rmse, train_mape, train_r2 = evaluate_model(y_train, y_train_pred)\n", 139 | "\n", 140 | "# for the testing set\n", 141 | "test_mse, test_rmse, test_mape, test_r2 = evaluate_model(y_test, y_test_pred)\n", 142 | "\n", 143 | "print(\"Training Set Metrics:\")\n", 144 | "print(f\"MSE: {train_mse:.2f}, RMSE: {train_rmse:.2f}, MAPE: {train_mape:.2%}, R²: {train_r2:.2f}\")\n", 145 | "\n", 146 | "print(\"\\nTesting Set Metrics:\")\n", 147 | "print(f\"MSE: {test_mse:.2f}, RMSE: {test_rmse:.2f}, MAPE: {test_mape:.2%}, R²: {test_r2:.2f}\")\n" 148 | ] 149 | }, 150 | { 151 | "cell_type": "code", 152 | "execution_count": 14, 153 | "id": "1cb0afc5-5562-4a55-a870-48c32a6d75b4", 154 | "metadata": {}, 155 | "outputs": [ 156 | { 157 | "name": "stdout", 158 | "output_type": "stream", 159 | "text": [ 160 | " Header_and_Main_Declaration Incomprehensible_Code \\\n", 161 | "0 2 2.0 \n", 162 | "1 2 2.0 \n", 163 | "2 2 2.0 \n", 164 | "3 2 1.0 \n", 165 | "4 2 2.0 \n", 166 | "\n", 167 | " Comprehensible_Code_with_logical_errors \\\n", 168 | "0 2 \n", 169 | "1 2 \n", 170 | "2 1 \n", 171 | "3 1 \n", 172 | "4 1 \n", 173 | "\n", 174 | " Comprehensible_code_with_syntax_errors Correct_code_and_output \\\n", 175 | "0 2.0 2 \n", 176 | "1 2.0 0 \n", 177 | "2 1.0 1 \n", 178 | "3 1.0 0 \n", 179 | "4 1.0 0 \n", 180 | "\n", 181 | " Final_Marks \n", 182 | "0 10 \n", 183 | "1 8 \n", 184 | "2 7 \n", 185 | "3 5 \n", 186 | "4 6 \n" 187 | ] 188 | } 189 | ], 190 | "source": [ 191 | "# reloading the data\n", 192 | "\n", 193 | "df = read_data('ml-data.xlsx')\n", 194 | "\n", 195 | "print(df.head())" 196 | ] 197 | }, 198 | { 199 | "cell_type": "code", 200 | "execution_count": 21, 201 | "id": "83f1c4fd-4699-47bc-879c-9e89b062cb77", 202 | "metadata": {}, 203 | "outputs": [ 204 | { 205 | "name": "stdout", 206 | "output_type": "stream", 207 | "text": [ 208 | "Training Set Metrics:\n", 209 | "MSE: 0.00, RMSE: 0.06, MAPE: 935317922102.33%, R²: 1.00\n", 210 | "\n", 211 | "Testing Set Metrics:\n", 212 | "MSE: 0.00, RMSE: 0.00, MAPE: 1490167536908.57%, R²: 1.00\n" 213 | ] 214 | } 215 | ], 216 | "source": [ 217 | "#3q)\n", 218 | "\n", 219 | "# Split the data into train and test\n", 220 | "y = df.iloc[:,-1]\n", 221 | "X = df.drop('Final_Marks',axis = 1)\n", 222 | "X_train, X_test, y_train, y_test = train_test_splits(X, y)\n", 223 | "\n", 224 | "# Train the model\n", 225 | "model = train_model(X_train, y_train)\n", 226 | "y_train_pred = model.predict(X_train)\n", 227 | "y_test_pred = model.predict(X_test)\n", 228 | "\n", 229 | "# for the training set\n", 230 | "train_mse, train_rmse, train_mape, train_r2 = evaluate_model(y_train, y_train_pred)\n", 231 | "\n", 232 | "# for the testing set\n", 233 | "test_mse, test_rmse, test_mape, test_r2 = evaluate_model(y_test, y_test_pred)\n", 234 | "\n", 235 | "print(\"Training Set Metrics:\")\n", 236 | "print(f\"MSE: {train_mse:.2f}, RMSE: {train_rmse:.2f}, MAPE: {train_mape:.2%}, R²: {train_r2:.2f}\")\n", 237 | "\n", 238 | "print(\"\\nTesting Set Metrics:\")\n", 239 | "print(f\"MSE: {test_mse:.2f}, RMSE: {test_rmse:.2f}, MAPE: {test_mape:.2%}, R²: {test_r2:.2f}\")\n" 240 | ] 241 | }, 242 | { 243 | "cell_type": "code", 244 | "execution_count": 26, 245 | "id": "b6c53046-57da-4ddd-a06c-fe57e93cfce1", 246 | "metadata": {}, 247 | "outputs": [ 248 | { 249 | "name": "stderr", 250 | "output_type": "stream", 251 | "text": [ 252 | "C:\\Users\\mvenk\\anaconda3\\Lib\\site-packages\\sklearn\\cluster\\_kmeans.py:870: FutureWarning: The default value of `n_init` will change from 10 to 'auto' in 1.4. Set the value of `n_init` explicitly to suppress the warning\n", 253 | " warnings.warn(\n", 254 | "C:\\Users\\mvenk\\anaconda3\\Lib\\site-packages\\sklearn\\cluster\\_kmeans.py:1382: UserWarning: KMeans is known to have a memory leak on Windows with MKL, when there are less chunks than available threads. You can avoid it by setting the environment variable OMP_NUM_THREADS=5.\n", 255 | " warnings.warn(\n" 256 | ] 257 | }, 258 | { 259 | "name": "stdout", 260 | "output_type": "stream", 261 | "text": [ 262 | " Header_and_Main_Declaration Incomprehensible_Code \\\n", 263 | "0 2 2.0 \n", 264 | "1 2 2.0 \n", 265 | "2 2 2.0 \n", 266 | "3 2 1.0 \n", 267 | "4 2 2.0 \n", 268 | "\n", 269 | " Comprehensible_Code_with_logical_errors \\\n", 270 | "0 2 \n", 271 | "1 2 \n", 272 | "2 1 \n", 273 | "3 1 \n", 274 | "4 1 \n", 275 | "\n", 276 | " Comprehensible_code_with_syntax_errors Correct_code_and_output \\\n", 277 | "0 2.0 2 \n", 278 | "1 2.0 0 \n", 279 | "2 1.0 1 \n", 280 | "3 1.0 0 \n", 281 | "4 1.0 0 \n", 282 | "\n", 283 | " Final_Marks Cluster Target \n", 284 | "0 10 1 1 \n", 285 | "1 8 1 1 \n", 286 | "2 7 1 1 \n", 287 | "3 5 0 1 \n", 288 | "4 6 1 1 \n", 289 | "Cluster Centers:\n", 290 | " [[ 1.66796117e+00 1.74757282e-01 8.71844660e-01 9.69037892e-01\n", 291 | " 2.27184466e-01 -1.11022302e-16 3.14563107e-01]\n", 292 | " [ 1.84720121e+00 1.93065310e+00 1.19667171e+00 1.12111122e+00\n", 293 | " 6.52042360e-01 1.00000000e+00 9.42511346e-01]]\n" 294 | ] 295 | } 296 | ], 297 | "source": [ 298 | "#4q)\n", 299 | "from sklearn.cluster import KMeans\n", 300 | "import pandas as pd\n", 301 | "\n", 302 | "# prepare data by removing the target variable\n", 303 | "def prepare_data(df, target_column):\n", 304 | " X = df.drop(columns=[target_column])\n", 305 | " return X\n", 306 | "\n", 307 | "# perform K-means clustering\n", 308 | "def perform_kmeans(X, n_clusters=3):\n", 309 | " kmeans = KMeans(n_clusters=n_clusters, random_state=42)\n", 310 | " kmeans.fit(X)\n", 311 | " return kmeans\n", 312 | "\n", 313 | "def add_cluster_labels(df, kmeans):\n", 314 | " df['Cluster'] = kmeans.labels_\n", 315 | " return df\n", 316 | "\n", 317 | "def main(df, target_column, n_clusters=3):\n", 318 | " X = prepare_data(df, target_column)\n", 319 | " kmeans = perform_kmeans(X, n_clusters)\n", 320 | " df_with_clusters = add_cluster_labels(df, kmeans)\n", 321 | " return df_with_clusters, kmeans\n", 322 | "\n", 323 | "df_with_clusters, kmeans_model = main(df, target_column='Final_Marks', n_clusters=2)\n", 324 | "\n", 325 | "print(df_with_clusters.head())\n", 326 | "\n", 327 | "# Optionally, analyze the cluster centers\n", 328 | "print(\"Cluster Centers:\\n\", kmeans_model.cluster_centers_)\n", 329 | "\n" 330 | ] 331 | }, 332 | { 333 | "cell_type": "code", 334 | "execution_count": null, 335 | "id": "e415b9b8-4797-4264-a857-56941476e413", 336 | "metadata": {}, 337 | "outputs": [], 338 | "source": [ 339 | "#5q)\n" 340 | ] 341 | } 342 | ], 343 | "metadata": { 344 | "kernelspec": { 345 | "display_name": "Python 3 (ipykernel)", 346 | "language": "python", 347 | "name": "python3" 348 | }, 349 | "language_info": { 350 | "codemirror_mode": { 351 | "name": "ipython", 352 | "version": 3 353 | }, 354 | "file_extension": ".py", 355 | "mimetype": "text/x-python", 356 | "name": "python", 357 | "nbconvert_exporter": "python", 358 | "pygments_lexer": "ipython3", 359 | "version": "3.11.7" 360 | } 361 | }, 362 | "nbformat": 4, 363 | "nbformat_minor": 5 364 | } 365 | -------------------------------------------------------------------------------- /Lab-5/sss: -------------------------------------------------------------------------------- 1 | ss 2 | -------------------------------------------------------------------------------- /Lab-7/Lab-7.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 6, 6 | "id": "bee9a245-e15d-4b07-b4a2-6260659bb336", 7 | "metadata": {}, 8 | "outputs": [ 9 | { 10 | "name": "stderr", 11 | "output_type": "stream", 12 | "text": [ 13 | "C:\\Users\\mvenk\\AppData\\Local\\Temp\\ipykernel_16564\\446592067.py:15: SettingWithCopyWarning: \n", 14 | "A value is trying to be set on a copy of a slice from a DataFrame\n", 15 | "\n", 16 | "See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy\n", 17 | " X.fillna(0, inplace=True)\n" 18 | ] 19 | }, 20 | { 21 | "name": "stdout", 22 | "output_type": "stream", 23 | "text": [ 24 | "Best Perceptron Hyperparameters: {'tol': 1e-05, 'penalty': 'l2', 'max_iter': 1500, 'alpha': 4.641588833612782e-05}\n", 25 | "Best MLP Hyperparameters: {'solver': 'adam', 'max_iter': 300, 'learning_rate': 'constant', 'hidden_layer_sizes': (150, 100, 50), 'alpha': 4.641588833612782e-05, 'activation': 'tanh'}\n", 26 | "Perceptron Accuracy: 0.3342776203966006\n", 27 | "MLP Accuracy: 1.0\n" 28 | ] 29 | } 30 | ], 31 | "source": [ 32 | "import pandas as pd\n", 33 | "import numpy as np\n", 34 | "\n", 35 | "data = pd.read_excel('ml-data.xlsx')\n", 36 | "\n", 37 | "# Extract the relevant columns (E to J)\n", 38 | "X = data[['Header_and_Main_Declaration', 'Incomprehensible_Code', \n", 39 | " 'Comprehensible_Code_with_logical_errors', \n", 40 | " 'Comprehensible_code_with_syntax_errors', \n", 41 | " 'Correct_code_and_output']]\n", 42 | "\n", 43 | "y = data['Final_Marks']\n", 44 | "\n", 45 | "# Handle any missing values if they exist\n", 46 | "X.fillna(0, inplace=True)\n", 47 | "\n", 48 | "# Split the data into training and testing sets\n", 49 | "from sklearn.model_selection import train_test_split\n", 50 | "\n", 51 | "X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=42)\n", 52 | "\n", 53 | "# Define the models: Perceptron and MLPClassifier\n", 54 | "from sklearn.linear_model import Perceptron\n", 55 | "from sklearn.neural_network import MLPClassifier\n", 56 | "from sklearn.model_selection import RandomizedSearchCV\n", 57 | "import numpy as np\n", 58 | "\n", 59 | "# Define Perceptron model and hyperparameters\n", 60 | "perceptron = Perceptron()\n", 61 | "param_dist_perceptron = {\n", 62 | " 'penalty': ['l2', 'l1', 'elasticnet'],\n", 63 | " 'alpha': np.logspace(-5, 1, 10),\n", 64 | " 'max_iter': [500, 1000, 1500, 2000],\n", 65 | " 'tol': [1e-3, 1e-4, 1e-5],\n", 66 | "}\n", 67 | "\n", 68 | "# Perform RandomizedSearchCV on Perceptron\n", 69 | "random_search_perceptron = RandomizedSearchCV(perceptron, param_distributions=param_dist_perceptron,\n", 70 | " n_iter=10, cv=5, random_state=42, n_jobs=-1)\n", 71 | "random_search_perceptron.fit(X_train, y_train)\n", 72 | "\n", 73 | "# Best hyperparameters for Perceptron\n", 74 | "print(\"Best Perceptron Hyperparameters:\", random_search_perceptron.best_params_)\n", 75 | "\n", 76 | "# Define MLPClassifier model and hyperparameters\n", 77 | "mlp = MLPClassifier()\n", 78 | "param_dist_mlp = {\n", 79 | " 'hidden_layer_sizes': [(50,), (100,), (100, 50), (150, 100, 50)],\n", 80 | " 'activation': ['tanh', 'relu'],\n", 81 | " 'solver': ['adam', 'sgd'],\n", 82 | " 'alpha': np.logspace(-5, 1, 10),\n", 83 | " 'learning_rate': ['constant', 'adaptive'],\n", 84 | " 'max_iter': [200, 300, 500]\n", 85 | "}\n", 86 | "\n", 87 | "# Perform RandomizedSearchCV on MLPClassifier\n", 88 | "random_search_mlp = RandomizedSearchCV(mlp, param_distributions=param_dist_mlp, \n", 89 | " n_iter=10, cv=5, random_state=42, n_jobs=-1)\n", 90 | "random_search_mlp.fit(X_train, y_train)\n", 91 | "\n", 92 | "# Best hyperparameters for MLP\n", 93 | "print(\"Best MLP Hyperparameters:\", random_search_mlp.best_params_)\n", 94 | "\n", 95 | "# Evaluate models on test set\n", 96 | "from sklearn.metrics import accuracy_score\n", 97 | "\n", 98 | "y_pred_perceptron = random_search_perceptron.predict(X_test)\n", 99 | "y_pred_mlp = random_search_mlp.predict(X_test)\n", 100 | "\n", 101 | "# Print accuracy\n", 102 | "print(\"Perceptron Accuracy:\", accuracy_score(y_test, y_pred_perceptron))\n", 103 | "print(\"MLP Accuracy:\", accuracy_score(y_test, y_pred_mlp))\n" 104 | ] 105 | }, 106 | { 107 | "cell_type": "code", 108 | "execution_count": 9, 109 | "id": "12c0a923-054d-4ae9-9832-a0fc271d0017", 110 | "metadata": {}, 111 | "outputs": [ 112 | { 113 | "name": "stderr", 114 | "output_type": "stream", 115 | "text": [ 116 | "C:\\Users\\mvenk\\AppData\\Local\\Temp\\ipykernel_16564\\3922117403.py:62: FutureWarning: The behavior of DataFrame concatenation with empty or all-NA entries is deprecated. In a future version, this will no longer exclude empty or all-NA columns when determining the result dtypes. To retain the old behavior, exclude the relevant entries before the concat operation.\n", 117 | " results = pd.concat([results, temp_result], ignore_index=True)\n", 118 | "C:\\Users\\mvenk\\anaconda3\\Lib\\site-packages\\xgboost\\core.py:158: UserWarning: [15:34:40] WARNING: C:\\buildkite-agent\\builds\\buildkite-windows-cpu-autoscaling-group-i-06abd128ca6c1688d-1\\xgboost\\xgboost-ci-windows\\src\\learner.cc:740: \n", 119 | "Parameters: { \"use_label_encoder\" } are not used.\n", 120 | "\n", 121 | " warnings.warn(smsg, UserWarning)\n" 122 | ] 123 | }, 124 | { 125 | "data": { 126 | "text/html": [ 127 | "
\n", 128 | "\n", 141 | "\n", 142 | " \n", 143 | " \n", 144 | " \n", 145 | " \n", 146 | " \n", 147 | " \n", 148 | " \n", 149 | " \n", 150 | " \n", 151 | " \n", 152 | " \n", 153 | " \n", 154 | " \n", 155 | " \n", 156 | " \n", 157 | " \n", 158 | " \n", 159 | " \n", 160 | " \n", 161 | " \n", 162 | " \n", 163 | " \n", 164 | " \n", 165 | " \n", 166 | " \n", 167 | " \n", 168 | " \n", 169 | " \n", 170 | " \n", 171 | " \n", 172 | " \n", 173 | " \n", 174 | " \n", 175 | " \n", 176 | " \n", 177 | " \n", 178 | " \n", 179 | " \n", 180 | " \n", 181 | " \n", 182 | " \n", 183 | " \n", 184 | " \n", 185 | " \n", 186 | " \n", 187 | " \n", 188 | " \n", 189 | " \n", 190 | " \n", 191 | " \n", 192 | " \n", 193 | " \n", 194 | " \n", 195 | " \n", 196 | " \n", 197 | " \n", 198 | " \n", 199 | " \n", 200 | " \n", 201 | " \n", 202 | "
ClassifierAccuracyPrecisionRecallF1-Score
0Support Vector Machine0.9688390.9645080.9688390.966334
1Decision Tree0.9291780.9298240.9291780.929137
2Random Forest0.9291780.9303040.9291780.929217
3AdaBoost0.2974500.2893780.2974500.230668
4XGBoost0.9291780.9298160.9291780.929155
5Naive Bayes0.4702550.4105750.4702550.429052
\n", 203 | "
" 204 | ], 205 | "text/plain": [ 206 | " Classifier Accuracy Precision Recall F1-Score\n", 207 | "0 Support Vector Machine 0.968839 0.964508 0.968839 0.966334\n", 208 | "1 Decision Tree 0.929178 0.929824 0.929178 0.929137\n", 209 | "2 Random Forest 0.929178 0.930304 0.929178 0.929217\n", 210 | "3 AdaBoost 0.297450 0.289378 0.297450 0.230668\n", 211 | "4 XGBoost 0.929178 0.929816 0.929178 0.929155\n", 212 | "5 Naive Bayes 0.470255 0.410575 0.470255 0.429052" 213 | ] 214 | }, 215 | "execution_count": 9, 216 | "metadata": {}, 217 | "output_type": "execute_result" 218 | } 219 | ], 220 | "source": [ 221 | "# Import necessary libraries\n", 222 | "from sklearn.svm import SVC\n", 223 | "from sklearn.tree import DecisionTreeClassifier\n", 224 | "from sklearn.ensemble import RandomForestClassifier, AdaBoostClassifier\n", 225 | "from xgboost import XGBClassifier # Ensure xgboost is installed\n", 226 | "from sklearn.naive_bayes import GaussianNB\n", 227 | "from sklearn.metrics import accuracy_score, precision_score, recall_score, f1_score\n", 228 | "from sklearn.model_selection import train_test_split\n", 229 | "import pandas as pd\n", 230 | "\n", 231 | "# Extract features (E to I) and target (J column)\n", 232 | "X = data[['Header_and_Main_Declaration', 'Incomprehensible_Code', \n", 233 | " 'Comprehensible_Code_with_logical_errors', \n", 234 | " 'Comprehensible_code_with_syntax_errors', \n", 235 | " 'Correct_code_and_output']].copy() # Use .copy() to avoid SettingWithCopyWarning\n", 236 | "\n", 237 | "y = data['Final_Marks']\n", 238 | "\n", 239 | "# Handle any missing values by reassigning X\n", 240 | "X = X.fillna(0)\n", 241 | "\n", 242 | "# Split the data into training and testing sets\n", 243 | "X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=42)\n", 244 | "\n", 245 | "# Define the classifiers (Removed CatBoostClassifier)\n", 246 | "classifiers = {\n", 247 | " 'Support Vector Machine': SVC(),\n", 248 | " 'Decision Tree': DecisionTreeClassifier(),\n", 249 | " 'Random Forest': RandomForestClassifier(),\n", 250 | " 'AdaBoost': AdaBoostClassifier(),\n", 251 | " 'XGBoost': XGBClassifier(use_label_encoder=False, eval_metric='mlogloss'),\n", 252 | " 'Naive Bayes': GaussianNB()\n", 253 | "}\n", 254 | "\n", 255 | "# Initialize a dataframe to store results\n", 256 | "results = pd.DataFrame(columns=['Classifier', 'Accuracy', 'Precision', 'Recall', 'F1-Score'])\n", 257 | "\n", 258 | "# Loop over classifiers, fit and predict, and store results\n", 259 | "for classifier_name, classifier in classifiers.items():\n", 260 | " # Train the classifier\n", 261 | " classifier.fit(X_train, y_train)\n", 262 | " \n", 263 | " # Predict on the test set\n", 264 | " y_pred = classifier.predict(X_test)\n", 265 | " \n", 266 | " # Calculate performance metrics\n", 267 | " accuracy = accuracy_score(y_test, y_pred)\n", 268 | " precision = precision_score(y_test, y_pred, average='weighted', zero_division=0)\n", 269 | " recall = recall_score(y_test, y_pred, average='weighted', zero_division=0)\n", 270 | " f1 = f1_score(y_test, y_pred, average='weighted', zero_division=0)\n", 271 | " \n", 272 | " # Create a temporary DataFrame for this result\n", 273 | " temp_result = pd.DataFrame({\n", 274 | " 'Classifier': [classifier_name],\n", 275 | " 'Accuracy': [accuracy],\n", 276 | " 'Precision': [precision],\n", 277 | " 'Recall': [recall],\n", 278 | " 'F1-Score': [f1]\n", 279 | " })\n", 280 | " \n", 281 | " # Append the result to the results DataFrame using pd.concat\n", 282 | " results = pd.concat([results, temp_result], ignore_index=True)\n", 283 | "\n", 284 | "# Display the results\n", 285 | "results\n" 286 | ] 287 | }, 288 | { 289 | "cell_type": "code", 290 | "execution_count": 10, 291 | "id": "de50ad46-59f4-459c-9bb4-9d28c350af94", 292 | "metadata": {}, 293 | "outputs": [ 294 | { 295 | "name": "stderr", 296 | "output_type": "stream", 297 | "text": [ 298 | "C:\\Users\\mvenk\\AppData\\Local\\Temp\\ipykernel_16564\\3418198608.py:71: FutureWarning: The behavior of DataFrame concatenation with empty or all-NA entries is deprecated. In a future version, this will no longer exclude empty or all-NA columns when determining the result dtypes. To retain the old behavior, exclude the relevant entries before the concat operation.\n", 299 | " results = pd.concat([results, temp_result], ignore_index=True)\n", 300 | "C:\\Users\\mvenk\\anaconda3\\Lib\\site-packages\\xgboost\\core.py:158: UserWarning: [16:26:59] WARNING: C:\\buildkite-agent\\builds\\buildkite-windows-cpu-autoscaling-group-i-06abd128ca6c1688d-1\\xgboost\\xgboost-ci-windows\\src\\learner.cc:740: \n", 301 | "Parameters: { \"use_label_encoder\" } are not used.\n", 302 | "\n", 303 | " warnings.warn(smsg, UserWarning)\n" 304 | ] 305 | }, 306 | { 307 | "data": { 308 | "text/html": [ 309 | "
\n", 310 | "\n", 323 | "\n", 324 | " \n", 325 | " \n", 326 | " \n", 327 | " \n", 328 | " \n", 329 | " \n", 330 | " \n", 331 | " \n", 332 | " \n", 333 | " \n", 334 | " \n", 335 | " \n", 336 | " \n", 337 | " \n", 338 | " \n", 339 | " \n", 340 | " \n", 341 | " \n", 342 | " \n", 343 | " \n", 344 | " \n", 345 | " \n", 346 | " \n", 347 | " \n", 348 | " \n", 349 | " \n", 350 | " \n", 351 | " \n", 352 | " \n", 353 | " \n", 354 | " \n", 355 | " \n", 356 | " \n", 357 | " \n", 358 | " \n", 359 | " \n", 360 | " \n", 361 | " \n", 362 | " \n", 363 | " \n", 364 | " \n", 365 | " \n", 366 | " \n", 367 | " \n", 368 | " \n", 369 | " \n", 370 | " \n", 371 | " \n", 372 | " \n", 373 | " \n", 374 | " \n", 375 | " \n", 376 | " \n", 377 | " \n", 378 | " \n", 379 | " \n", 380 | " \n", 381 | " \n", 382 | " \n", 383 | " \n", 384 | "
ClassifierAccuracyPrecisionRecallF1-Score
0Support Vector Machine0.9688390.9645080.9688390.966334
1Decision Tree0.9291780.9295380.9291780.929100
2Random Forest0.9291780.9304000.9291780.929301
3AdaBoost0.2974500.2893780.2974500.230668
4XGBoost0.9291780.9298160.9291780.929155
5Naive Bayes0.4702550.4105750.4702550.429052
\n", 385 | "
" 386 | ], 387 | "text/plain": [ 388 | " Classifier Accuracy Precision Recall F1-Score\n", 389 | "0 Support Vector Machine 0.968839 0.964508 0.968839 0.966334\n", 390 | "1 Decision Tree 0.929178 0.929538 0.929178 0.929100\n", 391 | "2 Random Forest 0.929178 0.930400 0.929178 0.929301\n", 392 | "3 AdaBoost 0.297450 0.289378 0.297450 0.230668\n", 393 | "4 XGBoost 0.929178 0.929816 0.929178 0.929155\n", 394 | "5 Naive Bayes 0.470255 0.410575 0.470255 0.429052" 395 | ] 396 | }, 397 | "execution_count": 10, 398 | "metadata": {}, 399 | "output_type": "execute_result" 400 | } 401 | ], 402 | "source": [ 403 | "# Import necessary libraries\n", 404 | "from sklearn.svm import SVC\n", 405 | "from sklearn.tree import DecisionTreeClassifier\n", 406 | "from sklearn.ensemble import RandomForestClassifier, AdaBoostClassifier\n", 407 | "from xgboost import XGBClassifier # Ensure xgboost is installed\n", 408 | "from sklearn.naive_bayes import GaussianNB\n", 409 | "from sklearn.metrics import accuracy_score, precision_score, recall_score, f1_score\n", 410 | "from sklearn.model_selection import train_test_split\n", 411 | "import pandas as pd\n", 412 | "\n", 413 | "# Import CatBoost (optional, you can comment this if it's not installed)\n", 414 | "try:\n", 415 | " from catboost import CatBoostClassifier\n", 416 | "except ImportError:\n", 417 | " CatBoostClassifier = None\n", 418 | "\n", 419 | "# Extract features (E to I) and target (J column)\n", 420 | "X = data[['Header_and_Main_Declaration', 'Incomprehensible_Code', \n", 421 | " 'Comprehensible_Code_with_logical_errors', \n", 422 | " 'Comprehensible_code_with_syntax_errors', \n", 423 | " 'Correct_code_and_output']].copy() # Use .copy() to avoid SettingWithCopyWarning\n", 424 | "\n", 425 | "y = data['Final_Marks']\n", 426 | "\n", 427 | "# Handle any missing values\n", 428 | "X = X.fillna(0)\n", 429 | "\n", 430 | "# Split the data into training and testing sets\n", 431 | "X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=42)\n", 432 | "\n", 433 | "# Define the classifiers (with CatBoost if available)\n", 434 | "classifiers = {\n", 435 | " 'Support Vector Machine': SVC(),\n", 436 | " 'Decision Tree': DecisionTreeClassifier(),\n", 437 | " 'Random Forest': RandomForestClassifier(),\n", 438 | " 'AdaBoost': AdaBoostClassifier(),\n", 439 | " 'XGBoost': XGBClassifier(use_label_encoder=False, eval_metric='mlogloss'),\n", 440 | " 'Naive Bayes': GaussianNB()\n", 441 | "}\n", 442 | "\n", 443 | "if CatBoostClassifier:\n", 444 | " classifiers['CatBoost'] = CatBoostClassifier(silent=True)\n", 445 | "\n", 446 | "# Initialize a dataframe to store results\n", 447 | "results = pd.DataFrame(columns=['Classifier', 'Accuracy', 'Precision', 'Recall', 'F1-Score'])\n", 448 | "\n", 449 | "# Loop over classifiers, fit and predict, and store results\n", 450 | "for classifier_name, classifier in classifiers.items():\n", 451 | " # Train the classifier\n", 452 | " classifier.fit(X_train, y_train)\n", 453 | " \n", 454 | " # Predict on the test set\n", 455 | " y_pred = classifier.predict(X_test)\n", 456 | " \n", 457 | " # Calculate performance metrics\n", 458 | " accuracy = accuracy_score(y_test, y_pred)\n", 459 | " precision = precision_score(y_test, y_pred, average='weighted', zero_division=0)\n", 460 | " recall = recall_score(y_test, y_pred, average='weighted', zero_division=0)\n", 461 | " f1 = f1_score(y_test, y_pred, average='weighted', zero_division=0)\n", 462 | " \n", 463 | " # Create a temporary DataFrame for this result\n", 464 | " temp_result = pd.DataFrame({\n", 465 | " 'Classifier': [classifier_name],\n", 466 | " 'Accuracy': [accuracy],\n", 467 | " 'Precision': [precision],\n", 468 | " 'Recall': [recall],\n", 469 | " 'F1-Score': [f1]\n", 470 | " })\n", 471 | " \n", 472 | " # Append the result to the results DataFrame using pd.concat\n", 473 | " results = pd.concat([results, temp_result], ignore_index=True)\n", 474 | "\n", 475 | "# Display the results in a tabular form\n", 476 | "results\n" 477 | ] 478 | }, 479 | { 480 | "cell_type": "code", 481 | "execution_count": 11, 482 | "id": "d454c021-2d21-4b6e-88a0-1a74f0411123", 483 | "metadata": {}, 484 | "outputs": [ 485 | { 486 | "name": "stdout", 487 | "output_type": "stream", 488 | "text": [ 489 | "Collecting catboost\n", 490 | " Downloading catboost-1.2.7-cp311-cp311-win_amd64.whl.metadata (1.2 kB)\n", 491 | "Collecting graphviz (from catboost)\n", 492 | " Downloading graphviz-0.20.3-py3-none-any.whl.metadata (12 kB)\n", 493 | "Requirement already satisfied: matplotlib in c:\\users\\mvenk\\anaconda3\\lib\\site-packages (from catboost) (3.8.0)\n", 494 | "Requirement already satisfied: numpy<2.0,>=1.16.0 in c:\\users\\mvenk\\anaconda3\\lib\\site-packages (from catboost) (1.26.4)\n", 495 | "Requirement already satisfied: pandas>=0.24 in c:\\users\\mvenk\\anaconda3\\lib\\site-packages (from catboost) (2.1.4)\n", 496 | "Requirement already satisfied: scipy in c:\\users\\mvenk\\anaconda3\\lib\\site-packages (from catboost) (1.11.4)\n", 497 | "Requirement already satisfied: plotly in c:\\users\\mvenk\\anaconda3\\lib\\site-packages (from catboost) (5.9.0)\n", 498 | "Requirement already satisfied: six in c:\\users\\mvenk\\anaconda3\\lib\\site-packages (from catboost) (1.16.0)\n", 499 | "Collecting python-dateutil>=2.8.2 (from pandas>=0.24->catboost)\n", 500 | " Downloading python_dateutil-2.9.0.post0-py2.py3-none-any.whl.metadata (8.4 kB)\n", 501 | "Requirement already satisfied: pytz>=2020.1 in c:\\users\\mvenk\\anaconda3\\lib\\site-packages (from pandas>=0.24->catboost) (2023.3.post1)\n", 502 | "Requirement already satisfied: tzdata>=2022.1 in c:\\users\\mvenk\\anaconda3\\lib\\site-packages (from pandas>=0.24->catboost) (2023.3)\n", 503 | "Requirement already satisfied: contourpy>=1.0.1 in c:\\users\\mvenk\\anaconda3\\lib\\site-packages (from matplotlib->catboost) (1.2.0)\n", 504 | "Requirement already satisfied: cycler>=0.10 in c:\\users\\mvenk\\anaconda3\\lib\\site-packages (from matplotlib->catboost) (0.11.0)\n", 505 | "Requirement already satisfied: fonttools>=4.22.0 in c:\\users\\mvenk\\anaconda3\\lib\\site-packages (from matplotlib->catboost) (4.25.0)\n", 506 | "Requirement already satisfied: kiwisolver>=1.0.1 in c:\\users\\mvenk\\anaconda3\\lib\\site-packages (from matplotlib->catboost) (1.4.4)\n", 507 | "Requirement already satisfied: packaging>=20.0 in c:\\users\\mvenk\\anaconda3\\lib\\site-packages (from matplotlib->catboost) (23.1)\n", 508 | "Requirement already satisfied: pillow>=6.2.0 in c:\\users\\mvenk\\anaconda3\\lib\\site-packages (from matplotlib->catboost) (10.2.0)\n", 509 | "Requirement already satisfied: pyparsing>=2.3.1 in c:\\users\\mvenk\\anaconda3\\lib\\site-packages (from matplotlib->catboost) (3.0.9)\n", 510 | "Requirement already satisfied: tenacity>=6.2.0 in c:\\users\\mvenk\\anaconda3\\lib\\site-packages (from plotly->catboost) (8.2.2)\n", 511 | "Downloading catboost-1.2.7-cp311-cp311-win_amd64.whl (101.7 MB)\n", 512 | " ---------------------------------------- 0.0/101.7 MB ? eta -:--:--\n", 513 | " ---------------------------------------- 0.0/101.7 MB 640.0 kB/s eta 0:02:39\n", 514 | " ---------------------------------------- 0.1/101.7 MB 919.0 kB/s eta 0:01:51\n", 515 | " ---------------------------------------- 0.1/101.7 MB 1.2 MB/s eta 0:01:27\n", 516 | " ---------------------------------------- 0.1/101.7 MB 1.2 MB/s eta 0:01:27\n", 517 | " ---------------------------------------- 0.1/101.7 MB 1.2 MB/s eta 0:01:27\n", 518 | " ---------------------------------------- 0.1/101.7 MB 1.2 MB/s eta 0:01:27\n", 519 | " ---------------------------------------- 0.1/101.7 MB 359.9 kB/s eta 0:04:43\n", 520 | " ---------------------------------------- 0.2/101.7 MB 483.6 kB/s eta 0:03:31\n", 521 | " ---------------------------------------- 0.3/101.7 MB 704.5 kB/s eta 0:02:25\n", 522 | " ---------------------------------------- 0.5/101.7 MB 1.2 MB/s eta 0:01:28\n", 523 | " ---------------------------------------- 0.6/101.7 MB 1.3 MB/s eta 0:01:20\n", 524 | " ---------------------------------------- 0.8/101.7 MB 1.5 MB/s eta 0:01:10\n", 525 | " ---------------------------------------- 1.1/101.7 MB 1.8 MB/s eta 0:00:57\n", 526 | " ---------------------------------------- 1.3/101.7 MB 2.0 MB/s eta 0:00:52\n", 527 | " --------------------------------------- 1.5/101.7 MB 2.1 MB/s eta 0:00:48\n", 528 | " --------------------------------------- 1.7/101.7 MB 2.3 MB/s eta 0:00:43\n", 529 | " --------------------------------------- 2.0/101.7 MB 2.5 MB/s eta 0:00:40\n", 530 | " --------------------------------------- 2.2/101.7 MB 2.6 MB/s eta 0:00:38\n", 531 | " --------------------------------------- 2.5/101.7 MB 2.8 MB/s eta 0:00:36\n", 532 | " - -------------------------------------- 2.7/101.7 MB 2.9 MB/s eta 0:00:34\n", 533 | " - -------------------------------------- 2.9/101.7 MB 3.0 MB/s eta 0:00:34\n", 534 | " - -------------------------------------- 3.2/101.7 MB 3.1 MB/s eta 0:00:32\n", 535 | " - -------------------------------------- 3.2/101.7 MB 3.1 MB/s eta 0:00:32\n", 536 | " - -------------------------------------- 3.5/101.7 MB 3.1 MB/s eta 0:00:32\n", 537 | " - -------------------------------------- 3.7/101.7 MB 3.2 MB/s eta 0:00:32\n", 538 | " - -------------------------------------- 4.0/101.7 MB 3.3 MB/s eta 0:00:30\n", 539 | " - -------------------------------------- 4.2/101.7 MB 3.4 MB/s eta 0:00:29\n", 540 | " - -------------------------------------- 4.5/101.7 MB 3.5 MB/s eta 0:00:28\n", 541 | " - -------------------------------------- 4.8/101.7 MB 3.6 MB/s eta 0:00:27\n", 542 | " -- ------------------------------------- 5.2/101.7 MB 3.8 MB/s eta 0:00:26\n", 543 | " -- ------------------------------------- 5.5/101.7 MB 3.8 MB/s eta 0:00:26\n", 544 | " -- ------------------------------------- 5.8/101.7 MB 3.9 MB/s eta 0:00:25\n", 545 | " -- ------------------------------------- 6.0/101.7 MB 4.0 MB/s eta 0:00:25\n", 546 | " -- ------------------------------------- 6.1/101.7 MB 3.9 MB/s eta 0:00:25\n", 547 | " -- ------------------------------------- 6.3/101.7 MB 3.9 MB/s eta 0:00:25\n", 548 | " -- ------------------------------------- 6.5/101.7 MB 3.9 MB/s eta 0:00:25\n", 549 | " -- ------------------------------------- 6.7/101.7 MB 3.9 MB/s eta 0:00:25\n", 550 | " -- ------------------------------------- 6.9/101.7 MB 3.9 MB/s eta 0:00:25\n", 551 | " -- ------------------------------------- 6.9/101.7 MB 3.9 MB/s eta 0:00:25\n", 552 | " -- ------------------------------------- 7.0/101.7 MB 3.8 MB/s eta 0:00:25\n", 553 | " -- ------------------------------------- 7.2/101.7 MB 3.9 MB/s eta 0:00:25\n", 554 | " -- ------------------------------------- 7.5/101.7 MB 3.9 MB/s eta 0:00:25\n", 555 | " --- ------------------------------------ 7.8/101.7 MB 3.9 MB/s eta 0:00:24\n", 556 | " --- ------------------------------------ 8.2/101.7 MB 4.0 MB/s eta 0:00:24\n", 557 | " --- ------------------------------------ 8.4/101.7 MB 4.1 MB/s eta 0:00:24\n", 558 | " --- ------------------------------------ 8.7/101.7 MB 4.1 MB/s eta 0:00:23\n", 559 | " --- ------------------------------------ 9.0/101.7 MB 4.2 MB/s eta 0:00:23\n", 560 | " --- ------------------------------------ 9.4/101.7 MB 4.2 MB/s eta 0:00:22\n", 561 | " --- ------------------------------------ 9.5/101.7 MB 4.2 MB/s eta 0:00:22\n", 562 | " --- ------------------------------------ 9.6/101.7 MB 4.2 MB/s eta 0:00:23\n", 563 | " --- ------------------------------------ 10.0/101.7 MB 4.2 MB/s eta 0:00:22\n", 564 | " ---- ----------------------------------- 10.3/101.7 MB 4.4 MB/s eta 0:00:21\n", 565 | " ---- ----------------------------------- 10.5/101.7 MB 5.0 MB/s eta 0:00:19\n", 566 | " ---- ----------------------------------- 10.6/101.7 MB 4.9 MB/s eta 0:00:19\n", 567 | " ---- ----------------------------------- 10.9/101.7 MB 5.0 MB/s eta 0:00:19\n", 568 | " ---- ----------------------------------- 11.2/101.7 MB 5.1 MB/s eta 0:00:18\n", 569 | " ---- ----------------------------------- 11.4/101.7 MB 5.1 MB/s eta 0:00:18\n", 570 | " ---- ----------------------------------- 11.7/101.7 MB 5.1 MB/s eta 0:00:18\n", 571 | " ---- ----------------------------------- 11.9/101.7 MB 5.1 MB/s eta 0:00:18\n", 572 | " ---- ----------------------------------- 12.1/101.7 MB 5.1 MB/s eta 0:00:18\n", 573 | " ---- ----------------------------------- 12.4/101.7 MB 5.1 MB/s eta 0:00:18\n", 574 | " ---- ----------------------------------- 12.7/101.7 MB 5.1 MB/s eta 0:00:18\n", 575 | " ----- ---------------------------------- 13.0/101.7 MB 5.2 MB/s eta 0:00:18\n", 576 | " ----- ---------------------------------- 13.2/101.7 MB 5.1 MB/s eta 0:00:18\n", 577 | " ----- ---------------------------------- 13.5/101.7 MB 5.2 MB/s eta 0:00:17\n", 578 | " ----- ---------------------------------- 13.8/101.7 MB 5.3 MB/s eta 0:00:17\n", 579 | " ----- ---------------------------------- 13.8/101.7 MB 5.3 MB/s eta 0:00:17\n", 580 | " ----- ---------------------------------- 14.1/101.7 MB 5.2 MB/s eta 0:00:17\n", 581 | " ----- ---------------------------------- 14.3/101.7 MB 5.2 MB/s eta 0:00:17\n", 582 | " ----- ---------------------------------- 14.4/101.7 MB 5.2 MB/s eta 0:00:17\n", 583 | " ----- ---------------------------------- 14.7/101.7 MB 5.1 MB/s eta 0:00:18\n", 584 | " ----- ---------------------------------- 15.0/101.7 MB 5.1 MB/s eta 0:00:17\n", 585 | " ----- ---------------------------------- 15.2/101.7 MB 5.0 MB/s eta 0:00:18\n", 586 | " ------ --------------------------------- 15.4/101.7 MB 5.0 MB/s eta 0:00:18\n", 587 | " ------ --------------------------------- 15.6/101.7 MB 5.0 MB/s eta 0:00:18\n", 588 | " ------ --------------------------------- 15.7/101.7 MB 5.0 MB/s eta 0:00:18\n", 589 | " ------ --------------------------------- 15.9/101.7 MB 4.8 MB/s eta 0:00:18\n", 590 | " ------ --------------------------------- 16.0/101.7 MB 4.8 MB/s eta 0:00:18\n", 591 | " ------ --------------------------------- 16.3/101.7 MB 4.9 MB/s eta 0:00:18\n", 592 | " ------ --------------------------------- 16.4/101.7 MB 4.9 MB/s eta 0:00:18\n", 593 | " ------ --------------------------------- 16.5/101.7 MB 4.8 MB/s eta 0:00:18\n", 594 | " ------ --------------------------------- 16.8/101.7 MB 4.8 MB/s eta 0:00:18\n", 595 | " ------ --------------------------------- 17.0/101.7 MB 4.9 MB/s eta 0:00:18\n", 596 | " ------ --------------------------------- 17.2/101.7 MB 5.0 MB/s eta 0:00:17\n", 597 | " ------ --------------------------------- 17.4/101.7 MB 5.0 MB/s eta 0:00:18\n", 598 | " ------ --------------------------------- 17.6/101.7 MB 4.9 MB/s eta 0:00:18\n", 599 | " ------ --------------------------------- 17.7/101.7 MB 5.0 MB/s eta 0:00:17\n", 600 | " ------- -------------------------------- 17.9/101.7 MB 4.9 MB/s eta 0:00:18\n", 601 | " ------- -------------------------------- 18.0/101.7 MB 4.8 MB/s eta 0:00:18\n", 602 | " ------- -------------------------------- 18.4/101.7 MB 4.8 MB/s eta 0:00:18\n", 603 | " ------- -------------------------------- 18.6/101.7 MB 4.7 MB/s eta 0:00:18\n", 604 | " ------- -------------------------------- 18.8/101.7 MB 4.7 MB/s eta 0:00:18\n", 605 | " ------- -------------------------------- 19.1/101.7 MB 4.7 MB/s eta 0:00:18\n", 606 | " ------- -------------------------------- 19.4/101.7 MB 4.7 MB/s eta 0:00:18\n", 607 | " ------- -------------------------------- 19.6/101.7 MB 4.7 MB/s eta 0:00:18\n", 608 | " ------- -------------------------------- 19.8/101.7 MB 4.8 MB/s eta 0:00:18\n", 609 | " ------- -------------------------------- 20.2/101.7 MB 4.8 MB/s eta 0:00:18\n", 610 | " -------- ------------------------------- 20.5/101.7 MB 4.8 MB/s eta 0:00:18\n", 611 | " -------- ------------------------------- 20.8/101.7 MB 4.9 MB/s eta 0:00:17\n", 612 | " -------- ------------------------------- 21.0/101.7 MB 4.9 MB/s eta 0:00:17\n", 613 | " -------- ------------------------------- 21.1/101.7 MB 4.8 MB/s eta 0:00:17\n", 614 | " -------- ------------------------------- 21.4/101.7 MB 4.8 MB/s eta 0:00:17\n", 615 | " -------- ------------------------------- 21.5/101.7 MB 4.7 MB/s eta 0:00:18\n", 616 | " -------- ------------------------------- 21.5/101.7 MB 4.6 MB/s eta 0:00:18\n", 617 | " -------- ------------------------------- 21.8/101.7 MB 4.6 MB/s eta 0:00:18\n", 618 | " -------- ------------------------------- 21.9/101.7 MB 4.6 MB/s eta 0:00:18\n", 619 | " -------- ------------------------------- 22.1/101.7 MB 4.5 MB/s eta 0:00:18\n", 620 | " -------- ------------------------------- 22.4/101.7 MB 4.6 MB/s eta 0:00:18\n", 621 | " -------- ------------------------------- 22.5/101.7 MB 4.5 MB/s eta 0:00:18\n", 622 | " -------- ------------------------------- 22.7/101.7 MB 4.5 MB/s eta 0:00:18\n", 623 | " --------- ------------------------------ 23.1/101.7 MB 4.5 MB/s eta 0:00:18\n", 624 | " --------- ------------------------------ 23.4/101.7 MB 4.6 MB/s eta 0:00:18\n", 625 | " --------- ------------------------------ 23.7/101.7 MB 4.6 MB/s eta 0:00:18\n", 626 | " --------- ------------------------------ 24.0/101.7 MB 4.6 MB/s eta 0:00:17\n", 627 | " --------- ------------------------------ 24.3/101.7 MB 4.6 MB/s eta 0:00:17\n", 628 | " --------- ------------------------------ 24.5/101.7 MB 4.7 MB/s eta 0:00:17\n", 629 | " --------- ------------------------------ 24.8/101.7 MB 4.7 MB/s eta 0:00:17\n", 630 | " --------- ------------------------------ 25.0/101.7 MB 4.7 MB/s eta 0:00:17\n", 631 | " --------- ------------------------------ 25.3/101.7 MB 4.7 MB/s eta 0:00:17\n", 632 | " ---------- ----------------------------- 25.6/101.7 MB 4.7 MB/s eta 0:00:17\n", 633 | " ---------- ----------------------------- 25.9/101.7 MB 4.8 MB/s eta 0:00:16\n", 634 | " ---------- ----------------------------- 26.1/101.7 MB 4.9 MB/s eta 0:00:16\n", 635 | " ---------- ----------------------------- 26.2/101.7 MB 4.9 MB/s eta 0:00:16\n", 636 | " ---------- ----------------------------- 26.6/101.7 MB 4.9 MB/s eta 0:00:16\n", 637 | " ---------- ----------------------------- 26.9/101.7 MB 5.0 MB/s eta 0:00:15\n", 638 | " ---------- ----------------------------- 27.0/101.7 MB 5.1 MB/s eta 0:00:15\n", 639 | " ---------- ----------------------------- 27.1/101.7 MB 5.0 MB/s eta 0:00:16\n", 640 | " ---------- ----------------------------- 27.4/101.7 MB 4.9 MB/s eta 0:00:16\n", 641 | " ---------- ----------------------------- 27.7/101.7 MB 5.0 MB/s eta 0:00:15\n", 642 | " ---------- ----------------------------- 27.9/101.7 MB 5.1 MB/s eta 0:00:15\n", 643 | " ----------- ---------------------------- 28.2/101.7 MB 5.2 MB/s eta 0:00:15\n", 644 | " ----------- ---------------------------- 28.3/101.7 MB 5.1 MB/s eta 0:00:15\n", 645 | " ----------- ---------------------------- 28.6/101.7 MB 5.2 MB/s eta 0:00:15\n", 646 | " ----------- ---------------------------- 29.0/101.7 MB 5.2 MB/s eta 0:00:15\n", 647 | " ----------- ---------------------------- 29.2/101.7 MB 5.2 MB/s eta 0:00:15\n", 648 | " ----------- ---------------------------- 29.5/101.7 MB 5.2 MB/s eta 0:00:15\n", 649 | " ----------- ---------------------------- 29.9/101.7 MB 5.2 MB/s eta 0:00:14\n", 650 | " ----------- ---------------------------- 30.2/101.7 MB 5.2 MB/s eta 0:00:14\n", 651 | " ----------- ---------------------------- 30.4/101.7 MB 5.2 MB/s eta 0:00:14\n", 652 | " ------------ --------------------------- 30.7/101.7 MB 5.2 MB/s eta 0:00:14\n", 653 | " ------------ --------------------------- 31.0/101.7 MB 5.2 MB/s eta 0:00:14\n", 654 | " ------------ --------------------------- 31.3/101.7 MB 5.3 MB/s eta 0:00:14\n", 655 | " ------------ --------------------------- 31.5/101.7 MB 5.3 MB/s eta 0:00:14\n", 656 | " ------------ --------------------------- 31.7/101.7 MB 5.4 MB/s eta 0:00:14\n", 657 | " ------------ --------------------------- 31.9/101.7 MB 5.4 MB/s eta 0:00:13\n", 658 | " ------------ --------------------------- 32.2/101.7 MB 5.6 MB/s eta 0:00:13\n", 659 | " ------------ --------------------------- 32.5/101.7 MB 5.5 MB/s eta 0:00:13\n", 660 | " ------------ --------------------------- 32.7/101.7 MB 5.5 MB/s eta 0:00:13\n", 661 | " ------------ --------------------------- 33.0/101.7 MB 5.6 MB/s eta 0:00:13\n", 662 | " ------------ --------------------------- 33.0/101.7 MB 5.5 MB/s eta 0:00:13\n", 663 | " ------------- -------------------------- 33.1/101.7 MB 5.4 MB/s eta 0:00:13\n", 664 | " ------------- -------------------------- 33.3/101.7 MB 5.4 MB/s eta 0:00:13\n", 665 | " ------------- -------------------------- 33.5/101.7 MB 5.3 MB/s eta 0:00:13\n", 666 | " ------------- -------------------------- 33.6/101.7 MB 5.3 MB/s eta 0:00:13\n", 667 | " ------------- -------------------------- 33.7/101.7 MB 5.2 MB/s eta 0:00:14\n", 668 | " ------------- -------------------------- 33.8/101.7 MB 5.0 MB/s eta 0:00:14\n", 669 | " ------------- -------------------------- 34.0/101.7 MB 5.0 MB/s eta 0:00:14\n", 670 | " ------------- -------------------------- 34.2/101.7 MB 5.0 MB/s eta 0:00:14\n", 671 | " ------------- -------------------------- 34.3/101.7 MB 4.9 MB/s eta 0:00:14\n", 672 | " ------------- -------------------------- 34.6/101.7 MB 4.9 MB/s eta 0:00:14\n", 673 | " ------------- -------------------------- 34.7/101.7 MB 4.9 MB/s eta 0:00:14\n", 674 | " ------------- -------------------------- 34.9/101.7 MB 4.8 MB/s eta 0:00:14\n", 675 | " ------------- -------------------------- 35.2/101.7 MB 4.9 MB/s eta 0:00:14\n", 676 | " ------------- -------------------------- 35.3/101.7 MB 4.7 MB/s eta 0:00:15\n", 677 | " ------------- -------------------------- 35.4/101.7 MB 4.7 MB/s eta 0:00:15\n", 678 | " ------------- -------------------------- 35.6/101.7 MB 4.7 MB/s eta 0:00:15\n", 679 | " ------------- -------------------------- 35.6/101.7 MB 4.6 MB/s eta 0:00:15\n", 680 | " -------------- ------------------------- 35.9/101.7 MB 4.6 MB/s eta 0:00:15\n", 681 | " -------------- ------------------------- 36.1/101.7 MB 4.5 MB/s eta 0:00:15\n", 682 | " -------------- ------------------------- 36.4/101.7 MB 4.6 MB/s eta 0:00:15\n", 683 | " -------------- ------------------------- 36.6/101.7 MB 4.6 MB/s eta 0:00:15\n", 684 | " -------------- ------------------------- 36.9/101.7 MB 4.6 MB/s eta 0:00:15\n", 685 | " -------------- ------------------------- 37.2/101.7 MB 4.7 MB/s eta 0:00:14\n", 686 | " -------------- ------------------------- 37.3/101.7 MB 4.7 MB/s eta 0:00:14\n", 687 | " -------------- ------------------------- 37.4/101.7 MB 4.6 MB/s eta 0:00:15\n", 688 | " -------------- ------------------------- 37.7/101.7 MB 4.6 MB/s eta 0:00:14\n", 689 | " -------------- ------------------------- 38.1/101.7 MB 4.6 MB/s eta 0:00:14\n", 690 | " --------------- ------------------------ 38.3/101.7 MB 4.6 MB/s eta 0:00:14\n", 691 | " --------------- ------------------------ 38.4/101.7 MB 4.5 MB/s eta 0:00:14\n", 692 | " --------------- ------------------------ 38.6/101.7 MB 4.6 MB/s eta 0:00:14\n", 693 | " --------------- ------------------------ 38.8/101.7 MB 4.6 MB/s eta 0:00:14\n", 694 | " --------------- ------------------------ 38.8/101.7 MB 4.6 MB/s eta 0:00:14\n", 695 | " --------------- ------------------------ 38.8/101.7 MB 4.6 MB/s eta 0:00:14\n", 696 | " --------------- ------------------------ 39.0/101.7 MB 4.3 MB/s eta 0:00:15\n", 697 | " --------------- ------------------------ 39.3/101.7 MB 4.3 MB/s eta 0:00:15\n", 698 | " --------------- ------------------------ 39.3/101.7 MB 4.3 MB/s eta 0:00:15\n", 699 | " --------------- ------------------------ 39.5/101.7 MB 4.3 MB/s eta 0:00:15\n", 700 | " --------------- ------------------------ 39.7/101.7 MB 4.2 MB/s eta 0:00:15\n", 701 | " --------------- ------------------------ 39.9/101.7 MB 4.2 MB/s eta 0:00:15\n", 702 | " --------------- ------------------------ 40.2/101.7 MB 4.2 MB/s eta 0:00:15\n", 703 | " --------------- ------------------------ 40.6/101.7 MB 4.2 MB/s eta 0:00:15\n", 704 | " ---------------- ----------------------- 40.8/101.7 MB 4.2 MB/s eta 0:00:15\n", 705 | " ---------------- ----------------------- 41.1/101.7 MB 4.2 MB/s eta 0:00:15\n", 706 | " ---------------- ----------------------- 41.4/101.7 MB 4.2 MB/s eta 0:00:15\n", 707 | " ---------------- ----------------------- 41.7/101.7 MB 4.2 MB/s eta 0:00:15\n", 708 | " ---------------- ----------------------- 41.8/101.7 MB 4.2 MB/s eta 0:00:15\n", 709 | " ---------------- ----------------------- 42.0/101.7 MB 4.1 MB/s eta 0:00:15\n", 710 | " ---------------- ----------------------- 42.0/101.7 MB 4.1 MB/s eta 0:00:15\n", 711 | " ---------------- ----------------------- 42.2/101.7 MB 4.1 MB/s eta 0:00:15\n", 712 | " ---------------- ----------------------- 42.4/101.7 MB 4.0 MB/s eta 0:00:15\n", 713 | " ---------------- ----------------------- 42.6/101.7 MB 4.0 MB/s eta 0:00:15\n", 714 | " ---------------- ----------------------- 42.7/101.7 MB 4.0 MB/s eta 0:00:15\n", 715 | " ---------------- ----------------------- 42.9/101.7 MB 4.0 MB/s eta 0:00:15\n", 716 | " ---------------- ----------------------- 42.9/101.7 MB 4.0 MB/s eta 0:00:15\n", 717 | " ---------------- ----------------------- 43.0/101.7 MB 3.9 MB/s eta 0:00:16\n", 718 | " ---------------- ----------------------- 43.2/101.7 MB 3.9 MB/s eta 0:00:16\n", 719 | " ----------------- ---------------------- 43.3/101.7 MB 3.9 MB/s eta 0:00:15\n", 720 | " ----------------- ---------------------- 43.5/101.7 MB 3.9 MB/s eta 0:00:16\n", 721 | " ----------------- ---------------------- 43.5/101.7 MB 3.9 MB/s eta 0:00:15\n", 722 | " ----------------- ---------------------- 43.6/101.7 MB 3.8 MB/s eta 0:00:16\n", 723 | " ----------------- ---------------------- 43.7/101.7 MB 3.8 MB/s eta 0:00:16\n", 724 | " ----------------- ---------------------- 43.8/101.7 MB 3.8 MB/s eta 0:00:16\n", 725 | " ----------------- ---------------------- 44.0/101.7 MB 3.8 MB/s eta 0:00:16\n", 726 | " ----------------- ---------------------- 44.1/101.7 MB 3.8 MB/s eta 0:00:16\n", 727 | " ----------------- ---------------------- 44.4/101.7 MB 3.8 MB/s eta 0:00:15\n", 728 | " ----------------- ---------------------- 44.6/101.7 MB 3.9 MB/s eta 0:00:15\n", 729 | " ----------------- ---------------------- 44.7/101.7 MB 3.9 MB/s eta 0:00:15\n", 730 | " ----------------- ---------------------- 45.1/101.7 MB 3.9 MB/s eta 0:00:15\n", 731 | " ----------------- ---------------------- 45.3/101.7 MB 3.9 MB/s eta 0:00:15\n", 732 | " ----------------- ---------------------- 45.5/101.7 MB 3.9 MB/s eta 0:00:15\n", 733 | " ----------------- ---------------------- 45.7/101.7 MB 3.9 MB/s eta 0:00:15\n", 734 | " ------------------ --------------------- 45.9/101.7 MB 4.0 MB/s eta 0:00:14\n", 735 | " ------------------ --------------------- 46.2/101.7 MB 4.0 MB/s eta 0:00:14\n", 736 | " ------------------ --------------------- 46.2/101.7 MB 4.0 MB/s eta 0:00:14\n", 737 | " ------------------ --------------------- 46.2/101.7 MB 3.9 MB/s eta 0:00:15\n", 738 | " ------------------ --------------------- 46.4/101.7 MB 3.9 MB/s eta 0:00:15\n", 739 | " ------------------ --------------------- 46.7/101.7 MB 3.9 MB/s eta 0:00:15\n", 740 | " ------------------ --------------------- 46.9/101.7 MB 3.9 MB/s eta 0:00:15\n", 741 | " ------------------ --------------------- 47.0/101.7 MB 3.8 MB/s eta 0:00:15\n", 742 | " ------------------ --------------------- 47.2/101.7 MB 3.8 MB/s eta 0:00:15\n", 743 | " ------------------ --------------------- 47.4/101.7 MB 3.8 MB/s eta 0:00:15\n", 744 | " ------------------ --------------------- 47.6/101.7 MB 3.8 MB/s eta 0:00:15\n", 745 | " ------------------ --------------------- 47.8/101.7 MB 3.8 MB/s eta 0:00:15\n", 746 | " ------------------ --------------------- 47.9/101.7 MB 3.8 MB/s eta 0:00:15\n", 747 | " ------------------ --------------------- 48.2/101.7 MB 3.7 MB/s eta 0:00:15\n", 748 | " ------------------- -------------------- 48.4/101.7 MB 3.7 MB/s eta 0:00:15\n", 749 | " ------------------- -------------------- 48.7/101.7 MB 3.8 MB/s eta 0:00:15\n", 750 | " ------------------- -------------------- 48.9/101.7 MB 3.8 MB/s eta 0:00:15\n", 751 | " ------------------- -------------------- 49.1/101.7 MB 4.0 MB/s eta 0:00:14\n", 752 | " ------------------- -------------------- 49.3/101.7 MB 3.9 MB/s eta 0:00:14\n", 753 | " ------------------- -------------------- 49.6/101.7 MB 4.0 MB/s eta 0:00:14\n", 754 | " ------------------- -------------------- 49.8/101.7 MB 4.0 MB/s eta 0:00:13\n", 755 | " ------------------- -------------------- 50.1/101.7 MB 4.0 MB/s eta 0:00:13\n", 756 | " ------------------- -------------------- 50.3/101.7 MB 4.0 MB/s eta 0:00:13\n", 757 | " ------------------- -------------------- 50.6/101.7 MB 4.0 MB/s eta 0:00:13\n", 758 | " ------------------- -------------------- 50.8/101.7 MB 4.0 MB/s eta 0:00:13\n", 759 | " -------------------- ------------------- 50.9/101.7 MB 3.9 MB/s eta 0:00:13\n", 760 | " -------------------- ------------------- 51.1/101.7 MB 3.9 MB/s eta 0:00:14\n", 761 | " -------------------- ------------------- 51.2/101.7 MB 3.9 MB/s eta 0:00:14\n", 762 | " -------------------- ------------------- 51.3/101.7 MB 3.9 MB/s eta 0:00:14\n", 763 | " -------------------- ------------------- 51.3/101.7 MB 3.9 MB/s eta 0:00:14\n", 764 | " -------------------- ------------------- 51.4/101.7 MB 3.8 MB/s eta 0:00:14\n", 765 | " -------------------- ------------------- 51.4/101.7 MB 3.8 MB/s eta 0:00:14\n", 766 | " -------------------- ------------------- 51.4/101.7 MB 3.8 MB/s eta 0:00:14\n", 767 | " -------------------- ------------------- 51.7/101.7 MB 3.6 MB/s eta 0:00:14\n", 768 | " -------------------- ------------------- 52.0/101.7 MB 3.6 MB/s eta 0:00:14\n", 769 | " -------------------- ------------------- 52.2/101.7 MB 3.6 MB/s eta 0:00:14\n", 770 | " -------------------- ------------------- 52.4/101.7 MB 3.7 MB/s eta 0:00:14\n", 771 | " -------------------- ------------------- 52.6/101.7 MB 3.7 MB/s eta 0:00:14\n", 772 | " -------------------- ------------------- 52.9/101.7 MB 3.7 MB/s eta 0:00:14\n", 773 | " -------------------- ------------------- 53.2/101.7 MB 3.9 MB/s eta 0:00:13\n", 774 | " -------------------- ------------------- 53.3/101.7 MB 3.9 MB/s eta 0:00:13\n", 775 | " --------------------- ------------------ 53.6/101.7 MB 3.9 MB/s eta 0:00:13\n", 776 | " --------------------- ------------------ 53.9/101.7 MB 4.1 MB/s eta 0:00:12\n", 777 | " --------------------- ------------------ 54.1/101.7 MB 4.2 MB/s eta 0:00:12\n", 778 | " --------------------- ------------------ 54.2/101.7 MB 4.1 MB/s eta 0:00:12\n", 779 | " --------------------- ------------------ 54.5/101.7 MB 4.2 MB/s eta 0:00:12\n", 780 | " --------------------- ------------------ 54.5/101.7 MB 4.2 MB/s eta 0:00:12\n", 781 | " --------------------- ------------------ 54.6/101.7 MB 4.1 MB/s eta 0:00:12\n", 782 | " --------------------- ------------------ 54.8/101.7 MB 4.0 MB/s eta 0:00:12\n", 783 | " --------------------- ------------------ 54.9/101.7 MB 4.0 MB/s eta 0:00:12\n", 784 | " --------------------- ------------------ 55.1/101.7 MB 4.0 MB/s eta 0:00:12\n", 785 | " --------------------- ------------------ 55.3/101.7 MB 4.0 MB/s eta 0:00:12\n", 786 | " --------------------- ------------------ 55.6/101.7 MB 4.0 MB/s eta 0:00:12\n", 787 | " ---------------------- ----------------- 56.0/101.7 MB 4.1 MB/s eta 0:00:12\n", 788 | " ---------------------- ----------------- 56.2/101.7 MB 4.1 MB/s eta 0:00:12\n", 789 | " ---------------------- ----------------- 56.5/101.7 MB 4.2 MB/s eta 0:00:11\n", 790 | " ---------------------- ----------------- 56.8/101.7 MB 4.3 MB/s eta 0:00:11\n", 791 | " ---------------------- ----------------- 56.9/101.7 MB 4.2 MB/s eta 0:00:11\n", 792 | " ---------------------- ----------------- 57.2/101.7 MB 4.3 MB/s eta 0:00:11\n", 793 | " ---------------------- ----------------- 57.4/101.7 MB 4.3 MB/s eta 0:00:11\n", 794 | " ---------------------- ----------------- 57.6/101.7 MB 4.3 MB/s eta 0:00:11\n", 795 | " ---------------------- ----------------- 57.9/101.7 MB 4.3 MB/s eta 0:00:11\n", 796 | " ---------------------- ----------------- 58.1/101.7 MB 4.4 MB/s eta 0:00:10\n", 797 | " ---------------------- ----------------- 58.4/101.7 MB 4.4 MB/s eta 0:00:10\n", 798 | " ----------------------- ---------------- 58.7/101.7 MB 4.4 MB/s eta 0:00:10\n", 799 | " ----------------------- ---------------- 59.0/101.7 MB 4.5 MB/s eta 0:00:10\n", 800 | " ----------------------- ---------------- 59.2/101.7 MB 4.4 MB/s eta 0:00:10\n", 801 | " ----------------------- ---------------- 59.4/101.7 MB 4.4 MB/s eta 0:00:10\n", 802 | " ----------------------- ---------------- 59.7/101.7 MB 4.5 MB/s eta 0:00:10\n", 803 | " ----------------------- ---------------- 60.0/101.7 MB 4.5 MB/s eta 0:00:10\n", 804 | " ----------------------- ---------------- 60.3/101.7 MB 4.5 MB/s eta 0:00:10\n", 805 | " ----------------------- ---------------- 60.5/101.7 MB 4.5 MB/s eta 0:00:10\n", 806 | " ----------------------- ---------------- 60.7/101.7 MB 4.5 MB/s eta 0:00:10\n", 807 | " ----------------------- ---------------- 61.0/101.7 MB 4.5 MB/s eta 0:00:10\n", 808 | " ------------------------ --------------- 61.3/101.7 MB 4.6 MB/s eta 0:00:09\n", 809 | " ------------------------ --------------- 61.5/101.7 MB 4.6 MB/s eta 0:00:09\n", 810 | " ------------------------ --------------- 61.6/101.7 MB 4.7 MB/s eta 0:00:09\n", 811 | " ------------------------ --------------- 61.8/101.7 MB 5.0 MB/s eta 0:00:09\n", 812 | " ------------------------ --------------- 62.0/101.7 MB 4.9 MB/s eta 0:00:09\n", 813 | " ------------------------ --------------- 62.2/101.7 MB 4.9 MB/s eta 0:00:09\n", 814 | " ------------------------ --------------- 62.4/101.7 MB 4.9 MB/s eta 0:00:09\n", 815 | " ------------------------ --------------- 62.5/101.7 MB 4.9 MB/s eta 0:00:09\n", 816 | " ------------------------ --------------- 62.7/101.7 MB 4.8 MB/s eta 0:00:09\n", 817 | " ------------------------ --------------- 62.8/101.7 MB 4.7 MB/s eta 0:00:09\n", 818 | " ------------------------ --------------- 63.0/101.7 MB 4.7 MB/s eta 0:00:09\n", 819 | " ------------------------ --------------- 63.3/101.7 MB 4.7 MB/s eta 0:00:09\n", 820 | " ------------------------ --------------- 63.6/101.7 MB 4.7 MB/s eta 0:00:09\n", 821 | " ------------------------- -------------- 63.9/101.7 MB 4.8 MB/s eta 0:00:08\n", 822 | " ------------------------- -------------- 64.1/101.7 MB 4.8 MB/s eta 0:00:08\n", 823 | " ------------------------- -------------- 64.3/101.7 MB 4.8 MB/s eta 0:00:08\n", 824 | " ------------------------- -------------- 64.5/101.7 MB 4.8 MB/s eta 0:00:08\n", 825 | " ------------------------- -------------- 64.7/101.7 MB 4.7 MB/s eta 0:00:08\n", 826 | " ------------------------- -------------- 64.9/101.7 MB 4.9 MB/s eta 0:00:08\n", 827 | " ------------------------- -------------- 65.1/101.7 MB 5.0 MB/s eta 0:00:08\n", 828 | " ------------------------- -------------- 65.1/101.7 MB 5.0 MB/s eta 0:00:08\n", 829 | " ------------------------- -------------- 65.1/101.7 MB 5.0 MB/s eta 0:00:08\n", 830 | " ------------------------- -------------- 65.1/101.7 MB 5.0 MB/s eta 0:00:08\n", 831 | " ------------------------- -------------- 65.2/101.7 MB 4.6 MB/s eta 0:00:08\n", 832 | " ------------------------- -------------- 65.5/101.7 MB 4.7 MB/s eta 0:00:08\n", 833 | " ------------------------- -------------- 65.6/101.7 MB 4.6 MB/s eta 0:00:08\n", 834 | " ------------------------- -------------- 65.8/101.7 MB 4.5 MB/s eta 0:00:08\n", 835 | " ------------------------- -------------- 66.1/101.7 MB 4.5 MB/s eta 0:00:08\n", 836 | " -------------------------- ------------- 66.4/101.7 MB 4.5 MB/s eta 0:00:08\n", 837 | " -------------------------- ------------- 66.7/101.7 MB 4.5 MB/s eta 0:00:08\n", 838 | " -------------------------- ------------- 67.0/101.7 MB 4.5 MB/s eta 0:00:08\n", 839 | " -------------------------- ------------- 67.3/101.7 MB 4.6 MB/s eta 0:00:08\n", 840 | " -------------------------- ------------- 67.6/101.7 MB 4.6 MB/s eta 0:00:08\n", 841 | " -------------------------- ------------- 67.9/101.7 MB 4.7 MB/s eta 0:00:08\n", 842 | " -------------------------- ------------- 68.0/101.7 MB 4.7 MB/s eta 0:00:08\n", 843 | " -------------------------- ------------- 68.2/101.7 MB 4.6 MB/s eta 0:00:08\n", 844 | " -------------------------- ------------- 68.3/101.7 MB 4.5 MB/s eta 0:00:08\n", 845 | " -------------------------- ------------- 68.3/101.7 MB 4.5 MB/s eta 0:00:08\n", 846 | " -------------------------- ------------- 68.3/101.7 MB 4.5 MB/s eta 0:00:08\n", 847 | " -------------------------- ------------- 68.3/101.7 MB 4.5 MB/s eta 0:00:08\n", 848 | " -------------------------- ------------- 68.4/101.7 MB 4.2 MB/s eta 0:00:08\n", 849 | " -------------------------- ------------- 68.5/101.7 MB 4.2 MB/s eta 0:00:08\n", 850 | " -------------------------- ------------- 68.6/101.7 MB 4.1 MB/s eta 0:00:08\n", 851 | " --------------------------- ------------ 68.9/101.7 MB 4.1 MB/s eta 0:00:08\n", 852 | " --------------------------- ------------ 69.0/101.7 MB 4.1 MB/s eta 0:00:08\n", 853 | " --------------------------- ------------ 69.2/101.7 MB 4.0 MB/s eta 0:00:09\n", 854 | " --------------------------- ------------ 69.5/101.7 MB 4.1 MB/s eta 0:00:08\n", 855 | " --------------------------- ------------ 69.8/101.7 MB 4.1 MB/s eta 0:00:08\n", 856 | " --------------------------- ------------ 69.9/101.7 MB 4.1 MB/s eta 0:00:08\n", 857 | " --------------------------- ------------ 70.1/101.7 MB 4.0 MB/s eta 0:00:08\n", 858 | " --------------------------- ------------ 70.2/101.7 MB 4.0 MB/s eta 0:00:08\n", 859 | " --------------------------- ------------ 70.4/101.7 MB 3.9 MB/s eta 0:00:08\n", 860 | " --------------------------- ------------ 70.6/101.7 MB 3.9 MB/s eta 0:00:08\n", 861 | " --------------------------- ------------ 70.8/101.7 MB 3.9 MB/s eta 0:00:08\n", 862 | " --------------------------- ------------ 71.0/101.7 MB 3.9 MB/s eta 0:00:08\n", 863 | " --------------------------- ------------ 71.1/101.7 MB 3.9 MB/s eta 0:00:08\n", 864 | " ---------------------------- ----------- 71.3/101.7 MB 3.9 MB/s eta 0:00:08\n", 865 | " ---------------------------- ----------- 71.5/101.7 MB 3.8 MB/s eta 0:00:08\n", 866 | " ---------------------------- ----------- 71.7/101.7 MB 3.9 MB/s eta 0:00:08\n", 867 | " ---------------------------- ----------- 71.9/101.7 MB 3.9 MB/s eta 0:00:08\n", 868 | " ---------------------------- ----------- 72.1/101.7 MB 3.9 MB/s eta 0:00:08\n", 869 | " ---------------------------- ----------- 72.3/101.7 MB 3.9 MB/s eta 0:00:08\n", 870 | " ---------------------------- ----------- 72.5/101.7 MB 3.9 MB/s eta 0:00:08\n", 871 | " ---------------------------- ----------- 72.7/101.7 MB 3.9 MB/s eta 0:00:08\n", 872 | " ---------------------------- ----------- 73.0/101.7 MB 4.0 MB/s eta 0:00:08\n", 873 | " ---------------------------- ----------- 73.0/101.7 MB 4.0 MB/s eta 0:00:08\n", 874 | " ---------------------------- ----------- 73.1/101.7 MB 3.9 MB/s eta 0:00:08\n", 875 | " ---------------------------- ----------- 73.3/101.7 MB 3.9 MB/s eta 0:00:08\n", 876 | " ---------------------------- ----------- 73.4/101.7 MB 3.9 MB/s eta 0:00:08\n", 877 | " ---------------------------- ----------- 73.6/101.7 MB 3.8 MB/s eta 0:00:08\n", 878 | " ----------------------------- ---------- 73.9/101.7 MB 3.8 MB/s eta 0:00:08\n", 879 | " ----------------------------- ---------- 74.0/101.7 MB 3.8 MB/s eta 0:00:08\n", 880 | " ----------------------------- ---------- 74.0/101.7 MB 3.8 MB/s eta 0:00:08\n", 881 | " ----------------------------- ---------- 74.2/101.7 MB 3.7 MB/s eta 0:00:08\n", 882 | " ----------------------------- ---------- 74.3/101.7 MB 3.7 MB/s eta 0:00:08\n", 883 | " ----------------------------- ---------- 74.3/101.7 MB 3.7 MB/s eta 0:00:08\n", 884 | " ----------------------------- ---------- 74.5/101.7 MB 3.6 MB/s eta 0:00:08\n", 885 | " ----------------------------- ---------- 74.8/101.7 MB 3.6 MB/s eta 0:00:08\n", 886 | " ----------------------------- ---------- 75.0/101.7 MB 3.6 MB/s eta 0:00:08\n", 887 | " ----------------------------- ---------- 75.3/101.7 MB 3.6 MB/s eta 0:00:08\n", 888 | " ----------------------------- ---------- 75.5/101.7 MB 3.9 MB/s eta 0:00:07\n", 889 | " ----------------------------- ---------- 75.9/101.7 MB 4.0 MB/s eta 0:00:07\n", 890 | " ----------------------------- ---------- 76.2/101.7 MB 4.0 MB/s eta 0:00:07\n", 891 | " ------------------------------ --------- 76.4/101.7 MB 4.0 MB/s eta 0:00:07\n", 892 | " ------------------------------ --------- 76.5/101.7 MB 3.9 MB/s eta 0:00:07\n", 893 | " ------------------------------ --------- 76.6/101.7 MB 3.9 MB/s eta 0:00:07\n", 894 | " ------------------------------ --------- 77.0/101.7 MB 3.9 MB/s eta 0:00:07\n", 895 | " ------------------------------ --------- 77.3/101.7 MB 3.9 MB/s eta 0:00:07\n", 896 | " ------------------------------ --------- 77.5/101.7 MB 3.9 MB/s eta 0:00:07\n", 897 | " ------------------------------ --------- 77.6/101.7 MB 3.8 MB/s eta 0:00:07\n", 898 | " ------------------------------ --------- 77.8/101.7 MB 3.8 MB/s eta 0:00:07\n", 899 | " ------------------------------ --------- 78.0/101.7 MB 3.7 MB/s eta 0:00:07\n", 900 | " ------------------------------ --------- 78.0/101.7 MB 3.7 MB/s eta 0:00:07\n", 901 | " ------------------------------ --------- 78.1/101.7 MB 3.7 MB/s eta 0:00:07\n", 902 | " ------------------------------ --------- 78.2/101.7 MB 3.6 MB/s eta 0:00:07\n", 903 | " ------------------------------ --------- 78.4/101.7 MB 3.7 MB/s eta 0:00:07\n", 904 | " ------------------------------ --------- 78.5/101.7 MB 3.7 MB/s eta 0:00:07\n", 905 | " ------------------------------ --------- 78.7/101.7 MB 3.9 MB/s eta 0:00:06\n", 906 | " ------------------------------- -------- 78.9/101.7 MB 3.9 MB/s eta 0:00:06\n", 907 | " ------------------------------- -------- 79.2/101.7 MB 3.9 MB/s eta 0:00:06\n", 908 | " ------------------------------- -------- 79.3/101.7 MB 3.9 MB/s eta 0:00:06\n", 909 | " ------------------------------- -------- 79.7/101.7 MB 3.9 MB/s eta 0:00:06\n", 910 | " ------------------------------- -------- 79.9/101.7 MB 3.9 MB/s eta 0:00:06\n", 911 | " ------------------------------- -------- 80.2/101.7 MB 4.0 MB/s eta 0:00:06\n", 912 | " ------------------------------- -------- 80.5/101.7 MB 4.1 MB/s eta 0:00:06\n", 913 | " ------------------------------- -------- 80.6/101.7 MB 4.0 MB/s eta 0:00:06\n", 914 | " ------------------------------- -------- 80.7/101.7 MB 4.0 MB/s eta 0:00:06\n", 915 | " ------------------------------- -------- 80.8/101.7 MB 4.0 MB/s eta 0:00:06\n", 916 | " ------------------------------- -------- 81.0/101.7 MB 4.0 MB/s eta 0:00:06\n", 917 | " ------------------------------- -------- 81.3/101.7 MB 4.0 MB/s eta 0:00:06\n", 918 | " ------------------------------- -------- 81.4/101.7 MB 4.0 MB/s eta 0:00:06\n", 919 | " -------------------------------- ------- 81.6/101.7 MB 4.0 MB/s eta 0:00:06\n", 920 | " -------------------------------- ------- 81.7/101.7 MB 3.9 MB/s eta 0:00:06\n", 921 | " -------------------------------- ------- 81.9/101.7 MB 3.9 MB/s eta 0:00:06\n", 922 | " -------------------------------- ------- 82.1/101.7 MB 4.0 MB/s eta 0:00:05\n", 923 | " -------------------------------- ------- 82.4/101.7 MB 4.0 MB/s eta 0:00:05\n", 924 | " -------------------------------- ------- 82.5/101.7 MB 4.0 MB/s eta 0:00:05\n", 925 | " -------------------------------- ------- 82.7/101.7 MB 4.0 MB/s eta 0:00:05\n", 926 | " -------------------------------- ------- 82.9/101.7 MB 3.9 MB/s eta 0:00:05\n", 927 | " -------------------------------- ------- 82.9/101.7 MB 3.9 MB/s eta 0:00:05\n", 928 | " -------------------------------- ------- 83.1/101.7 MB 3.9 MB/s eta 0:00:05\n", 929 | " -------------------------------- ------- 83.3/101.7 MB 4.0 MB/s eta 0:00:05\n", 930 | " -------------------------------- ------- 83.4/101.7 MB 3.9 MB/s eta 0:00:05\n", 931 | " -------------------------------- ------- 83.5/101.7 MB 3.9 MB/s eta 0:00:05\n", 932 | " -------------------------------- ------- 83.5/101.7 MB 3.9 MB/s eta 0:00:05\n", 933 | " -------------------------------- ------- 83.7/101.7 MB 3.9 MB/s eta 0:00:05\n", 934 | " -------------------------------- ------- 83.8/101.7 MB 3.8 MB/s eta 0:00:05\n", 935 | " --------------------------------- ------ 84.0/101.7 MB 3.8 MB/s eta 0:00:05\n", 936 | " --------------------------------- ------ 84.2/101.7 MB 3.8 MB/s eta 0:00:05\n", 937 | " --------------------------------- ------ 84.3/101.7 MB 3.9 MB/s eta 0:00:05\n", 938 | " --------------------------------- ------ 84.5/101.7 MB 3.9 MB/s eta 0:00:05\n", 939 | " --------------------------------- ------ 84.5/101.7 MB 3.8 MB/s eta 0:00:05\n", 940 | " --------------------------------- ------ 84.6/101.7 MB 3.9 MB/s eta 0:00:05\n", 941 | " --------------------------------- ------ 84.7/101.7 MB 3.8 MB/s eta 0:00:05\n", 942 | " --------------------------------- ------ 84.9/101.7 MB 3.8 MB/s eta 0:00:05\n", 943 | " --------------------------------- ------ 85.1/101.7 MB 3.8 MB/s eta 0:00:05\n", 944 | " --------------------------------- ------ 85.3/101.7 MB 3.8 MB/s eta 0:00:05\n", 945 | " --------------------------------- ------ 85.5/101.7 MB 3.8 MB/s eta 0:00:05\n", 946 | " --------------------------------- ------ 85.6/101.7 MB 3.7 MB/s eta 0:00:05\n", 947 | " --------------------------------- ------ 85.6/101.7 MB 3.7 MB/s eta 0:00:05\n", 948 | " --------------------------------- ------ 85.6/101.7 MB 3.7 MB/s eta 0:00:05\n", 949 | " --------------------------------- ------ 85.6/101.7 MB 3.7 MB/s eta 0:00:05\n", 950 | " --------------------------------- ------ 85.7/101.7 MB 3.5 MB/s eta 0:00:05\n", 951 | " --------------------------------- ------ 86.0/101.7 MB 3.5 MB/s eta 0:00:05\n", 952 | " --------------------------------- ------ 86.4/101.7 MB 3.5 MB/s eta 0:00:05\n", 953 | " ---------------------------------- ----- 86.5/101.7 MB 3.5 MB/s eta 0:00:05\n", 954 | " ---------------------------------- ----- 86.8/101.7 MB 3.6 MB/s eta 0:00:05\n", 955 | " ---------------------------------- ----- 87.0/101.7 MB 3.6 MB/s eta 0:00:05\n", 956 | " ---------------------------------- ----- 87.3/101.7 MB 3.6 MB/s eta 0:00:05\n", 957 | " ---------------------------------- ----- 87.5/101.7 MB 3.6 MB/s eta 0:00:04\n", 958 | " ---------------------------------- ----- 87.6/101.7 MB 3.5 MB/s eta 0:00:04\n", 959 | " ---------------------------------- ----- 87.9/101.7 MB 3.6 MB/s eta 0:00:04\n", 960 | " ---------------------------------- ----- 88.0/101.7 MB 3.5 MB/s eta 0:00:04\n", 961 | " ---------------------------------- ----- 88.2/101.7 MB 3.5 MB/s eta 0:00:04\n", 962 | " ---------------------------------- ----- 88.4/101.7 MB 3.7 MB/s eta 0:00:04\n", 963 | " ---------------------------------- ----- 88.6/101.7 MB 3.7 MB/s eta 0:00:04\n", 964 | " ---------------------------------- ----- 88.7/101.7 MB 3.6 MB/s eta 0:00:04\n", 965 | " ---------------------------------- ----- 88.8/101.7 MB 3.6 MB/s eta 0:00:04\n", 966 | " ---------------------------------- ----- 88.9/101.7 MB 3.6 MB/s eta 0:00:04\n", 967 | " ----------------------------------- ---- 89.1/101.7 MB 3.6 MB/s eta 0:00:04\n", 968 | " ----------------------------------- ---- 89.3/101.7 MB 3.6 MB/s eta 0:00:04\n", 969 | " ----------------------------------- ---- 89.5/101.7 MB 3.6 MB/s eta 0:00:04\n", 970 | " ----------------------------------- ---- 89.8/101.7 MB 3.6 MB/s eta 0:00:04\n", 971 | " ----------------------------------- ---- 90.0/101.7 MB 3.6 MB/s eta 0:00:04\n", 972 | " ----------------------------------- ---- 90.3/101.7 MB 3.6 MB/s eta 0:00:04\n", 973 | " ----------------------------------- ---- 90.5/101.7 MB 3.5 MB/s eta 0:00:04\n", 974 | " ----------------------------------- ---- 90.6/101.7 MB 3.5 MB/s eta 0:00:04\n", 975 | " ----------------------------------- ---- 90.9/101.7 MB 3.6 MB/s eta 0:00:04\n", 976 | " ----------------------------------- ---- 91.1/101.7 MB 3.6 MB/s eta 0:00:03\n", 977 | " ----------------------------------- ---- 91.2/101.7 MB 3.6 MB/s eta 0:00:03\n", 978 | " ----------------------------------- ---- 91.4/101.7 MB 3.6 MB/s eta 0:00:03\n", 979 | " ----------------------------------- ---- 91.5/101.7 MB 3.5 MB/s eta 0:00:03\n", 980 | " ------------------------------------ --- 91.8/101.7 MB 3.6 MB/s eta 0:00:03\n", 981 | " ------------------------------------ --- 91.9/101.7 MB 3.6 MB/s eta 0:00:03\n", 982 | " ------------------------------------ --- 92.0/101.7 MB 3.6 MB/s eta 0:00:03\n", 983 | " ------------------------------------ --- 92.1/101.7 MB 3.6 MB/s eta 0:00:03\n", 984 | " ------------------------------------ --- 92.1/101.7 MB 3.5 MB/s eta 0:00:03\n", 985 | " ------------------------------------ --- 92.4/101.7 MB 3.5 MB/s eta 0:00:03\n", 986 | " ------------------------------------ --- 92.6/101.7 MB 3.5 MB/s eta 0:00:03\n", 987 | " ------------------------------------ --- 92.8/101.7 MB 3.5 MB/s eta 0:00:03\n", 988 | " ------------------------------------ --- 93.1/101.7 MB 3.5 MB/s eta 0:00:03\n", 989 | " ------------------------------------ --- 93.4/101.7 MB 3.6 MB/s eta 0:00:03\n", 990 | " ------------------------------------ --- 93.6/101.7 MB 3.7 MB/s eta 0:00:03\n", 991 | " ------------------------------------ --- 93.7/101.7 MB 3.6 MB/s eta 0:00:03\n", 992 | " ------------------------------------ --- 94.0/101.7 MB 3.7 MB/s eta 0:00:03\n", 993 | " ------------------------------------- -- 94.1/101.7 MB 3.7 MB/s eta 0:00:03\n", 994 | " ------------------------------------- -- 94.4/101.7 MB 3.8 MB/s eta 0:00:02\n", 995 | " ------------------------------------- -- 94.6/101.7 MB 3.8 MB/s eta 0:00:02\n", 996 | " ------------------------------------- -- 94.8/101.7 MB 3.9 MB/s eta 0:00:02\n", 997 | " ------------------------------------- -- 95.0/101.7 MB 3.9 MB/s eta 0:00:02\n", 998 | " ------------------------------------- -- 95.3/101.7 MB 4.0 MB/s eta 0:00:02\n", 999 | " ------------------------------------- -- 95.5/101.7 MB 4.0 MB/s eta 0:00:02\n", 1000 | " ------------------------------------- -- 95.7/101.7 MB 4.0 MB/s eta 0:00:02\n", 1001 | " ------------------------------------- -- 95.9/101.7 MB 4.3 MB/s eta 0:00:02\n", 1002 | " ------------------------------------- -- 96.2/101.7 MB 4.3 MB/s eta 0:00:02\n", 1003 | " ------------------------------------- -- 96.5/101.7 MB 4.3 MB/s eta 0:00:02\n", 1004 | " -------------------------------------- - 96.7/101.7 MB 4.3 MB/s eta 0:00:02\n", 1005 | " -------------------------------------- - 97.1/101.7 MB 4.3 MB/s eta 0:00:02\n", 1006 | " -------------------------------------- - 97.4/101.7 MB 4.3 MB/s eta 0:00:02\n", 1007 | " -------------------------------------- - 97.6/101.7 MB 4.3 MB/s eta 0:00:01\n", 1008 | " -------------------------------------- - 97.9/101.7 MB 4.4 MB/s eta 0:00:01\n", 1009 | " -------------------------------------- - 98.2/101.7 MB 4.5 MB/s eta 0:00:01\n", 1010 | " -------------------------------------- - 98.5/101.7 MB 4.5 MB/s eta 0:00:01\n", 1011 | " -------------------------------------- - 98.7/101.7 MB 4.5 MB/s eta 0:00:01\n", 1012 | " -------------------------------------- - 98.9/101.7 MB 4.6 MB/s eta 0:00:01\n", 1013 | " --------------------------------------- 99.2/101.7 MB 4.7 MB/s eta 0:00:01\n", 1014 | " --------------------------------------- 99.5/101.7 MB 4.7 MB/s eta 0:00:01\n", 1015 | " --------------------------------------- 99.8/101.7 MB 4.8 MB/s eta 0:00:01\n", 1016 | " --------------------------------------- 100.0/101.7 MB 4.8 MB/s eta 0:00:01\n", 1017 | " --------------------------------------- 100.3/101.7 MB 4.8 MB/s eta 0:00:01\n", 1018 | " --------------------------------------- 100.6/101.7 MB 4.8 MB/s eta 0:00:01\n", 1019 | " --------------------------------------- 100.6/101.7 MB 4.8 MB/s eta 0:00:01\n", 1020 | " --------------------------------------- 100.8/101.7 MB 4.7 MB/s eta 0:00:01\n", 1021 | " --------------------------------------- 101.0/101.7 MB 4.7 MB/s eta 0:00:01\n", 1022 | " --------------------------------------- 101.4/101.7 MB 4.8 MB/s eta 0:00:01\n", 1023 | " --------------------------------------- 101.7/101.7 MB 4.9 MB/s eta 0:00:01\n", 1024 | " --------------------------------------- 101.7/101.7 MB 5.0 MB/s eta 0:00:01\n", 1025 | " --------------------------------------- 101.7/101.7 MB 5.0 MB/s eta 0:00:01\n", 1026 | " --------------------------------------- 101.7/101.7 MB 5.0 MB/s eta 0:00:01\n", 1027 | " --------------------------------------- 101.7/101.7 MB 5.0 MB/s eta 0:00:01\n", 1028 | " --------------------------------------- 101.7/101.7 MB 5.0 MB/s eta 0:00:01\n", 1029 | " --------------------------------------- 101.7/101.7 MB 5.0 MB/s eta 0:00:01\n", 1030 | " --------------------------------------- 101.7/101.7 MB 5.0 MB/s eta 0:00:01\n", 1031 | " --------------------------------------- 101.7/101.7 MB 5.0 MB/s eta 0:00:01\n", 1032 | " --------------------------------------- 101.7/101.7 MB 5.0 MB/s eta 0:00:01\n", 1033 | " --------------------------------------- 101.7/101.7 MB 5.0 MB/s eta 0:00:01\n", 1034 | " --------------------------------------- 101.7/101.7 MB 5.0 MB/s eta 0:00:01\n", 1035 | " --------------------------------------- 101.7/101.7 MB 5.0 MB/s eta 0:00:01\n", 1036 | " --------------------------------------- 101.7/101.7 MB 5.0 MB/s eta 0:00:01\n", 1037 | " --------------------------------------- 101.7/101.7 MB 5.0 MB/s eta 0:00:01\n", 1038 | " ---------------------------------------- 101.7/101.7 MB 3.7 MB/s eta 0:00:00\n", 1039 | "Downloading graphviz-0.20.3-py3-none-any.whl (47 kB)\n", 1040 | " ---------------------------------------- 0.0/47.1 kB ? eta -:--:--\n", 1041 | " ---------------------------------------- 47.1/47.1 kB 2.5 MB/s eta 0:00:00\n", 1042 | "Downloading python_dateutil-2.9.0.post0-py2.py3-none-any.whl (229 kB)\n", 1043 | " ---------------------------------------- 0.0/229.9 kB ? eta -:--:--\n", 1044 | " --------------------------------------- 225.3/229.9 kB 6.9 MB/s eta 0:00:01\n", 1045 | " ---------------------------------------- 229.9/229.9 kB 4.7 MB/s eta 0:00:00\n", 1046 | "Installing collected packages: python-dateutil, graphviz, catboost\n", 1047 | " Attempting uninstall: python-dateutil\n", 1048 | " Found existing installation: python-dateutil 2.7.5\n", 1049 | " Uninstalling python-dateutil-2.7.5:\n", 1050 | " Successfully uninstalled python-dateutil-2.7.5\n", 1051 | "Successfully installed catboost-1.2.7 graphviz-0.20.3 python-dateutil-2.9.0.post0\n" 1052 | ] 1053 | }, 1054 | { 1055 | "name": "stderr", 1056 | "output_type": "stream", 1057 | "text": [ 1058 | "ERROR: pip's dependency resolver does not currently take into account all the packages that are installed. This behaviour is the source of the following dependency conflicts.\n", 1059 | "conda-repo-cli 1.0.75 requires requests_mock, which is not installed.\n", 1060 | "conda-repo-cli 1.0.75 requires clyent==1.2.1, but you have clyent 1.2.2 which is incompatible.\n", 1061 | "conda-repo-cli 1.0.75 requires python-dateutil==2.8.2, but you have python-dateutil 2.9.0.post0 which is incompatible.\n", 1062 | "onelogin 3.1.6 requires python-dateutil~=2.7.0, but you have python-dateutil 2.9.0.post0 which is incompatible.\n", 1063 | "onelogin 3.1.6 requires typing-extensions~=4.3.0, but you have typing-extensions 4.12.2 which is incompatible.\n" 1064 | ] 1065 | } 1066 | ], 1067 | "source": [ 1068 | "!pip install catboost\n" 1069 | ] 1070 | }, 1071 | { 1072 | "cell_type": "code", 1073 | "execution_count": 13, 1074 | "id": "c2f47c1f-64b1-4332-9ce5-628e2a6766ac", 1075 | "metadata": {}, 1076 | "outputs": [ 1077 | { 1078 | "name": "stderr", 1079 | "output_type": "stream", 1080 | "text": [ 1081 | "C:\\Users\\mvenk\\AppData\\Local\\Temp\\ipykernel_16564\\1098097717.py:63: FutureWarning: The behavior of DataFrame concatenation with empty or all-NA entries is deprecated. In a future version, this will no longer exclude empty or all-NA columns when determining the result dtypes. To retain the old behavior, exclude the relevant entries before the concat operation.\n", 1082 | " results = pd.concat([results, temp_result], ignore_index=True)\n", 1083 | "C:\\Users\\mvenk\\anaconda3\\Lib\\site-packages\\xgboost\\core.py:158: UserWarning: [16:30:57] WARNING: C:\\buildkite-agent\\builds\\buildkite-windows-cpu-autoscaling-group-i-06abd128ca6c1688d-1\\xgboost\\xgboost-ci-windows\\src\\learner.cc:740: \n", 1084 | "Parameters: { \"use_label_encoder\" } are not used.\n", 1085 | "\n", 1086 | " warnings.warn(smsg, UserWarning)\n" 1087 | ] 1088 | }, 1089 | { 1090 | "data": { 1091 | "text/html": [ 1092 | "
\n", 1093 | "\n", 1106 | "\n", 1107 | " \n", 1108 | " \n", 1109 | " \n", 1110 | " \n", 1111 | " \n", 1112 | " \n", 1113 | " \n", 1114 | " \n", 1115 | " \n", 1116 | " \n", 1117 | " \n", 1118 | " \n", 1119 | " \n", 1120 | " \n", 1121 | " \n", 1122 | " \n", 1123 | " \n", 1124 | " \n", 1125 | " \n", 1126 | " \n", 1127 | " \n", 1128 | " \n", 1129 | " \n", 1130 | " \n", 1131 | " \n", 1132 | " \n", 1133 | " \n", 1134 | " \n", 1135 | " \n", 1136 | " \n", 1137 | " \n", 1138 | " \n", 1139 | " \n", 1140 | " \n", 1141 | " \n", 1142 | " \n", 1143 | " \n", 1144 | " \n", 1145 | " \n", 1146 | " \n", 1147 | " \n", 1148 | " \n", 1149 | " \n", 1150 | " \n", 1151 | " \n", 1152 | " \n", 1153 | " \n", 1154 | " \n", 1155 | " \n", 1156 | " \n", 1157 | " \n", 1158 | " \n", 1159 | " \n", 1160 | " \n", 1161 | " \n", 1162 | " \n", 1163 | " \n", 1164 | " \n", 1165 | " \n", 1166 | " \n", 1167 | " \n", 1168 | " \n", 1169 | " \n", 1170 | " \n", 1171 | " \n", 1172 | " \n", 1173 | " \n", 1174 | " \n", 1175 | "
ClassifierAccuracyPrecisionRecallF1-Score
0Support Vector Machine0.9688390.9645080.9688390.966334
1Decision Tree0.9320110.9326970.9320110.932049
2Random Forest0.9291780.9303040.9291780.929217
3AdaBoost0.2974500.2893780.2974500.230668
4XGBoost0.9291780.9298160.9291780.929155
5CatBoost0.9348440.9357260.9348440.934917
6Naive Bayes0.4702550.4105750.4702550.429052
\n", 1176 | "
" 1177 | ], 1178 | "text/plain": [ 1179 | " Classifier Accuracy Precision Recall F1-Score\n", 1180 | "0 Support Vector Machine 0.968839 0.964508 0.968839 0.966334\n", 1181 | "1 Decision Tree 0.932011 0.932697 0.932011 0.932049\n", 1182 | "2 Random Forest 0.929178 0.930304 0.929178 0.929217\n", 1183 | "3 AdaBoost 0.297450 0.289378 0.297450 0.230668\n", 1184 | "4 XGBoost 0.929178 0.929816 0.929178 0.929155\n", 1185 | "5 CatBoost 0.934844 0.935726 0.934844 0.934917\n", 1186 | "6 Naive Bayes 0.470255 0.410575 0.470255 0.429052" 1187 | ] 1188 | }, 1189 | "execution_count": 13, 1190 | "metadata": {}, 1191 | "output_type": "execute_result" 1192 | } 1193 | ], 1194 | "source": [ 1195 | "# Import necessary libraries\n", 1196 | "from sklearn.svm import SVC\n", 1197 | "from sklearn.tree import DecisionTreeClassifier\n", 1198 | "from sklearn.ensemble import RandomForestClassifier, AdaBoostClassifier\n", 1199 | "from xgboost import XGBClassifier\n", 1200 | "from catboost import CatBoostClassifier\n", 1201 | "from sklearn.naive_bayes import GaussianNB\n", 1202 | "from sklearn.metrics import accuracy_score, precision_score, recall_score, f1_score\n", 1203 | "from sklearn.model_selection import train_test_split\n", 1204 | "import pandas as pd\n", 1205 | "\n", 1206 | "# Extract features (E to I) and target (J column)\n", 1207 | "X = data[['Header_and_Main_Declaration', 'Incomprehensible_Code', \n", 1208 | " 'Comprehensible_Code_with_logical_errors', \n", 1209 | " 'Comprehensible_code_with_syntax_errors', \n", 1210 | " 'Correct_code_and_output']]\n", 1211 | "\n", 1212 | "y = data['Final_Marks']\n", 1213 | "\n", 1214 | "# Handle any missing values by assigning the result to X\n", 1215 | "X = X.fillna(0)\n", 1216 | "\n", 1217 | "# Split the data into training and testing sets\n", 1218 | "X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=42)\n", 1219 | "\n", 1220 | "# Define the classifiers\n", 1221 | "classifiers = {\n", 1222 | " 'Support Vector Machine': SVC(),\n", 1223 | " 'Decision Tree': DecisionTreeClassifier(),\n", 1224 | " 'Random Forest': RandomForestClassifier(),\n", 1225 | " 'AdaBoost': AdaBoostClassifier(),\n", 1226 | " 'XGBoost': XGBClassifier(use_label_encoder=False, eval_metric='mlogloss'),\n", 1227 | " 'CatBoost': CatBoostClassifier(silent=True),\n", 1228 | " 'Naive Bayes': GaussianNB()\n", 1229 | "}\n", 1230 | "\n", 1231 | "# Initialize a dataframe to store results\n", 1232 | "results = pd.DataFrame(columns=['Classifier', 'Accuracy', 'Precision', 'Recall', 'F1-Score'])\n", 1233 | "\n", 1234 | "# Loop over classifiers, fit and predict, and store results\n", 1235 | "for classifier_name, classifier in classifiers.items():\n", 1236 | " # Train the classifier\n", 1237 | " classifier.fit(X_train, y_train)\n", 1238 | " \n", 1239 | " # Predict on the test set\n", 1240 | " y_pred = classifier.predict(X_test)\n", 1241 | " \n", 1242 | " # Calculate performance metrics\n", 1243 | " accuracy = accuracy_score(y_test, y_pred)\n", 1244 | " precision = precision_score(y_test, y_pred, average='weighted', zero_division=0)\n", 1245 | " recall = recall_score(y_test, y_pred, average='weighted', zero_division=0)\n", 1246 | " f1 = f1_score(y_test, y_pred, average='weighted', zero_division=0)\n", 1247 | " \n", 1248 | " # Append the result using pd.concat\n", 1249 | " temp_result = pd.DataFrame({\n", 1250 | " 'Classifier': [classifier_name],\n", 1251 | " 'Accuracy': [accuracy],\n", 1252 | " 'Precision': [precision],\n", 1253 | " 'Recall': [recall],\n", 1254 | " 'F1-Score': [f1]\n", 1255 | " })\n", 1256 | " \n", 1257 | " results = pd.concat([results, temp_result], ignore_index=True)\n", 1258 | "\n", 1259 | "# Display the results\n", 1260 | "results\n" 1261 | ] 1262 | }, 1263 | { 1264 | "cell_type": "code", 1265 | "execution_count": null, 1266 | "id": "e9c589a3-7a15-4476-aac8-66e9c414aa83", 1267 | "metadata": {}, 1268 | "outputs": [], 1269 | "source": [] 1270 | } 1271 | ], 1272 | "metadata": { 1273 | "kernelspec": { 1274 | "display_name": "Python 3 (ipykernel)", 1275 | "language": "python", 1276 | "name": "python3" 1277 | }, 1278 | "language_info": { 1279 | "codemirror_mode": { 1280 | "name": "ipython", 1281 | "version": 3 1282 | }, 1283 | "file_extension": ".py", 1284 | "mimetype": "text/x-python", 1285 | "name": "python", 1286 | "nbconvert_exporter": "python", 1287 | "pygments_lexer": "ipython3", 1288 | "version": "3.11.7" 1289 | } 1290 | }, 1291 | "nbformat": 4, 1292 | "nbformat_minor": 5 1293 | } 1294 | -------------------------------------------------------------------------------- /Lab-7/ss: -------------------------------------------------------------------------------- 1 | s 2 | -------------------------------------------------------------------------------- /Lab6/ss: -------------------------------------------------------------------------------- 1 | s 2 | -------------------------------------------------------------------------------- /Lab_1 (1).ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "nbformat": 4, 3 | "nbformat_minor": 0, 4 | "metadata": { 5 | "colab": { 6 | "provenance": [] 7 | }, 8 | "kernelspec": { 9 | "name": "python3", 10 | "display_name": "Python 3" 11 | }, 12 | "language_info": { 13 | "name": "python" 14 | } 15 | }, 16 | "cells": [ 17 | { 18 | "cell_type": "code", 19 | "execution_count": 4, 20 | "metadata": { 21 | "colab": { 22 | "base_uri": "https://localhost:8080/" 23 | }, 24 | "id": "iGgjnTmemMAx", 25 | "outputId": "7f16022b-9d02-44b3-b9f0-aae0119a14a4" 26 | }, 27 | "outputs": [ 28 | { 29 | "output_type": "stream", 30 | "name": "stdout", 31 | "text": [ 32 | "Enter a String: hemesh\n", 33 | "Count of vowels in the given string: 2\n", 34 | "Count of consonants in the given string: 4\n" 35 | ] 36 | } 37 | ], 38 | "source": [ 39 | "#1Q)\n", 40 | "def count_vowels(string):\n", 41 | " vowels = 'AEIOUaeiou'\n", 42 | " vowel_count = 0;\n", 43 | " for char in string:\n", 44 | " if char in vowels:\n", 45 | " vowel_count = vowel_count + 1\n", 46 | " return vowel_count\n", 47 | "\n", 48 | "def count_consonants(string):\n", 49 | " vowels = 'AEIOUaeiou'\n", 50 | " consonant_count = 0;\n", 51 | " for char in string:\n", 52 | " if char not in vowels:\n", 53 | " consonant_count = consonant_count + 1\n", 54 | " return consonant_count\n", 55 | "\n", 56 | "\n", 57 | "\n", 58 | "input_string = input('Enter a String: ')\n", 59 | "print('Count of vowels in the given string:',count_vowels(input_string))\n", 60 | "print('Count of consonants in the given string:',count_consonants(input_string))" 61 | ] 62 | }, 63 | { 64 | "cell_type": "code", 65 | "source": [ 66 | "#4Q)\n", 67 | "# this function is for taking matrix input\n", 68 | "def construct_matrix(rows,columns,matrix):\n", 69 | " for i in range(rows):\n", 70 | " matrix_row = []\n", 71 | " for j in range(columns):\n", 72 | " element = int(input(f'Enter element for row {i+1} and column {j+1}: '))\n", 73 | " matrix_row.append(element)\n", 74 | " matrix.append(matrix_row)\n", 75 | "#this function is for matrix transpose\n", 76 | "def transposes_matrix(matrix,rows,columns,transpose_matrix):\n", 77 | " for i in range(columns):\n", 78 | " transpose_matrix_row = []\n", 79 | " for j in range(rows):\n", 80 | " transpose_matrix_row.append(matrix[j][i])\n", 81 | " transpose_matrix.append(transpose_matrix_row)\n", 82 | "\n", 83 | "#main program starts here\n", 84 | "#enter number of rows and colums\n", 85 | "rows = int(input('Enter number of rows: '))\n", 86 | "columns = int(input('Enter number of columns: '))\n", 87 | "\n", 88 | "matrix = []\n", 89 | "\n", 90 | "construct_matrix(rows,columns,matrix)\n", 91 | "print('Input Matrix: ')\n", 92 | "for i in range(rows):\n", 93 | "\n", 94 | " for j in range(columns):\n", 95 | " print(matrix[i][j], end = \" \")\n", 96 | " print()\n", 97 | "\n", 98 | "\n", 99 | "transpose_matrix = []\n", 100 | "\n", 101 | "transposes_matrix(matrix,rows,columns,transpose_matrix)\n", 102 | "print('Transpose Matrix: ')\n", 103 | "for i in range(rows):\n", 104 | "\n", 105 | " for j in range(columns):\n", 106 | " print(transpose_matrix[i][j], end = \" \")\n", 107 | " print()\n", 108 | "\n" 109 | ], 110 | "metadata": { 111 | "colab": { 112 | "base_uri": "https://localhost:8080/" 113 | }, 114 | "id": "YP9oSPVRnchR", 115 | "outputId": "7c2d84f1-d656-4723-fddf-8e1798d80005" 116 | }, 117 | "execution_count": 9, 118 | "outputs": [ 119 | { 120 | "output_type": "stream", 121 | "name": "stdout", 122 | "text": [ 123 | "Enter number of rows: 2\n", 124 | "Enter number of columns: 2\n", 125 | "Enter element for row 1 and column 1: 1\n", 126 | "Enter element for row 1 and column 2: 2\n", 127 | "Enter element for row 2 and column 1: 3\n", 128 | "Enter element for row 2 and column 2: 4\n", 129 | "Input Matrix: \n", 130 | "1 2 \n", 131 | "3 4 \n", 132 | "Transpose Matrix: \n", 133 | "1 3 \n", 134 | "2 4 \n" 135 | ] 136 | } 137 | ] 138 | }, 139 | { 140 | "cell_type": "code", 141 | "source": [ 142 | "#2Q)\n", 143 | "# this function is for taking matrix input\n", 144 | "def construct_matrix(rows, columns):\n", 145 | " matrix = []\n", 146 | " for i in range(rows):\n", 147 | " matrix_row = []\n", 148 | " for j in range(columns):\n", 149 | " element = int(input(f'Enter element for row {i+1} and column {j+1}: '))\n", 150 | " matrix_row.append(element)\n", 151 | " matrix.append(matrix_row)\n", 152 | " return matrix\n", 153 | "# this function is for taking matrix multiplication\n", 154 | "def matrix_multiplication(matrix_a, matrix_b):\n", 155 | " rows_A = len(matrix_a)\n", 156 | " columns_A = len(matrix_a[0])\n", 157 | " rows_B = len(matrix_b)\n", 158 | " columns_B = len(matrix_b[0])\n", 159 | "\n", 160 | " result_matrix = [[0] * columns_B for ele in range(rows_A)]\n", 161 | "\n", 162 | " for i in range(rows_A):\n", 163 | " for j in range(columns_B):\n", 164 | " result_matrix[i][j] = sum(matrix_a[i][k] * matrix_b[k][j] for k in range(columns_A))\n", 165 | "\n", 166 | " return result_matrix\n", 167 | "\n", 168 | "rows_A = int(input('Enter number of rows for matrix A: '))\n", 169 | "columns_A = int(input('Enter number of columns for matrix A: '))\n", 170 | "\n", 171 | "rows_B = int(input('Enter number of rows for matrix B: '))\n", 172 | "columns_B = int(input('Enter number of columns for matrix B: '))\n", 173 | "\n", 174 | "if columns_A != rows_B:\n", 175 | " print('Error: Multiplication is not possible')\n", 176 | " exit()\n", 177 | "\n", 178 | "print('Enter elements for Matrix A:')\n", 179 | "matrix_a = construct_matrix(rows_A, columns_A)\n", 180 | "\n", 181 | "print('Enter elements for Matrix B:')\n", 182 | "matrix_b = construct_matrix(rows_B, columns_B)\n", 183 | "\n", 184 | "print('Input Matrix A: ')\n", 185 | "\n", 186 | "for i in range(rows_A):\n", 187 | "\n", 188 | " for j in range(columns_A):\n", 189 | " print(matrix_a[i][j], end = \" \")\n", 190 | " print()\n", 191 | "\n", 192 | "print('Input Matrix B: ')\n", 193 | "\n", 194 | "for i in range(rows_B):\n", 195 | "\n", 196 | " for j in range(columns_B):\n", 197 | " print(matrix_b[i][j], end = \" \")\n", 198 | " print()\n", 199 | "\n", 200 | "result_matrix = matrix_multiplication(matrix_a, matrix_b)\n", 201 | "\n", 202 | "print('Result Matrix: ')\n", 203 | "for i in range(rows_A):\n", 204 | "\n", 205 | " for j in range(columns_B):\n", 206 | " print(result_matrix[i][j], end = \" \")\n", 207 | " print()\n" 208 | ], 209 | "metadata": { 210 | "colab": { 211 | "base_uri": "https://localhost:8080/" 212 | }, 213 | "id": "cYIqnTcJsRb-", 214 | "outputId": "f0aeee35-ea43-4923-9b0b-e28350376a66" 215 | }, 216 | "execution_count": 17, 217 | "outputs": [ 218 | { 219 | "output_type": "stream", 220 | "name": "stdout", 221 | "text": [ 222 | "Enter number of rows for matrix A: 2\n", 223 | "Enter number of columns for matrix A: 2\n", 224 | "Enter number of rows for matrix B: 2\n", 225 | "Enter number of columns for matrix B: 2\n", 226 | "Enter elements for Matrix A:\n", 227 | "Enter element for row 1 and column 1: 1\n", 228 | "Enter element for row 1 and column 2: 2\n", 229 | "Enter element for row 2 and column 1: 3\n", 230 | "Enter element for row 2 and column 2: 4\n", 231 | "Enter elements for Matrix B:\n", 232 | "Enter element for row 1 and column 1: 5\n", 233 | "Enter element for row 1 and column 2: 6\n", 234 | "Enter element for row 2 and column 1: 7\n", 235 | "Enter element for row 2 and column 2: 8\n", 236 | "Input Matrix A: \n", 237 | "1 2 \n", 238 | "3 4 \n", 239 | "Input Matrix B: \n", 240 | "5 6 \n", 241 | "7 8 \n", 242 | "Result Matrix: \n", 243 | "19 22 \n", 244 | "43 50 \n" 245 | ] 246 | } 247 | ] 248 | }, 249 | { 250 | "cell_type": "code", 251 | "source": [ 252 | "#3Q)\n", 253 | "# this function is for inputting list\n", 254 | "def input_list():\n", 255 | " result = []\n", 256 | " i = 0\n", 257 | " while True:\n", 258 | " user_input = input(f'Enter element {i+1} (or type \"exit\" to finish): ')\n", 259 | " if user_input.lower() == \"exit\":\n", 260 | " break\n", 261 | " try:\n", 262 | " element = int(user_input)\n", 263 | " result.append(element)\n", 264 | " i += 1\n", 265 | " except ValueError:\n", 266 | " print(\"Please enter a valid integer or 'exit' to finish.\")\n", 267 | " return result\n", 268 | "# this function is for getting count of commont elements in list\n", 269 | "def common_count(list_a, list_b):\n", 270 | " count = 0\n", 271 | " for ele_a in list_a:\n", 272 | " if ele_a in list_b:\n", 273 | " count += 1\n", 274 | " list_b.remove(ele_a)\n", 275 | " return count\n", 276 | "\n", 277 | "print('Enter elements for the first list:')\n", 278 | "list_a = input_list()\n", 279 | "\n", 280 | "print('Enter elements for the second list:')\n", 281 | "list_b = input_list()\n", 282 | "\n", 283 | "common_ele_count = common_count(list_a, list_b)\n", 284 | "print(f'Number of common elements: {common_ele_count}')\n" 285 | ], 286 | "metadata": { 287 | "colab": { 288 | "base_uri": "https://localhost:8080/" 289 | }, 290 | "id": "-fhM4JsYwYA7", 291 | "outputId": "9c750c39-8ef9-49fe-855c-318a4bf294ef" 292 | }, 293 | "execution_count": 20, 294 | "outputs": [ 295 | { 296 | "output_type": "stream", 297 | "name": "stdout", 298 | "text": [ 299 | "Enter elements for the first list:\n", 300 | "Enter element 1 (or type \"exit\" to finish): 1\n", 301 | "Enter element 2 (or type \"exit\" to finish): 2\n", 302 | "Enter element 3 (or type \"exit\" to finish): 3\n", 303 | "Enter element 4 (or type \"exit\" to finish): 4\n", 304 | "Enter element 5 (or type \"exit\" to finish): exit\n", 305 | "Enter elements for the second list:\n", 306 | "Enter element 1 (or type \"exit\" to finish): 5\n", 307 | "Enter element 2 (or type \"exit\" to finish): 2\n", 308 | "Enter element 3 (or type \"exit\" to finish): 3\n", 309 | "Enter element 4 (or type \"exit\" to finish): 2\n", 310 | "Enter element 5 (or type \"exit\" to finish): 4\n", 311 | "Enter element 6 (or type \"exit\" to finish): \n", 312 | "Please enter a valid integer or 'exit' to finish.\n", 313 | "Enter element 6 (or type \"exit\" to finish): exit\n", 314 | "Number of common elements: 3\n" 315 | ] 316 | } 317 | ] 318 | }, 319 | { 320 | "cell_type": "code", 321 | "source": [], 322 | "metadata": { 323 | "id": "iqqU15uf1QRz" 324 | }, 325 | "execution_count": null, 326 | "outputs": [] 327 | } 328 | ] 329 | } -------------------------------------------------------------------------------- /lab_2.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "nbformat": 4, 3 | "nbformat_minor": 0, 4 | "metadata": { 5 | "colab": { 6 | "provenance": [] 7 | }, 8 | "kernelspec": { 9 | "name": "python3", 10 | "display_name": "Python 3" 11 | }, 12 | "language_info": { 13 | "name": "python" 14 | } 15 | }, 16 | "cells": [ 17 | { 18 | "cell_type": "code", 19 | "execution_count": 24, 20 | "metadata": { 21 | "colab": { 22 | "base_uri": "https://localhost:8080/" 23 | }, 24 | "id": "uzfbpsfgPudm", 25 | "outputId": "c65f5337-21a7-4511-bca2-1892d49eed9e" 26 | }, 27 | "outputs": [ 28 | { 29 | "output_type": "stream", 30 | "name": "stdout", 31 | "text": [ 32 | "Dimensionality of the vector space: 3\n", 33 | "Number of vectors in the vector space: 10\n", 34 | "Rank of Matrix A: 3\n", 35 | "\n", 36 | "Matrix A:\n", 37 | "[[20 6 2]\n", 38 | " [16 3 6]\n", 39 | " [27 6 2]\n", 40 | " [19 1 2]\n", 41 | " [24 4 2]\n", 42 | " [22 1 5]\n", 43 | " [15 4 2]\n", 44 | " [18 4 2]\n", 45 | " [21 1 4]\n", 46 | " [16 2 4]]\n", 47 | "\n", 48 | "Matrix C:\n", 49 | "[[386]\n", 50 | " [289]\n", 51 | " [393]\n", 52 | " [110]\n", 53 | " [280]\n", 54 | " [167]\n", 55 | " [271]\n", 56 | " [274]\n", 57 | " [148]\n", 58 | " [198]]\n", 59 | "\n", 60 | "Pseudo-Inverse of A:\n", 61 | "[[-0.01008596 -0.03124505 0.01013951 0.0290728 0.0182907 0.01161794\n", 62 | " -0.00771348 0.00095458 0.01743623 -0.00542016]\n", 63 | " [ 0.09059668 0.07263726 0.03172933 -0.09071908 -0.01893196 -0.06926996\n", 64 | " 0.05675464 0.03152577 -0.07641966 0.00357352]\n", 65 | " [ 0.00299878 0.15874243 -0.05795468 -0.06609024 -0.06295043 0.03348017\n", 66 | " 0.01541831 -0.01070461 0.00029003 0.05938755]]\n", 67 | "\n", 68 | "Cost of each product available for sale (Matrix X):\n", 69 | "[[ 1.]\n", 70 | " [55.]\n", 71 | " [18.]]\n" 72 | ] 73 | } 74 | ], 75 | "source": [ 76 | "import numpy as np\n", 77 | "import pandas as pd\n", 78 | "\n", 79 | "df = pd.read_excel('Lab Session Data.xlsx', sheet_name=0)\n", 80 | "\n", 81 | "A = df[['Candies (#)', 'Mangoes (Kg)', 'Milk Packets (#)']].to_numpy()\n", 82 | "C = df['Payment (Rs)'].to_numpy()\n", 83 | "\n", 84 | "C = C.reshape(-1, 1)\n", 85 | "\n", 86 | "dimensionality = A.shape[1]\n", 87 | "print(f\"Dimensionality of the vector space: {dimensionality}\")\n", 88 | "\n", 89 | "num_vectors = A.shape[0]\n", 90 | "print(f\"Number of vectors in the vector space: {num_vectors}\")\n", 91 | "\n", 92 | "rank_A = np.linalg.matrix_rank(A)\n", 93 | "print(f\"Rank of Matrix A: {rank_A}\")\n", 94 | "\n", 95 | "A_pseudo_inv = np.linalg.pinv(A)\n", 96 | "\n", 97 | "X = A_pseudo_inv @ C\n", 98 | "\n", 99 | "print(\"\\nMatrix A:\")\n", 100 | "print(A)\n", 101 | "\n", 102 | "print(\"\\nMatrix C:\")\n", 103 | "print(C)\n", 104 | "\n", 105 | "print(\"\\nPseudo-Inverse of A:\")\n", 106 | "print(A_pseudo_inv)\n", 107 | "\n", 108 | "print(\"\\nCost of each product available for sale (Matrix X):\")\n", 109 | "print(X)\n" 110 | ] 111 | }, 112 | { 113 | "cell_type": "code", 114 | "source": [], 115 | "metadata": { 116 | "id": "miX7qSvFQVS4" 117 | }, 118 | "execution_count": null, 119 | "outputs": [] 120 | } 121 | ] 122 | } --------------------------------------------------------------------------------