├── README.md
├── RNN-based Decoder for Convolutional Codes.ipynb
└── coding.py
/README.md:
--------------------------------------------------------------------------------
1 | # On Recurrent Neural Networks for Sequence-based Processing in Communications
2 | ## In this notebook we show how to build a decoder for convolutional codes based on recurrent neural networks
3 | Accompanying code of paper ["On Recurrent Neural Networks for Sequence-based Processing in Communications" by Daniel Tandler, Sebastian Dörner, Sebastian Cammerer, Stephan ten Brink](https://ieeexplore.ieee.org/document/9048728)
4 |
5 | If you find this code helpful please cite this work using the following bibtex entry:
6 |
7 | ```tex
8 | @article{RNN-Conv-Decoding-Tandler2019,
9 | author = {Daniel Tandler and
10 | Sebastian D{\"{o}}rner and
11 | Sebastian Cammerer and
12 | Stephan ten Brink},
13 | booktitle = {2019 53rd Asilomar Conference on Signals, Systems, and Computers},
14 | title = {On Recurrent Neural Networks for Sequence-based Processing in Communications},
15 | year = {2019},
16 | pages = {537-543}
17 | }
18 | ```
19 |
20 |
21 | ## Installation/Setup
22 |
23 | An example of the used code is given in the Jupyter Notebook (.ipynb file), the coding.py file is just for arbitrary code generation and not required to run the notebook.
24 |
25 | You can directly run the notebook with code and short explanations in google colab:
26 |
27 | [Run this Notebook in Google Colaboratory: Link to colab.google.com](https://colab.research.google.com/github/sdnr/RNN-Conv-Decoder/blob/master/RNN-based%20Decoder%20for%20Convolutional%20Codes.ipynb)
28 |
--------------------------------------------------------------------------------
/RNN-based Decoder for Convolutional Codes.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "nbformat": 4,
3 | "nbformat_minor": 0,
4 | "metadata": {
5 | "accelerator": "GPU",
6 | "colab": {
7 | "name": "Copy of RNN-based Decoder for Convolutional Codes.ipynb",
8 | "provenance": [],
9 | "collapsed_sections": []
10 | },
11 | "kernelspec": {
12 | "display_name": "Python 3",
13 | "language": "python",
14 | "name": "python3"
15 | },
16 | "language_info": {
17 | "codemirror_mode": {
18 | "name": "ipython",
19 | "version": 3
20 | },
21 | "file_extension": ".py",
22 | "mimetype": "text/x-python",
23 | "name": "python",
24 | "nbconvert_exporter": "python",
25 | "pygments_lexer": "ipython3",
26 | "version": "3.6.8"
27 | },
28 | "varInspector": {
29 | "cols": {
30 | "lenName": 16,
31 | "lenType": 16,
32 | "lenVar": 40
33 | },
34 | "kernels_config": {
35 | "python": {
36 | "delete_cmd_postfix": "",
37 | "delete_cmd_prefix": "del ",
38 | "library": "var_list.py",
39 | "varRefreshCmd": "print(var_dic_list())"
40 | },
41 | "r": {
42 | "delete_cmd_postfix": ") ",
43 | "delete_cmd_prefix": "rm(",
44 | "library": "var_list.r",
45 | "varRefreshCmd": "cat(var_dic_list()) "
46 | }
47 | },
48 | "oldHeight": 654.4,
49 | "position": {
50 | "height": "676px",
51 | "left": "714px",
52 | "right": "63px",
53 | "top": "-5px",
54 | "width": "800px"
55 | },
56 | "types_to_exclude": [
57 | "module",
58 | "function",
59 | "builtin_function_or_method",
60 | "instance",
61 | "_Feature"
62 | ],
63 | "varInspector_section_display": "block",
64 | "window_display": false
65 | }
66 | },
67 | "cells": [
68 | {
69 | "cell_type": "markdown",
70 | "metadata": {
71 | "id": "xFHXWn4HnIA2"
72 | },
73 | "source": [
74 | "# On Recurrent Neural Networks for Sequence-based Processing in Communications\n",
75 | "## In this notebook we show how to build a decoder for convolutional codes based on recurrent neural networks\n",
76 | "Accompanying code of paper [\"On Recurrent Neural Networks for Sequence-based Processing in Communications\" by Daniel Tandler, Sebastian Dörner, Sebastian Cammerer, Stephan ten Brink](https://ieeexplore.ieee.org/document/9048728)\n",
77 | "\n",
78 | "If you find this code helpful please cite this work using the following bibtex entry:\n",
79 | "\n",
80 | "```tex\n",
81 | "@article{RNN-Conv-Decoding-Tandler2019,\n",
82 | " author = {Daniel Tandler and\n",
83 | " Sebastian D{\\\"{o}}rner and\n",
84 | " Sebastian Cammerer and\n",
85 | " Stephan ten Brink},\n",
86 | " booktitle = {2019 53rd Asilomar Conference on Signals, Systems, and Computers},\n",
87 | " title = {On Recurrent Neural Networks for Sequence-based Processing in Communications},\n",
88 | " year = {2019},\n",
89 | " pages = {537-543}\n",
90 | "}\n",
91 | "```"
92 | ]
93 | },
94 | {
95 | "cell_type": "code",
96 | "source": [
97 | "# downgrading numpy to not run into error with tf1's cudnngru\n",
98 | "!pip3 install numpy==1.19.2"
99 | ],
100 | "metadata": {
101 | "colab": {
102 | "base_uri": "https://localhost:8080/"
103 | },
104 | "id": "bCqo_Me1BAUe",
105 | "outputId": "8014e8e8-70c5-4c38-b37e-a4f88d4ea905"
106 | },
107 | "execution_count": 1,
108 | "outputs": [
109 | {
110 | "output_type": "stream",
111 | "name": "stdout",
112 | "text": [
113 | "Looking in indexes: https://pypi.org/simple, https://us-python.pkg.dev/colab-wheels/public/simple/\n",
114 | "Requirement already satisfied: numpy==1.19.2 in /usr/local/lib/python3.7/dist-packages (1.19.2)\n"
115 | ]
116 | }
117 | ]
118 | },
119 | {
120 | "cell_type": "code",
121 | "metadata": {
122 | "id": "QEy0wTWmvk71",
123 | "colab": {
124 | "base_uri": "https://localhost:8080/"
125 | },
126 | "outputId": "e94acd5f-3a1c-4fea-b3d7-b69df3c3faf4"
127 | },
128 | "source": [
129 | "# magic command to use TF 1.X in colaboraty when importing tensorflow\n",
130 | "%tensorflow_version 1.x \n",
131 | "import tensorflow as tf # imports the tensorflow library to the python kernel\n",
132 | "tf.logging.set_verbosity(tf.logging.ERROR) # sets the amount of debug information from TF (INFO, WARNING, ERROR)\n",
133 | "\n",
134 | "print(\"Using tensorflow version:\", tf.__version__)"
135 | ],
136 | "execution_count": 2,
137 | "outputs": [
138 | {
139 | "output_type": "stream",
140 | "name": "stdout",
141 | "text": [
142 | "TensorFlow 1.x selected.\n",
143 | "Using tensorflow version: 1.15.2\n"
144 | ]
145 | }
146 | ]
147 | },
148 | {
149 | "cell_type": "code",
150 | "metadata": {
151 | "id": "o0V_CfUbnIBB",
152 | "colab": {
153 | "base_uri": "https://localhost:8080/"
154 | },
155 | "outputId": "72fc5df0-40ce-4bea-f21f-327b4b75c68d"
156 | },
157 | "source": [
158 | "import tensorflow as tf\n",
159 | "import numpy as np\n",
160 | "print(\"numpy version:\",np.__version__)\n",
161 | "import matplotlib.pyplot as plt"
162 | ],
163 | "execution_count": 3,
164 | "outputs": [
165 | {
166 | "output_type": "stream",
167 | "name": "stdout",
168 | "text": [
169 | "numpy version: 1.19.2\n"
170 | ]
171 | }
172 | ]
173 | },
174 | {
175 | "cell_type": "markdown",
176 | "metadata": {
177 | "id": "Lwaxsc7YnIBY"
178 | },
179 | "source": [
180 | "# Code Setup\n",
181 | "\n",
182 | "We first set up a code class that holds all necessary parameters and provides functions to quickly generate large samples of encoded bits.\n",
183 | "\n",
184 | "For this notebook, only code examples for memory 1,2,4 and 6 are provided.\n",
185 | "\n",
186 | "To generate other convolutional codes check out the accompanying coding.py which uses [CommPy](https://github.com/veeresht/CommPy) to generate arbitray codes."
187 | ]
188 | },
189 | {
190 | "cell_type": "code",
191 | "metadata": {
192 | "id": "rz8aiownnIBc"
193 | },
194 | "source": [
195 | "class code:\n",
196 | " def __init__(self,m):\n",
197 | " self.m = m # Number of delay elements in the convolutional encoder\n",
198 | " self.tb_depth = 5*(self.m + 1) # Traceback depth of the decoder\n",
199 | " self.code_rate = 0.5\n",
200 | " if m == 1:\n",
201 | " self.d1 = 0o1\n",
202 | " self.d2 = 0o3\n",
203 | " self.impulse_response = np.array([0, 1, 1, 1])\n",
204 | " self.viterbi_reference = np.array([7.293792e-02,5.801720e-02,4.490250e-02,3.349593e-02,2.429049e-02,1.684274e-02,1.124068e-02,7.277303e-03,4.354604e-03,2.546695e-03,1.382015e-03,7.138968e-04])\n",
205 | " elif m == 2:\n",
206 | " self.d1 = 0o5\n",
207 | " self.d2 = 0o7\n",
208 | " self.impulse_response = np.array([1, 1, 0, 1, 1, 1])\n",
209 | " self.viterbi_reference = np.array([9.278817e-02,6.424232e-02,4.195904e-02,2.531590e-02,1.424276e-02,7.385386e-03,3.617080e-03,1.526589e-03,6.319029e-04,2.502278e-04,7.633503e-05,2.566724e-05])\n",
210 | " elif m == 4:\n",
211 | " self.d1 = 0o23\n",
212 | " self.d2 = 0o35\n",
213 | " self.impulse_response = np.array([1, 1, 0, 1, 0, 1, 1, 0, 1, 1])\n",
214 | " self.viterbi_reference = np.array([1.266374e-01,7.990744e-02,4.546113e-02,2.301058e-02,1.045569e-02,4.220632e-03,1.526512e-03,5.214676e-04,1.482288e-04,3.666830e-05,7.778123e-06,1.444509e-06])\n",
215 | " elif m == 6:\n",
216 | " self.d1 = 0o133\n",
217 | " self.d2 = 0o171\n",
218 | " self.impulse_response = np.array([1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 1])\n",
219 | " self.viterbi_reference = np.array([1.547330e-01,8.593706e-02,3.985466e-02,1.544436e-02,5.221681e-03,1.378203e-03,3.501900e-04,8.042758e-05,1.676778e-05,2.989088e-06,3.444674e-07,np.NaN])\n",
220 | " else:\n",
221 | " print(\"Code not available!\")\n",
222 | " \n",
223 | " def zero_pad(self,u):\n",
224 | " return np.reshape(np.stack([u,np.zeros_like(u)],axis=1),(-1,))\n",
225 | " \n",
226 | " def encode_sequence(self,u,terminate=False):\n",
227 | " if terminate:\n",
228 | " return np.convolve(self.zero_pad(u),self.impulse_response,mode='full')[:-1] % 2\n",
229 | " else:\n",
230 | " return np.convolve(self.zero_pad(u),self.impulse_response,mode='full')[:len(u)*2] % 2\n",
231 | " \n",
232 | " def encode_batch(self,u,terminate=False):\n",
233 | " x0 = self.encode_sequence(u[0],terminate)\n",
234 | " x = np.empty((u.shape[0],x0.shape[0]),dtype=np.int8)\n",
235 | " x[0] = x0\n",
236 | " for i in range(len(u)-1):\n",
237 | " x[i+1] = self.encode_sequence(u[i+1],terminate)\n",
238 | " return x"
239 | ],
240 | "execution_count": 4,
241 | "outputs": []
242 | },
243 | {
244 | "cell_type": "markdown",
245 | "metadata": {
246 | "id": "h7CbqjGjnIBp"
247 | },
248 | "source": [
249 | "### Our SNR definition"
250 | ]
251 | },
252 | {
253 | "cell_type": "code",
254 | "metadata": {
255 | "id": "H4AoHWSOnIBu"
256 | },
257 | "source": [
258 | "def ebnodb2std(ebnodb, coderate=1):\n",
259 | " ebno = 10**(ebnodb/10)\n",
260 | " return (1/np.sqrt(2*coderate*ebno)).astype(np.float32)"
261 | ],
262 | "execution_count": 5,
263 | "outputs": []
264 | },
265 | {
266 | "cell_type": "markdown",
267 | "metadata": {
268 | "id": "q8NToLOvnIB6"
269 | },
270 | "source": [
271 | "### Choose which Convolutional you want to use"
272 | ]
273 | },
274 | {
275 | "cell_type": "code",
276 | "metadata": {
277 | "id": "MLNYe-pKnIB-"
278 | },
279 | "source": [
280 | "#code = code(m=1) # memory 1 rate 0.5 code with generator polynomials 0o1 and 0o3 (octal)\n",
281 | "#code = code(m=2) # memory 2 rate 0.5 code with generator polynomials 0o5 and 0o7 (octal)\n",
282 | "#code = code(m=4) # memory 4 rate 0.5 code with generator polynomials 0o23 and 0o35 (octal)\n",
283 | "code = code(m=6) # memory 6 rate 0.5 code with generator polynomials 0o133 and 0o171 (octal)"
284 | ],
285 | "execution_count": 6,
286 | "outputs": []
287 | },
288 | {
289 | "cell_type": "markdown",
290 | "metadata": {
291 | "id": "vHxCyjidnICJ"
292 | },
293 | "source": [
294 | "## Parameters"
295 | ]
296 | },
297 | {
298 | "cell_type": "code",
299 | "metadata": {
300 | "id": "sYiAZ51CnICN",
301 | "colab": {
302 | "base_uri": "https://localhost:8080/"
303 | },
304 | "outputId": "f38cf17c-cd15-4612-cf3d-b222bb72ea1b"
305 | },
306 | "source": [
307 | "model_name = \"%s%sm%s_Model\" % (oct(code.d1),oct(code.d2),code.m)\n",
308 | "saver_path = \"trained_models/\"+model_name\n",
309 | "\n",
310 | "gradient_depth = code.tb_depth\n",
311 | "additional_input = 0\n",
312 | "decision_offset = int(len(code.impulse_response)/2)\n",
313 | "sequence_length = 15\n",
314 | "\n",
315 | "rnn_layers = 3\n",
316 | "rnn_units_per_layer = 256\n",
317 | "dense_layers = [16]\n",
318 | "\n",
319 | "print(\"Code Rate:\", code.code_rate)\n",
320 | "print(\"RNN layers:\", rnn_layers)\n",
321 | "print(\"Units per layer:\", rnn_units_per_layer)\n",
322 | "print(\"Gradient depth:\", gradient_depth)\n",
323 | "print(\"ConvCode traceback length thump rule:\",code.tb_depth)"
324 | ],
325 | "execution_count": 7,
326 | "outputs": [
327 | {
328 | "output_type": "stream",
329 | "name": "stdout",
330 | "text": [
331 | "Code Rate: 0.5\n",
332 | "RNN layers: 3\n",
333 | "Units per layer: 256\n",
334 | "Gradient depth: 35\n",
335 | "ConvCode traceback length thump rule: 35\n"
336 | ]
337 | }
338 | ]
339 | },
340 | {
341 | "cell_type": "markdown",
342 | "metadata": {
343 | "id": "pgCc5a_DnICZ"
344 | },
345 | "source": [
346 | "## Tensorflow Graph"
347 | ]
348 | },
349 | {
350 | "cell_type": "code",
351 | "metadata": {
352 | "id": "Y8vm-YG3nICc"
353 | },
354 | "source": [
355 | "graph = tf.Graph()\n",
356 | "with graph.as_default():\n",
357 | " \n",
358 | " # Encoded Sequence Input\n",
359 | " x = tf.placeholder(tf.float32,shape=[2*gradient_depth+sequence_length,None,2*(1+2*additional_input)],name=\"coded_sequence\")\n",
360 | " \n",
361 | " # Decoding\n",
362 | " multi_rnn_cell = tf.contrib.cudnn_rnn.CudnnGRU(rnn_layers,rnn_units_per_layer,direction='bidirectional')\n",
363 | " multi_rnn_cell.build(input_shape=[2*gradient_depth+sequence_length,None,(1+2*additional_input)*2])\n",
364 | "\n",
365 | " out,(new_state,) = multi_rnn_cell(x)\n",
366 | " \n",
367 | " out_sequence = out[gradient_depth:gradient_depth+sequence_length,:,:]\n",
368 | " \n",
369 | " # final dense layers:\n",
370 | " for size in dense_layers:\n",
371 | " out_sequence = tf.layers.dense(out_sequence,size,activation=tf.nn.relu)\n",
372 | " u_hat = tf.layers.dense(out_sequence,1,activation=tf.nn.sigmoid)\n",
373 | " \n",
374 | " u_hat = tf.squeeze(u_hat)\n",
375 | " \n",
376 | " u_hat_bits = tf.cast(tf.greater(u_hat,0.5),tf.int8)\n",
377 | " \n",
378 | " \n",
379 | " # Loss function\n",
380 | " u_label = tf.placeholder(tf.int8,shape=[sequence_length,None],name=\"uncoded_bits\")\n",
381 | " loss = tf.losses.log_loss(labels=u_label,predictions=u_hat)\n",
382 | " correct_predictions = tf.equal(u_hat_bits, u_label)\n",
383 | " ber = 1.0 - tf.reduce_mean(tf.cast(correct_predictions, tf.float32),axis=1)\n",
384 | "\n",
385 | "\n",
386 | " # Training\n",
387 | " lr = tf.placeholder(tf.float32, shape=[])\n",
388 | " optimizer = tf.train.RMSPropOptimizer(lr)\n",
389 | " step = optimizer.minimize(loss)\n",
390 | " \n",
391 | " # Init\n",
392 | " init = tf.global_variables_initializer()\n",
393 | " \n",
394 | " # Saver\n",
395 | " saver = tf.train.Saver()"
396 | ],
397 | "execution_count": 8,
398 | "outputs": []
399 | },
400 | {
401 | "cell_type": "markdown",
402 | "metadata": {
403 | "id": "wFhX5TfMnICo"
404 | },
405 | "source": [
406 | "### Let's print all trainable variables of the graph we just defined:\n",
407 | "Note that special CudnnGRU layers generate some kind of \"sub\"-graph and therefore their variables are not shown here but in a so called opaque_kernel."
408 | ]
409 | },
410 | {
411 | "cell_type": "code",
412 | "metadata": {
413 | "id": "TKjS78WEnICr",
414 | "colab": {
415 | "base_uri": "https://localhost:8080/"
416 | },
417 | "outputId": "4fed0087-504a-4df6-d58b-32f27ba907b4"
418 | },
419 | "source": [
420 | "def model_summary(for_graph): #from TensorFlow slim.model_analyzer.analyze_vars source\n",
421 | " print(\"{:60}{:21}{:14}{:>17}\".format('Name','Shape','Variables','Size'))\n",
422 | " print('{:-<112}'.format(''))\n",
423 | " total_size = 0\n",
424 | " total_bytes = 0\n",
425 | " for var in for_graph.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES):\n",
426 | " # if var.num_elements() is None or [] assume size 0.\n",
427 | " var_shape = var.get_shape()\n",
428 | " var_size = var.get_shape().num_elements() or 0\n",
429 | " var_bytes = var_size * var.dtype.size\n",
430 | " total_size += var_size\n",
431 | " total_bytes += var_bytes\n",
432 | " print(\"{:60}{:21}{:14}{:>11} bytes\".format(var.name, str(var_shape), str(var_size), var_bytes))\n",
433 | " print()\n",
434 | " print('\\033[1mTotal number of variables:\\t{}\\033[0m'.format(total_size))\n",
435 | " print('\\033[1mTotal bytes of variables:\\t{}\\033[0m'.format(total_bytes))\n",
436 | "\n",
437 | "model_summary(graph)"
438 | ],
439 | "execution_count": 9,
440 | "outputs": [
441 | {
442 | "output_type": "stream",
443 | "name": "stdout",
444 | "text": [
445 | "Name Shape Variables Size\n",
446 | "----------------------------------------------------------------------------------------------------------------\n",
447 | "cudnn_gru/opaque_kernel:0 0 0 bytes\n",
448 | "dense/kernel:0 (512, 16) 8192 32768 bytes\n",
449 | "dense/bias:0 (16,) 16 64 bytes\n",
450 | "dense_1/kernel:0 (16, 1) 16 64 bytes\n",
451 | "dense_1/bias:0 (1,) 1 4 bytes\n",
452 | "\n",
453 | "\u001b[1mTotal number of variables:\t8225\u001b[0m\n",
454 | "\u001b[1mTotal bytes of variables:\t32900\u001b[0m\n"
455 | ]
456 | }
457 | ]
458 | },
459 | {
460 | "cell_type": "markdown",
461 | "metadata": {
462 | "id": "LP8nLUmKnIC2"
463 | },
464 | "source": [
465 | "## Generator function\n",
466 | "Let's define a generatior function that first generates a large dataset pair of bit sequences and encoded bit sequences.\n",
467 | "\n",
468 | "In a second step, this functions slices those datasets in shorter snippets which are then fed to the NN decoder."
469 | ]
470 | },
471 | {
472 | "cell_type": "code",
473 | "metadata": {
474 | "id": "Caj6X4x1nIC5"
475 | },
476 | "source": [
477 | "def generator(batch_size,iterations,gradient_depth,sequence_length,additional_input,decision_offset,sigma,apriori):\n",
478 | " offset = code.tb_depth + 3\n",
479 | " full_uncoded_sequences = np.random.randint(0,100,[batch_size,iterations+2*gradient_depth+sequence_length+offset+2*additional_input],dtype=np.int8)\n",
480 | " full_uncoded_sequences = np.less(full_uncoded_sequences,np.array([apriori*100],dtype=np.int8)).astype(np.int8)\n",
481 | " full_coded_sequences = code.encode_batch(full_uncoded_sequences)\n",
482 | " full_coded_sequences = np.reshape(full_coded_sequences,[batch_size,-1,int(1/code.code_rate)])\n",
483 | " \n",
484 | " # Feeding\n",
485 | " for i in range(iterations):\n",
486 | " encoded_sequences = full_coded_sequences[:,offset+i:offset+i+2*gradient_depth+sequence_length+2*additional_input,:]\n",
487 | " labels = full_uncoded_sequences[:,offset+i+gradient_depth+additional_input+decision_offset:offset+i+gradient_depth+additional_input+decision_offset+sequence_length]\n",
488 | " \n",
489 | " # BPSK Modulation\n",
490 | " modulated_sequences = (encoded_sequences.astype(np.float32) - 0.5) * 2.0\n",
491 | "\n",
492 | " # AWGN\n",
493 | " noise = np.random.normal(size=modulated_sequences.shape).astype(np.float32)\n",
494 | " noised_sequences = modulated_sequences + noise * sigma\n",
495 | " \n",
496 | " # Input Processing\n",
497 | " stack_array = []\n",
498 | " for k in range(2*gradient_depth+sequence_length):\n",
499 | " stack_array.append(noised_sequences[:,k:k+2*additional_input+1,:])\n",
500 | " input_x = np.stack(stack_array,axis=1)\n",
501 | " input_x = np.reshape(input_x,newshape=[batch_size,sequence_length+2*gradient_depth,(1+2*additional_input)*2])\n",
502 | "\n",
503 | " # Transpose dimensions 1 and 0 because CudnnGRU layers need [time,batch,input] feeding\n",
504 | " input_x = np.transpose(input_x,axes=[1,0,2])\n",
505 | " input_labels = np.transpose(labels,axes=[1,0])\n",
506 | " \n",
507 | " yield input_x,input_labels"
508 | ],
509 | "execution_count": 10,
510 | "outputs": []
511 | },
512 | {
513 | "cell_type": "markdown",
514 | "metadata": {
515 | "id": "uVX0fZaTnIDF"
516 | },
517 | "source": [
518 | "## Starting a tensorflow session\n",
519 | "We create a session for the previously defined graph and save the initial state of the graph in training_stage 0"
520 | ]
521 | },
522 | {
523 | "cell_type": "code",
524 | "metadata": {
525 | "id": "eyffvGZ9nIDH",
526 | "colab": {
527 | "base_uri": "https://localhost:8080/",
528 | "height": 35
529 | },
530 | "outputId": "b6850dad-3d33-4415-dea6-2095e8dcf36a"
531 | },
532 | "source": [
533 | "sess_config = tf.ConfigProto()\n",
534 | "#sess_config.gpu_options.per_process_gpu_memory_fraction = 0.3 # to limit the amount of GPU memory usage\n",
535 | "sess_config.gpu_options.allow_growth = True\n",
536 | "sess = tf.Session(graph=graph, config=sess_config)\n",
537 | "sess.run(init)\n",
538 | "\n",
539 | "trained_stages = 0\n",
540 | "saver.save(sess,saver_path,global_step=trained_stages)"
541 | ],
542 | "execution_count": 11,
543 | "outputs": [
544 | {
545 | "output_type": "execute_result",
546 | "data": {
547 | "text/plain": [
548 | "'trained_models/0o1330o171m6_Model-0'"
549 | ],
550 | "application/vnd.google.colaboratory.intrinsic+json": {
551 | "type": "string"
552 | }
553 | },
554 | "metadata": {},
555 | "execution_count": 11
556 | }
557 | ]
558 | },
559 | {
560 | "cell_type": "markdown",
561 | "metadata": {
562 | "id": "sy0m9X_nnIDR"
563 | },
564 | "source": [
565 | "## Auxiliary functions"
566 | ]
567 | },
568 | {
569 | "cell_type": "code",
570 | "metadata": {
571 | "id": "3Ihe8ZHonIDU"
572 | },
573 | "source": [
574 | "# generates a python dictionary linking numpy feeds to tensorflow tensors\n",
575 | "def gen_feed_dict(x_feed, u_feed, lr_feed=1e-3):\n",
576 | " feed_dict = {\n",
577 | " x: x_feed,\n",
578 | " u_label: u_feed,\n",
579 | " lr: lr_feed,\n",
580 | " }\n",
581 | " return feed_dict\n",
582 | "\n",
583 | "# runs a single batch to predict u_hat and calculate the BER\n",
584 | "def test_step(x_feed, u_feed):\n",
585 | " return sess.run(ber,feed_dict=gen_feed_dict(x_feed,u_feed))\n",
586 | "\n",
587 | "# runs a monte carlo simulation of several test steps to get meaningful BERs\n",
588 | "def test(test_parameters, plot=False, plot_baseline=False, ber_at_time=int(sequence_length/2)):\n",
589 | " test_sigma = ebnodb2std(test_parameters['ebnodb'],code.code_rate)\n",
590 | " ber = np.zeros([len(test_parameters['ebnodb']),sequence_length])\n",
591 | " for i in range(len(test_sigma)):\n",
592 | " for x_feed,u_feed in generator(test_parameters['batch_size'],test_parameters['iterations'],gradient_depth,sequence_length,additional_input,decision_offset,test_sigma[i],0.5):\n",
593 | " curr_ber = test_step(x_feed, u_feed)\n",
594 | " ber[i] += curr_ber\n",
595 | " # logging\n",
596 | " print(\"SNR:\",test_parameters['ebnodb'][i])\n",
597 | " print(\"BER:\",ber[i]/test_parameters['iterations'])\n",
598 | " ber = ber/test_parameters['iterations']\n",
599 | " print(\"Final BER:\",ber)\n",
600 | " if (plot):\n",
601 | " plot_bler_vs_ebnodb(test_parameters['ebnodb'], ber[:,ber_at_time], plot_baseline)\n",
602 | " return ber\n",
603 | "\n",
604 | "# runs a single training step\n",
605 | "def train_step(x_feed,u_feed,lr_feed):\n",
606 | " return sess.run([step,loss,ber],feed_dict=gen_feed_dict(x_feed,u_feed,lr_feed))\n",
607 | " \n",
608 | "# runs a training set according to training_params\n",
609 | "def train(training_params):\n",
610 | " global trained_stages\n",
611 | " pl = training_params['learning']\n",
612 | " #early_stopping = training_params['early_stopping']\n",
613 | " for epoch in pl:\n",
614 | " # learning params\n",
615 | " batch_size = epoch[0]\n",
616 | " iterations = epoch[1]\n",
617 | " learning_rate = epoch[2]\n",
618 | " ebnodb = epoch[3]\n",
619 | " apriori = epoch[4]\n",
620 | " train_sigma = ebnodb2std(ebnodb,code.code_rate)\n",
621 | " # logging\n",
622 | " logging_interval = int(iterations/10)\n",
623 | " logging_it_counter = 0\n",
624 | " logging_interval_loss = 0.0\n",
625 | " logging_interval_ber = np.zeros([sequence_length])\n",
626 | " \n",
627 | " print(\"\\nTraining Epoch - Batch Size: %d, Iterations: %d, Learning Rate: %.4f, EbNodB %.1f (std: %.3f), P_apriori %.2f\" % (batch_size,iterations,learning_rate,ebnodb,train_sigma,apriori))\n",
628 | " # training\n",
629 | " for x_feed,u_feed in generator(batch_size,iterations,gradient_depth,sequence_length,additional_input,decision_offset,train_sigma,apriori):\n",
630 | " _,curr_loss,curr_ber = train_step(x_feed,u_feed,learning_rate)\n",
631 | " # logging\n",
632 | " logging_interval_loss += curr_loss\n",
633 | " logging_interval_ber += curr_ber\n",
634 | " logging_it_counter += 1\n",
635 | "\n",
636 | " if logging_it_counter%logging_interval == 0:\n",
637 | " #if early_stopping and previous_logging_interval_loss < logging_interval_loss:\n",
638 | " # print(\"\")\n",
639 | "\n",
640 | " print(\" Iteration %d to %d - Avg. Loss: %.3E Avg. BER: %.3E Min. @ BER[%d]=%.3E\" % (logging_it_counter-logging_interval,\n",
641 | " logging_it_counter,\n",
642 | " logging_interval_loss/logging_interval,\n",
643 | " np.mean(logging_interval_ber/logging_interval),\n",
644 | " np.argmin(logging_interval_ber/logging_interval),\n",
645 | " np.min(logging_interval_ber/logging_interval)))\n",
646 | " logging_interval_loss = 0.0\n",
647 | " logging_interval_ber = 0.0\n",
648 | " \n",
649 | " # save weights\n",
650 | " trained_stages += 1\n",
651 | " saver.save(sess,saver_path,global_step=trained_stages)\n",
652 | " print(\" -> saved as training stage: %s-%d\" % (model_name,trained_stages))\n",
653 | "\n",
654 | "# plots a BER curve\n",
655 | "def plot_bler_vs_ebnodb(ebnodb, ber, baseline=False):\n",
656 | " image = plt.figure(figsize=(12,6))\n",
657 | " plt.plot(ebnodb, ber, '-o')\n",
658 | " if baseline:\n",
659 | " plt.plot(ebnodb, baseline_ber, '--')\n",
660 | " plt.legend(['RNN Decoder', 'Viterbi Decoder']);\n",
661 | " plt.yscale('log')\n",
662 | " plt.xlabel('EbNo (dB)', fontsize=16)\n",
663 | " plt.ylabel('Bit-error rate', fontsize=16)"
664 | ],
665 | "execution_count": 12,
666 | "outputs": []
667 | },
668 | {
669 | "cell_type": "markdown",
670 | "metadata": {
671 | "id": "2nLozXmgnIDd"
672 | },
673 | "source": [
674 | "## Training\n",
675 | "Let's define training parameters and begin with the training.\n",
676 | "\n",
677 | "Notice that we use so called a priori ramp-up training [1].\n",
678 | "That is, setting the a priori probability of either ones or zeros in bit vector u to a small value and later, in subsequent training epochs, raising it up till 0.5 where ones and zeros are uniformly distributed again."
679 | ]
680 | },
681 | {
682 | "cell_type": "code",
683 | "metadata": {
684 | "id": "DYsTNnVvnIDg"
685 | },
686 | "source": [
687 | "train_snr_db = 1.5\n",
688 | "training_params = {\n",
689 | " 'learning' : [ #batch_size, iterations, learning_rate, training_ebnodb, apriori\n",
690 | " [100, 1000, 0.001, train_snr_db, 0.01],\n",
691 | " [100, 1000, 0.001, train_snr_db, 0.1],\n",
692 | " [100, 1000, 0.001, train_snr_db, 0.2],\n",
693 | " [100, 1000, 0.001, train_snr_db, 0.3],\n",
694 | " [100, 1000, 0.001, train_snr_db, 0.4],\n",
695 | " [100, 500000, 0.0001, train_snr_db, 0.5],\n",
696 | " [500, 100000, 0.0001, train_snr_db, 0.5],\n",
697 | " [1000, 50000, 0.0001, train_snr_db, 0.5],\n",
698 | " [2000, 50000, 0.0001, train_snr_db, 0.5],\n",
699 | " ]\n",
700 | "}"
701 | ],
702 | "execution_count": 13,
703 | "outputs": []
704 | },
705 | {
706 | "cell_type": "code",
707 | "metadata": {
708 | "id": "60hOY8pGnIDp",
709 | "colab": {
710 | "base_uri": "https://localhost:8080/",
711 | "height": 1000
712 | },
713 | "outputId": "479f02b3-0c1c-4ad7-8ed0-1974759e441e"
714 | },
715 | "source": [
716 | "train(training_params)"
717 | ],
718 | "execution_count": 14,
719 | "outputs": [
720 | {
721 | "output_type": "stream",
722 | "name": "stdout",
723 | "text": [
724 | "\n",
725 | "Training Epoch - Batch Size: 100, Iterations: 1000, Learning Rate: 0.0010, EbNodB 1.5 (std: 0.841), P_apriori 0.01\n",
726 | " Iteration 0 to 100 - Avg. Loss: 2.168E-01 Avg. BER: 1.960E-02 Min. @ BER[14]=1.820E-02\n",
727 | " Iteration 100 to 200 - Avg. Loss: 4.481E-02 Avg. BER: 1.110E-02 Min. @ BER[3]=1.020E-02\n",
728 | " Iteration 200 to 300 - Avg. Loss: 2.168E-02 Avg. BER: 9.087E-03 Min. @ BER[0]=8.700E-03\n",
729 | " Iteration 300 to 400 - Avg. Loss: 2.077E-02 Avg. BER: 1.069E-02 Min. @ BER[6]=1.040E-02\n",
730 | " Iteration 400 to 500 - Avg. Loss: 1.726E-02 Avg. BER: 8.720E-03 Min. @ BER[13]=8.200E-03\n",
731 | " Iteration 500 to 600 - Avg. Loss: 1.409E-02 Avg. BER: 5.440E-03 Min. @ BER[7]=3.600E-03\n",
732 | " Iteration 600 to 700 - Avg. Loss: 7.269E-03 Avg. BER: 2.087E-03 Min. @ BER[5]=1.600E-03\n",
733 | " Iteration 700 to 800 - Avg. Loss: 6.081E-03 Avg. BER: 1.820E-03 Min. @ BER[2]=1.300E-03\n",
734 | " Iteration 800 to 900 - Avg. Loss: 4.063E-03 Avg. BER: 1.260E-03 Min. @ BER[6]=8.000E-04\n",
735 | " Iteration 900 to 1000 - Avg. Loss: 1.967E-03 Avg. BER: 6.600E-04 Min. @ BER[10]=3.000E-04\n",
736 | " -> saved as training stage: 0o1330o171m6_Model-1\n",
737 | "\n",
738 | "Training Epoch - Batch Size: 100, Iterations: 1000, Learning Rate: 0.0010, EbNodB 1.5 (std: 0.841), P_apriori 0.10\n",
739 | " Iteration 0 to 100 - Avg. Loss: 1.519E-01 Avg. BER: 6.061E-02 Min. @ BER[4]=5.730E-02\n",
740 | " Iteration 100 to 200 - Avg. Loss: 1.139E-01 Avg. BER: 4.728E-02 Min. @ BER[2]=4.320E-02\n",
741 | " Iteration 200 to 300 - Avg. Loss: 9.829E-02 Avg. BER: 4.007E-02 Min. @ BER[0]=3.650E-02\n",
742 | " Iteration 300 to 400 - Avg. Loss: 7.084E-02 Avg. BER: 2.793E-02 Min. @ BER[1]=2.380E-02\n",
743 | " Iteration 400 to 500 - Avg. Loss: 5.512E-02 Avg. BER: 2.075E-02 Min. @ BER[5]=1.790E-02\n",
744 | " Iteration 500 to 600 - Avg. Loss: 4.875E-02 Avg. BER: 1.853E-02 Min. @ BER[0]=1.540E-02\n",
745 | " Iteration 600 to 700 - Avg. Loss: 5.106E-02 Avg. BER: 1.949E-02 Min. @ BER[0]=1.540E-02\n",
746 | " Iteration 700 to 800 - Avg. Loss: 4.781E-02 Avg. BER: 1.792E-02 Min. @ BER[0]=1.440E-02\n",
747 | " Iteration 800 to 900 - Avg. Loss: 4.288E-02 Avg. BER: 1.667E-02 Min. @ BER[1]=1.220E-02\n",
748 | " Iteration 900 to 1000 - Avg. Loss: 4.000E-02 Avg. BER: 1.522E-02 Min. @ BER[3]=1.200E-02\n",
749 | " -> saved as training stage: 0o1330o171m6_Model-2\n",
750 | "\n",
751 | "Training Epoch - Batch Size: 100, Iterations: 1000, Learning Rate: 0.0010, EbNodB 1.5 (std: 0.841), P_apriori 0.20\n",
752 | " Iteration 0 to 100 - Avg. Loss: 1.995E-01 Avg. BER: 8.715E-02 Min. @ BER[0]=7.990E-02\n",
753 | " Iteration 100 to 200 - Avg. Loss: 1.838E-01 Avg. BER: 7.964E-02 Min. @ BER[0]=7.010E-02\n",
754 | " Iteration 200 to 300 - Avg. Loss: 1.764E-01 Avg. BER: 7.613E-02 Min. @ BER[0]=6.960E-02\n",
755 | " Iteration 300 to 400 - Avg. Loss: 1.582E-01 Avg. BER: 6.832E-02 Min. @ BER[3]=6.040E-02\n",
756 | " Iteration 400 to 500 - Avg. Loss: 1.508E-01 Avg. BER: 6.343E-02 Min. @ BER[0]=5.520E-02\n",
757 | " Iteration 500 to 600 - Avg. Loss: 1.463E-01 Avg. BER: 6.268E-02 Min. @ BER[2]=5.650E-02\n",
758 | " Iteration 600 to 700 - Avg. Loss: 1.455E-01 Avg. BER: 6.221E-02 Min. @ BER[0]=5.070E-02\n",
759 | " Iteration 700 to 800 - Avg. Loss: 1.361E-01 Avg. BER: 5.781E-02 Min. @ BER[0]=5.190E-02\n",
760 | " Iteration 800 to 900 - Avg. Loss: 1.379E-01 Avg. BER: 5.850E-02 Min. @ BER[0]=5.270E-02\n",
761 | " Iteration 900 to 1000 - Avg. Loss: 1.389E-01 Avg. BER: 5.840E-02 Min. @ BER[2]=5.300E-02\n",
762 | " -> saved as training stage: 0o1330o171m6_Model-3\n",
763 | "\n",
764 | "Training Epoch - Batch Size: 100, Iterations: 1000, Learning Rate: 0.0010, EbNodB 1.5 (std: 0.841), P_apriori 0.30\n",
765 | " Iteration 0 to 100 - Avg. Loss: 3.464E-01 Avg. BER: 1.693E-01 Min. @ BER[0]=1.521E-01\n",
766 | " Iteration 100 to 200 - Avg. Loss: 3.178E-01 Avg. BER: 1.538E-01 Min. @ BER[0]=1.445E-01\n",
767 | " Iteration 200 to 300 - Avg. Loss: 3.141E-01 Avg. BER: 1.505E-01 Min. @ BER[0]=1.394E-01\n",
768 | " Iteration 300 to 400 - Avg. Loss: 3.154E-01 Avg. BER: 1.534E-01 Min. @ BER[1]=1.452E-01\n",
769 | " Iteration 400 to 500 - Avg. Loss: 3.079E-01 Avg. BER: 1.483E-01 Min. @ BER[3]=1.389E-01\n",
770 | " Iteration 500 to 600 - Avg. Loss: 3.123E-01 Avg. BER: 1.508E-01 Min. @ BER[0]=1.375E-01\n",
771 | " Iteration 600 to 700 - Avg. Loss: 2.892E-01 Avg. BER: 1.386E-01 Min. @ BER[1]=1.267E-01\n",
772 | " Iteration 700 to 800 - Avg. Loss: 2.950E-01 Avg. BER: 1.413E-01 Min. @ BER[2]=1.317E-01\n",
773 | " Iteration 800 to 900 - Avg. Loss: 2.759E-01 Avg. BER: 1.323E-01 Min. @ BER[1]=1.236E-01\n",
774 | " Iteration 900 to 1000 - Avg. Loss: 2.751E-01 Avg. BER: 1.343E-01 Min. @ BER[0]=1.246E-01\n",
775 | " -> saved as training stage: 0o1330o171m6_Model-4\n",
776 | "\n",
777 | "Training Epoch - Batch Size: 100, Iterations: 1000, Learning Rate: 0.0010, EbNodB 1.5 (std: 0.841), P_apriori 0.40\n",
778 | " Iteration 0 to 100 - Avg. Loss: 5.074E-01 Avg. BER: 2.843E-01 Min. @ BER[1]=2.710E-01\n",
779 | " Iteration 100 to 200 - Avg. Loss: 4.780E-01 Avg. BER: 2.661E-01 Min. @ BER[1]=2.488E-01\n",
780 | " Iteration 200 to 300 - Avg. Loss: 5.105E-01 Avg. BER: 2.867E-01 Min. @ BER[0]=2.677E-01\n",
781 | " Iteration 300 to 400 - Avg. Loss: 4.990E-01 Avg. BER: 2.801E-01 Min. @ BER[0]=2.573E-01\n",
782 | " Iteration 400 to 500 - Avg. Loss: 4.972E-01 Avg. BER: 2.794E-01 Min. @ BER[0]=2.627E-01\n",
783 | " Iteration 500 to 600 - Avg. Loss: 4.795E-01 Avg. BER: 2.658E-01 Min. @ BER[0]=2.501E-01\n",
784 | " Iteration 600 to 700 - Avg. Loss: 4.837E-01 Avg. BER: 2.711E-01 Min. @ BER[2]=2.534E-01\n",
785 | " Iteration 700 to 800 - Avg. Loss: 4.583E-01 Avg. BER: 2.494E-01 Min. @ BER[0]=2.356E-01\n",
786 | " Iteration 800 to 900 - Avg. Loss: 4.650E-01 Avg. BER: 2.541E-01 Min. @ BER[1]=2.361E-01\n",
787 | " Iteration 900 to 1000 - Avg. Loss: 4.607E-01 Avg. BER: 2.525E-01 Min. @ BER[1]=2.432E-01\n",
788 | " -> saved as training stage: 0o1330o171m6_Model-5\n",
789 | "\n",
790 | "Training Epoch - Batch Size: 100, Iterations: 500000, Learning Rate: 0.0001, EbNodB 1.5 (std: 0.841), P_apriori 0.50\n"
791 | ]
792 | },
793 | {
794 | "output_type": "error",
795 | "ename": "KeyboardInterrupt",
796 | "evalue": "ignored",
797 | "traceback": [
798 | "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
799 | "\u001b[0;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)",
800 | "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mtrain\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtraining_params\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m",
801 | "\u001b[0;32m\u001b[0m in \u001b[0;36mtrain\u001b[0;34m(training_params)\u001b[0m\n\u001b[1;32m 55\u001b[0m \u001b[0;31m# training\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 56\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mx_feed\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mu_feed\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mgenerator\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mbatch_size\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0miterations\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mgradient_depth\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0msequence_length\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0madditional_input\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mdecision_offset\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mtrain_sigma\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mapriori\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 57\u001b[0;31m \u001b[0m_\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mcurr_loss\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mcurr_ber\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtrain_step\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mx_feed\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mu_feed\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mlearning_rate\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 58\u001b[0m \u001b[0;31m# logging\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 59\u001b[0m \u001b[0mlogging_interval_loss\u001b[0m \u001b[0;34m+=\u001b[0m \u001b[0mcurr_loss\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
802 | "\u001b[0;32m\u001b[0m in \u001b[0;36mtrain_step\u001b[0;34m(x_feed, u_feed, lr_feed)\u001b[0m\n\u001b[1;32m 31\u001b[0m \u001b[0;31m# runs a single training step\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 32\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mtrain_step\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mx_feed\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mu_feed\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mlr_feed\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 33\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0msess\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mrun\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mstep\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mloss\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mber\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mfeed_dict\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mgen_feed_dict\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mx_feed\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mu_feed\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mlr_feed\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 34\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 35\u001b[0m \u001b[0;31m# runs a training set according to training_params\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
803 | "\u001b[0;32m/tensorflow-1.15.2/python3.7/tensorflow_core/python/client/session.py\u001b[0m in \u001b[0;36mrun\u001b[0;34m(self, fetches, feed_dict, options, run_metadata)\u001b[0m\n\u001b[1;32m 954\u001b[0m \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 955\u001b[0m result = self._run(None, fetches, feed_dict, options_ptr,\n\u001b[0;32m--> 956\u001b[0;31m run_metadata_ptr)\n\u001b[0m\u001b[1;32m 957\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mrun_metadata\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 958\u001b[0m \u001b[0mproto_data\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtf_session\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mTF_GetBuffer\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mrun_metadata_ptr\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
804 | "\u001b[0;32m/tensorflow-1.15.2/python3.7/tensorflow_core/python/client/session.py\u001b[0m in \u001b[0;36m_run\u001b[0;34m(self, handle, fetches, feed_dict, options, run_metadata)\u001b[0m\n\u001b[1;32m 1178\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mfinal_fetches\u001b[0m \u001b[0;32mor\u001b[0m \u001b[0mfinal_targets\u001b[0m \u001b[0;32mor\u001b[0m \u001b[0;34m(\u001b[0m\u001b[0mhandle\u001b[0m \u001b[0;32mand\u001b[0m \u001b[0mfeed_dict_tensor\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1179\u001b[0m results = self._do_run(handle, final_targets, final_fetches,\n\u001b[0;32m-> 1180\u001b[0;31m feed_dict_tensor, options, run_metadata)\n\u001b[0m\u001b[1;32m 1181\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1182\u001b[0m \u001b[0mresults\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
805 | "\u001b[0;32m/tensorflow-1.15.2/python3.7/tensorflow_core/python/client/session.py\u001b[0m in \u001b[0;36m_do_run\u001b[0;34m(self, handle, target_list, fetch_list, feed_dict, options, run_metadata)\u001b[0m\n\u001b[1;32m 1357\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mhandle\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1358\u001b[0m return self._do_call(_run_fn, feeds, fetches, targets, options,\n\u001b[0;32m-> 1359\u001b[0;31m run_metadata)\n\u001b[0m\u001b[1;32m 1360\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1361\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_do_call\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0m_prun_fn\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mhandle\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfeeds\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfetches\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
806 | "\u001b[0;32m/tensorflow-1.15.2/python3.7/tensorflow_core/python/client/session.py\u001b[0m in \u001b[0;36m_do_call\u001b[0;34m(self, fn, *args)\u001b[0m\n\u001b[1;32m 1363\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0m_do_call\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfn\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1364\u001b[0m \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1365\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mfn\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1366\u001b[0m \u001b[0;32mexcept\u001b[0m \u001b[0merrors\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mOpError\u001b[0m \u001b[0;32mas\u001b[0m \u001b[0me\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1367\u001b[0m \u001b[0mmessage\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mcompat\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mas_text\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0me\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmessage\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
807 | "\u001b[0;32m/tensorflow-1.15.2/python3.7/tensorflow_core/python/client/session.py\u001b[0m in \u001b[0;36m_run_fn\u001b[0;34m(feed_dict, fetch_list, target_list, options, run_metadata)\u001b[0m\n\u001b[1;32m 1348\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_extend_graph\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1349\u001b[0m return self._call_tf_sessionrun(options, feed_dict, fetch_list,\n\u001b[0;32m-> 1350\u001b[0;31m target_list, run_metadata)\n\u001b[0m\u001b[1;32m 1351\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1352\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0m_prun_fn\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mhandle\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfeed_dict\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfetch_list\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
808 | "\u001b[0;32m/tensorflow-1.15.2/python3.7/tensorflow_core/python/client/session.py\u001b[0m in \u001b[0;36m_call_tf_sessionrun\u001b[0;34m(self, options, feed_dict, fetch_list, target_list, run_metadata)\u001b[0m\n\u001b[1;32m 1441\u001b[0m return tf_session.TF_SessionRun_wrapper(self._session, options, feed_dict,\n\u001b[1;32m 1442\u001b[0m \u001b[0mfetch_list\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtarget_list\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1443\u001b[0;31m run_metadata)\n\u001b[0m\u001b[1;32m 1444\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1445\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0m_call_tf_sessionprun\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mhandle\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfeed_dict\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfetch_list\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
809 | "\u001b[0;31mKeyboardInterrupt\u001b[0m: "
810 | ]
811 | }
812 | ]
813 | },
814 | {
815 | "cell_type": "markdown",
816 | "metadata": {
817 | "id": "S2jKzN85nIDx"
818 | },
819 | "source": [
820 | "## Restoring already trained models\n",
821 | "Since we used the tensorflow saver after each training epoch, we can load already trained models and then evaluate"
822 | ]
823 | },
824 | {
825 | "cell_type": "code",
826 | "metadata": {
827 | "id": "R5_apwuCnID1"
828 | },
829 | "source": [
830 | "#saver.restore(sess,\"%s-%d\" % (saver_path,9))"
831 | ],
832 | "execution_count": 15,
833 | "outputs": []
834 | },
835 | {
836 | "cell_type": "markdown",
837 | "metadata": {
838 | "id": "ODSuWtQWnID9"
839 | },
840 | "source": [
841 | "## Evaluation: Monte Carlo BER simulation"
842 | ]
843 | },
844 | {
845 | "cell_type": "code",
846 | "metadata": {
847 | "id": "0ulrSB8gnIEA",
848 | "colab": {
849 | "base_uri": "https://localhost:8080/"
850 | },
851 | "outputId": "a79ef38a-81a8-49d9-c64b-e24c9a5cb280"
852 | },
853 | "source": [
854 | "test_parameters = {\n",
855 | " 'batch_size' : 2000,\n",
856 | " 'iterations' : 100,\n",
857 | " 'ebnodb' : np.arange(0,5.6,0.5)\n",
858 | "}\n",
859 | "sim_ber = test(test_parameters,plot=False,plot_baseline=False)"
860 | ],
861 | "execution_count": 16,
862 | "outputs": [
863 | {
864 | "output_type": "stream",
865 | "name": "stdout",
866 | "text": [
867 | "SNR: 0.0\n",
868 | "BER: [0.45576 0.457 0.4558 0.455955 0.45695 0.45717 0.45542 0.45647\n",
869 | " 0.457045 0.455375 0.45677 0.45647 0.45766 0.45634 0.45731 ]\n",
870 | "SNR: 0.5\n",
871 | "BER: [0.44117 0.44179 0.43963 0.43956 0.43945 0.43944 0.43984 0.439545\n",
872 | " 0.44075 0.44156 0.4421 0.44107 0.44037 0.44228 0.44171 ]\n",
873 | "SNR: 1.0\n",
874 | "BER: [0.42144 0.4202 0.42042 0.421925 0.421415 0.42074 0.42227 0.42213\n",
875 | " 0.422155 0.42186 0.42179 0.422345 0.42302 0.423785 0.42311 ]\n",
876 | "SNR: 1.5\n",
877 | "BER: [0.398795 0.400125 0.39785 0.397695 0.39824 0.399015 0.39951 0.398105\n",
878 | " 0.40013 0.39963 0.398385 0.400075 0.40104 0.39912 0.39943 ]\n",
879 | "SNR: 2.0\n",
880 | "BER: [0.371985 0.37340001 0.37313 0.373795 0.373295 0.373975\n",
881 | " 0.374525 0.37428 0.37472 0.37539 0.375845 0.374815\n",
882 | " 0.376405 0.375295 0.3757 ]\n",
883 | "SNR: 2.5\n",
884 | "BER: [0.34763 0.34754 0.34817 0.347765 0.348205 0.34797 0.34716 0.346935\n",
885 | " 0.348895 0.348965 0.350425 0.349585 0.350415 0.35102 0.3515 ]\n",
886 | "SNR: 3.0\n",
887 | "BER: [0.318415 0.31746 0.317815 0.31865 0.31829 0.318725 0.319555 0.319345\n",
888 | " 0.31983 0.319635 0.32059 0.32066 0.322305 0.321975 0.322405]\n",
889 | "SNR: 3.5\n",
890 | "BER: [0.292285 0.2923 0.2931 0.293585 0.292635 0.29359 0.293415 0.29419\n",
891 | " 0.29523 0.295885 0.29455 0.29592 0.297125 0.298355 0.298075]\n",
892 | "SNR: 4.0\n",
893 | "BER: [0.27332 0.272845 0.27345 0.27313 0.27271 0.27439 0.2748 0.273955\n",
894 | " 0.27474 0.27544 0.27759 0.27654 0.277185 0.278665 0.280505]\n",
895 | "SNR: 4.5\n",
896 | "BER: [0.25164 0.25231 0.251455 0.25138 0.252965 0.253595 0.25241 0.252215\n",
897 | " 0.25412 0.25438 0.255165 0.25613 0.256195 0.257295 0.258125]\n",
898 | "SNR: 5.0\n",
899 | "BER: [0.23532 0.236625 0.235905 0.23755 0.2362 0.23742 0.237885 0.237495\n",
900 | " 0.23814 0.23905 0.239465 0.240705 0.24107 0.242395 0.242365]\n",
901 | "SNR: 5.5\n",
902 | "BER: [0.21717 0.21814 0.21866 0.21848 0.217555 0.218565\n",
903 | " 0.219365 0.219425 0.22072499 0.221625 0.222785 0.22249\n",
904 | " 0.22271 0.22363 0.225355 ]\n",
905 | "Final BER: [[0.45576 0.457 0.4558 0.455955 0.45695 0.45717\n",
906 | " 0.45542 0.45647 0.457045 0.455375 0.45677 0.45647\n",
907 | " 0.45766 0.45634 0.45731 ]\n",
908 | " [0.44117 0.44179 0.43963 0.43956 0.43945 0.43944\n",
909 | " 0.43984 0.439545 0.44075 0.44156 0.4421 0.44107\n",
910 | " 0.44037 0.44228 0.44171 ]\n",
911 | " [0.42144 0.4202 0.42042 0.421925 0.421415 0.42074\n",
912 | " 0.42227 0.42213 0.422155 0.42186 0.42179 0.422345\n",
913 | " 0.42302 0.423785 0.42311 ]\n",
914 | " [0.398795 0.400125 0.39785 0.397695 0.39824 0.399015\n",
915 | " 0.39951 0.398105 0.40013 0.39963 0.398385 0.400075\n",
916 | " 0.40104 0.39912 0.39943 ]\n",
917 | " [0.371985 0.37340001 0.37313 0.373795 0.373295 0.373975\n",
918 | " 0.374525 0.37428 0.37472 0.37539 0.375845 0.374815\n",
919 | " 0.376405 0.375295 0.3757 ]\n",
920 | " [0.34763 0.34754 0.34817 0.347765 0.348205 0.34797\n",
921 | " 0.34716 0.346935 0.348895 0.348965 0.350425 0.349585\n",
922 | " 0.350415 0.35102 0.3515 ]\n",
923 | " [0.318415 0.31746 0.317815 0.31865 0.31829 0.318725\n",
924 | " 0.319555 0.319345 0.31983 0.319635 0.32059 0.32066\n",
925 | " 0.322305 0.321975 0.322405 ]\n",
926 | " [0.292285 0.2923 0.2931 0.293585 0.292635 0.29359\n",
927 | " 0.293415 0.29419 0.29523 0.295885 0.29455 0.29592\n",
928 | " 0.297125 0.298355 0.298075 ]\n",
929 | " [0.27332 0.272845 0.27345 0.27313 0.27271 0.27439\n",
930 | " 0.2748 0.273955 0.27474 0.27544 0.27759 0.27654\n",
931 | " 0.277185 0.278665 0.280505 ]\n",
932 | " [0.25164 0.25231 0.251455 0.25138 0.252965 0.253595\n",
933 | " 0.25241 0.252215 0.25412 0.25438 0.255165 0.25613\n",
934 | " 0.256195 0.257295 0.258125 ]\n",
935 | " [0.23532 0.236625 0.235905 0.23755 0.2362 0.23742\n",
936 | " 0.237885 0.237495 0.23814 0.23905 0.239465 0.240705\n",
937 | " 0.24107 0.242395 0.242365 ]\n",
938 | " [0.21717 0.21814 0.21866 0.21848 0.217555 0.218565\n",
939 | " 0.219365 0.219425 0.22072499 0.221625 0.222785 0.22249\n",
940 | " 0.22271 0.22363 0.225355 ]]\n"
941 | ]
942 | }
943 | ]
944 | },
945 | {
946 | "cell_type": "code",
947 | "metadata": {
948 | "id": "qDuKGcjUnIEI",
949 | "colab": {
950 | "base_uri": "https://localhost:8080/",
951 | "height": 393
952 | },
953 | "outputId": "c48acfce-b0ec-41c0-f9e7-f9dd05b5dacf"
954 | },
955 | "source": [
956 | "plot_bler_vs_ebnodb(test_parameters['ebnodb'], sim_ber[:,0])\n",
957 | "plt.plot(np.arange(0,5.6,0.5),code.viterbi_reference)\n",
958 | "plt.ylim(1e-6,0.5)\n",
959 | "plt.grid()\n",
960 | "plt.legend(['RNN-Decoder','Viterbi reference'])\n",
961 | "plt.show();"
962 | ],
963 | "execution_count": 17,
964 | "outputs": [
965 | {
966 | "output_type": "display_data",
967 | "data": {
968 | "text/plain": [
969 | ""
970 | ],
971 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAt4AAAF4CAYAAABjOf4xAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAgAElEQVR4nOzdeXgUZbrG4d/bnZUtCAIqi4AggqAIUUEUAoKAGlF0BAe3AQEXXMd1dI7LjMvozDguuKCiuAGKiIAoKoosogKCAiKKigquqKBgQrbv/FEdspCEDulOpTvPfV11pau6uurl1JmZp6vf+j5zziEiIiIiItEV8LsAEREREZHaQMFbRERERKQaKHiLiIiIiFQDBW8RERERkWqg4C0iIiIiUg0UvEVEREREqkGC3wVEm5llApn169cffeCBB1bLObf8kcumLVkUFBuqMWBG84apNKyTWC01RNuWP3L5/rdscvMLSAwG2KdBStz82wpt376dunXr+l2G7AFdu9ik6xa7dO1il65ddCxfvnyzc65J6e1WW8bxTk9Pd8uWLauWc/W64002bcnaZXtSMEC3/RsSDBjBQICEgBEwIyFgBIOhv2YEA0ZC0PvrrQdICBbbN+D9DQRKrpc4bqCifQMEApAQCIQ+U7l9X1n1HX9/aTXZuQU7/22piUFuH9qFkw9rXi3/N46mGSs2cdfcdWzakkXzhqlcNbBDXPy7apP58+eTkZHhdxlSSbpusUvXLnbp2kWHmS13zqWX3h73d7z98G0ZoRsgJ78A52BHbgF5BfkUOEdeviO/wJHvvL95BQXk5xdfdzvX8wpC+xbUvC9LWbn5XP7cSv758lqSgkZC0PuykBT6mxAIkBg0EoMBEoIBEgOFr0N/A0Zigre9xGcDpY5T4WeLtiXucs7C7aHXgcK6DDPb+e+YsWIT101fRVZuPgCbtmRx3fRVAHERvgu/VHy7JYv99KVCRESkWil4R8F+DVPLvOPdvGEqU8f2rPLxnXMUOLyQHgrnBQVFwbzkekGJwF78dX6xffILKHff0se7bc4n5dQFAzo1Ize/gLz8AnILnPc334W2eX+35+STV7heUFDsPe+LR25e0Wer4ztGYUhPCBrbd+Ttcs6s3HyumvYhk9//mqQEL7wnBQNe2A8ayaFticFAsfdt5+vC7Ukl9vG+TJS1T+H7iYXrwQCBgJVdfCXE+5cKERGRmk7BOwquGtihRMABrxXjqoEdInJ8MyNoEAwEI3K8ypr0zlflfrG4fWiXiJ4rvyAU2ssK8SVCu7dP8dC+c98C73XhfsWPl5Pv/c0LnefxxRvKrCM332EG23fk7TxuTl4BOaHj5eR55yjcFmnBgIWCuxWF+FJfAsoP+972F1dsKvH/k+B9qbhp5hoKnCM5IUhyQoDkxECJ1ykJwZLbErxfLWqiEm1C776pO/oiIlKjxH3wLny4sl27dtV2zsL/oY/XPuFof7Eozusrr74vGK+t+aHcLxVTxoT3a4ULtQV5YdwL4oXBvERgLxXcc/JdGdsKyM1zJY5T+NmibUXH3ZFbwLbsPHYUe7/w89t35JdZ75asXK547sNK/d8pGLCdITx5ZzAPlB3ew9mnxP67P15ZwV939EVEpKaL++DtnJsFzEpPTx9dnec9+bDmnHxY87h8aKH4F4t46xWOxJcKM9vZW16TlPfQ7z4Nkpkypic78grYkZfv/c0t9jovP7Rexutd9i3a57esvDL3yc7Nr3ILUVnBf9OvWeSVOnBWbj7XTV/FO59vJilh13CfFCwZ6pPK/KJQ9uci0f5TGerPFxGJfXEfvCU6Cr9YxJt4/rWivC8V1w7uSOu9q3coqbz8gl2CelWD/1c//1HmubJy81n42ebQvvmhXxSq/vCA199fdBc+qVRgLxn0vdc7g/0uYb/ozr73ZaDk5xZ+9hP/ff1TduR5bUy6my8iEpsUvEVKiddfK2rSLxUJodFt6iZH7phLN/xabpvQ4mv7ldiWH2oF2pGXH/rrvc4OhfqcvJJ38XNKhf6S73vbc/JLfWEo466/t4+3XvrufGVl5eZz+dSV3DpnLamJQVITg6QkBamTGCQ1KbSeGCQ1KVDi/cLXqUmh9xOD1Cl8Xer95IRAiVF/okW9+SJSWyh4i9Qi8fpLBVSuTSgYMC9kJvnzgDKUDP9Fwbwo/Bduz8krYOxTy8s8hgP6d2xGdm4+WTn5ZOV6y+ZtO8jKyeePnHzvvdCyJ9M2pJYI8gFSk4LUSUwIhfjALiF+l/WkXQN90ZeCIK+t/p7rZ6xWb76I1AoK3iISF2KtTagy4b95BUOUhjuSkHNuZ499VvGgXupvUYgvKLFeIsTn5PNbVi4//rbrZyPRxlM4hOdzy77ZGdLLu3tfGPBL37FPSQyU2JaSGCRYzX35hdSfLyKFFLxFJG7Ea5tQpB76LQywDaNRZEhufiiwF7sDXzLUF4S25ZGVm1/uvACFowD9lp1LVo73S0DxY+2JpIRA0Z37MsN65QN+apLXh19ewNdoOyJSXNwHbz+GExQRiaSa1J+/O4XjyDdISQxr/4rmBXj+/KPK/Ex5d++zc0MBvVjozy7xfsEubTnZoSVaAX/ztpxdZhv2Rtv5iIWfbS5zdJ2KHshNKjXSTtH7wZ3j9ldHX34h9eeLVE7cB2+/hhMUEYmkeO3P35O7+dV1976igJ+VU1Cif768gD912TdlHjsrt4B3v/h5l37+qjJj19CeECCpeKgPhfSyh8wsOUJP2UNueq8Xrd/MvfM+KzXazkeA7uaLlCfug7eIiNRcNbk3PxIBf9H6zWGPtuOc80a+KWMUnNKj63j7lTfSTqmReEqNuPNbVu7OwL9zVJ/QeXLyC/boIdxCWbkFXDZ1JX9/aXWpFp1AUStPsQd1C9t0UhK8tp2U4i0/pdqCUspYj2bfvnrzJRoUvEVExFfx2psPlbujb1Y4NnwQUqqzyiLOOXLzXamhNkuHdG8EnpFPLCv3OKd2a7GzjafwV4Cs3Hy2/JHDd8XWC/fZ04dyk4KBEg/SpiR4ffkpCYFigb6c4J8YJDmxdA+/t9/i9Zvjfux8tQn5Q8FbREQkSmKpPx+88J+UYCQl7H7m3YpG27nppIMrdd68/AKy84padHbkhdp58vJDvfdegN9RfLSdYm092cVbfkLbftmeU3K/nHyy8/Y85BeOnX/zrDVlT3xVoj2njNlwS/XkJxdr9yk9s27x3v7ix0sIRKaHXw/9+kfBW0REJIrUn797CcEA9YIB6iVHP5YUhvzCQJ9d6g58Vm5+hWPnZx66X4nWneKTcP2xPa/8ibfyo9fDXzKs776Hf8KCL3Z5eDgrN59/vvwxrfeu6935L9YGVHis6nxwtypqcpuQgreIiIhUWk3uz69IOCG/orv5twzpvEfnLSgo6uEv3pNfvM++aAbcXWfWLd76UzrUF39Ityo9/Ju35XDy+MVlvmeG18pTrG0nuVhLz86Wn1ALT/H1nZ8p1gpUVt9+crHPJAZ3/6tLWWr63XwFbxEREdkj8dqfH8m7+YUCASMl4AVMvxT28Pe56y2+25q9y/t710viztMO2TksZ3ZeyXae7OLtPaFfDXbkeds3b8srMaznjtCvB3kFe9baEwzYLgG9dJ9+cmJR6C8M708u2VDm3fy75q5T8BYRERGpaWKtNz9chT381ww6qMwvFjec0Il+BzWL6DnL6t/fOeRmqT797LxQL35uUegvDPM7in3mj5w8ftnuPQPg9e4XHaO8nP9tGb9g+EHBW0RERKSUeO3Nh+ptE6rO/n3nHL3ueJNvy7ibv1/D1KifPxxxH7w1c6WIiIhISfHYJmRmXF3O3fyqtAlF0p51rscQ59ws59yYtLQ0v0sRERERkSg6+bDm3D60C80bpmJ4D8TePrRLjfn1Iu7veIuIiIhI7VGT24Ti/o63iIiIiEhNoOAtIiIiIlINFLyjKS8HXP7u9xMRERGRuKce72j6eAa9Fl8GP/SFthne0ridN/2TiIiIiNQqCt7RtFcbNu/dk32/+wg+me1tq79fUQhv2wfq7+NffSIiIiJSbRS8o6nl4aw76GL27dMHfv0SvpjvLZ++Ah8+6+3T5CAvhLfpA617QYqGPRQRERGJRwre1cEMGrX1lvSRUFAA33/khfAv34blk+C9h8CC0Lxb0R3xFodDQrKvpYuIiIhIZCh4+yEQgP26esvRl0HeDvjm/aI74gv/AwvugoRU2P+ooraUZl28z4qIiIhIzFHwrgkSkqHNMd5y7N8hawt8tRi+eNsL4q//3dsvtRG06V10R7xRG99KFhEREZHKUfCuiVIbwkEneAvAb9/ClwuK7oh/PMPb3rBVUX94mz5Qr4k/9YqIiIjIbil4x4IG+8Ghw73FOdj8WVF/+JqX4IMnvf2adfFaUtpmQKuekFzPx6JFREREpLiYDN5m1ha4Hkhzzp3mdz3VygyaHOgtR46B/Dz4bmXR3fD3J8CS+yGQ6D2c2TbDC+PNu0Mw0d/aRURERGqxag/eZjYROBH40TnXudj2QcA9QBB41Dl3R3nHcM59AYwys2nRrrfGCyZAi3Rv6X0l5PwB37wbCuJvw/zbYf5tkFQPWh/ttaS0zYCmHTWRj4iIiEg18uOO9xPA/cCThRvMLAiMBwYAG4GlZjYTL4TfXurzI51zP1ZPqTEoqQ4c0M9bAP74BTYsLDaG+Kve9rpNi9pS2vSBhi39qVdERESklqj24O2cW2BmrUttPgJYH7qTjZlNAYY4527Huzsue6pOI+g0xFsAtnxdNFrKF/Nh1fPe9kYHFLWltD7G+5yIiIiIRIw556r/pF7wnl3YamJmpwGDnHPnhdbPAo50zo0r5/ONgVvx7pA/GgroZe03BhgD0KxZs+5TpkyJ8L9k97Zt20a9ejX0IUfnqLv9K/b69SP2+vVD0rauJiE/G4fxe/0D2NLwEH7d61C2pnWkIFj7JvKp0ddOKqRrF5t03WKXrl3s0rWLjr59+y53zqWX3h6TD1c6534Gzg9jvwnABID09HSXkZER5cp2NX/+fPw47x7Jz4VNy7Ev5tPgi7dpsHEmrb6ZDsFkaHVkqC0lw5v4JxD0udjoi6lrJyXo2sUmXbfYpWsXu3TtqldNCd6bgOJNxi1C26rMzDKBzHbt2kXicPEtmAitenhLxrWwYxt89Y43bOEX82HeLcAtkJLmtaO0zYAOgyGthb91i4iIiMSAmhK8lwLtzawNXuAeDvw5Egd2zs0CZqWnp4+OxPFqleR6cOBx3gKw7aeiEP7F2/DJbJhzJbQ6CjoPhU4naxIfERERkXL4MZzgZCAD2NvMNgI3OuceM7NxwFy8kUwmOufWVHdtshv1mkCX07zFOfj5c1jzIqye5gXwV67xHs7sfCocdKI3A6eIiIiIAP6ManJGOdvnAHMifT61mkSJGezdDvpc5S0/rIHVL3jLSxfB7Muh3QDvTniHwZBU1++KRURERHxVU1pNokatJtWk2cHe0u/vsOkDL4CvmQ7rXobEOl747nwqtOsPCbVvhBQRERGRuA/eUs3MoEV3bznun/D1O6EQPsP7m5wGHTO9O+Ft+ngzb4qIiIjUAko9Ej2BgDdNfeujYfCd3gOZq1+AtTNh5dNQZ284+GTvTnjLHt7+IiIiInEq7oO3erxriGAitO/vLbl3w/rXvRC+4hlY+ig0aA4Hn+KF8P0O8+6ci4iIiMSRuA/e6vGugRJTvHaTjpneWOHrXvFC+HsPw5L7oVFbL4B3PhWadvS7WhEREZGIiPvgLTVccj045E/ekvUrrJ3lhfCF/4EFd0HTg71+8M6nQqM2flcrIiIissfiPnir1SSGpO4F3c72lm0/Fj2Q+eY/vKV5dy+AH3wKNNjP72pFREREKiXun2Zzzs1yzo1JS0vzuxSpjHpN4cgxMGouXLYKBtwCBXkw92/w307w+Amw9DHY/rPflYqIiIiEJe6Dt8SBhq2g16UwdgGMWwYZ18H2H+HlK+Df7eHpU2Hls5C91e9KRURERMoV960mEmf2bg8Z10Cfq+GH1UWzZc64AILJ0H6A145y4CBIquN3tSIiIiI7KXhLbDKDfbp4y7E3wsZloYl6XoRPZkNiXTjoeC+EH3AsJCT5XbGIiIjUcnEfvPVwZS1gBi0P95aBt8JXi70Q/vFLsOp5SEmDjid5IbxNbwgE/a5YREREaqG4D94ax7uWCQS9cN2mNxz/b/hiPqya5o2QsuIpqNs0NFvmadDicM2WKSIiItUm7oO31GLBRK/nu/0AyM2Cz17z7oR/8CS8PwHSWnpDE3Y5DfY5RLNlioiISFQpeEvtkJgKnYZ4y47f4ZM5Xgh/9wF4515o3C40W+Zp0ORAv6sVERGROKTgLbVPcn04dJi3/PELrJ3phfC374S3/wXNukDnoSRnt/S7UhEREYkjcR+89XClVKhOI+h+rrf8/n3RbJnzbqYHAdgyA44cC62PUSuKiIiIVEncP1mmmSslbPX3gR7nw3mvw6Uf8nWrofDVOzApEx48CpZNhJztflcpIiIiMSrug7fIHtmrNV+2PQuu+BiGjIdAAsy+HP7bEeZeD7986XeFIiIiEmMUvEUqkpgKh53pTVc/ci606w/vPQT3HgbPDoP186CgwO8qRUREJAbEfY+3SESYQase3vLbd7D8ca/15Omh0Lg9HDEGup7hPbgpIiIiUgbd8RaprAb7Qt+/weVr4JQJkNIAXrkK/tMR5lwNmz/zu0IRERGpgRS8RfZUQrI3JOHoN+G8N+Gg47274Penw1OnwLpX1YYiIiIiOyl4i0RCi+4wdIL3MGbf6+HHtTB5GNzXDZaMh6wtflcoIiIiPov74G1mmWY2YevWrX6XIrVBvabQ52q4bBWcNhHqNYO5f/NGQ5l9uRfIRUREpFaK++CtcbzFF8FEbwr6UXO9EVEOHgornoEHenjjgq+dDQX5flcpIiIi1Sjug7eI7/Y9FE4eD1eshWNvhJ+/gKkj4J5DYdHd3rT1IiIiEvcUvEWqS93GcMwVcOmHcPpTsFdreOMmrw3lpXHw/Sq/KxQREZEo0jjeItUtmACdTvKWH9bA+xPgw6mw4ilodRQcMRo6ZnrtKiIiIhI3dMdbxE/NDobMe+Cva+G4f8Jvm2DaX+B/h8Dbd8G2n/yuUERERCJEwVukJkjdC466GC5ZAWdMgSYd4K1/wt2dYPpY2LTc7wpFRESkitRqIlKTBILQYbC3/PRpqA1lMnw0BZqnw5FjodPJkJDkd6UiIiJSSbrjLVJTNTkQTvi3NxrKoH9B1q8wfTTcfTC8dRv8/r3fFYqIiEglKHiL1HQpDaDH+TBuGYx4AfbrCm//ywvg00bC1++Bc35XKSIiIrsR960mZpYJZLZr187vUkSqJhCA9v295efPYemjsOJpWP2CN1b4EWO9SXsSU/yuVERERMoQ93e8NXOlxKXGB8Cg2702lBP+A3k74KULvYcx37gZtm70u0IREREpJe6Dt0hcS64Hh58HF74LZ78ELXvA4v95wxFOPQs2LFIbioiISA0R960mIrWCGbTN8JZfv/LaUD54EtbOhGadvUl5upwOSXX8rVNERKQW0x1vkXiz1/5w3D+8NpTMe71tsy71pqZ/7Qb4dYOv5YmIiNRWCt4i8SqpDnQ/B85fBOfOgbZ9YMkDcE9XmPxn2LjM7wpFRERqFbWaiMQ7M2jdy1u2boRlE73l0ZehbV/oczXsf5TfVYqIiMQ93fEWqU3SWsCx/weXrYL+N8MPq+HxwfD48fD5W3oQU0REJIoUvEVqo+T6cPRlcOlHMOgO+OULeOpkeGwAfDpXAVxERCQKFLxFarOkOtDjArhkJZzwX/j9B3j2dJjQB9bOgoICvysUERGJGwreIuLNdnn4KLjkAzjpfsj+DaaeCQ/1glXToCDf7wpFRERinoK3iBQJJkK3s2DcMhj6iBe4XxgF44+Alc9Cfp7fFYqIiMQsBW8R2VUwAQ453ZsR80+TICEVZlwA93WD5U9AXo7fFYqIiMScmA3eZnaymT1iZlPN7Di/6xGJS4EAHHwynL8Qhk+GOo28yXju7QrvTYDcbL8rFBERiRm+BG8zm2hmP5rZ6lLbB5nZOjNbb2bXVnQM59wM59xo4HxgWDTrFan1zOCg42H0W3DmC5DWEl65Cu45BN65H3K2+12hiIhIjefXHe8ngEHFN5hZEBgPDAY6AWeYWScz62Jms0stTYt99IbQ50Qk2sygXX8Y+SqcMwuadIDXrof/dYGF//EeyhQREZEy+TJzpXNugZm1LrX5CGC9c+4LADObAgxxzt0OnFj6GGZmwB3AK865D6JbsYiUYAZtenvL1+/Bgjth3i2w+F5veMIjx0LqXn5XKSIiUqOY82mijFDwnu2c6xxaPw0Y5Jw7L7R+FnCkc25cOZ+/BDgHWAqsdM49VMY+Y4AxAM2aNes+ZcqUKPxLKrZt2zbq1atX7eeVqtO1q5z6v33G/l89z94/v0deMJVNzU9gY4uTyE1Kq/ZadO1ik65b7NK1i126dtHRt2/f5c659NLbfbnjHQnOuXuBe3ezzwRgAkB6errLyMiohspKmj9/Pn6cV6pO166yMoDR8P1qEhbcxf4fv8D+382B9JFw1MVQf59qq0TXLjbpusUuXbvYpWtXvWrSqCabgJbF1luEtlWJmWWa2YStW7dW9VAiEo59OsPpk+Ci96BjJrz7APzvEJhzFWzd6Hd1IiIivqlJwXsp0N7M2phZEjAcmFnVgzrnZjnnxqSlVf/P3SK1WpMOMHSCNxnPIafDsolwT1eYeQn88qXf1YmIiFQ7v4YTnAwsATqY2UYzG+WcywPGAXOBtcBzzrk1ftQnIhHU+AAYcj9csgK6nQ0fTob7usOLF8Dmz/yuTkREpNr4NarJGeVsnwPMieS5zCwTyGzXrl0kDysildWwFZz4X+h9FbxzLyx73AvhnYfCMVdCs05+VygiIhJVNanVJCrUaiJSwzTYFwbdDpetgl6Xwqdz4cGeMGUEfLvS7+pERESiJu6Dt4jUUPWawICbvQDe+2r4ciFM6APPnA7fLPW7OhERkYhT8BYRf9VpBP2uh8tXQb8bYONSeKw/PDkENiz2uzoREZGIifvgreEERWJESprX/33ZKhjwD/jhY3jieJg4GD5/E3ya7EtERCRS4j54q8dbJMYk14Nel8BlH8Ggf8GvG+CpU+DR/rDuVQVwERGJWXEfvEUkRiWmQo/z4dKVcOLdsP1HmDwMHu4NH8+EggK/KxQREakUBW8RqdkSkr1p5y/+AIY8ADnb4bmz4MGjYNU0KMj3u0IREZGwxH3wVo+3SJwIJsJhI+Ci92Hoo4CDF0bB+CNg5bOQn+t3hSIiIhWK++CtHm+ROBNMgEP+BBcsgdOf9FpSZlzgzYa57HHI2+F3hSIiImWK++AtInEqEIBOQ2DsQjhjKtTdG2ZfBvcexj7fzVMPuIiI1DhhB28zq2tml5jZNDN7y8zah7YPN7ODoleiiEgFzKDDIDhvHpw5Hervy0Hr7oWJA+G7D/2uTkREZKewgreZtQQ+Au4C2gO9gfqht/sCV0alughQj7dILWEG7Y6FUa/zSYdL4JcvYEIGvPxXyPrV7+pERETCvuP9H2AHcCDQHbBi770NHBPhuiJGPd4itUwgwPf7HgsXL4fDR8OyiV7/9wdPqf1ERER8FW7wHgDc6Jz7Cig9e8UmoHlEqxIRqarUhnD8nTDmbWjcHmaOg8cGwLcr/a5MRERqqXCDdxLweznvpQF5kSlHRCTC9j0ERr4KJz8EW77y2k9mXwF//OJ3ZSIiUsuEG7w/Ak4t573BwPLIlCMiEgVm0PUMGLcMjhwLyx/32k+WT1L7iYiIVJtwg/ddwCgzewTvwUqATmZ2MzAq9L6ISM2W2hAG/8sbgrBJB5h1CTzWHzZ94HdlIiJSC4QVvJ1z04ELgT8Bb4Q2PwlcBoxzzr0anfKqTqOaiMgu9ukMf3kFTpkAWzfCI/1g1qVqPxERkagKexxv59xDeA9RDgTOxGsxaeGcmxCl2iJCo5qISJnM4NBhXvtJjwu9UU/u6+aNglKQ73d1IiISh8Idx/tsM2vsnNvunHvDOfesc26uc+53M2tkZmdHu1ARkahIaQCDboPzF0HTTjD7cnj0WNioR1dERCSywr3j/ThwQDnvtQm9LyISu5p1gnNfhqGPwm/feeF75sWw/We/KxMRkTgRbvC2Ct6ri4YTFJF4YAaH/AnGLYWeF8HKZ732k6WPqf1ERESqLKG8N8ysK9Ct2KZMM+tcardUYDjwWRRqExHxR0oDGHgrHHYWzLkSXr4CPpgEx/8HWh7ud3UiIhKjyg3ewBDgxtBrB1xfzn4/4w0pKCISX5oeBOfMgjXTYe713tCDh50J/W+Gunv7XZ2IiMSYilpN/ofXv90Wr9VkaGi9+LIf0NQ5NzPKde4xDScoIlViBp1P9dpPjroEPpzitZ+8/4jaT0REpFLKDd7Oua3Oua+ccxvwQvac0Hrx5XvnnKu2aveAhhMUkYhIrg/H/QMueAf27eq1oEzoA1+/53dlIiISI8KdQOcr51xOtIsREanxmnSAs1+CPz3hjXgy8TiYcSFs+8nvykREpIYLewIdMxtjZivM7A8zyy+9RLNIEZEaxQwOPsVrP+l1GXz0HNzXHd57GPI1yJOIiJQt7Al0gPuApUAK3rjdTwO/AZ8Dt0SrQBGRGiu5Hgy4GS5cAs27wStXe+0nXy3xuzIREamBwr3jfRlwO3BBaP0B59w5eA9eZuGNbCIiUjvt3R7OehFOfxKytsDjg+DF82Hbj35XJiIiNUi4wbs9sAAoCC1JAM65X4FbgUujUp2ISKwwg05DYNz7cMxfYfULXvvJuw+q/URERIDwg3cWEAiNYPI93p3uQtvwhhUUEZGkunDs/8EFS6DF4fDqtfBwb9iw2O/KRETEZ+EG71VAu9DrhcDfzKynmR0O3AR8EoXaRERi197t4MwXYNjTsOM3eOJ4eGE0/P6935WJiIhPwg3eE4C9Qq//DtQDFgHvAgcCf418aSIiMc4MOmbCRe9D76vg4xlwXzosGQ/5uX5XJyIi1SzccbynOuduD71eDxwMDAROAdo558b8cRwAACAASURBVOZHrcIq0syVIuK7pDrQ7wa48F1o1QPm/g0eOgY2LPK7MhERqUa7Dd5mlmRmd4faSgBwzm13zr3hnJvpnNsc3RKrRjNXikiN0fgAGPE8DH8WcrfDEyfAtFHw23d+VyYiItVgt8E7NGPlWCA1+uWIiMQ5MzjoBK/9pM81sHYW3J8Oi+9V+4mISJwLt8d7BdAlmoWIiNQqianQ929w0bvQ+mh4/e/w0NHw5QK/KxMRkSgJN3j/FbjSzE40M4tmQSIitUqjtvDnqXDGFMjNgkmZ8Pxf4Ldv/a5MREQiLCHM/Z4H0oCXgFwz+wlwxd53zrn9I12ciEit0WEwtM2AxffAorvh07nQ52rocSEkJPldnYiIREC4wXseJYO2iIhEWmIqZFwLhw6HV6+DN26Elc/A8Xd5oVxERGJaWMHbOXdulOsQEZFCe7WGMybDp6/BK1fDk0Og08kw8FZIa+F3dSIisofC7fEWEZHqduBx3tjffW/wWk/uP9xrQ8nL8bsyERHZAwreIiI1WWIK9LkKLnoPDugHb9wEDx8DGxb7XZmIiFSSgreISCzYa38Y/gycMRVy/oAnjocZF8L2Gj2HmYiIFKPgLSISSzoM8u5+H305fDTVm3xn+SQoKPC7MhER2Q0FbxGRWJNUB/rfBOcvgqadYNYl8Pgg+GGN35WJiEgFdhu8zSzJzF40s97VUZCIiISpaUc492U4+UH4eT08dAy8dgPs2OZ3ZSIiUobdBm/nXA7QP5x9q4uZdTSzh8xsmpld4Hc9IiK+MYOuf4Zxy+CwM+Gd+2D8kbB2NjhNvyAiUpOEG6YXAz0icUIzm2hmP5rZ6lLbB5nZOjNbb2bXVnQM59xa59z5wOlAr0jUJSIS0+o0gpPuhZGvQUoaTB0Bk4fDr1/5XZmIiISEG7z/Cowys3Fm1sLMgmYWKL5U4pxPAIOKbzCzIDAeGAx0As4ws05m1sXMZpdamoY+cxLwMjCnEucWEYlvrY6EsW/Dcf+ELxd6d7819reISI1gLoyfIs2s8HH58nZ2zrlwp5/HzFoDs51znUPrPYGbnHMDQ+vXhQ56exjHetk5d0I5740BxgA0a9as+5QpU8ItMWK2bdtGvXr1qv28UnW6drFL186TnP0T7dY/SpPN77K9Tks+PfACtjY82O+yyqXrFrt07WKXrl109O3bd7lzLr309nDD8i2UH7ojoTnwTbH1jcCR5e1sZhnAUCCZCu54O+cmABMA0tPTXUZGRgRKrZz58+fjx3ml6nTtYpeuXXF/gnWvUnfOVRy28m/QdQQMuAXq7u13YbvQdYtdunaxS9eueoUVvJ1zN0W5jkpxzs0H5vtchohIbOgwCNr0hgV3eg9frpsD/W+Gw86CQI15bl5EJO5V+r9xzayembU0s0j+LrEJaFlsvUVoW5WZWaaZTdi6dWskDiciEps09reIiO/CDt5mNtDMlgFbgA3AFjN738wGRKCOpUB7M2tjZknAcGBmBI6Lc26Wc25MWlpaJA4nIhLbNPa3iIhvwgreZjYQbwSResA/gAuBfwL1gTmVCd9mNhlYAnQws41mNso5lweMA+YCa4HnnHO6DSMiEg0a+1tExBfhPlx5E/AacKJzrnCEE8zsFmA2cDPwejgHcs6dUc72OURhaEAzywQy27VrF+lDi4jEtsKxv7uOgNmXe2N/HzgIBt8Je+3vd3UiInEn3FaTQ4HxxUM3QGj9AaBrpAuLFLWaiIjshsb+FhGpFuEG7x1Ag3Leqx96X0REYlUwEY66GMa9D+2OhTdugoePgQ2L/a5MRCRuhBu85wP/MLM2xTeaWSu8NpS3IltW5GhUExGRSkhrAcOfgTOmQs4f8MTxMONC2L7Z78pERGJeuMH7WiANWGdmC8xsqpm9DXwGNASuiVaBVaVWExGRPdBhEFz0Lhx9OXw0Fe5Ph+WToKBg958VEZEyhRW8nXPrgEOAe/Fmi+wGpAD3AF2dc59FrUIREfFHUt2isb+bdNTY3yIiVbTb4G1mSWZ2N9DCOXelc+5I51z70N+rnXPfVUOdIiLil6Yd4S9zYMgDGvtbRKQKdhu8nXM5wFggNfrlRJ56vEVEIsAMDhsRGvt7hMb+FhHZA+H2eK8AukSzkGhRj7eISATVaQQn3Qcj50JKmjf29+Th8OtXflcmIlLjhRu8/wpcaWYnmplFsyAREYkBrXpo7G8RkUoKN3g/DzQGXgKyzOwbM/u62KJbHSIitU3h2N8Xvaexv0VEwhDulPHzgJhs4tOU8SIiUdawpTf297pXYM7V3tjfXUfAgFug7t5+VyciUmOEG7wvBXY457KjWUw0OOdmAbPS09NH+12LiEhc6zAY2vSGBXd5D19+8rIXvg87CwLh/sAqIhK/whlOMAH4GRgQ/XJERCSmFR/7u2knb+zviQPh+9V+VyYi4rtwhhPMA34A8qNfjoiIxIXiY3//8jk83BvmXq+xv0WkVgv3t7+ngfOiWYiIiMSZ0mN/L7kfxh8Ba2dp7G8RqZXC7fHeAPzZzJbijWzyHaUetnTOTYxsaSIiEhcKx/7uOgJmXwFTz4QDB8HgO2Gv/f2uTkSk2oQbvMeH/jYHupfxvgNqZPDWqCYiIjVE4djf7z0Eb93ujf3d52roOQ4SkvyuTkQk6sJtNWmzm6VtVKqLAM1cKSJSg5Qe+3vezRr7W0RqjbDueDvnNEGOiIhEThljf3fYpx8ccYjXmiIiEocqNbCqmR1iZuPM7EYz2ye0rZ2Z1Y9OeSIiEtc6DIaL3oWjL6fZD2977SdrZ/tdlYhIVIQVvM0s2cyeB1YA9wL/B+wXevtO4ProlCciInEvNPb3B93+DfWbwdQRMG0kbP/Z78pERCIq3DvetwL9gbOAZoAVe+8VYGCE6xIRkVpmW/22MPot6Hs9fDzTG3pwzYt+lyUiEjHhBu8zgBucc88Cv5R670ugdSSLEhGRWiqY6I10MvZtSGsBz58LU8+CbT/6XZmISJWFG7wbA2srOEZyZMqJPDPLNLMJW7du9bsUEREJV7OD4bx5cOyN8OmrXu/3qmmaeEdEYlq4wftLoGc57x0BrItMOZGn4QRFRGJUMAGOuQLGLoRGbeGFUTBlBPz+vd+ViYjskXCD95PAtWY2AkgMbXNm1he4nBo6eY6IiMSBpgfBqNdgwD/g83le7/fKybr7LSIxJ9zgfSfwMvAU8Gto2yLgDeBV59x9UahNRETEEwhCr0vg/MXQpCPMOB+ePR1++9bvykREwhZW8HbO5TvnhgN9gP8Aj+INK9jPOTciivWJiIgU2bsd/GUODLoDvlzo9X5/8KTufotITAhr5spCzrmFwMIo1SIiIrJ7gSD0uADaHwczL/aWNS9C5r3ejJgiIjVUpWauBDCzgJm9aWbto1GQiIhIWBofAOfMhuP/DV+/Bw/0hGUTdfdbRGqsSgdvvMlzMgBNEy8iIv4KBOCI0XDhO9D8MJh9OTx5Evy6we/KRER2sSfBW0REpGbZqzWcPRNO/B9sWgEPHAXvPwIFBX5XJiKyk4K3iIjEBzNI/wtcuARa9YA5V8KkTPj5c78rExEB9ix4FwA3AzExhpNmrhQRqWUatoQzX4CT7ofvV8GDvWDJA1CQ73dlIlLLhRW8zexsM2sM4Dw3O+e+D73XyMzOjmaRVaGZK0VEaiEz6HYWXPQutOkNc6+DxwfD5s/8rkxEarFw73g/DhxQznttQu+LiIjULA32gz9PhVMehp/WwUNHw+J7dPdbRHwRbvC2Ct6rC+RFoBYREZHIM4NDh8NF78EBx8Lr/wePDYAfP/G7MhGpZcqdQMfMugLdim3KNLPOpXZLBYYD+u1ORERqtvr7wPBnYPULMOcqePgYyLgWjroUgpWaT05EZI9U9N80Q4AbQ68dcH05+/0MjIpkUSIiIlFhBl1O8/q+51wJ826Bj2fCyQ9As4P9rk5E4lxFrSb/w+vfbovXajI0tF582Q9o6pybGeU6RUREIqdeUzj9SfjTJNi6ER7uA/P/Bfm5flcmInGs3DvezrmtwFYAM2sDfOecy6muwkRERKLu4JOh9THwytUw/zZYO8u7+73vIX5XJiJxKKyHK51zXyl0i4hIXKrbGE57DIY9A9t+gEf6wpu3Qp7+Z09EIqvc4G1m+WZ2ROh1QWi9vEWjmoiISGzreKI38knn02DBnTChD2z6wO+qRCSOVPRw5S3AxmKvXfTLERER8VGdRjD0Yeg8FGZdCo/2h16XQp9rIDHF7+pEJMZV1ON9c7HXN1VLNSIiIjXBgQPhwnfhteth0X/hk5e93u8W6X5XJiIxLNwJdERERGqX1IYwZDyc+QLkbPcm3XntBsjN8rsyEYlRFQZvM6tvZgPN7EQzqxfa1sHMJpvZGjObb2ZDq6dUERERH7TrDxcugW7nwDv3edPOf/2u31WJSAyq6OHKA4E1wBxgJvCpmXUHFgL9gW1AZ+B5M+tfDbWWrq+umS0zsxOr+9wiIlLLpDSAzP/B2S9Bfg5MHASvXOvdCRcRCVNFd7z/AWQDxwE9gI+BGcAKoKVz7kigFfA2cG24JzSziWb2o5mtLrV9kJmtM7P1ZhbO8a4Bngv3vCIiIlXWNgMuWAKHnwfvPQgP9oINi/yuSkRiREXBuxdwi3NunnPufeBioDlwv3MuG8A59wdwH96d73A9AQwqvsHMgsB4YDDQCTjDzDqZWRczm11qaWpmA/C+CPxYifOKiIhUXXI9OOHfcO7L3voTJ8DLV8KObf7WJSI1njlX9iiBobG5j3HOLQmtJ+HdAT/cObe82H49gMXOuWDYJzVrDcx2znUOrfcEbnLODQytXwfgnLu9nM/fCtTFC+lZwCnOuYIy9hsDjAFo1qxZ9ylTpoRbYsRs27aNevXqVft5pep07WKXrl1sisXrFsjPps2XT9Ni42yyU5qwrsM4tux1qN9lVbtYvHbi0bWLjr59+y53zu0yDFJF43gHgPxi64WvSyf1SIzv3Rz4ptj6RuDI8nZ2zl0PYGbnApvLCt2h/SYAEwDS09NdRkZGBEqtnPnz5+PHeaXqdO1il65dbIrd6zYIvn6X1JcuouuH/wfdz4UB//D6wmuJ2L12omtXvSoK3gDNzaxt6HWw2LYtxfZpEfmywuOce8Kvc4uIiOzUqgecvwjeuhWWjIfP3oCT7vFGRBERCdld8J5WxrYZpdaNqt/13gS0LLbeIrStyswsE8hs165dJA4nIiJStsRUOO6f0OlkmHEhPH0qHHYmHHerNya4iNR6FQXvv1RbFbAUaG9mbfAC93Dgz5E4sHNuFjArPT19dCSOJyIiUqEW6TB2Abz9L1h8D6yfB5n3eLNhikitVtGU8ZOicUIzmwxkAHub2UbgRufcY2Y2DpiL19Iy0Tm3JhrnFxERibrEFOh/I3TMhJcugmdPh65nwuA7ILm+39WJiE9212oScc65M8rZPgdvsp6IUquJiIj4pnk3GPM2vH0HLLobvloEQx+Blkf4XZmI+KDCKePjgXNulnNuTFpamt+liIhIbZSQBMf+H5w7B1wBTBwIb90G+bl+VyYi1Szug7eIiEiNsH9POH8xHDLM6/+eOAh+/tzvqkSkGsV98DazTDObsHXrVr9LERGR2i6lAZzyEJz2OPy8Hh46BpZPgnImsxOR+BL3wVutJiIiUuN0HgoXvOONgDLrEpgyArZv9rsqEYmyuA/eIiIiNVJaczhrBgy8Dda/Dg8e5U28IyJxS8FbRETEL4EA9LwIRr8FdRrDM6fCnKsgN8vvykQkCuI+eKvHW0REarx9Onvhu8dF8P4EeLgPfPeh31WJSITFffBWj7eIiMSExBQYdBuc9SLs+A0eORYW/Q8K8v2uTEQiJO6Dt4iISEw5oJ/34GWHwfDGjTDpJNjyjd9ViUgEKHiLiIjUNHUawelPwskPwncr4cFesGqa31WJSBUpeIuIiNREZtD1z3D+Imh6ELwwCl44D7K2+F2ZiOyhuA/eerhSRERiWqM23nTzfW+A1dPhoaNhwyK/qxKRPRD3wVsPV4qISMwLJkCfq2DU6xBMgidOhNdvhLwcvysTkUqI++AtIiISN1p0h7ELoPs5sPh/8Gg/+Gmd31WJSJgUvEVERGJJcj3IvAeGT4bfvoWHe8P7j4BzflcmIruh4C0iIhKLDjoeLlgCrY+BOVfCM6fB7z/4XZWIVCDug7cerhQRkbhVvxmMeB6O/7f3wOWDPeGTl/2uSkTKEffBWw9XiohIXDODI0Z7vd9pLWDKn2HmxbBjm9+ViUgpcR+8RUREaoUmHWDUG3D05fDBU/DwMbBxmd9ViUgxCt4iIiLxIiEJ+t8E574M+bnw2HEw/1+Qn+d3ZSKCgreIiEj8ad3Lm/Gy86kw/zZ4fDD88oXfVYnUegreIiIi8Si1IZz6CJz6mDfW90PHwIqnNeygiI8UvEVEROJZl9PggsWw32Hw0kXw3Fnwxy9+VyVSK8V98NZwgiIiUus1bAlnz4QBt8C6V+GBnrB+nt9VidQ6cR+8NZygiIgIEAhAr0th9JteG8rTQ+GVayE32+/KRGqNuA/eIiIiUsy+h8CY+XDEWHjvQZiQAd+v8rkokdpBwVtERKS2SUyF4++EES9A1i/wSD945z4oKPC7MpG4puAtIiJSW7XvDxcsgfbHwWs3wFNDYOsmv6sSiVsK3iIiIrVZ3cYw7Gk46T7YuBwe7Amrp/tdlUhcUvAWERGp7cyg29lw/kJo3B6m/QWmj4VsjQgmEkkK3iIiIuJpfACMnAt9roVVz8ODR8NX7/hdlUjcUPAWERGRIsEE6HsdjHzVG4LwiRNg3i2Ql+N3ZSIxT8FbREREdtXyCDh/EXQdAQv/A48NgJ8+9bsqkZgW98FbM1eKiIjsoeT6MOR+OP0p2PIVPNwblj4GzvldmUhMivvgrZkrRUREqqjTSd6wg/v3hJevgGeHwbYf/a5KJObEffAWERGRCGiwrzfhzuA74Yv58EBPWPeq31WJxBQFbxEREQlPIABHjoWxb0P9fWHyMJh9OYH8bL8rE4kJCt4iIiJSOU07wuh5cNQlsOxxui//K/y0zu+qRGo8BW8RERGpvIRkOO4fcPZLJOb+DhP6wuoX/K5KpEZT8BYREZE917YPy9Lvhn06w7SR8Mq1GvNbpBwK3iIiIlIlOcmN4dyXoceF8N6DMOlE+O1bv8sSqXEUvEVERKTqgokw6HY4bSJ8v9ob8/vLBX5XJVKjKHiLiIhI5HQ+Fca8Bal7wZNDYNHdmnBHJETBW0RERCKrSQcY/SZ0GgJv3ARTRkC2ZpAWUfAWERGRyEuuD6c9DoPugM/mwoQMrwVFpBZT8BYREZHoMIMeF3gPXuZmwaP9YeVkv6sS8Y2Ct4iIiERXqx4wdgG0SIcZ58PsyyFvh99ViVS7mAzeZpZhZgvN7CEzy/C7HhEREdmNek3hrBnQ6zJYNhEmDoQtX/tdlUi1qvbgbWYTzexHM1tdavsgM1tnZuvN7NrdHMYB24AUYGO0ahUREZEICibAgJth2DPw8+fekIPr3/C7KpFq48cd7yeAQcU3mFkQGA8MBjoBZ5hZJzPrYmazSy1NgYXOucHANcDN1Vy/iIiIVEXHE2HMfKi/Hzx9Gsz/FxQU+F2VSNQlVPcJnXMLzKx1qc1HAOudc18AmNkUYIhz7nbgxAoO9yuQHI06RUREJIoaHwDnveH1e8+/DTYuhaEToE4jvysTiRpzPgxqHwres51znUPrpwGDnHPnhdbPAo50zo0r5/NDgYFAQ+BB59z8cvYbA4wBaNasWfcpU6ZE9h8Shm3btlGvXr1qP69Una5d7NK1i026brGrStfOOfb79lXarX+UnKRGrO58Ddvqt4tsgVIu/ecuOvr27bvcOZdeenu13/GOBOfcdGB6GPtNACYApKenu4yMjChXtqv58+fjx3ml6nTtYpeuXWzSdYtdVb92fWHj6aQ8fw7pK6+D4++Cbud4wxFKVOk/d9WrpoxqsgloWWy9RWhblZlZpplN2LpVM2aJiIjUWC26w5i3ofXRMOtSeOkib+xvkThSU4L3UqC9mbUxsyRgODAzEgd2zs1yzo1JS0uLxOFEREQkWuo2hhHToM81sPIZeHQA/PKF31WJRIwfwwlOBpYAHcxso5mNcs7lAeOAucBa4Dnn3Jrqrk1ERER8FghC37/Bn5+Hrd/Awxmw7hW/qxKJiGoP3s65M5xz+zrnEp1zLZxzj4W2z3HOHeicO8A5d2ukzqdWExERkRh04HHebJeN2sDk4fDGzZCf53dVIlVSU1pNokatJiIiIjFqr/1h5FzvQctF/4WnT4FtP/ldlcgei/vgLSIiIjEsMQVOuheGjIdv3vdmu/zmfb+rEtkjcR+81WoiIiISBw47E0a9DglJ8PhgeO9h8GEuEpGqiPvgrVYTERGROLHvId5U8+36wytXwwvnwY5tflclEra4D94iIiISR1L3guGTod/fYc10ePRY+OlTv6sSCYuCt4iIiMSWQAB6XwlnToftP8EjfWHNDL+rEtmtuA/e6vEWERGJUwf09YYcbNoRnj8H5l4P+bl+VyVSrrgP3urxFhERiWNpLeDcOXDEGFhyP0w6CX7/3u+qRMoU98FbRERE4lxCEhx/Fwx9FL5bCQ8dAxsW+12VyC4UvEVERCQ+HPInOG8epDSASZmw+F4NOSg1ioK3iIiIxI9mnWD0W3DQCfD63+G5syD7N7+rEgFqQfDWw5UiIiK1TEoDOP1JOO6f8MkcmJABP3zsd1Ui8R+89XCliIhILWQGR10M58yCnG3eeN8fPed3VVLLJfhdgJ9yc3PZuHEj2dnZUTtHWloaa9eujdrxpWwpKSm0aNGCxMREv0sRERE/te7lDTn4/F9g+mj45n0YeCskJPtdmdRCtTp4b9y4kfr169O6dWvMLCrn+P3336lfv35Uji1lc87x888/s3HjRtq0aeN3OSIi4rf6+8A5M2HezfDOffDtCjh9kjcUoUg1ivtWk4pkZ2fTuHHjqIVu8YeZ0bhx46j+kiEiIjEmmOj1fJ/+JPy0Dh7uDZ+/5XdVUsvEffDe3cOVCt3xSddVRETK1GkIjHkL6jaFp06BBXdBQYHfVUktEffBu6Y/XBkMBunatSudO3cmMzOTLVu2ALBhwwbMjPvuu2/nvuPGjeOJJ54A4Nxzz6V58+bs2LEDgM2bN9O6desyz3HTTTfRvHlzunbtSvv27Rk6dCgffxzdp7vPPfdcpk2bFtVziIiI7JG928PoedDlNHjznzB5OGT96ndVUgvEffCOpBkrNtHrjjdpc+3L9LrjTWas2FTlY6amprJy5UpWr15No0aNGD9+/M73mjZtyj333ENOTk6Znw0Gg0ycODGs81x++eWsXLmSzz77jGHDhtGvXz9++umnKtcfKXl5eX6XICIitUlSXRj6CBz/b/j8TXi4D3z3od9VSZxT8A7TjBWbuG76KjZtycIBm7Zkcd30VREJ34V69uzJpk1Fx2vSpAnHHnsskyZNKnP/yy67jLvvvrvSoXXYsGEcd9xxPPvsswAsX76cPn360L17dwYOHMh3330HwPr16+nfvz+HHnoo3bp14/PPP8c5x1VXXUXnzp3p0qULU6dOBbwHGseNG0eHDh3o378/P/74487zlXf8jIwMLrvsMtLT07nnnnsq9W8QERGpMjM4YjT85RUoyINHB8AHT/ldlcSxWj2qSXE3z1rDx9+WP7PViq+3kJNfsgcsKzefq6d9xOT3vy7zM532a8AVGa3COn9+fj7z5s1j1KhRJbZfc801DB48mJEjR+7ymVatWnH00Ufz1FNPkZmZGdZ5CnXr1o1PPvmE3NxcLr74Yl566SWaNGnC1KlTuf7665k4cSIjRozg2muv5ZRTTiE7O5uCggKmT5/OypUr+fDDD9m8eTOHH344vXv3ZsmSJaxbt46PP/6YH374gU6dOjFy5MgKjw+Qk5PDsmXLKlW7iIhIRLU83BtycNpImDkOvnnPuxOemOJ3ZRJnFLzDVDp07257uLKysujatSubNm2iY8eODBgwoMT7bdu25cgjj9x5d7q06667jiFDhnDCCSdU6rzOOQDWrVvH6tWrd543Pz+ffffdl99//51NmzZxyimnAN642ACLFi3ijDPOIBgM0qxZM/r06cPSpUtZsGDBzu377bcf/fr1q/D4hYYNG1apukVERKKi7t5w1ovw1m2w8N/w/UfeCCh7tfa7MokjCt4hN2YeXOH7ve54k01bsnbZ3rxhKlPH9iz3c7///nuFxy3s8f7j/9u79+goq3OP49/HcItcyqGgCLEVK6UkGBIIQUAuwZag0CAK1HjEg9QWsVYKbbQ1dRVbbSlWtHCsiAeLCBVEWi5FK+2BlGLkbqByEcVFe1JQY6hCoNz3+eOdpGGYSQbIzDsTfp+1ZiWzZ89+n3EDebJ93r2PHCE3N5enn36a+++//4w+Dz30ECNGjKB///5nvb9jx45kZGTw8sv/Po2rsLCQFStWAFBSUhLyum+99RZZWVk450hLS+PNN988p7gjFW78Sk2bNq2T64iIiFywS5LghochJQt+O87bcvCW5+CLuX5HJvVEva/xrm07wUgV5HYiuWHSGW3JDZMoyO10QeNWuvTSS5k+fTpPPPHEWTXbX/rSl0hNTWX58uUh31tYWMgvfvGLquePPfYYJSUlYZPuxYsXs3LlSvLz8+nUqRNlZWVVifGJEyfYvn07zZs3JyUlhSVLlgBw7Ngxjhw5Qt++fVm4cCGnTp2irKyMNWvWkJ2dTb9+/ara9+/fz+rV3t6o4cYXERGJW51uhHF/hpafg998DTbP8TsiqSfqfeJdV9sJ3pzZnp/dci3tWyZjeCvdP7vlWm7ObF83gQKZmZmkp6fz0ksvnfVaYWEhpaWlId+XlpZGt27dahz7ySefrNpOcN68eaxatYo2bdrQqFEjXnnld448BgAAELZJREFUFR588EG6du1KRkYGxcXFALz44otMnz6d9PR0evfuzQcffMDw4cNJT0+na9euDBw4kKlTp9K2bVuGDx9Ox44dSU1N5c4776RXL+//AtQ0voiISNxq1QG+/ke45suwfAKsn+V3RFIPWGWtb32XlZXlgm/i27lzJ507d47qdXVkvH8udH6LiooYMGBA3QUkMaO5S0yat8RVr+fu5DFYdBe8s8I7+bL3t/2OqE7V67nzkZltds5lBbfX+xVvERERkfPWoDGMegFSb4aVP/ROuhQ5T7q5UkRERKQmSQ3h1tleEr7qUTh5HHIe8vYBFzkHSrxFREREapPUAG5+xkvC10yFU8fgy48o+ZZzosRbREREJBKXJMFXZ0BSY3jjl1799+ApSr4lYkq8RURERCJ1ySUw5Amv7GTdr7zke8g0r12kFkq8RURERM6FGeT+1Eu+1z4Jp45D3gxvRVykBvr1zEc5OTm8/vrrZ7Q99dRTjB8/nmXLljFlyhQAlixZwo4dO855/MmTJ59xsE6lffv2MWLEiPMLOqCgoIC0tDQKCgouaBwREZGEZAY3/AgG/ABK5sPvxsGpk7W/Ty5qWvH2UX5+PgsWLCA3999H0S5YsICpU6fSr18/8vLyAC/xHjp0KKmpqRGPHXz6ZXXt2rXjlVdeqfX9DRqE/+Mxa9YsDhw4QFJSZL/d1zaeiIhIwjGDAd+HpEbwv494ZSe3zoYGjfyOTOJUvV/xrqsj46NhxIgRrFixguPHjwOwd+9e9u3bR9++fZkzZw733XcfxcXFLFu2jIKCAjIyMtizZw979uxh8ODBdO/enb59+7Jr1y4AxowZwz333EPPnj154IEHANi6dSu9evWiY8eOPPfcc1XX6dKly1nxFBUV0bdvX/Ly8khNTeXUqVMUFBTQo0cP0tPTefbZZwHIy8ujoqKC7t27s3DhQsrKyrj11lvp0aMHPXr04I033gC8FffRo0fTp08fRo8eXWO/sWPHMmDAAK6++mqmT59eFdPcuXOrTsocPXo0QNhxREREfNF3kld6snMZvHynl4CLhFDvlyCdc8uB5VlZWd+oseNr34cP/lq3F297LVxfGPblVq1akZ2dzWuvvcawYcNYsGABo0aNwqrdHd27d2/y8vIYOnRoVXnIDTfcwMyZM+nYsSPr16/n3nvvZdWqVQCUlpZSXFxMUlISkydPZtu2baxbt47Dhw+TmZnJkCFDagx5y5YtvP3223To0IFZs2bxmc98ho0bN3Ls2DH69OnDoEGDWLZsGc2aNaOkpASA22+/nYkTJ3L99dfz97//ndzcXHbu3AnAjh07WLt2LcnJyTX227VrF6tXr+bQoUN06tSJ8ePHs3v3bh599FGKi4tp3bo1Bw4cAGDChAlhxxEREfFFr295K9+vfg9eyofb5kPDZL+jkjhT7xPveFdZblKZeM+ePbvG/hUVFRQXFzNy5MiqtmPH/v2b9ciRI88o/xg2bBjJyckkJyeTk5PDhg0byMjICDt+dnY2HTp0AGDlypVs27atqizl008/5d133616vdKf/vSnM2rQDx48SEVFBeCtjicnJ9fab8iQITRu3JjGjRtz2WWX8eGHH7Jq1SpGjhxJ69atAe8XlZrGadasWY3/7URERKIq+xveDZfL7of5I+H2hdCoqd9RSRxR4l3pxinRGffQoRpfHjZsGBMnTmTLli0cOXKE7t2719j/9OnTtGzZsmq1OVjTpmf+BbegvUWDn9f0fuccM2bMOKMGPVxM69ato0mTJjWOV1O/xo0bV32flJRUY416TeOIiIj4qtud3j7fS+6BebfC7S9DkxZ+RyVxot7XeMe7Zs2akZOTw9ixY8nPzw/Zp3nz5hwKJPAtWrSgQ4cOLFq0CPCS461bt4Ydf+nSpRw9epTy8nKKioro0aNHxLHl5ubyzDPPcOLECQB2797N4cOHz+o3aNAgZsyYUfU83C8FkfarNHDgQBYtWkR5eTlAVanJuY4jIiISU12/BiOeh9KN8OJw+NcnfkckcUKJdxzIz89n69atYRPv2267jccff5zMzEz27NnD/PnzmT17Nl27diUtLY2lS5eGHTs9PZ2cnByuu+46Hn74Ydq1axdxXHfffTepqal069aNLl26MG7cuJAr0dOnT2fTpk2kp6eTmprKzJkzQ44Xab9KaWlpFBYW0r9/f7p27cqkSZPOaxwREZGYSxsOo+bC/q0wNw+OHPA7IokD5pzzO4aYyMrKcps2bTqjbefOnXTu3Dmq1z106BDNmzeP6jUktAud36KiIgYMGFB3AUnMaO4Sk+YtcWnuarB7JSy8Az57Ddy5FJq18TuiM2juosPMNjvnsoLbteItIiIiEi1fHOTdZHngfZgzBA594HdE4iMl3iIiIiLR9IUcuGMxHPwH/PpG+LTU74jEJ0q8RURERKLtqj4w+ndw+GMv+f7nXr8jEh9c9In3xVLjfrHRvIqISNy5Mtur8z56EH49BMr3+B2RxNhFnXg3adKE8vJyJWn1jHOO8vJy7fMtIiLxp303GPN7OPkv+PVNUPaO3xFJDF3UB+ikpKRQWlpKWVlZ1K5x9OhRJYA+aNKkCSkpKX6HISIicra218KYFfBCnpd8/9cyuDzN76gkBhIy8TazS4CfAC2ATc65F85nnIYNG551/HldKyoqIjMzM6rXEBERkQRzWWe46zV44avebiejl0C7DL+jkiiLeamJmT1vZh+Z2dtB7YPN7B0ze8/Mvl/LMMOAFOAEoFuDRUREJPG0vgbuehUaNfcO2SndVPt7JKH5UeM9BxhcvcHMkoCngRuBVCDfzFLN7Foz+33Q4zKgE1DsnJsEjI9x/CIiIiJ1o1UHuGsFJLeCuTfD3970OyKJopgn3s65NUDwuanZwHvOufedc8eBBcAw59xfnXNDgx4f4a1y/zPw3lOxi15ERESkjrX8nLfy3bwtzLsF3v+z3xFJlMRLjXd74P+qPS8FetbQ/7fADDPrC6wJ18nMvgl8M/C0wsz8uHW4NfCxD9eVC6e5S1yau8SkeUtcmru69MMBsbya5i46Ph+qMV4S73PinDsCfD2CfrOAWdGPKDwz2+Scy/IzBjk/mrvEpblLTJq3xKW5S1yau9iKl328/wFcWe15SqBNRERERKReiJfEeyPQ0cw6mFkj4DZgmc8xiYiIiIjUGT+2E3wJeBPoZGalZvZ159xJ4D7gdWAn8LJzbnusY4sSX0td5IJo7hKX5i4xad4Sl+YucWnuYsh0XLqIiIiISPTFS6mJiIiIiEi9psQ7is7xNE6JE+FOV5X4ZmZXmtlqM9thZtvNbILfMUlkzKyJmW0ws62BuXvE75gkcmaWZGZvmdnv/Y5FImdme83sr2ZWYmY6MjNGVGoSJYHTOHcDX8Hbl3wjkO+c2+FrYFIrM+sHVABznXNd/I5HImNmVwBXOOe2mFlzYDNws/7OxT8zM6Cpc67CzBoCa4EJzrl1PocmETCzSUAW0MI5N9TveCQyZrYXyHLOaQ/vGNKKd/SEPI3T55gkAmFOV5U455zb75zbEvj+EN6N2u39jUoi4TwVgacNAw+tCiUAM0sBhgD/43csIolAiXf0hDqNU0mASAyY2VVAJrDe30gkUoFyhRLgI+CPzjnNXWJ4CngAOO13IHLOHLDSzDYHTvqWGFDiLSL1ipk1AxYD33HOHfQ7HomMc+6Ucy4D7wC1bDNTmVecM7OhwEfOuc1+xyLn5XrnXDfgRuBbgTJLiTIl3tGj0zhFYixQH7wYmO+c+63f8ci5c859AqwGBvsdi9SqD5AXqBVeAAw0s3n+hiSRcs79I/D1I+B3eCWyEmVKvKNHp3GKxFDgBr3ZwE7n3DS/45HImVkbM2sZ+D4Z76b0Xf5GJbVxzv3AOZfinLsK72fcKufcHT6HJREws6aBm9Axs6bAIEA7ecWAEu8oqeencdZroU5X9TsmiUgfYDTeqltJ4HGT30FJRK4AVpvZNrxFiz8657Q1nUj0XA6sNbOtwAZghXPuDz7HdFHQdoIiIiIiIjGgFW8RERERkRhQ4i0iIiIiEgNKvEVEREREYkCJt4iIiIhIDCjxFhERERGJASXeIiIiIiIxoMRbRMRHZjbGzFyYxych+l0TwZiV7x8b4rV5gZMG6/IzdDezI2bWvpZ+VwXiGlOtbXLQZz5pZn8zs9nB45nZU2b2al3GLiISSw38DkBERAAYCZQGtZ28wDF/ZGbznHPHL3Cc2jwOPF95BPV5uh44BTQEUoFHgO5m1s05dzrQ5+fA+2aW45xbfUERi4j4QIm3iEh8KHHOvVeH463EOwZ6HDCjDsc9g5l1B3KAb1/gUOsDJ/4C/MXMTgHPAZ3wTv/FObffzJYDBYASbxFJOCo1ERFJLO3MbImZVZhZuZk9bWbJIfptBJYAhWZ2aU0DmlkLM/tvM9tnZsfM7B0zm2hmFkE8dwPbnHPbg8a81Mx+FYixwsyWASmRfkjgYOBrw6D2BUCumV15DmOJiMQFJd4iIvEhycwaBD1C/Rs9D3gPuAV4EvgG8EyYMX8ItAHuD3fRwDVWAHcBTwBfBf4ATAMeiyDuwcBfQrQ/i5eUTwvE+g7wmxrGqfz8yYFV9IeA7cDbQf3+gvez6ysRxCYiEldUaiIiEh92hWhbAQwNanvVOfe9wPcrzcwBPzaznzrndlfv6Jzbbma/AR4ws2ecc5+GuMZNePXVdznn5lQbtynwXTOb5pz7OFTAZnY5cBWwNai9E3A7UOicm1JtzGbAPaHGAo4GPd8FDK1W3135mcrMrBS4Dng+zFgiInFJK94iIvFhONAj6PGdEP1eDnq+AO/f8uww4/4IaIZXFx1KP+A0Z69GzwMaAb1qiLld4GtZUHvPQEyhYg3nOrzP3BMYBRzGS9YvD9G3rNq1RUQShla8RUTiw9sR3lz5YZjnIbfyc869b2azgQlm9ssQXVoBB0LsfPJBtdfDaRL4eiyo/YpaYg1lc7WbKzeY2RpgPzAJeDCo77+AUHXtIiJxTSveIiKJJXgFuPJ5TVv5/QRIwqubDnYAaGVmjYLa21Z7PZzywNf/CGrfHxQbYZ6H5Zz7EPgYSA/xcqvAayIiCUWJt4hIYhkV9Pw2vFKR9eHe4JzbBzwNjOfsnUX+jPezYGRQ+38Cx4E3a4hlL15t9tVB7esDMYWKNSJmdgXQmqAyFjNLAj6Hd7OmiEhCUamJiEh8yDCz1iHaN1UrwQC4ycwex9unOxuvhnuuc+7dWsafAnwT6A/8rVr7a8BaYKaZtcHbSeQmvB1JfhbuxkoA59xxM1tPUH25c+6dwE2dPw7smrIRb0/xm2qIr2dg7+5LgM/j1aSfAmYG9esCXAqsqfnjiojEHyXeIiLxYVGY9jacWVZxB/BdvNXr43iHzHwvxPvO4JwrN7NpwOSg9tNmNgT4KV4t9WfxVrInAU9FEPdC4HEza+qcO1ytfRxQEYitEbAKb6eTtWHGqWx3ePXlm4F7nHMbgvoNDbxeFEFsIiJxxZxzfscgIiIJysxa4B11f69zbl4MrrcDWOycezja1xIRqWuq8RYRkfPmnDsI/Bxvr/BITro8b2Y2DO8GzSeieR0RkWhRqYmIiFyoaXi7plwB7IvidZKBO5xzn0TxGiIiUaNSExERERGRGFCpiYiIiIhIDCjxFhERERGJASXeIiIiIiIxoMRbRERERCQGlHiLiIiIiMTA/wPMbA0b3IXMYgAAAABJRU5ErkJggg==\n"
972 | },
973 | "metadata": {
974 | "needs_background": "light"
975 | }
976 | }
977 | ]
978 | },
979 | {
980 | "cell_type": "code",
981 | "metadata": {
982 | "id": "TWgUOTgynIEQ"
983 | },
984 | "source": [
985 | ""
986 | ],
987 | "execution_count": null,
988 | "outputs": []
989 | }
990 | ]
991 | }
--------------------------------------------------------------------------------
/coding.py:
--------------------------------------------------------------------------------
1 | '''
2 | These scripts require the python library CommPy!
3 |
4 | Install:
5 |
6 | $ git clone https://github.com/veeresht/CommPy.git
7 | $ cd CommPy
8 | $ python3 setup.py install
9 | '''
10 |
11 |
12 |
13 | import numpy as np
14 | from commpy.channelcoding import convcode as cc
15 |
16 | class code:
17 | def __init__(self,d1,d2,m):
18 | self.d1 = d1
19 | self.d2 = d2
20 | self.m = m # Number of delay elements in the convolutional encoder
21 | self.generator_matrixNSC = np.array([[self.d1, self.d2]])# G(D) corresponding to the convolutional encoder
22 | self.trellisNSC = cc.Trellis(np.array([self.m]), self.generator_matrixNSC)# Create trellis data structure
23 | self.tb_depth = 5*(self.m + 1) # Traceback depth of the decoder
24 | self.code_rate = self.trellisNSC.k / self.trellisNSC.n # the code rate
25 | ## get impulse response
26 | self.impulse_response = self.commpy_encode_sequence(np.concatenate([np.array([1],dtype=np.int8),np.zeros([self.m],dtype=np.int8)],axis=0)).astype(np.int8)
27 |
28 | def commpy_encode_sequence(self,u,terminate=False):
29 | if terminate:
30 | return cc.conv_encode(u, self.trellisNSC, code_type = 'default')
31 | else:
32 | return cc.conv_encode(u, self.trellisNSC, code_type = 'default')[:-2*self.trellisNSC.total_memory]
33 |
34 | def commpy_encode_batch(self,u,terminate=False):
35 | x0 = self.commpy_encode_sequence(u[0],terminate)
36 | x = np.empty(shape=[u.shape[0],len(x0)],dtype=np.int8)
37 | x[0] = x0
38 | for i in range(len(u)-1):
39 | x[i+1] = self.commpy_encode_sequence(u[i+1],terminate)
40 | return x
41 |
42 | def commpy_decode_sequence(self,y):
43 | return cc.viterbi_decode(y, self.trellisNSC, self.tb_depth,'unquantized')
44 |
45 | def commpy_decode_batch(self,y):
46 | u_hat0 = cc.viterbi_decode(y[0], self.trellisNSC, self.tb_depth,'unquantized')
47 | u_hat = np.empty(shape=[y.shape[0],len(u_hat0)],dtype=np.int8)
48 | u_hat[0] = u_hat0
49 | for i in range(len(y)-1):
50 | u_hat[i+1] = cc.viterbi_decode(y[i+1], self.trellisNSC, self.tb_depth,'unquantized')
51 | return u_hat
52 |
53 | def zero_pad(self,u):
54 | return np.reshape(np.stack([u,np.zeros_like(u)],axis=1),(-1,))
55 |
56 | def encode_sequence(self,u,terminate=False):
57 | if terminate:
58 | return np.convolve(self.zero_pad(u),self.impulse_response,mode='full')[:-1] % 2
59 | else:
60 | return np.convolve(self.zero_pad(u),self.impulse_response,mode='full')[:len(u)*2] % 2
61 |
62 | def encode_batch(self,u,terminate=False):
63 | x0 = self.encode_sequence(u[0],terminate)
64 | x = np.empty((u.shape[0],x0.shape[0]),dtype=np.int8)
65 | x[0] = x0
66 | for i in range(len(u)-1):
67 | x[i+1] = self.encode_sequence(u[i+1],terminate)
68 | return x
--------------------------------------------------------------------------------