├── .gitignore ├── README.md ├── SConscript ├── examples ├── mnist.c ├── mnist.h ├── mnist_model.c ├── mnist_sm.c └── model │ ├── mnist-keras.ipynb │ ├── mnist-lg.onnx │ └── mnist-sm.onnx └── src ├── add.c ├── conv2d.c ├── dense.c ├── info.c ├── matmul.c ├── maxpool.c ├── model.c ├── onnx.h ├── relu.c ├── softmax.c └── transpose.c /.gitignore: -------------------------------------------------------------------------------- 1 | # Prerequisites 2 | *.d 3 | 4 | # Object files 5 | *.o 6 | *.ko 7 | *.obj 8 | *.elf 9 | 10 | # Linker output 11 | *.ilk 12 | *.map 13 | *.exp 14 | 15 | # Precompiled Headers 16 | *.gch 17 | *.pch 18 | 19 | # Libraries 20 | *.lib 21 | *.a 22 | *.la 23 | *.lo 24 | 25 | # Shared objects (inc. Windows DLLs) 26 | *.dll 27 | *.so 28 | *.so.* 29 | *.dylib 30 | 31 | # Executables 32 | *.exe 33 | *.out 34 | *.app 35 | *.i*86 36 | *.x86_64 37 | *.hex 38 | 39 | # Debug files 40 | *.dSYM/ 41 | *.su 42 | *.idb 43 | *.pdb 44 | 45 | # Kernel Module Compile Results 46 | *.mod* 47 | *.cmd 48 | .tmp_versions/ 49 | modules.order 50 | Module.symvers 51 | Mkfile.old 52 | dkms.conf 53 | 54 | # Project 55 | .vscode/ 56 | .sconsign.dblite 57 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ![](https://raw.githubusercontent.com/onnx/onnx/master/docs/ONNX_logo_main.png) 2 | 3 | # onnx-backend 4 | 5 | **通用神经网络模型 onnx 在 RT-Thread 上的后端** 6 | 7 | [ONNX](https://onnx.ai/) (Open Neural Network Exchange) 是机器学习模型的通用格式,可以帮助大家方便地融合不同机器学习框架的模型。 8 | 9 | 如果能在 RT-Thread 上解析并运行 onnx 的模型,那么就可以在 [RT-Thread](https://www.rt-thread.org) 上运行几乎所有主流机器学习框架了,例如 Tensorflow, Keras, Pytorch, Caffe2, mxnet, 因为它们生成的模型都可以转换为 onnx。 10 | 11 | ## 支持的算子 12 | 13 | - Conv2D 14 | - Relu 15 | - Maxpool 16 | - Softmax 17 | - Matmul 18 | - Add 19 | - Flatten 20 | - Transpose 21 | 22 | ## 手写体例程 23 | 24 | 当前只有一个手写体识别的例程:利用 Keras 训练一个卷积神经网络模型,保存为 onnx 模型,再在 RT-Thread 上解析模型进行 inference,当前在 STM32F407 上测试通过。 25 | 26 | 不过这个例程分成了 3 个小的 demo,放在 examples 目录下,用来更直观地展示 onnx-backend 的工作流程,最小的 demo 只需要 16KB 内存就可以了,因此在 STM32F103C8T6 上也可以运行: 27 | 28 | | 例程文件 | 说明 | 29 | | ------------- | ---------------------------------------- | 30 | | mnist.c | 纯手动构建模型,模型参数保存在 mnist.h | 31 | | mnist_sm.c | 纯手动构建模型,模型参数从 onnx 文件加载 | 32 | | mnist_model.c | 自动从 onnx 文件加载模型和参数 | 33 | 34 | #### Keras 模型结构 35 | 36 | ``` 37 | _________________________________________________________________ 38 | Layer (type) Output Shape Param # 39 | ================================================================= 40 | conv2d_5 (Conv2D) (None, 28, 28, 2) 20 41 | _________________________________________________________________ 42 | max_pooling2d_5 (MaxPooling2 (None, 14, 14, 2) 0 43 | _________________________________________________________________ 44 | dropout_5 (Dropout) (None, 14, 14, 2) 0 45 | _________________________________________________________________ 46 | conv2d_6 (Conv2D) (None, 14, 14, 2) 38 47 | _________________________________________________________________ 48 | max_pooling2d_6 (MaxPooling2 (None, 7, 7, 2) 0 49 | _________________________________________________________________ 50 | dropout_6 (Dropout) (None, 7, 7, 2) 0 51 | _________________________________________________________________ 52 | flatten_3 (Flatten) (None, 98) 0 53 | _________________________________________________________________ 54 | dense_5 (Dense) (None, 4) 396 55 | _________________________________________________________________ 56 | dense_6 (Dense) (None, 10) 50 57 | ================================================================= 58 | Total params: 504 59 | Trainable params: 504 60 | Non-trainable params: 0 61 | _________________________________________________________________ 62 | 63 | ``` 64 | 65 | ``` 66 | msh />onnx_mnist 1 67 | @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ 68 | @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ 69 | @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ 70 | @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ 71 | @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ 72 | @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ @@@@@@@@@@@@@@@@@@ 73 | @@@@@@@@@@@@@@@@@@@@@@@@@@ @@@@@@@@@@@@@@@@ 74 | @@@@@@@@@@@@@@@@@@@@@@ @@@@@@@@@@@@@@ 75 | @@@@@@@@@@@@@@@@@@@@ @@@@@@@@ @@@@@@@@@@@@@@ 76 | @@@@@@@@@@@@@@@@@@@@ @@@@@@@@@@ @@@@@@@@@@@@@@ 77 | @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ @@@@@@ @@@@@@@@@@@@@@ 78 | @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ @@@@ @@@@@@@@@@@@@@ 79 | @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ @@@@@@@@@@@@@@@@ 80 | @@@@@@@@@@@@@@@@@@@@@@@@ @@@@@@@@@@@@@@@@@@ 81 | @@@@@@@@@@@@@@@@@@@@@@@@ @@@@@@@@@@@@@@@@@@@@ 82 | @@@@@@@@@@@@@@@@@@@@@@@@ @@@@@@@@@@@@@@@@@@@@@@ 83 | @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ @@@@@@@@@@@@@@@@@@@@ 84 | @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ @@@@@@@@@@@@@@@@@@@@ 85 | @@@@@@@@@@@@ @@@@@@@@@@@@@@@@@@ @@@@@@@@@@@@@@@@@@@@ 86 | @@@@@@@@@@ @@@@@@@@@@@@@@@@@@ @@@@@@@@@@@@@@@@@@@@ 87 | @@@@@@@@@@ @@@@@@@@@@@@@@@@@@ @@@@@@@@@@@@@@@@@@@@ 88 | @@@@@@@@@@ @@@@@@@@@@@@ @@@@@@@@@@@@@@@@@@@@@@ 89 | @@@@@@@@@@ @@@@@@@@@@@@@@@@@@@@@@@@ 90 | @@@@@@@@@@@@ @@@@@@@@@@@@@@@@@@@@@@@@@@ 91 | @@@@@@@@@@@@@@@@@@ @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ 92 | @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ 93 | @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ 94 | @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ 95 | 96 | Predictions: 97 | 0.007383 0.000000 0.057510 0.570970 0.000000 0.105505 0.000000 0.000039 0.257576 0.001016 98 | 99 | The number is 3 100 | 101 | ``` 102 | 103 | ``` 104 | msh />onnx_mnist 0 105 | @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ 106 | @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ 107 | @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ 108 | @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ 109 | @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ 110 | @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ 111 | @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ 112 | @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ 113 | @@@@@@@@@@ @@@@@@@@@@@@@@@@@@@@ 114 | @@@@@@ @@@@@@@@@@@@@@@@@@ 115 | @@@@ @@@@@@@@@@@@@@ @@@@@@@@@@@@@@@@ 116 | @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ @@@@@@@@@@@@@@@@ 117 | @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ @@@@@@@@@@@@@@ 118 | @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ @@@@@@@@@@@@@@ 119 | @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ @@@@@@@@@@@@@@ 120 | @@@@@@@@@@@@@@@@@@@@@@@@@@ @@@@@@@@@@@@ 121 | @@@@@@@@@@@@@@@@@@@@@@ @@@@@@@@@@@@@@ 122 | @@@@@@@@@@@@@@@@ @@@@ @@@@@@@@@@@@@@ 123 | @@@@@@@@@@@@@@ @@@@@@@@@@@@@@ @@@@@@@@@@@@@@ 124 | @@@@@@@@@@@@ @@@@@@@@@@@@@@@@@@ @@@@@@@@@@@@@@ 125 | @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ @@@@@@@@@@@@@@ 126 | @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ @@@@@@@@@@@@@@@@ 127 | @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ @@@@@@@@@@@@@@@@ 128 | @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ @@@@@@@@@@@@@@@@@@ 129 | @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ @@@@@@@@@@@@@@@@@@@@ 130 | @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ @@@@@@@@@@@@@@@@@@@@ 131 | @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ 132 | @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ 133 | 134 | Predictions: 135 | 0.000498 0.000027 0.017220 0.028220 0.000643 0.002182 0.000000 0.753116 0.026616 136 | 137 | The number is 7 138 | 139 | ``` 140 | 141 | ## 注意事项 142 | 143 | 由于 onnx 的模型是 Google Protobuf v3 的格式,所以这个后端依赖于2个软件包,默认也会选中这两个软件包: 144 | 145 | - protobuf-c 146 | - onnx-parser 147 | 148 | 149 | ## Todo List 150 | 151 | - 模型量化 152 | - 解析更加复杂的模型,生成计算图, 153 | - 针对不同算子进行硬件加速。 154 | 155 | 156 | ## 联系方式 157 | 158 | - 维护:Wu Han 159 | - 主页:http://wuhanstudio.cc 160 | - 邮箱:wuhanstudio@qq.com 161 | 162 | -------------------------------------------------------------------------------- /SConscript: -------------------------------------------------------------------------------- 1 | from building import * 2 | import rtconfig 3 | 4 | # get current directory 5 | cwd = GetCurrentDir() 6 | # The set of source files associated with this SConscript file. 7 | src = Glob('src/*.c') 8 | 9 | if GetDepend('ONNX_BACKEND_USING_MNIST_EXAMPLE'): 10 | src += Glob('examples/mnist.c') 11 | 12 | if GetDepend('ONNX_BACKEND_USING_MNIST_SMALL_EXAMPLE'): 13 | src += Glob('examples/mnist_sm.c') 14 | 15 | if GetDepend('ONNX_BACKEND_USING_MNIST_MODEL_EXAMPLE'): 16 | src += Glob('examples/mnist_model.c') 17 | 18 | path = [cwd + '/src'] 19 | path += [cwd + '/examples'] 20 | 21 | LOCAL_CCFLAGS = '' 22 | 23 | group = DefineGroup('onnx-backend', src, depend = ['PKG_USING_ONNX_BACKEND'], CPPPATH = path, LOCAL_CCFLAGS = LOCAL_CCFLAGS) 24 | 25 | Return('group') 26 | -------------------------------------------------------------------------------- /examples/mnist.c: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | #include 4 | #include 5 | #include 6 | #include 7 | 8 | #include "mnist.h" 9 | #include "onnx.h" 10 | 11 | int mnist(int argc, char const *argv[]) 12 | { 13 | int img_index = 0; 14 | if(argc == 2) 15 | { 16 | img_index = atoi(argv[1]); 17 | } 18 | print_img(img[img_index]); 19 | 20 | // 1. Conv2D 21 | int64_t shapeW3[] = {2, 1, 3, 3}; 22 | int64_t dimW3 = 4; 23 | int64_t permW3_t[] = { 0, 2, 3, 1}; 24 | float* W3_t = transpose(W3, shapeW3, dimW3, permW3_t); 25 | 26 | float* conv1 = (float*) malloc(sizeof(float)*28*28*2); 27 | memset(conv1, 0, sizeof(sizeof(float)*28*28*2)); 28 | conv2D(img[img_index], 28, 28, 1, W3, 2, 3, 3, 1, 1, 1, 1, B3, conv1, 28, 28); 29 | 30 | free(W3_t); 31 | 32 | // 2. Relu 33 | float* relu1 = (float*) malloc(sizeof(float)*28*28*2); 34 | relu(conv1, 28*28*2, relu1); 35 | 36 | free(conv1); 37 | 38 | // 3. Maxpool 39 | float* maxpool1 = (float*) malloc(sizeof(float)*14*14*2); 40 | memset(maxpool1, 0, sizeof(sizeof(float)*14*14*2)); 41 | maxpool(relu1, 28, 28, 2, 2, 2, 0, 0, 2, 2, 14, 14, maxpool1); 42 | 43 | free(relu1); 44 | 45 | // 4. Conv2D 46 | int64_t shapeW2[] = {2, 2, 3, 3}; 47 | int64_t dimW2 = 4; 48 | int64_t perm_t[] = { 0, 2, 3, 1}; 49 | float* W2_t = transpose(W2, shapeW2, dimW2, perm_t); 50 | 51 | float* conv2 = (float*) malloc(sizeof(float)*14*14*2); 52 | memset(conv2, 0, sizeof(sizeof(float)*14*14*2)); 53 | conv2D(maxpool1, 14, 14, 2, W2_t, 2, 3, 3, 1, 1, 1, 1, B2, conv2, 14, 14); 54 | 55 | free(W2_t); 56 | free(maxpool1); 57 | 58 | // 5. Relu 59 | float* relu2 = (float*) malloc(sizeof(float)*14*14*2); 60 | relu(conv2, 14*14*2, relu2); 61 | 62 | free(conv2); 63 | 64 | // 6. Maxpool 65 | float* maxpool2 = (float*) malloc(sizeof(float)*7*7*2); 66 | memset(maxpool2, 0, sizeof(sizeof(float)*7*7*2)); 67 | maxpool(relu2, 14, 14, 2, 2, 2, 0, 0, 2, 2, 7, 7, maxpool2); 68 | 69 | free(relu2); 70 | 71 | // Flatten NOT REQUIRED 72 | 73 | // 7. Dense 74 | int64_t shapeW1[] = {98, 4}; 75 | int64_t dimW1 = 2; 76 | int64_t permW1_t[] = { 1, 0}; 77 | float* W1_t = transpose(W1, shapeW1, dimW1, permW1_t); 78 | 79 | float* dense1 = (float*) malloc(sizeof(float)*4); 80 | memset(dense1, 0, sizeof(sizeof(float)*4)); 81 | dense(maxpool2, W1_t, 98, 4, B1, dense1); 82 | 83 | free(W1_t); 84 | free(maxpool2); 85 | 86 | // 8. Dense 87 | int64_t shapeW[] = {4, 10}; 88 | int64_t dimW = 2; 89 | int64_t permW_t[] = { 1, 0}; 90 | float* W_t = transpose(W, shapeW, dimW, permW_t); 91 | 92 | float* dense2 = (float*) malloc(sizeof(float)*10); 93 | memset(dense2, 0, sizeof(sizeof(float)*10)); 94 | dense(dense1, W_t, 4, 10, B, dense2); 95 | 96 | free(W_t); 97 | free(dense1); 98 | 99 | // 9. Softmax 100 | float* output = (float*) malloc(sizeof(float)*10); 101 | memset(output, 0, sizeof(sizeof(float)*10)); 102 | softmax(dense2, 10, output); 103 | 104 | // 10. Result 105 | float max = 0; 106 | int max_index = 0; 107 | printf("\nPredictions: \n"); 108 | for(int i = 0; i < 10; i++) 109 | { 110 | printf("%f ", output[i]); 111 | if(output[i] > max) 112 | { 113 | max = output[i]; 114 | max_index = i; 115 | } 116 | } 117 | printf("\n"); 118 | printf("\nThe number is %d\n", max_index); 119 | 120 | free(dense2); 121 | free(output); 122 | 123 | return 0; 124 | } 125 | MSH_CMD_EXPORT(mnist, mnist simple example) 126 | -------------------------------------------------------------------------------- /examples/mnist.h: -------------------------------------------------------------------------------- 1 | #ifndef __MNIST_H__ 2 | #define __MNIST_H__ 3 | 4 | #include 5 | #include 6 | 7 | #define IMG0 {0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.3803922, 0.37647063, 0.3019608, 0.46274513, 0.2392157, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.3529412, 0.5411765, 0.9215687, 0.9215687, 0.9215687, 0.9215687, 0.9215687, 0.9215687, 0.9843138, 0.9843138, 0.9725491, 0.9960785, 0.9607844, 0.9215687, 0.74509805, 0.08235294, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.54901963, 0.9843138, 0.9960785, 0.9960785, 0.9960785, 0.9960785, 0.9960785, 0.9960785, 0.9960785, 0.9960785, 0.9960785, 0.9960785, 0.9960785, 0.9960785, 0.9960785, 0.9960785, 0.7411765, 0.09019608, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.8862746, 0.9960785, 0.81568635, 0.7803922, 0.7803922, 0.7803922, 0.7803922, 0.54509807, 0.2392157, 0.2392157, 0.2392157, 0.2392157, 0.2392157, 0.5019608, 0.8705883, 0.9960785, 0.9960785, 0.7411765, 0.08235294, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.14901961, 0.32156864, 0.050980397, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.13333334, 0.8352942, 0.9960785, 0.9960785, 0.45098042, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.32941177, 0.9960785, 0.9960785, 0.9176471, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.32941177, 0.9960785, 0.9960785, 0.9176471, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.4156863, 0.6156863, 0.9960785, 0.9960785, 0.95294124, 0.20000002, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.098039225, 0.45882356, 0.8941177, 0.8941177, 0.8941177, 0.9921569, 0.9960785, 0.9960785, 0.9960785, 0.9960785, 0.94117653, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.26666668, 0.4666667, 0.86274517, 0.9960785, 0.9960785, 0.9960785, 0.9960785, 0.9960785, 0.9960785, 0.9960785, 0.9960785, 0.9960785, 0.5568628, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.14509805, 0.73333335, 0.9921569, 0.9960785, 0.9960785, 0.9960785, 0.8745099, 0.8078432, 0.8078432, 0.29411766, 0.26666668, 0.8431373, 0.9960785, 0.9960785, 0.45882356, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.4431373, 0.8588236, 0.9960785, 0.9490197, 0.89019614, 0.45098042, 0.34901962, 0.121568635, 0.0, 0.0, 0.0, 0.0, 0.7843138, 0.9960785, 0.9450981, 0.16078432, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.6627451, 0.9960785, 0.6901961, 0.24313727, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.18823531, 0.9058824, 0.9960785, 0.9176471, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.07058824, 0.48627454, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.32941177, 0.9960785, 0.9960785, 0.6509804, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.54509807, 0.9960785, 0.9333334, 0.22352943, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.8235295, 0.9803922, 0.9960785, 0.65882355, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.9490197, 0.9960785, 0.93725497, 0.22352943, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.34901962, 0.9843138, 0.9450981, 0.3372549, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.019607844, 0.8078432, 0.96470594, 0.6156863, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.015686275, 0.45882356, 0.27058825, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0} 8 | #define IMG0_LABEL 7 9 | 10 | #define IMG1 {0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.121568635, 0.5176471, 0.9960785, 0.9921569, 0.9960785, 0.8352942, 0.32156864, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.08235294, 0.5568628, 0.91372555, 0.98823535, 0.9921569, 0.98823535, 0.9921569, 0.98823535, 0.8745099, 0.078431375, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.48235297, 0.9960785, 0.9921569, 0.9960785, 0.9921569, 0.87843144, 0.7960785, 0.7960785, 0.8745099, 1.0, 0.8352942, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.7960785, 0.9921569, 0.98823535, 0.9921569, 0.8313726, 0.078431375, 0.0, 0.0, 0.2392157, 0.9921569, 0.98823535, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.16078432, 0.95294124, 0.87843144, 0.7960785, 0.7176471, 0.16078432, 0.59607846, 0.11764707, 0.0, 0.0, 1.0, 0.9921569, 0.40000004, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.15686275, 0.078431375, 0.0, 0.0, 0.40000004, 0.9921569, 0.19607845, 0.0, 0.32156864, 0.9921569, 0.98823535, 0.078431375, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.32156864, 0.83921576, 0.121568635, 0.4431373, 0.91372555, 0.9960785, 0.91372555, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.24313727, 0.40000004, 0.32156864, 0.16078432, 0.9921569, 0.909804, 0.9921569, 0.98823535, 0.91372555, 0.19607845, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.59607846, 0.9921569, 0.9960785, 0.9921569, 0.9960785, 0.9921569, 0.9960785, 0.91372555, 0.48235297, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.59607846, 0.98823535, 0.9921569, 0.98823535, 0.9921569, 0.98823535, 0.75294125, 0.19607845, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.24313727, 0.7176471, 0.7960785, 0.95294124, 0.9960785, 0.9921569, 0.24313727, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.15686275, 0.6745098, 0.98823535, 0.7960785, 0.078431375, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.08235294, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.7176471, 0.9960785, 0.43921572, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.24313727, 0.7960785, 0.6392157, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.2392157, 0.9921569, 0.5921569, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.08235294, 0.83921576, 0.75294125, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.043137256, 0.8352942, 0.9960785, 0.5921569, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.40000004, 0.9921569, 0.5921569, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.16078432, 0.8352942, 0.98823535, 0.9921569, 0.43529415, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.16078432, 1.0, 0.8352942, 0.36078432, 0.20000002, 0.0, 0.0, 0.121568635, 0.36078432, 0.6784314, 0.9921569, 0.9960785, 0.9921569, 0.5568628, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.6745098, 0.98823535, 0.9921569, 0.98823535, 0.7960785, 0.7960785, 0.91372555, 0.98823535, 0.9921569, 0.98823535, 0.9921569, 0.50980395, 0.078431375, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.08235294, 0.7960785, 1.0, 0.9921569, 0.9960785, 0.9921569, 0.9960785, 0.9921569, 0.9568628, 0.7960785, 0.32156864, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.078431375, 0.5921569, 0.5921569, 0.9921569, 0.67058825, 0.5921569, 0.5921569, 0.15686275, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0} 11 | #define IMG1_LABEL 3 12 | 13 | #define TOTAL_IMAGE 2 14 | 15 | static const float img[][784] = {IMG0, IMG1}; 16 | static const signed char label[] = {IMG0_LABEL, IMG1_LABEL}; 17 | 18 | static const float W3[] = {-0.3233681, -0.4261553, -0.6519891, 0.79061985, -0.2210753, 0.037107922, 0.3984157, 0.22128074, 0.7975414, 0.2549885, 0.3076058, 0.62500215, -0.58958095, 0.20375429, -0.06477713, -1.566038, -0.37670124, -0.6443057}; 19 | static const float B3[] = {-0.829373, -0.14096421}; 20 | 21 | static const float W2[] = {0.0070440695, 0.23192555, 0.036849476, -0.14687373, -0.15593372, 0.0044246824, 0.27322513, -0.027562773, 0.23404223, -0.6354651, -0.55645454, -0.77057034, 0.15603222, 0.71015775, 0.23954256, 1.8201442, -0.018377468, 1.5745461, 1.7230825, -0.59662616, 1.3997843, 0.33511618, 0.56846994, 0.3797911, 0.035079807, -0.18287429, -0.032232445, 0.006910181, -0.0026898328, -0.0057844054, 0.29354542, 0.13796881, 0.3558416, 0.0022847173, 0.0025906325, -0.022641085}; 22 | static const float B2[] = {-0.11655525, -0.0036503011}; 23 | 24 | static const float W1[] = {0.15791991, -0.22649878, 0.021204736, 0.025593571, 0.008755621, -0.775102, -0.41594088, -0.12580238, -0.3963741, 0.33545518, -0.631953, -0.028754484, -0.50668705, -0.3574023, -3.7807872, -0.8261617, 0.102246165, 0.571127, -0.6256297, 0.06698781, 0.55969477, 0.25374785, -3.075965, -0.6959133, 0.2531965, 0.31739804, -0.8664238, 0.12750633, 0.83136076, 0.2666574, -2.5865922, -0.572031, 0.29743987, 0.16238026, -0.99154145, 0.077973805, 0.8913329, 0.16854058, -2.5247803, -0.5639109, 0.41671264, -0.10801031, -1.0229865, 0.2062031, 0.39889312, -0.16026731, -1.9185526, -0.48375717, 0.057339806, -1.2573057, -0.23117211, 1.051854, -0.7981992, -1.6263007, -0.26003376, -0.07649365, -0.4646075, 0.755821, 0.13187818, 0.24743222, -1.5276812, 0.1636555, -0.075465426, -0.058517877, -0.33852127, 1.3052516, 0.14443535, 0.44080895, -0.31031442, 0.15416017, 0.0053661224, -0.03175326, -0.15991405, 0.66121936, 0.0832211, 0.2651985, -0.038445678, 0.18054117, -0.0073251156, 0.054193687, -0.014296916, 0.30657783, 0.006181963, 0.22319937, 0.030315898, 0.12695274, -0.028179673, 0.11189027, 0.035358384, 0.046855893, -0.026528472, 0.26450494, 0.069981076, 0.107152134, -0.030371506, 0.09524366, 0.24802336, -0.36496836, -0.102762334, 0.49609017, 0.04002767, 0.020934932, -0.054773595, 0.05412083, -0.071876526, -1.5381132, -0.2356421, 1.5890793, -0.023087852, -0.24933836, 0.018771818, 0.08040064, 0.051946845, 0.6141782, 0.15780787, 0.12887044, -0.8691056, 1.3761537, 0.43058, 0.13476849, -0.14973496, 0.4542634, 0.13077497, 0.23117822, 0.003657386, 0.42742714, 0.23396699, 0.09209521, -0.060258932, 0.4642852, 0.10395402, 0.25047097, -0.05326261, 0.21466804, 0.11694269, 0.22402634, 0.12639907, 0.23495848, 0.12770525, 0.3324459, 0.0140223345, 0.106348366, 0.10877733, 0.30522102, 0.31412345, -0.07164018, 0.13483422, 0.45414954, 0.054698735, 0.07451815, 0.097312905, 0.27480683, 0.4866108, -0.43636885, -0.13586079, 0.5724732, 0.13595985, -0.0074526076, 0.11859829, 0.24481037, -0.37537888, -0.46877658, -0.5648533, 0.86578417, 0.3407381, -0.17214134, 0.040683553, 0.3630519, 0.089548275, -0.4989473, 0.47688767, 0.021731026, 0.2856471, 0.6174715, 0.7059148, -0.30635756, -0.5705427, -0.20692639, 0.041900065, 0.23040071, -0.1790487, -0.023751246, 0.14114629, 0.02345284, -0.64177734, -0.069909826, -0.08587972, 0.16460821, -0.53466517, -0.10163383, -0.13119817, 0.14908728, -0.63503706, -0.098961875, -0.23248474, 0.15406314, -0.48586813, -0.1904713, -0.20466608, 0.10629631, -0.5291871, -0.17358926, -0.36273107, 0.12225631, -0.38659447, -0.24787207, -0.25225234, 0.102635615, -0.14507034, -0.10110793, 0.043757595, -0.17158166, -0.031343404, -0.30139172, -0.09401665, 0.06986169, -0.54915506, 0.66843456, 0.14574362, -0.737502, 0.7700305, -0.4125441, 0.10115133, 0.05281194, 0.25467375, 0.22757779, -0.030224197, -0.0832025, -0.66385627, 0.51225215, -0.121023245, -0.3340579, -0.07505331, -0.09820366, -0.016041134, -0.03187605, -0.43589246, 0.094394326, -0.04983066, -0.0777906, -0.12822862, -0.089667186, -0.07014707, -0.010794195, -0.29095307, -0.01319235, -0.039757702, -0.023403417, -0.15530063, -0.052093383, -0.1477549, -0.07557954, -0.2686017, -0.035220042, -0.095615104, -0.015471024, -0.03906604, 0.024237331, -0.19604297, -0.19998372, -0.20302829, -0.04267139, -0.18774728, -0.045169186, -0.010131819, 0.14829905, -0.117015064, -0.4180649, -0.20680964, -0.024034742, -0.15787442, -0.055698488, -0.09037726, 0.40253848, -0.35745984, -0.786149, -0.0799551, 0.16205557, -0.14461482, -0.2749642, 0.2683253, 0.6881363, -0.064145364, 0.11361358, 0.59981894, 1.2947721, -1.2500908, 0.6082035, 0.12344158, 0.15808935, -0.17505693, 0.03425684, 0.39107767, 0.23190938, -0.7568858, 0.20042256, 0.079169095, 0.014275463, -0.12135842, 0.008516737, 0.26897284, 0.05706199, -0.52615446, 0.12489152, 0.08065737, -0.038548164, -0.08894516, 7.250979E-4, 0.28635752, -0.010820533, -0.39301336, 0.11144395, 0.06563818, -0.033744805, -0.07450528, -0.027328406, 0.3002447, 0.0029921278, -0.47954947, -0.04527057, -0.010289918, 0.039380465, -0.09236952, -0.1924659, 0.15401903, 0.21237805, -0.38984418, -0.37384143, -0.20648403, 0.29201767, -0.1299253, -0.36048025, -0.5544466, 0.45723814, -0.35266167, -0.94797707, -1.2481197, 0.88701195, 0.33620682, 0.0035414647, -0.22769359, 1.4563162, 0.54950374, 0.38396382, -0.41196275, 0.3758704, 0.17687413, 0.038129736, 0.16358295, 0.70515764, 0.055063568, 0.6445265, -0.2072113, 0.14618243, 0.10311305, 0.1971523, 0.174206, 0.36578146, -0.09782787, 0.5229244, -0.18459272, -0.0013945608, 0.08863555, 0.24184574, 0.15541393, 0.1722381, -0.10531331, 0.38215113, -0.30659106, -0.16298945, 0.11549875, 0.30750987, 0.1586183, -0.017728966, -0.050216004, 0.26232007, -1.2994286, -0.22700997, 0.108534105, 0.7447398, -0.39803517, 0.016863048, 0.10067235, -0.16355589, -0.64953077, -0.5674107, 0.017935256, 0.98968256, -1.395801, 0.44127485, 0.16644385, -0.19195901}; 25 | static const float B1[] = {1.2019119, -1.1770505, 2.1698284, -1.9615222}; 26 | 27 | static const float W[] = {0.55808353, 0.78707385, -0.040990848, -0.122510895, -0.41261443, -0.036044, 0.1691557, -0.14711425, -0.016407091, -0.28058195, 0.018765535, 0.062936015, 0.49562064, 0.33931744, -0.47547337, -0.1405672, -0.88271654, 0.18359914, 0.020887045, -0.13782434, -0.052250575, 0.67922074, -0.28022966, -0.31278887, 0.44416663, -0.26106882, -0.32219923, 1.0321393, -0.1444394, 0.5221766, 0.057590708, -0.96547794, -0.3051688, 0.16859075, -0.5320585, 0.42684716, -0.5434046, 0.014693736, 0.26795483, 0.15921915}; 28 | static const float B[] = {0.041442648, 1.461427, 0.07154641, -1.2774754, 0.80927604, -1.6933714, -0.29740578, -0.11774022, 0.3292682, 0.6596958}; 29 | 30 | // ASCII lib from (https://www.jianshu.com/p/1f58a0ebf5d9) 31 | static const char codeLib[] = "@B%8&WM#*oahkbdpqwmZO0QLCJUYXzcvunxrjft/\\|()1{}[]?-_+~<>i!lI;:,\"^`'. "; 32 | static void print_img(const float * buf) 33 | { 34 | for(int y = 0; y < 28; y++) 35 | { 36 | for (int x = 0; x < 28; x++) 37 | { 38 | int index = 0; 39 | if(buf[y*28+x] > 0.6f) index =69; 40 | if(index < 0) index = 0; 41 | printf("%c",codeLib[index]); 42 | printf("%c",codeLib[index]); 43 | } 44 | printf("\n"); 45 | } 46 | } 47 | 48 | #endif //__MNIST_H__ 49 | -------------------------------------------------------------------------------- /examples/mnist_model.c: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | #include 4 | #include 5 | 6 | #include "mnist.h" 7 | #include "onnx.h" 8 | 9 | #define MNIST_TEST_IMAGE 1 10 | #define ONNX_MODEL_NAME "mnist-sm.onnx" 11 | 12 | #define THREAD_PRIORITY 8 13 | #define THREAD_STACK_SIZE 5120 14 | #define THREAD_TIMESLICE 5 15 | 16 | static rt_thread_t tid1 = RT_NULL; 17 | 18 | static void mnist_model_entry(void* parameter) 19 | { 20 | // 0. Load Model 21 | Onnx__ModelProto* model = onnx_load_model(ONNX_MODEL_NAME); 22 | if(model == NULL) 23 | { 24 | printf("Failed to load model %s\n", ONNX_MODEL_NAME); 25 | return; 26 | } 27 | 28 | // 1. Initialize input 29 | int64_t* shapeInput = (int64_t*) malloc(sizeof(int64_t)*3); 30 | shapeInput[0] = 28; shapeInput[1] = 28; shapeInput[2] = 1; 31 | 32 | float* input = (float*) malloc(sizeof(int64_t)*28*28); 33 | memcpy(input, img[MNIST_TEST_IMAGE], sizeof(float)*28*28); 34 | 35 | print_img(input); 36 | printf("\n"); 37 | 38 | // 2. Run Model 39 | float* output = onnx_model_run(model, input, shapeInput); 40 | 41 | // 3. Print Result 42 | float max = 0; 43 | int max_index = 0; 44 | printf("\nPredictions: \n"); 45 | for(int i = 0; i < 10; i++) 46 | { 47 | printf("%f ", output[i]); 48 | if(output[i] > max) 49 | { 50 | max = output[i]; 51 | max_index = i; 52 | } 53 | } 54 | printf("\n"); 55 | printf("\nThe number is %d\n", max_index); 56 | 57 | // 4. Free model 58 | free(shapeInput); 59 | free(output); 60 | onnx__model_proto__free_unpacked(model, NULL); 61 | } 62 | 63 | static void mnist_model(int argc, char *argv[]) 64 | { 65 | tid1 = rt_thread_create("tonnx_model", 66 | mnist_model_entry, RT_NULL, 67 | THREAD_STACK_SIZE, 68 | THREAD_PRIORITY, THREAD_TIMESLICE); 69 | 70 | if (tid1 != RT_NULL) 71 | { 72 | 73 | rt_thread_startup(tid1); 74 | } 75 | else 76 | { 77 | rt_kprintf("Failed to start onnx thread\n"); 78 | } 79 | } 80 | MSH_CMD_EXPORT(mnist_model, load mnist onnx model from file); 81 | -------------------------------------------------------------------------------- /examples/mnist_sm.c: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | #include 4 | #include 5 | 6 | #include "mnist.h" 7 | #include "onnx.h" 8 | 9 | #define MNIST_TEST_IMAGE 1 10 | #define ONNX_MODEL_NAME "/mnist-sm.onnx" 11 | 12 | #define THREAD_PRIORITY 8 13 | #define THREAD_STACK_SIZE 5120 14 | #define THREAD_TIMESLICE 5 15 | 16 | static rt_thread_t tid1 = RT_NULL; 17 | 18 | static void mnist_sm_entry(void* parameter) 19 | { 20 | // Load Model 21 | Onnx__ModelProto* model = onnx_load_model(ONNX_MODEL_NAME); 22 | if(model == NULL) 23 | { 24 | printf("Failed to load model %s\n", ONNX_MODEL_NAME); 25 | return; 26 | } 27 | 28 | // Set input image: NWHC 29 | print_img(img[MNIST_TEST_IMAGE]); 30 | 31 | // 0. Initialize input shape 32 | int64_t* shapeInput = (int64_t*) malloc(sizeof(int64_t)*3); 33 | int64_t* shapeOutput = (int64_t*) malloc(sizeof(int64_t)*3); 34 | shapeInput[0] = 28; 35 | shapeInput[1] = 28; 36 | shapeInput[2] = 1; 37 | 38 | // 1. Transpose 39 | // float* input = transpose_layer(model->graph, img[img_index], shapeInput, shapeOutput, "Transpose6"); 40 | // memcpy(shapeInput, shapeOutput, sizeof(int64_t)*3); 41 | 42 | // 2. Conv2D 43 | float* conv1 = conv2D_layer(model->graph, img[MNIST_TEST_IMAGE], shapeInput, shapeOutput, "conv2d_5"); 44 | memcpy(shapeInput, shapeOutput, sizeof(int64_t)*3); 45 | // free(input); 46 | 47 | // 3. Relu 48 | float* relu1 = relu_layer(model->graph, conv1, shapeInput, shapeOutput, "Relu1"); 49 | memcpy(shapeInput, shapeOutput, sizeof(int64_t)*3); 50 | free(conv1); 51 | 52 | // 4. Maxpool 53 | float* maxpool1 = maxpool_layer(model->graph, relu1, shapeInput, shapeOutput, "max_pooling2d_5"); 54 | memcpy(shapeInput, shapeOutput, sizeof(int64_t)*3); 55 | free(relu1); 56 | 57 | // 5. Conv2D 58 | float* conv2 = conv2D_layer(model->graph, maxpool1, shapeInput, shapeOutput, "conv2d_6"); 59 | memcpy(shapeInput, shapeOutput, sizeof(int64_t)*3); 60 | free(maxpool1); 61 | 62 | // 6. Relu 63 | float* relu2 = relu_layer(model->graph, conv2, shapeInput, shapeOutput, "Relu"); 64 | memcpy(shapeInput, shapeOutput, sizeof(int64_t)*3); 65 | free(conv2); 66 | 67 | // 7. Maxpool 68 | float* maxpool2 = maxpool_layer(model->graph, relu2, shapeInput, shapeOutput, "max_pooling2d_6"); 69 | memcpy(shapeInput, shapeOutput, sizeof(int64_t)*3); 70 | free(relu2); 71 | 72 | // 8. Transpose 73 | // float* maxpool2_t = transpose_layer(model->graph, maxpool2, shapeInput, shapeOutput, "Transpose1"); 74 | // memcpy(shapeInput, shapeOutput, sizeof(int64_t)*3); 75 | // free(maxpool2); 76 | 77 | // 9. Flatten 78 | shapeInput[1] = shapeInput[0] * shapeInput[1] * shapeInput[2]; 79 | shapeInput[2] = 1; 80 | shapeInput[0] = 1; 81 | 82 | // 10. Dense 83 | float* matmul1 = matmul_layer(model->graph, maxpool2, shapeInput, shapeOutput, "dense_5"); 84 | memcpy(shapeInput, shapeOutput, sizeof(int64_t)*3); 85 | free(maxpool2); 86 | 87 | // 11. Add 88 | float* dense1 = add_layer(model->graph, matmul1, shapeInput, shapeOutput, "Add1"); 89 | memcpy(shapeInput, shapeOutput, sizeof(int64_t)*3); 90 | free(matmul1); 91 | 92 | // 12. Dense 93 | float* matmul2 = matmul_layer(model->graph, dense1, shapeInput, shapeOutput, "dense_6"); 94 | memcpy(shapeInput, shapeOutput, sizeof(int64_t)*3); 95 | free(dense1); 96 | 97 | // 13. Add 98 | float* dense2 = add_layer(model->graph, matmul2, shapeInput, shapeOutput, "Add"); 99 | memcpy(shapeInput, shapeOutput, sizeof(int64_t)*3); 100 | free(matmul2); 101 | 102 | // 14. Softmax 103 | float* output = softmax_layer(model->graph, dense2, shapeInput, shapeOutput, "Softmax"); 104 | free(dense2); 105 | 106 | // 15. Identity 107 | // Do Nothing Here 108 | 109 | // Result 110 | float max = 0; 111 | int max_index = 0; 112 | printf("\nPredictions: \n"); 113 | for(int i = 0; i < 10; i++) 114 | { 115 | printf("%f ", output[i]); 116 | if(output[i] > max) 117 | { 118 | max = output[i]; 119 | max_index = i; 120 | } 121 | } 122 | printf("\n"); 123 | printf("\nThe number is %d\n", max_index); 124 | 125 | // Free model 126 | free(shapeInput); 127 | free(shapeOutput); 128 | free(output); 129 | onnx__model_proto__free_unpacked(model, NULL); 130 | 131 | return; 132 | } 133 | 134 | static void mnist_sm(int argc, char const *argv[]) 135 | { 136 | 137 | tid1 = rt_thread_create("tmnist_sm", 138 | mnist_sm_entry, RT_NULL, 139 | THREAD_STACK_SIZE, 140 | THREAD_PRIORITY, THREAD_TIMESLICE); 141 | 142 | if (tid1 != RT_NULL) 143 | { 144 | 145 | rt_thread_startup(tid1); 146 | } 147 | else 148 | { 149 | rt_kprintf("Failed to start mnist-sm thread\n"); 150 | } 151 | 152 | } 153 | MSH_CMD_EXPORT(mnist_sm, mnist small model) 154 | -------------------------------------------------------------------------------- /examples/model/mnist-keras.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# 导入库" 8 | ] 9 | }, 10 | { 11 | "cell_type": "code", 12 | "execution_count": 1, 13 | "metadata": {}, 14 | "outputs": [], 15 | "source": [ 16 | "#coding:utf-8\n", 17 | "from tensorflow.examples.tutorials.mnist import input_data\n", 18 | "\n", 19 | "import numpy as np\n", 20 | "np.set_printoptions(suppress=True)\n", 21 | "\n", 22 | "import matplotlib.pyplot as plt\n", 23 | "%matplotlib inline" 24 | ] 25 | }, 26 | { 27 | "cell_type": "markdown", 28 | "metadata": {}, 29 | "source": [ 30 | "# 导入数据集" 31 | ] 32 | }, 33 | { 34 | "cell_type": "code", 35 | "execution_count": 3, 36 | "metadata": {}, 37 | "outputs": [ 38 | { 39 | "name": "stderr", 40 | "output_type": "stream", 41 | "text": [ 42 | "W0829 10:37:44.431263 12720 deprecation.py:323] From d:\\anaconda3\\envs\\tensorflow\\lib\\site-packages\\tensorflow\\contrib\\learn\\python\\learn\\datasets\\mnist.py:262: extract_images (from tensorflow.contrib.learn.python.learn.datasets.mnist) is deprecated and will be removed in a future version.\n", 43 | "Instructions for updating:\n", 44 | "Please use tf.data to implement this functionality.\n" 45 | ] 46 | }, 47 | { 48 | "name": "stdout", 49 | "output_type": "stream", 50 | "text": [ 51 | "Extracting MNIST_data/train-images-idx3-ubyte.gz\n" 52 | ] 53 | }, 54 | { 55 | "name": "stderr", 56 | "output_type": "stream", 57 | "text": [ 58 | "W0829 10:37:44.709223 12720 deprecation.py:323] From d:\\anaconda3\\envs\\tensorflow\\lib\\site-packages\\tensorflow\\contrib\\learn\\python\\learn\\datasets\\mnist.py:267: extract_labels (from tensorflow.contrib.learn.python.learn.datasets.mnist) is deprecated and will be removed in a future version.\n", 59 | "Instructions for updating:\n", 60 | "Please use tf.data to implement this functionality.\n", 61 | "W0829 10:37:44.722234 12720 deprecation.py:323] From d:\\anaconda3\\envs\\tensorflow\\lib\\site-packages\\tensorflow\\contrib\\learn\\python\\learn\\datasets\\mnist.py:110: dense_to_one_hot (from tensorflow.contrib.learn.python.learn.datasets.mnist) is deprecated and will be removed in a future version.\n", 62 | "Instructions for updating:\n", 63 | "Please use tf.one_hot on tensors.\n", 64 | "W0829 10:37:44.785238 12720 deprecation.py:323] From d:\\anaconda3\\envs\\tensorflow\\lib\\site-packages\\tensorflow\\contrib\\learn\\python\\learn\\datasets\\mnist.py:290: DataSet.__init__ (from tensorflow.contrib.learn.python.learn.datasets.mnist) is deprecated and will be removed in a future version.\n", 65 | "Instructions for updating:\n", 66 | "Please use alternatives such as official/mnist/dataset.py from tensorflow/models.\n" 67 | ] 68 | }, 69 | { 70 | "name": "stdout", 71 | "output_type": "stream", 72 | "text": [ 73 | "Extracting MNIST_data/train-labels-idx1-ubyte.gz\n", 74 | "Extracting MNIST_data/t10k-images-idx3-ubyte.gz\n", 75 | "Extracting MNIST_data/t10k-labels-idx1-ubyte.gz\n" 76 | ] 77 | } 78 | ], 79 | "source": [ 80 | "mnist = input_data.read_data_sets(\"MNIST_data/\", one_hot=True) #MNIST数据输入" 81 | ] 82 | }, 83 | { 84 | "cell_type": "code", 85 | "execution_count": 4, 86 | "metadata": {}, 87 | "outputs": [], 88 | "source": [ 89 | "X_train = mnist.train.images\n", 90 | "y_train = mnist.train.labels\n", 91 | "X_test = mnist.test.images\n", 92 | "y_test = mnist.test.labels\n", 93 | "\n", 94 | "# 输入图像大小是 28x28 大小\n", 95 | "X_train = X_train.reshape([-1, 28, 28, 1])\n", 96 | "X_test = X_test.reshape([-1, 28, 28, 1])" 97 | ] 98 | }, 99 | { 100 | "cell_type": "code", 101 | "execution_count": 8, 102 | "metadata": {}, 103 | "outputs": [ 104 | { 105 | "data": { 106 | "text/plain": [ 107 | "" 108 | ] 109 | }, 110 | "execution_count": 8, 111 | "metadata": {}, 112 | "output_type": "execute_result" 113 | }, 114 | { 115 | "data": { 116 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAPsAAAD4CAYAAAAq5pAIAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAANgElEQVR4nO3dXaxV9ZnH8d9vEKKxjS+jMowwUvC1zgVVJBonE8dK43iDTaz2JFaqzZxqcAKmJmMck3rhRTMZiiYmNTSS0kmlqWlVNM0MLyEhhFgFwxyw2Oo0WCgERBQO0dgRn7k4y8kRz1r7sNfaL+c8309ysvdez15rPdnhx1p7//def0eEAEx+f9HrBgB0B2EHkiDsQBKEHUiCsANJnNbNndnmo3+gwyLCYy2vdWS3fbPt39l+y/ZDdbYFoLPc7ji77SmSfi9poaR9kl6VNBARv61YhyM70GGdOLIvkPRWRPwhIv4s6eeSFtXYHoAOqhP2CyXtHfV4X7HsM2wP2t5me1uNfQGoqc4HdGOdKnzuND0iVkpaKXEaD/RSnSP7PkmzRj2eKWl/vXYAdEqdsL8q6RLbX7I9TdI3Ja1tpi0ATWv7ND4iPrZ9v6T/kjRF0qqIeL2xzgA0qu2ht7Z2xnt2oOM68qUaABMHYQeSIOxAEoQdSIKwA0kQdiAJwg4kQdiBJAg7kARhB5Ig7EAShB1IgrADSRB2IAnCDiRB2IEkCDuQBGEHkiDsQBKEHUiCsANJEHYgCcIOJEHYgSQIO5AEYQeSIOxAEoQdSIKwA0kQdiCJtudnlyTbeyQNSzoh6eOImN9EUwCaVyvshX+IiMMNbAdAB3EaDyRRN+whaZ3t7bYHx3qC7UHb22xvq7kvADU4Itpf2f7riNhv+wJJ6yX9c0Rsrnh++zsDMC4R4bGW1zqyR8T+4vaQpOckLaizPQCd03bYbZ9p+4uf3pf0NUm7mmoMQLPqfBo/XdJztj/dzjMR8Z+NdAWgcbXes5/yznjPDnRcR96zA5g4CDuQBGEHkiDsQBKEHUiiiR/CoMfuvvvu0lqr0ZZ33323sn7FFVdU1rdu3VpZ37JlS2Ud3cORHUiCsANJEHYgCcIOJEHYgSQIO5AEYQeSmDTj7AMDA5X1q666qrJeNVbd784+++y21z1x4kRlfdq0aZX1Dz/8sLL+wQcflNZ27txZue7tt99eWX/nnXcq6/gsjuxAEoQdSIKwA0kQdiAJwg4kQdiBJAg7kMSEurrs8uXLS2tLly6tXHfKlCl1do0e2LRpU2W91XcrDh482GQ7EwZXlwWSI+xAEoQdSIKwA0kQdiAJwg4kQdiBJCbUOPvevXtLazNnzqxcd2hoqLLe6nfZndTq2urPP/98lzo5dQsXLqys33XXXaW12bNn19p3q3H4O+64o7Q2mX8L3/Y4u+1Vtg/Z3jVq2bm219t+s7g9p8lmATRvPKfxP5F080nLHpK0MSIukbSxeAygj7UMe0RslnTkpMWLJK0u7q+WdGvDfQFoWLvXoJseEQckKSIO2L6g7Im2ByUNtrkfAA3p+AUnI2KlpJVS/Q/oALSv3aG3g7ZnSFJxe6i5lgB0QrthXytpcXF/saQXmmkHQKe0HGe3vUbSDZLOk3RQ0vclPS/pF5L+RtIfJX0jIk7+EG+sbdU6jb/00ktLa1deeWXluhs2bKisDw8Pt9UTqs2ZM6e09tJLL1Wu22pu+FYefPDB0lrVtREmurJx9pbv2SOi7AoBX63VEYCu4uuyQBKEHUiCsANJEHYgCcIOJDGhfuKKyeW2226rrD/77LO1tn/48OHS2vnnn19r2/2MS0kDyRF2IAnCDiRB2IEkCDuQBGEHkiDsQBKEHUiCsANJEHYgCcIOJEHYgSQIO5AEYQeSIOxAEh2fEQa53XfffaW1a665pqP7Pv3000trV199deW627dvb7qdnuPIDiRB2IEkCDuQBGEHkiDsQBKEHUiCsANJcN34SWDGjBmltTvvvLNy3WXLljXdzmdU9WaPeXnzrjh27Fhl/ayzzupSJ81r+7rxtlfZPmR716hlj9r+k+0dxd8tTTYLoHnjOY3/iaSbx1i+IiLmFX+/brYtAE1rGfaI2CzpSBd6AdBBdT6gu9/2UHGaf07Zk2wP2t5me1uNfQGoqd2w/0jSXEnzJB2QtLzsiRGxMiLmR8T8NvcFoAFthT0iDkbEiYj4RNKPJS1oti0ATWsr7LZHj6d8XdKusucC6A8tf89ue42kGySdZ3ufpO9LusH2PEkhaY+k73awx0nvpptuqqy3+u314OBgaW3OnDlt9TTZrVq1qtctdF3LsEfEwBiLn+5ALwA6iK/LAkkQdiAJwg4kQdiBJAg7kASXkm7AxRdfXFl/6qmnKus33nhjZb2TPwV9++23K+vvvfdere0/8sgjpbWPPvqoct0nn3yysn7ZZZe11ZMk7d+/v+11JyqO7EAShB1IgrADSRB2IAnCDiRB2IEkCDuQBOPs4/TAAw+U1pYsWVK57ty5cyvrx48fr6y///77lfXHH3+8tNZqPHnr1q2V9Vbj8J109OjRWusPDw+X1l588cVa256IOLIDSRB2IAnCDiRB2IEkCDuQBGEHkiDsQBKMs4/TddddV1prNY6+du3ayvry5aUT6kiSNm/eXFmfqObNm1dZv+iii2ptv+r38m+88UatbU9EHNmBJAg7kARhB5Ig7EAShB1IgrADSRB2IAnG2cfp3nvvLa0NDQ1VrvvYY4813c6k0Op6+9OnT6+1/Q0bNtRaf7JpeWS3Pcv2Jtu7bb9ue2mx/Fzb622/Wdye0/l2AbRrPKfxH0v6XkRcIelaSUtsf1nSQ5I2RsQlkjYWjwH0qZZhj4gDEfFacX9Y0m5JF0paJGl18bTVkm7tVJMA6jul9+y2Z0v6iqTfSJoeEQekkf8QbF9Qss6gpMF6bQKoa9xht/0FSb+UtCwijo13ssGIWClpZbGNaKdJAPWNa+jN9lSNBP1nEfGrYvFB2zOK+gxJhzrTIoAmtDyye+QQ/rSk3RHxw1GltZIWS/pBcftCRzrsE0eOHCmtMbTWnmuvvbbW+q0usf3EE0/U2v5kM57T+OslfUvSTts7imUPayTkv7D9HUl/lPSNzrQIoAktwx4RWySVvUH/arPtAOgUvi4LJEHYgSQIO5AEYQeSIOxAEvzEFR21c+fO0trll19ea9vr1q2rrL/88su1tj/ZcGQHkiDsQBKEHUiCsANJEHYgCcIOJEHYgSQYZ0dHzZ49u7R22mnV//yOHj1aWV+xYkU7LaXFkR1IgrADSRB2IAnCDiRB2IEkCDuQBGEHkmCcHbUMDAxU1s8444zS2vDwcOW6g4PVs4bxe/VTw5EdSIKwA0kQdiAJwg4kQdiBJAg7kARhB5JwRFQ/wZ4l6aeS/krSJ5JWRsQTth+V9E+S3ime+nBE/LrFtqp3hr4zderUyvorr7xSWa+6NvyaNWsq173nnnsq6xhbRIw56/J4vlTzsaTvRcRrtr8oabvt9UVtRUT8e1NNAuic8czPfkDSgeL+sO3dki7sdGMAmnVK79ltz5b0FUm/KRbdb3vI9irb55SsM2h7m+1ttToFUMu4w277C5J+KWlZRByT9CNJcyXN08iRf/lY60XEyoiYHxHzG+gXQJvGFXbbUzUS9J9FxK8kKSIORsSJiPhE0o8lLehcmwDqahl225b0tKTdEfHDUctnjHra1yXtar49AE0Zz6fx10v6lqSdtncUyx6WNGB7nqSQtEfSdzvSIXqq1dDsM888U1nfsWNHaW39+vWlNTRvPJ/Gb5E01rhd5Zg6gP7CN+iAJAg7kARhB5Ig7EAShB1IgrADSbT8iWujO+MnrkDHlf3ElSM7kARhB5Ig7EAShB1IgrADSRB2IAnCDiTR7SmbD0t6e9Tj84pl/ahfe+vXviR6a1eTvV1UVujql2o+t3N7W79em65fe+vXviR6a1e3euM0HkiCsANJ9DrsK3u8/yr92lu/9iXRW7u60ltP37MD6J5eH9kBdAlhB5LoSdht32z7d7bfsv1QL3ooY3uP7Z22d/R6frpiDr1DtneNWnau7fW23yxux5xjr0e9PWr7T8Vrt8P2LT3qbZbtTbZ3237d9tJieU9fu4q+uvK6df09u+0pkn4vaaGkfZJelTQQEb/taiMlbO+RND8iev4FDNt/L+m4pJ9GxN8Wy/5N0pGI+EHxH+U5EfEvfdLbo5KO93oa72K2ohmjpxmXdKukb6uHr11FX7erC69bL47sCyS9FRF/iIg/S/q5pEU96KPvRcRmSUdOWrxI0uri/mqN/GPpupLe+kJEHIiI14r7w5I+nWa8p69dRV9d0YuwXyhp76jH+9Rf872HpHW2t9se7HUzY5geEQekkX88ki7ocT8nazmNdzedNM1437x27Ux/Xlcvwj7W9bH6afzv+oi4StI/SlpSnK5ifMY1jXe3jDHNeF9od/rzunoR9n2SZo16PFPS/h70MaaI2F/cHpL0nPpvKuqDn86gW9we6nE//6+fpvEea5px9cFr18vpz3sR9lclXWL7S7anSfqmpLU96ONzbJ9ZfHAi22dK+pr6byrqtZIWF/cXS3qhh718Rr9M4102zbh6/Nr1fPrziOj6n6RbNPKJ/P9I+tde9FDS1xxJ/138vd7r3iSt0chp3f9q5IzoO5L+UtJGSW8Wt+f2UW//IWmnpCGNBGtGj3r7O428NRyStKP4u6XXr11FX1153fi6LJAE36ADkiDsQBKEHUiCsANJEHYgCcIOJEHYgST+D0dqK8VlJwIwAAAAAElFTkSuQmCC\n", 117 | "text/plain": [ 118 | "
" 119 | ] 120 | }, 121 | "metadata": { 122 | "needs_background": "light" 123 | }, 124 | "output_type": "display_data" 125 | } 126 | ], 127 | "source": [ 128 | "plt.imshow(X_train[0].reshape((28, 28)), cmap='gray')" 129 | ] 130 | }, 131 | { 132 | "cell_type": "code", 133 | "execution_count": 9, 134 | "metadata": {}, 135 | "outputs": [ 136 | { 137 | "data": { 138 | "text/plain": [ 139 | "" 140 | ] 141 | }, 142 | "execution_count": 9, 143 | "metadata": {}, 144 | "output_type": "execute_result" 145 | }, 146 | { 147 | "data": { 148 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAPsAAAD4CAYAAAAq5pAIAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAOHUlEQVR4nO3dS4xc5ZnG8ecBko2ThXEbsIixkwiZGUYaggwaCWMFRTGXje1FRjEXMRqgIxSkALMYcxFBMrbQaMiAN4aOQHFGgSjyRVgRKEFWZDMb5BsDhjaBQYztYPkCixCxyIDfWfRx1DF9vtOu26n2+/9Jrao6b52q1+V++pyqr875HBECcPY7p+0GAAwGYQeSIOxAEoQdSIKwA0mcN8gns81H/0CfRYSnWt7Vlt32Dbbfsf2e7dXdPBaA/nKn4+y2z5X0e0nflXRY0i5JqyLi7cI6bNmBPuvHlv1qSe9FxPsR8WdJv5S0vIvHA9BH3YT9YkmHJt0+XC37K7ZHbe+2vbuL5wLQpW4+oJtqV+ELu+kRMSZpTGI3HmhTN1v2w5LmT7r9NUkfdtcOgH7pJuy7JF1q++u2vyzp+5K29aYtAL3W8W58RHxm+x5Jv5F0rqTnIuKtnnUGoKc6Hnrr6Ml4zw70XV++VANg5iDsQBKEHUiCsANJEHYgCcIOJEHYgSQIO5AEYQeSIOxAEoQdSIKwA0kQdiAJwg4kQdiBJAg7kARhB5Ig7EAShB1IgrADSRB2IAnCDiRB2IEkCDuQBGEHkiDsQBKEHUiCsANJEHYgiY6nbMbZYcGCBcX6nXfeWaw/9NBDxXpplmB7yslG/2J8fLxYf/jhh4v1rVu3FuvZdBV22x9I+kTS55I+i4jFvWgKQO/1Yst+XUSc6MHjAOgj3rMDSXQb9pD0W9t7bI9OdQfbo7Z3297d5XMB6EK3u/HXRMSHti+Q9IrtAxGxc/IdImJM0pgk2a7/tAZAX3W1ZY+ID6vLY5K2Srq6F00B6L2Ow257lu2vnrouaZmk/b1qDEBvuTQOWlzR/oYmtubSxNuB5yNibcM67Mb3wdy5c2trDzzwQHHdW265pVifM2dOsd40Vt7NOHvT7+ahQ4eK9auuuqq2duLE2TuAFBFTvrAdv2ePiPcl/X3HHQEYKIbegCQIO5AEYQeSIOxAEoQdSKLjobeOnoyht440HUa6Zs2a2lrT/2+/h7+OHz9erJeMjIwU6wsXLizW33777dra5Zdf3klLM0Ld0BtbdiAJwg4kQdiBJAg7kARhB5Ig7EAShB1IgnH2GWDXrl3F+pVXXllb63acvTRWLUnXXXddsd7NoaRLliwp1nfs2FGsl/7t55139p5FnXF2IDnCDiRB2IEkCDuQBGEHkiDsQBKEHUiCcfYhcNlllxXrTePsH330UW2t6XjypnHw++67r1i/9957i/V169bV1g4ePFhct0nT7+7Jkydra3fffXdx3bGxsY56GgaMswPJEXYgCcIOJEHYgSQIO5AEYQeSIOxAEoyzzwBN4/ClsfJupyYeHR0t1jds2FCsl6ZN3rt3b3HdlStXFuubNm0q1ku/2xdddFFx3Zk8pXPH4+y2n7N9zPb+ScvOt/2K7Xery9m9bBZA701nN/5nkm44bdlqSdsj4lJJ26vbAIZYY9gjYqekj09bvFzSxur6RkkretwXgB7r9ERcF0bEEUmKiCO2L6i7o+1RSeU3fgD6ru9n3YuIMUljEh/QAW3qdOjtqO15klRdHutdSwD6odOwb5N0e3X9dkkv9qYdAP3SuBtv+wVJ35Y0YvuwpB9LelzSr2zfIemgpO/1s8nsDhw40NpzNx0P/8477xTrpWPtm46VX726PMjTdM77fn7/YCZqDHtErKopfafHvQDoI74uCyRB2IEkCDuQBGEHkiDsQBJn77y1iSxdurS21nR4bNPQ2vj4eLG+aNGiYv21116rrc2dO7e4btPh102933jjjcV6NmzZgSQIO5AEYQeSIOxAEoQdSIKwA0kQdiAJxtnPAjfffHNt7a677iqu23SYaNNYd9P6pbH0bg5RlaT169cX602nqs6GLTuQBGEHkiDsQBKEHUiCsANJEHYgCcIOJME4+1mu2ym5+7n+q6++Wlz3/vvvL9YZRz8zbNmBJAg7kARhB5Ig7EAShB1IgrADSRB2IAnG2c8Czz//fG1twYIFxXVHRkaK9abzzs+aNatYL3nkkUeKdcbRe6txy277OdvHbO+ftOxR23+w/Xr1c1N/2wTQrensxv9M0g1TLP+PiLii+nmpt20B6LXGsEfETkkfD6AXAH3UzQd099h+o9rNn113J9ujtnfb3t3FcwHoUqdh3yDpm5KukHRE0hN1d4yIsYhYHBGLO3wuAD3QUdgj4mhEfB4RJyX9VNLVvW0LQK91FHbb8ybdXClpf919AQwHT+O84C9I+rakEUlHJf24un2FpJD0gaQfRMSRxiezuzs4GgPXNM7+2GOPFesrVqyore3bt6+4btP86k3nlc8qIqY8IX/jl2oiYtUUi5/tuiMAA8XXZYEkCDuQBGEHkiDsQBKEHUiiceitp082g4feSlMPHz9+fICdzCwvv/xybe36668vrtt0Kuknn3yyo57OdnVDb2zZgSQIO5AEYQeSIOxAEoQdSIKwA0kQdiAJTiVdWbp0abH+xBO1J+PRgQMHiuvedtttHfV0Nli7dm1tbdmyZcV1Fy1a1Ot2UmPLDiRB2IEkCDuQBGEHkiDsQBKEHUiCsANJpBlnLx2PLklPP/10sX7s2LHaWuZx9KYpm5955pnamj3lYdfoE7bsQBKEHUiCsANJEHYgCcIOJEHYgSQIO5BEmnH2lStXFutNx07v2LGjl+3MGE1TNm/evLlYL72uTXMWNJ0nAGemcctue77t39ket/2W7R9Vy8+3/Yrtd6vL2f1vF0CnprMb/5mkf4mIv5H0D5J+aPtvJa2WtD0iLpW0vboNYEg1hj0ijkTE3ur6J5LGJV0sabmkjdXdNkpa0a8mAXTvjN6z214o6VuSXpN0YUQckSb+INi+oGadUUmj3bUJoFvTDrvtr0jaLOneiPjjdA9iiIgxSWPVY8zYiR2BmW5aQ2+2v6SJoP8iIrZUi4/anlfV50mqPywMQOsat+ye2IQ/K2k8In4yqbRN0u2SHq8uX+xLhz2yc+fOYv2cc8p/90qnmr711luL646Pjxfre/bsKdabLFiwoLZ27bXXFtdtGpJcsaL8UUzTHl5peO2pp54qrttUx5mZzm78NZJuk/Sm7derZQ9qIuS/sn2HpIOSvtefFgH0QmPYI+K/JNX9+f5Ob9sB0C98XRZIgrADSRB2IAnCDiRB2IEk3HSYYU+fbIi/Qbdp06ZivTTe3M1YsyTt27evWG9yySWX1NbmzJlTXLfb3pvWL03ZvH79+uK6J06cKNYxtYiY8j+FLTuQBGEHkiDsQBKEHUiCsANJEHYgCcIOJME4e6VpSueXXnqptrZ48eLiuidPnizW+znW3bTup59+Wqw3nc553bp1xfrWrVuLdfQe4+xAcoQdSIKwA0kQdiAJwg4kQdiBJAg7kATj7NM0MjJSW1uzZk1Xjz06Wp4da8uWLcV6N8d9N52bnWmTZx7G2YHkCDuQBGEHkiDsQBKEHUiCsANJEHYgicZxdtvzJf1c0kWSTkoai4inbD8q6S5Jx6u7PhgR9Qd9a2aPswMzRd04+3TCPk/SvIjYa/urkvZIWiHpHyX9KSL+fbpNEHag/+rCPp352Y9IOlJd/8T2uKSLe9segH47o/fsthdK+pak16pF99h+w/ZztmfXrDNqe7ft3V11CqAr0/5uvO2vSNohaW1EbLF9oaQTkkLSGk3s6v9zw2OwGw/0Wcfv2SXJ9pck/VrSbyLiJ1PUF0r6dUT8XcPjEHagzzo+EMYTpy59VtL45KBXH9ydslLS/m6bBNA/0/k0fomkVyW9qYmhN0l6UNIqSVdoYjf+A0k/qD7MKz0WW3agz7raje8Vwg70H8ezA8kRdiAJwg4kQdiBJAg7kARhB5Ig7EAShB1IgrADSRB2IAnCDiRB2IEkCDuQBGEHkmg84WSPnZD0v5Nuj1TLhtGw9jasfUn01qle9ragrjDQ49m/8OT27ohY3FoDBcPa27D2JdFbpwbVG7vxQBKEHUii7bCPtfz8JcPa27D2JdFbpwbSW6vv2QEMTttbdgADQtiBJFoJu+0bbL9j+z3bq9vooY7tD2y/afv1tuenq+bQO2Z7/6Rl59t+xfa71eWUc+y11Nujtv9QvXav276ppd7m2/6d7XHbb9n+UbW81deu0NdAXreBv2e3fa6k30v6rqTDknZJWhURbw+0kRq2P5C0OCJa/wKG7aWS/iTp56em1rL9b5I+jojHqz+UsyPiX4ekt0d1htN496m3umnG/0ktvna9nP68E21s2a+W9F5EvB8Rf5b0S0nLW+hj6EXETkkfn7Z4uaSN1fWNmvhlGbia3oZCRByJiL3V9U8knZpmvNXXrtDXQLQR9oslHZp0+7CGa773kPRb23tsj7bdzBQuPDXNVnV5Qcv9nK5xGu9BOm2a8aF57TqZ/rxbbYR9qqlphmn875qIuFLSjZJ+WO2uYno2SPqmJuYAPCLpiTabqaYZ3yzp3oj4Y5u9TDZFXwN53doI+2FJ8yfd/pqkD1voY0oR8WF1eUzSVk287RgmR0/NoFtdHmu5n7+IiKMR8XlEnJT0U7X42lXTjG+W9IuI2FItbv21m6qvQb1ubYR9l6RLbX/d9pclfV/Sthb6+ALbs6oPTmR7lqRlGr6pqLdJur26frukF1vs5a8MyzTeddOMq+XXrvXpzyNi4D+SbtLEJ/L/I+mhNnqo6esbkv67+nmr7d4kvaCJ3br/08Qe0R2S5kjaLund6vL8IertPzUxtfcbmgjWvJZ6W6KJt4ZvSHq9+rmp7deu0NdAXje+LgskwTfogCQIO5AEYQeSIOxAEoQdSIKwA0kQdiCJ/wftgrMNjgT54AAAAABJRU5ErkJggg==\n", 149 | "text/plain": [ 150 | "
" 151 | ] 152 | }, 153 | "metadata": { 154 | "needs_background": "light" 155 | }, 156 | "output_type": "display_data" 157 | } 158 | ], 159 | "source": [ 160 | "plt.imshow(X_train[1].reshape((28, 28)), cmap='gray')" 161 | ] 162 | }, 163 | { 164 | "cell_type": "markdown", 165 | "metadata": {}, 166 | "source": [ 167 | "# 构建模型" 168 | ] 169 | }, 170 | { 171 | "cell_type": "code", 172 | "execution_count": 14, 173 | "metadata": {}, 174 | "outputs": [ 175 | { 176 | "name": "stderr", 177 | "output_type": "stream", 178 | "text": [ 179 | "Using TensorFlow backend.\n" 180 | ] 181 | } 182 | ], 183 | "source": [ 184 | "# Importing the Keras libraries and packages\n", 185 | "# Importing the Keras libraries and packages\n", 186 | "from keras.models import Sequential\n", 187 | "from keras.layers import Dense\n", 188 | "from keras.layers import Conv2D\n", 189 | "from keras.layers import MaxPooling2D\n", 190 | "from keras.layers import Dropout\n", 191 | "from keras.layers import Flatten" 192 | ] 193 | }, 194 | { 195 | "cell_type": "code", 196 | "execution_count": 25, 197 | "metadata": {}, 198 | "outputs": [], 199 | "source": [ 200 | "def build_classifier():\n", 201 | " # Initialising the CNN\n", 202 | " classifier = Sequential()\n", 203 | "\n", 204 | " # Adding the first CNN layer and some Dropout regularisation\n", 205 | " classifier.add(Conv2D(filters = 2, kernel_size = 3, strides = 1, padding = \"SAME\", activation = \"relu\", input_shape = (28, 28, 1)))\n", 206 | " classifier.add(MaxPooling2D(pool_size=(2, 2), padding='SAME'))\n", 207 | " classifier.add(Dropout(0.3))\n", 208 | "\n", 209 | " classifier.add(Conv2D(filters = 2, kernel_size = 3, strides = 1, padding = \"SAME\", activation = \"relu\"))\n", 210 | " classifier.add(MaxPooling2D(pool_size=(2, 2), padding='SAME'))\n", 211 | " classifier.add(Dropout(0.3))\n", 212 | "\n", 213 | " classifier.add(Flatten())\n", 214 | " classifier.add(Dense(kernel_initializer=\"uniform\", units = 4))\n", 215 | "\n", 216 | " # Adding the output layer\n", 217 | " classifier.add(Dense(kernel_initializer=\"uniform\", units = 10, activation=\"softmax\"))\n", 218 | " classifier.compile(optimizer = 'adam', loss = 'categorical_crossentropy', metrics=['accuracy'])\n", 219 | "\n", 220 | " return classifier\n" 221 | ] 222 | }, 223 | { 224 | "cell_type": "code", 225 | "execution_count": 26, 226 | "metadata": {}, 227 | "outputs": [ 228 | { 229 | "name": "stdout", 230 | "output_type": "stream", 231 | "text": [ 232 | "_________________________________________________________________\n", 233 | "Layer (type) Output Shape Param # \n", 234 | "=================================================================\n", 235 | "conv2d_3 (Conv2D) (None, 28, 28, 2) 20 \n", 236 | "_________________________________________________________________\n", 237 | "max_pooling2d_3 (MaxPooling2 (None, 14, 14, 2) 0 \n", 238 | "_________________________________________________________________\n", 239 | "dropout_3 (Dropout) (None, 14, 14, 2) 0 \n", 240 | "_________________________________________________________________\n", 241 | "conv2d_4 (Conv2D) (None, 14, 14, 2) 38 \n", 242 | "_________________________________________________________________\n", 243 | "max_pooling2d_4 (MaxPooling2 (None, 7, 7, 2) 0 \n", 244 | "_________________________________________________________________\n", 245 | "dropout_4 (Dropout) (None, 7, 7, 2) 0 \n", 246 | "_________________________________________________________________\n", 247 | "flatten_2 (Flatten) (None, 98) 0 \n", 248 | "_________________________________________________________________\n", 249 | "dense_3 (Dense) (None, 4) 396 \n", 250 | "_________________________________________________________________\n", 251 | "dense_4 (Dense) (None, 10) 50 \n", 252 | "=================================================================\n", 253 | "Total params: 504\n", 254 | "Trainable params: 504\n", 255 | "Non-trainable params: 0\n", 256 | "_________________________________________________________________\n" 257 | ] 258 | } 259 | ], 260 | "source": [ 261 | "classifier = build_classifier()\n", 262 | "classifier.summary()" 263 | ] 264 | }, 265 | { 266 | "cell_type": "markdown", 267 | "metadata": {}, 268 | "source": [ 269 | "# 训练模型" 270 | ] 271 | }, 272 | { 273 | "cell_type": "code", 274 | "execution_count": 27, 275 | "metadata": {}, 276 | "outputs": [], 277 | "source": [ 278 | "from keras.callbacks import ModelCheckpoint\n", 279 | "checkpointer = ModelCheckpoint(filepath='minions.hdf5', verbose=1, save_best_only=True, monitor='val_loss',mode='min')" 280 | ] 281 | }, 282 | { 283 | "cell_type": "code", 284 | "execution_count": 28, 285 | "metadata": {}, 286 | "outputs": [ 287 | { 288 | "name": "stdout", 289 | "output_type": "stream", 290 | "text": [ 291 | "Train on 55000 samples, validate on 10000 samples\n", 292 | "Epoch 1/50\n", 293 | "55000/55000 [==============================] - 10s 178us/step - loss: 1.3555 - acc: 0.5203 - val_loss: 0.8780 - val_acc: 0.7317\n", 294 | "\n", 295 | "Epoch 00001: val_loss improved from inf to 0.87800, saving model to minions.hdf5\n", 296 | "Epoch 2/50\n", 297 | "55000/55000 [==============================] - 9s 171us/step - loss: 1.0732 - acc: 0.6216 - val_loss: 0.8069 - val_acc: 0.7405\n", 298 | "\n", 299 | "Epoch 00002: val_loss improved from 0.87800 to 0.80690, saving model to minions.hdf5\n", 300 | "Epoch 3/50\n", 301 | "55000/55000 [==============================] - 9s 172us/step - loss: 1.0243 - acc: 0.6383 - val_loss: 0.7807 - val_acc: 0.7610\n", 302 | "\n", 303 | "Epoch 00003: val_loss improved from 0.80690 to 0.78067, saving model to minions.hdf5\n", 304 | "Epoch 4/50\n", 305 | "55000/55000 [==============================] - 9s 172us/step - loss: 0.9878 - acc: 0.6533 - val_loss: 0.7382 - val_acc: 0.7770\n", 306 | "\n", 307 | "Epoch 00004: val_loss improved from 0.78067 to 0.73821, saving model to minions.hdf5\n", 308 | "Epoch 5/50\n", 309 | "55000/55000 [==============================] - 9s 169us/step - loss: 0.9613 - acc: 0.6623 - val_loss: 0.7169 - val_acc: 0.7922\n", 310 | "\n", 311 | "Epoch 00005: val_loss improved from 0.73821 to 0.71689, saving model to minions.hdf5\n", 312 | "Epoch 6/50\n", 313 | "55000/55000 [==============================] - 10s 173us/step - loss: 0.9434 - acc: 0.6650 - val_loss: 0.6947 - val_acc: 0.7987\n", 314 | "\n", 315 | "Epoch 00006: val_loss improved from 0.71689 to 0.69465, saving model to minions.hdf5\n", 316 | "Epoch 7/50\n", 317 | "55000/55000 [==============================] - 9s 171us/step - loss: 0.9078 - acc: 0.6775 - val_loss: 0.6258 - val_acc: 0.8173\n", 318 | "\n", 319 | "Epoch 00007: val_loss improved from 0.69465 to 0.62578, saving model to minions.hdf5\n", 320 | "Epoch 8/50\n", 321 | "55000/55000 [==============================] - 9s 169us/step - loss: 0.8136 - acc: 0.7133 - val_loss: 0.5246 - val_acc: 0.8476\n", 322 | "\n", 323 | "Epoch 00008: val_loss improved from 0.62578 to 0.52461, saving model to minions.hdf5\n", 324 | "Epoch 9/50\n", 325 | "55000/55000 [==============================] - 10s 174us/step - loss: 0.7273 - acc: 0.7461 - val_loss: 0.4556 - val_acc: 0.8652\n", 326 | "\n", 327 | "Epoch 00009: val_loss improved from 0.52461 to 0.45560, saving model to minions.hdf5\n", 328 | "Epoch 10/50\n", 329 | "55000/55000 [==============================] - 10s 174us/step - loss: 0.6914 - acc: 0.7601 - val_loss: 0.4300 - val_acc: 0.8750\n", 330 | "\n", 331 | "Epoch 00010: val_loss improved from 0.45560 to 0.43003, saving model to minions.hdf5\n", 332 | "Epoch 11/50\n", 333 | "55000/55000 [==============================] - 10s 177us/step - loss: 0.6735 - acc: 0.7685 - val_loss: 0.4067 - val_acc: 0.8877\n", 334 | "\n", 335 | "Epoch 00011: val_loss improved from 0.43003 to 0.40667, saving model to minions.hdf5\n", 336 | "Epoch 12/50\n", 337 | "55000/55000 [==============================] - 9s 168us/step - loss: 0.6610 - acc: 0.7749 - val_loss: 0.4092 - val_acc: 0.8806\n", 338 | "\n", 339 | "Epoch 00012: val_loss did not improve from 0.40667\n", 340 | "Epoch 13/50\n", 341 | "55000/55000 [==============================] - 9s 166us/step - loss: 0.6581 - acc: 0.7785 - val_loss: 0.3992 - val_acc: 0.8874\n", 342 | "\n", 343 | "Epoch 00013: val_loss improved from 0.40667 to 0.39921, saving model to minions.hdf5\n", 344 | "Epoch 14/50\n", 345 | "55000/55000 [==============================] - 9s 168us/step - loss: 0.6510 - acc: 0.7780 - val_loss: 0.3958 - val_acc: 0.8838\n", 346 | "\n", 347 | "Epoch 00014: val_loss improved from 0.39921 to 0.39576, saving model to minions.hdf5\n", 348 | "Epoch 15/50\n", 349 | "55000/55000 [==============================] - 9s 170us/step - loss: 0.6450 - acc: 0.7811 - val_loss: 0.4030 - val_acc: 0.8782\n", 350 | "\n", 351 | "Epoch 00015: val_loss did not improve from 0.39576\n", 352 | "Epoch 16/50\n", 353 | "55000/55000 [==============================] - 9s 169us/step - loss: 0.6391 - acc: 0.7837 - val_loss: 0.3956 - val_acc: 0.8850\n", 354 | "\n", 355 | "Epoch 00016: val_loss improved from 0.39576 to 0.39564, saving model to minions.hdf5\n", 356 | "Epoch 17/50\n", 357 | "55000/55000 [==============================] - 9s 167us/step - loss: 0.6345 - acc: 0.7848 - val_loss: 0.3838 - val_acc: 0.8887\n", 358 | "\n", 359 | "Epoch 00017: val_loss improved from 0.39564 to 0.38377, saving model to minions.hdf5\n", 360 | "Epoch 18/50\n", 361 | "55000/55000 [==============================] - 9s 169us/step - loss: 0.6337 - acc: 0.7848 - val_loss: 0.3913 - val_acc: 0.8818\n", 362 | "\n", 363 | "Epoch 00018: val_loss did not improve from 0.38377\n", 364 | "Epoch 19/50\n", 365 | "55000/55000 [==============================] - 9s 168us/step - loss: 0.6338 - acc: 0.7870 - val_loss: 0.3845 - val_acc: 0.8875\n", 366 | "\n", 367 | "Epoch 00019: val_loss did not improve from 0.38377\n", 368 | "Epoch 20/50\n", 369 | "55000/55000 [==============================] - 9s 168us/step - loss: 0.6345 - acc: 0.7847 - val_loss: 0.3817 - val_acc: 0.8917\n", 370 | "\n", 371 | "Epoch 00020: val_loss improved from 0.38377 to 0.38166, saving model to minions.hdf5\n", 372 | "Epoch 21/50\n", 373 | "55000/55000 [==============================] - 9s 171us/step - loss: 0.6208 - acc: 0.7914 - val_loss: 0.3702 - val_acc: 0.8914\n", 374 | "\n", 375 | "Epoch 00021: val_loss improved from 0.38166 to 0.37016, saving model to minions.hdf5\n", 376 | "Epoch 22/50\n", 377 | "55000/55000 [==============================] - 10s 175us/step - loss: 0.6217 - acc: 0.7905 - val_loss: 0.3771 - val_acc: 0.8924\n", 378 | "\n", 379 | "Epoch 00022: val_loss did not improve from 0.37016\n", 380 | "Epoch 23/50\n", 381 | "55000/55000 [==============================] - 10s 175us/step - loss: 0.6203 - acc: 0.7882 - val_loss: 0.3732 - val_acc: 0.8911\n", 382 | "\n", 383 | "Epoch 00023: val_loss did not improve from 0.37016\n", 384 | "Epoch 24/50\n", 385 | "55000/55000 [==============================] - 9s 171us/step - loss: 0.6259 - acc: 0.7895 - val_loss: 0.3805 - val_acc: 0.8883\n", 386 | "\n", 387 | "Epoch 00024: val_loss did not improve from 0.37016\n", 388 | "Epoch 25/50\n", 389 | "55000/55000 [==============================] - 9s 168us/step - loss: 0.6204 - acc: 0.7907 - val_loss: 0.3785 - val_acc: 0.8908\n", 390 | "\n", 391 | "Epoch 00025: val_loss did not improve from 0.37016\n", 392 | "Epoch 26/50\n", 393 | "55000/55000 [==============================] - 9s 169us/step - loss: 0.6194 - acc: 0.7916 - val_loss: 0.3764 - val_acc: 0.8924\n", 394 | "\n", 395 | "Epoch 00026: val_loss did not improve from 0.37016\n", 396 | "Epoch 27/50\n", 397 | "55000/55000 [==============================] - 9s 165us/step - loss: 0.6291 - acc: 0.7876 - val_loss: 0.3873 - val_acc: 0.8820\n", 398 | "\n", 399 | "Epoch 00027: val_loss did not improve from 0.37016\n", 400 | "Epoch 28/50\n", 401 | "55000/55000 [==============================] - 10s 175us/step - loss: 0.6165 - acc: 0.7912 - val_loss: 0.3765 - val_acc: 0.8919\n", 402 | "\n", 403 | "Epoch 00028: val_loss did not improve from 0.37016\n", 404 | "Epoch 29/50\n", 405 | "55000/55000 [==============================] - 10s 178us/step - loss: 0.6232 - acc: 0.7895 - val_loss: 0.3781 - val_acc: 0.8919\n", 406 | "\n", 407 | "Epoch 00029: val_loss did not improve from 0.37016\n", 408 | "Epoch 30/50\n", 409 | "55000/55000 [==============================] - 10s 173us/step - loss: 0.6226 - acc: 0.7904 - val_loss: 0.3909 - val_acc: 0.8823\n", 410 | "\n", 411 | "Epoch 00030: val_loss did not improve from 0.37016\n", 412 | "Epoch 31/50\n", 413 | "55000/55000 [==============================] - 10s 177us/step - loss: 0.6230 - acc: 0.7898 - val_loss: 0.3971 - val_acc: 0.8787\n", 414 | "\n", 415 | "Epoch 00031: val_loss did not improve from 0.37016\n", 416 | "Epoch 32/50\n", 417 | "55000/55000 [==============================] - 9s 168us/step - loss: 0.6197 - acc: 0.7899 - val_loss: 0.3747 - val_acc: 0.8887\n", 418 | "\n", 419 | "Epoch 00032: val_loss did not improve from 0.37016\n", 420 | "Epoch 33/50\n", 421 | "55000/55000 [==============================] - 9s 171us/step - loss: 0.6168 - acc: 0.7918 - val_loss: 0.3770 - val_acc: 0.8871\n", 422 | "\n", 423 | "Epoch 00033: val_loss did not improve from 0.37016\n", 424 | "Epoch 34/50\n", 425 | "55000/55000 [==============================] - 9s 164us/step - loss: 0.6146 - acc: 0.7914 - val_loss: 0.3842 - val_acc: 0.8843\n", 426 | "\n", 427 | "Epoch 00034: val_loss did not improve from 0.37016\n", 428 | "Epoch 35/50\n", 429 | "55000/55000 [==============================] - 9s 169us/step - loss: 0.6208 - acc: 0.7908 - val_loss: 0.3780 - val_acc: 0.8909\n", 430 | "\n", 431 | "Epoch 00035: val_loss did not improve from 0.37016\n", 432 | "Epoch 36/50\n", 433 | "55000/55000 [==============================] - 9s 164us/step - loss: 0.6172 - acc: 0.7935 - val_loss: 0.3705 - val_acc: 0.8917\n", 434 | "\n", 435 | "Epoch 00036: val_loss did not improve from 0.37016\n", 436 | "Epoch 37/50\n", 437 | "55000/55000 [==============================] - 9s 172us/step - loss: 0.6171 - acc: 0.7930 - val_loss: 0.3812 - val_acc: 0.8872\n", 438 | "\n", 439 | "Epoch 00037: val_loss did not improve from 0.37016\n", 440 | "Epoch 38/50\n", 441 | "55000/55000 [==============================] - 9s 169us/step - loss: 0.6223 - acc: 0.7928 - val_loss: 0.3910 - val_acc: 0.8887\n", 442 | "\n", 443 | "Epoch 00038: val_loss did not improve from 0.37016\n", 444 | "Epoch 39/50\n", 445 | "55000/55000 [==============================] - 10s 175us/step - loss: 0.6133 - acc: 0.7920 - val_loss: 0.3759 - val_acc: 0.8900\n", 446 | "\n", 447 | "Epoch 00039: val_loss did not improve from 0.37016\n", 448 | "Epoch 40/50\n", 449 | "55000/55000 [==============================] - 10s 177us/step - loss: 0.6144 - acc: 0.7934 - val_loss: 0.3715 - val_acc: 0.8932\n" 450 | ] 451 | }, 452 | { 453 | "name": "stdout", 454 | "output_type": "stream", 455 | "text": [ 456 | "\n", 457 | "Epoch 00040: val_loss did not improve from 0.37016\n", 458 | "Epoch 41/50\n", 459 | "55000/55000 [==============================] - 9s 166us/step - loss: 0.6202 - acc: 0.7912 - val_loss: 0.3847 - val_acc: 0.8881\n", 460 | "\n", 461 | "Epoch 00041: val_loss did not improve from 0.37016\n", 462 | "Epoch 42/50\n", 463 | "55000/55000 [==============================] - 9s 170us/step - loss: 0.6162 - acc: 0.7936 - val_loss: 0.3791 - val_acc: 0.8903\n", 464 | "\n", 465 | "Epoch 00042: val_loss did not improve from 0.37016\n", 466 | "Epoch 43/50\n", 467 | "55000/55000 [==============================] - 10s 174us/step - loss: 0.6174 - acc: 0.7920 - val_loss: 0.3777 - val_acc: 0.8899\n", 468 | "\n", 469 | "Epoch 00043: val_loss did not improve from 0.37016\n", 470 | "Epoch 44/50\n", 471 | "55000/55000 [==============================] - 9s 173us/step - loss: 0.6199 - acc: 0.7920 - val_loss: 0.3805 - val_acc: 0.8934\n", 472 | "\n", 473 | "Epoch 00044: val_loss did not improve from 0.37016\n", 474 | "Epoch 45/50\n", 475 | "55000/55000 [==============================] - 9s 166us/step - loss: 0.6163 - acc: 0.7916 - val_loss: 0.3764 - val_acc: 0.8931\n", 476 | "\n", 477 | "Epoch 00045: val_loss did not improve from 0.37016\n", 478 | "Epoch 46/50\n", 479 | "55000/55000 [==============================] - 10s 174us/step - loss: 0.6176 - acc: 0.7918 - val_loss: 0.3700 - val_acc: 0.8965\n", 480 | "\n", 481 | "Epoch 00046: val_loss improved from 0.37016 to 0.37002, saving model to minions.hdf5\n", 482 | "Epoch 47/50\n", 483 | "55000/55000 [==============================] - 10s 182us/step - loss: 0.6180 - acc: 0.7921 - val_loss: 0.3797 - val_acc: 0.8936\n", 484 | "\n", 485 | "Epoch 00047: val_loss did not improve from 0.37002\n", 486 | "Epoch 48/50\n", 487 | "55000/55000 [==============================] - 10s 183us/step - loss: 0.6133 - acc: 0.7931 - val_loss: 0.3770 - val_acc: 0.8926\n", 488 | "\n", 489 | "Epoch 00048: val_loss did not improve from 0.37002\n", 490 | "Epoch 49/50\n", 491 | "55000/55000 [==============================] - 11s 196us/step - loss: 0.6175 - acc: 0.7912 - val_loss: 0.3755 - val_acc: 0.8888\n", 492 | "\n", 493 | "Epoch 00049: val_loss did not improve from 0.37002\n", 494 | "Epoch 50/50\n", 495 | "55000/55000 [==============================] - 10s 174us/step - loss: 0.6087 - acc: 0.7945 - val_loss: 0.3695 - val_acc: 0.8977\n", 496 | "\n", 497 | "Epoch 00050: val_loss improved from 0.37002 to 0.36951, saving model to minions.hdf5\n" 498 | ] 499 | } 500 | ], 501 | "source": [ 502 | "history = classifier.fit(X_train, y_train, epochs = 50, batch_size = 50, validation_data=(X_test, y_test), callbacks=[checkpointer])" 503 | ] 504 | }, 505 | { 506 | "cell_type": "markdown", 507 | "metadata": {}, 508 | "source": [ 509 | "# 查看训练过程" 510 | ] 511 | }, 512 | { 513 | "cell_type": "code", 514 | "execution_count": 19, 515 | "metadata": {}, 516 | "outputs": [], 517 | "source": [ 518 | "def plot_history(history) :\n", 519 | " SMALL_SIZE = 20\n", 520 | " MEDIUM_SIZE = 22\n", 521 | " BIGGER_SIZE = 24\n", 522 | "\n", 523 | " plt.rc('font', size=SMALL_SIZE) # controls default text sizes\n", 524 | " plt.rc('axes', titlesize=SMALL_SIZE) # fontsize of the axes title\n", 525 | " plt.rc('axes', labelsize=MEDIUM_SIZE) # fontsize of the x and y labels\n", 526 | " plt.rc('xtick', labelsize=SMALL_SIZE) # fontsize of the tick labels\n", 527 | " plt.rc('ytick', labelsize=SMALL_SIZE) # fontsize of the tick labels\n", 528 | " plt.rc('legend', fontsize=SMALL_SIZE) # legend fontsize\n", 529 | " plt.rc('figure', titlesize=BIGGER_SIZE) # fontsize of the figure title\n", 530 | "\n", 531 | " fig = plt.figure()\n", 532 | " fig.set_size_inches(15,10)\n", 533 | " plt.plot(history['loss'])\n", 534 | " plt.plot(history['val_loss'])\n", 535 | " plt.title('Model Loss')\n", 536 | " plt.xlabel('epoch')\n", 537 | " plt.ylabel('loss')\n", 538 | " plt.legend(['train', 'test'],loc='upper left')\n", 539 | " plt.show()" 540 | ] 541 | }, 542 | { 543 | "cell_type": "code", 544 | "execution_count": 29, 545 | "metadata": {}, 546 | "outputs": [ 547 | { 548 | "data": { 549 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAA5IAAAJ1CAYAAACrR8+cAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAgAElEQVR4nOzdd5xddZ3/8ddnSmZSJ50ZQ0kInQQCBEJzDSAioBgUlwVkCaBYlwXLCq5IUBb1t1iCZVFUguKiKwooS1tKMHQCIi10QyhJSO9lyvf3x70TQ0iduTPnltfz8ZjHzZxz7rnvm+jj4dvv93y/kVJCkiRJkqStVZV1AEmSJElSabFISpIkSZK2iUVSkiRJkrRNLJKSJEmSpG1ikZQkSZIkbROLpCRJkiRpm1gkJUkqsIgYHxEpIiZ18j4T8/eZWJhkkiQVhkVSklTy8mUrRURbRIzczHX3rHftxG6M2C3WK55Tss4iSSpvFklJUrloAQI4e2MnI2JX4D356yRJUidYJCVJ5WIuMB04MyJqNnL+4+SK5s3dmkqSpDJkkZQklZOrgEbgA+sfjIha4AzgAeCZTb05InaNiF9GxBsRsTYi3sz/vusmrt8uIn4eEXMjYlVEPBERZ2wuYEQMjIhvRsSM/HuWRMRdEfG+bf62nRQRdRFxQUQ8GRErI2JpREyLiH/cxPUn5LPOjog1+b+feyPiMxtct3NE/DQiXsp/x4UR8VREXBkRg7rn20mSutLG/h9bSZJK1XXAd8mNPt643vETgO2AC4BdNvbGiDgQuBPoC/wReBbYAzgN+FBEHJVSmr7e9YPIFdOdgfvyP03AlcAdm/iMnYCpwHBgGnAb0Jtc8b0tIj6ZUrpq27/2touIHsDt5Kb7Pgf8COgFnAT8NiLGpJS+st715wA/AeYAfwLmA0OBfYAzgR/nr2sCHgX6AbcAvwfqgRHA6cAPgQVd/w0lSV3JIilJKhsppWUR8RtgYkRsn1J6PX/qE8BS4H+Ar2z4vogI4Jfkys/HUkq/Xu/cycBvgGsjYq+UUlv+1DfJlcjvp5TOX+/6HwIPbiLiNcBOwCkppd+s957+5ArmFRHxx5TS3G3/9tvsC+RK5K3ACSmllnyWS4BHgAsj4uaU0gP56z8JrAX2TSm9tf6NImLwer+eBAwEzkspTd7gut5AG5KkkufUVklSubkKqAbOgnWjgEcDv04prdzEew4lN/r44PolEiCl9Ftyo427A4fn71lLbqRyGTBpg+unA2+7R/49+5Irbr9fv0Tm37MYuJjcyN1Htv6rdspZQAI+314i81neAr6R//XjG7ynBWje8EYppfkbuf+qjVy3IqX0juOSpNJjkZQklZWU0sPAU8BZEVFFrgxVkSuYm7J//vXuTZxvP75f/nUPctNAn0gpLdnI9VM3cuyQ/GtDREza8Adof0Zyz83kLIiI6Etuiu+bKaXnNnLJht8XcuW4F/BMRHwvIiZExJCNvPePwHLgRxHx+4g4JyL2zo/6SpLKhFNbJUnl6CrgCuD95J7feyyl9JfNXN+Qf529ifPtx/tvcP2mpqDO2cix9kVmjs7/bEqfzZwrlG39vqSUvhsR84HPAOcC5wEpIu4FvtT+/GhK6dWIOIjcSO37gQ/nb/FaRFyeUrqioN9EkpQJRyQlSeXoV+SmVv4EGAb8dAvXt48qNm7ifNMG17W/breJ6zd2n/b3/GtKKTbzc+YWshbCtn5fAFJKv0wpHUyuFB8P/Bz4B+D2iBi63nUzUkon568bS26RoypgckRsdJ9PSVJpsUhKkspO/pnD64HtgRXkVnPdnPbRyvGbON9+/PH863PASmBMRDRs5vr1PZR/ffcWsnS5lNIy4GVg2Ca2Njki//r4Rs6RUlqcUrolpfQJYAq5xXXe8b1SSi0ppcdSSt8GTskfntDZ/JKk7FkkJUnl6qvAicAx+eK0OfcDzwOHR8RJ65/I//4PwAvkFt0hpdRM7pnBvmyw2E5EjCW3EM/b5Kd+TgM+HBFnbSxERIxef2Svi/0CCOA/I6J6vQyDgYvWu6b9+PsjYmOPxLTnXZm/7qCI2NhI7XbrXydJKm0+IylJKksppVnArK28NkXEGcD/kdtD8SZyo467kxtBWwb883pbf0BuG5GjgPPy5bF9H8mTye2feMJGPupUcgvZ/DwizgUeBhaTGzndBxhFblGetzby3m1xeERM2cS5x/PPKV4OHAt8CPhrRNxCbjGdj5Irh/8vpXTfeu/7DbA6Iu4DZpIroe8GDgQeI7cHZ/t3/Gz+2cmXgEXASOCDwBrg+538bpKkImCRlCSJ3GqvEXEguZHM95IrPvPJTYv9Rkrp+Q2unx8RhwGX5a8dS25U89PkitY7imRK6fWIOAD4F3LbfJxGbquSOcCzwA/IrTjbWSPzPxvTH7gipbQ2Io4GPk+u/P0Lue09/kpuD8gNpwNfABxDboXb44DVwKvAl4H/yo/SQu7vq47clir7Az2BN8gV0e+klJ4uwPeTJGUsUkpZZ5AkSZIklRCfkZQkSZIkbZOiKZIRcVJE/CAipkXE0ohIEXFtAe57ev5eKSI+XoiskiRJklTJiukZya8C+wLLgdeBPTp7w4jYgdzzJsvpng2eJUmSJKnsFc2IJHA+sBvQj9xCBZ0SEQFcDSwAruzs/SRJkiRJOUUzIplSuqf9z7kO2GnnAkeS2xT6yELcUJIkSZJUREWykCJiT+BbwOSU0p8jYpuL5ODBg9Pw4cMLnk2SJEmSSsFjjz02P6U0ZGPnyq5IRkQN8Ctym1B/paP3GT58ONOnTy9YLkmSJEkqJRHx6qbOlV2RBL4G7AccnlJalXUYSZIkSSo3xbTYTqdFxEHkRiG/k1J6sAPvPycipkfE9Hnz5hU+oCRJkiSVgbIpkutNaX0BuKgj90gp/TSlNDalNHbIkI1OBZYkSZKkilc2RZLcPpG7AXsCqyMitf8AF+evuSp/7PuZpZQkSZKkEldOz0iuAX6+iXP7k3tu8j7geWCbp71KkiRJknJKskhGRC0wEmhOKb0MkF9Y5+ObuH4SuSJ5TUrpZ92VU5IkSZLKUdEUyYiYAEzI/9qYfz0kIqbk/zw/pfTF/J+HATOAV4Hh3ZVRkiRJklRERRIYA5yxwbGd8z+QK41fRJIkSZKUqUgpZZ2hKI0dOzZNnz59q65ds2YNCxcuZNmyZbS2tnZxMnWl6upq+vbty8CBA6mrq8s6jiRJkpSZiHgspTR2Y+eKaUSyJK1Zs4ZZs2YxYMAAhg8fTm1tLRGRdSx1QEqJ5uZmli5dyqxZs9hxxx0tk5IkSdJGlNP2H5lYuHAhAwYMYPDgwfTo0cMSWcIigh49ejB48GAGDBjAwoULs44kSZIkFSWLZCctW7aMfv36ZR1DBdavXz+WLVuWdQxJkiSpKFkkO6m1tZXa2tqsY6jAamtrfd5VkiRJ2gSLZAE4nbX8+G8qSZIkbZpFUpIkSZK0TSySkiRJkqRtYpFUSZo0aRIRwdSpU7OOIkmSJFUci6QKYubMmUQEEydOzDqKJEmSpC5mkVRJ+tznPseMGTM46KCDso4iSZIkVZyarANIHTF48GAGDx6cdQxJkiSpIjkiqU6bNGkSI0aMAOCaa64hItb9TJkyhalTpxIRTJo0iUceeYTjjz+egQMHEhHMnDkTgHvuuYdzzjmHvfbai379+tGzZ09GjRrFJZdcwurVqzf6mRt7RjIiGD9+PPPnz+ecc86hqamJuro69t57b66++uqu/quQJEmSKoIjkuq08ePHs3jxYiZPnsy+++7LhAkT1p0bM2YMixcvBuDBBx/km9/8JocffjhnnXUW8+fPp0ePHgB8+9vf5rnnnuPQQw/l+OOPZ/Xq1dx///1MmjSJqVOncuedd1JdXb1VeRYvXsxhhx1Gjx49OOmkk1i9ejXXX389Z511FlVVVZxxxhmF/0uQJEmSKohFsoTMXrKKVWtb2XlIn6yjvM348eMZPnw4kydPZsyYMUyaNOlt59tHDe+44w6uvPJKPvnJT77jHj/+8Y8ZMWIEEfG24xdddBGXXnop119/PSeffPJW5fnrX//K2WefzU9+8pN15fP8889nn3324dvf/rZFUpIkSeoki2QXuuRPz/Dsm0sLdr+1LW20tLXRq0fh/tn2elc/Lv7g3gW73+aMGTNmoyUSYOedd97o8fPOO49LL72U22+/fauLZK9evfjud7/7thHMvfbai8MOO4w///nPLFu2jL59+277F5AkSZIE+IxkSYmAlLJO0XGbW2F1xYoVXHbZZRx44IE0NDRQVVVFRKxbUOeNN97Y6s/Zdddd6dev3zuO77DDDgDrptpKkiRJ6hhHJLtQoUf6Fq9cy6yFK9ltu77U127d84LFpLGxcaPHm5ubOfLII3nkkUcYNWoUJ598MkOGDKG2thaASy65hDVr1mz15/Tv33+jx2tqcv9xb21t3cbkkiRJktZnkSwhNdW5AeTm1raSLJIbPv/Y7qabbuKRRx7hjDPOYMqUKW87N3v2bC655JJuSCdJkiRpazm1tYTUVueKWHNr8c1vbX8esSOjfS+99BIAH/nIR95x7t577+1cMEmSJEkFZ5EsIbVVfx+RLDYDBgwgIpg1a9Y2v3f48OEA79gT8pVXXuHLX/5yAdJJkiRJKiSntpaQqqqgpqqqKItknz59GDduHNOmTeO0005jt912o7q6mhNOOGGL7/3gBz/ILrvswne/+12eeuop9ttvP2bNmsXNN9/M8ccf36FyKkmSJKnrWCRLTG11FOXUVoBf/epXnH/++dx2221cd911pJTYfvvt1404bkrv3r25++67ueCCC5g6dSrTpk1j55135qKLLuLzn/88v/3tb7vnC0iSJEnaKpFKeT+JLjR27Ng0ffr0LV43Y8YM9txzz25IlDNz/grWtrax23bug9jVuvvfVpIkSSomEfFYSmnsxs75jGSJqa0uzqmtkiRJkiqHRbLE1FYHrW2JtjZHkiVJkiRlwyJZYmqri3flVkmSJEmVwSJZYv6+l6RFUpIkSVI2LJIl5u8jkk5tlSRJkpQNi2SJcWqrJEmSpKxZJEtMVVVQXRUWSUmSJEmZsUiWoNwWIE5tlSRJkpQNi2QJci9JSZIkSVmySJag2upwRFKSJElSZiySJai2uoqWtjba2iyTkiRJkrqfRbIErVu5tc3prZIkSZK6n0WyBNVWB+BekpIkSZKyYZEsQe0jki0uuCNJkiQpAxbJEtReJNcWUZGcOXMmEcHEiRO79XMnTZpERDB16tRu/VxJkiSpklkkS1B1VVBd5cqtkiRJkrJhkSxRtdVVNLcUz4ikJEmSpMphkSxRtdVVRbNq66RJkxgxYgQA11xzDRGx7mfKlCnrrrv99ts57rjjGDx4MHV1dYwcOZIvfelLLF68+B33fPLJJznllFMYPnw4dXV1DBkyhP3335/zzjuP5uZmAIYPH84ll1wCwBFHHPG2z5UkSZLUdWqyDqCOqa0KVjUXx9TW8ePHs3jxYiZPnsy+++7LhAkT1p0bM2YMAF//+te5+OKLGThwIB/4wAcYOnQoTz75JJdffjm33HILDz74IP369QNyJXLcuHFEBCeccAIjRoxg6dKlvPTSS/z4xz/m0ksvpba2lvPOO48bb7yRe++9lzPOOIPhw4dn8fUlSZKkimORLFG1NVW0rGyjLSWqMh6BGz9+PMOHD2fy5MmMGTOGSZMmve38Pffcw8UXX8whhxzCLbfcQv/+/dedmzJlCmeeeSYXX3wx3/ve94DcqObq1au58cYb+dCHPvS2ey1atIhevXoBcN5557F48WLuvfdeJk6cyPjx47v0e0qSJEnKsUh2pVsvgDlPdcmtB7a10bu5DXpUQ2eKZONoOPZbhQu2EVdccQUAV1111dtKJMDEiROZPHkyv/71r9cVyXY9e/Z8x70GDBjQdUElSZIkbRWLZIlqr47FMbl18x588EFqa2v53e9+x+9+97t3nF+7di3z5s1jwYIFDBo0iJNPPpnJkyczYcIETjrpJN773vdy2GGHMXLkyAzSS5IkSdqQRbIrdeFIX0tzK6/MXcaOA3vRv1ePLvucQliwYAEtLS3rFsbZlOXLlzNo0CAOOuggpk2bxn/8x39w/fXX86tf/QqA3XffnYsvvphTTjmlO2JLkiRJ2gRXbS1RtdW5MclS2EuyoaGBAQMGkFLa7M9OO+207j2HHHIIN998M4sWLeL+++/noosuYu7cuZx66qnceeedGX4bSZIkSRbJElUVQVUEza3FsQVIdXU1AK2tre84d/DBB7No0SKeeeaZbb5vXV0dhx56KF//+tfXPWt50003bdXnSpIkSeoaFskSFRG5vSSLpEgOGDCAiGDWrFnvOHf++ecD8IlPfII333zzHedXrFjBQw89tO73adOmsWTJkndcN3fuXIB1q7YCDBo0CGCjnytJkiSpa/iMZAmrrY6imdrap08fxo0bx7Rp0zjttNPYbbfdqK6u5oQTTuCoo47iW9/6FhdeeCG77rorxx13HCNGjGD58uW8+uqr3HvvvRx++OHcdtttAHznO9/hjjvuYPz48ey888706dOHZ555hltvvZUBAwZwzjnnrPvcI444gqqqKi688EKefvrpdau6fvWrX83k70GSJEmqBJFScRSRYjN27Ng0ffr0LV43Y8YM9txzz25I9E6vLVzJ8jUt7NnUL5PP39BLL73E+eefzwMPPMCiRYtIKXH11VczceJEAO677z6uuOIK7rvvPubPn09DQwPDhg3jyCOP5NRTT2Xs2LEA3HHHHVx33XU8/PDDvPHGG7S0tLD99ttzzDHH8IUvfOFtz1ICXHvttVx++eU8//zzrF69GoBC/Oc6y39bSZIkKWsR8VhKaexGz1kkN64UiuScJauZt2w1o4Y1EJ3ZS1IbZZGUJElSJdtckfQZyRJWWx0kSmPlVkmSJEnlwyJZwmqrc/98xbLgjiRJkqTKYJEsYe1FssUiKUmSJKkbWSRLWG117rnItU5tlSRJktSNLJIlrLoqqIpwRFKSJElSt7JIlrCIoLa6ymckJUmSJHUri2SJq60OV22VJEmS1K0skgWQ5V6cjkh2DfdXlSRJkjbNItlJ1dXVNDc3Z/b57SOSFp/Cam5uprq6OusYkiRJUlGySHZS3759Wbp0aWafX1tdRSLR0maRLKSlS5fSt2/frGNIkiRJRcki2UkDBw5k0aJFzJ8/n7Vr13b7yGD7XpJOb+28lBJr165l/vz5LFq0iIEDB2YdSZIkSSpKNVkHKHV1dXXsuOOOLFy4kJkzZ9La2tqtn7+2pY23lq2hZUEPevZwKmZnVVdX07dvX3bccUfq6uqyjiNJkiQVJYtkAdTV1dHU1ERTU1O3f/b85Wv40KV3MumDezHxsBHd/vmSJEmSKo9TW0vcwF496FFdxeylq7OOIkmSJKlCWCRLXFVVsF1DHXOWWCQlSZIkdQ+LZBloaujJbIukJEmSpG5ikSwDTQ31jkhKkiRJ6jYWyTLQmC+S3b31iCRJkqTKZJEsA0396lnb2saCFWuzjiJJkiSpAlgky0BjQ08Ap7dKkiRJ6hYWyTLQ1FAP4II7kiRJkrqFRbIMtBfJOUtWZZxEkiRJUiWwSJaBQX3qqKkKRyQlSZIkdQuLZBmorgq26+cWIJIkSZK6h0WyTDQ21DsiKUmSJKlbWCTLRGNDPXOWWiQlSZIkdT2LZJl4V0M9s5esIqWUdRRJkiRJZc4iWSYaG3qyurmNJauas44iSZIkqcxZJMuEe0lKkiRJ6i4WyTLRuK5IupekJEmSpK5lkSwTjkhKkiRJ6i4WyTIxpE8dVYF7SUqSJEnqchbJMlFTXcXQvu4lKUmSJKnrFU2RjIiTIuIHETEtIpZGRIqIa7fxHoMi4uMRcUNEvBQRqyJiSUTcFxFnR0TRfN+u0NhQ74ikJEmSpC5Xk3WA9XwV2BdYDrwO7NGBe3wU+C9gNnAPMAvYDvgw8DPg2Ij4aCrTzRabGup5Ye6yrGNIkiRJKnPFNEJ3PrAb0A/4dAfv8QJwArB9Sum0lNKFKaWzyJXS14CPkCuVZamxITe1tUx7siRJkqQiUTRFMqV0T0rpxc6MFqaU7k4p/Sml1LbB8TnAlflfx3ciZlFraqhn5dpWlq1pyTqKJEmSpDJWNEWyGzTnX8u2ZTU19ARcuVWSJElS16qIIhkRNcA/53+9LcssXcm9JCVJkiR1h4ooksC3gFHALSml27MO01Ua80VyzpJVGSeRJEmSVM7KvkhGxLnAF4DngNO3cO05ETE9IqbPmzevW/IV0tC+9UTAm4sdkZQkSZLUdcq6SEbEZ4HJwLPAESmlhZu7PqX005TS2JTS2CFDhnRLxkLqUVPF4D51PiMpSZIkqUuVbZGMiPOAHwJPkyuRczKO1C2aGuqZvdQiKUmSJKnrlGWRjIgvA98DniBXIt/KOFK3aexX7zOSkiRJkrpUSRbJiKiNiD0iYuRGzl1EbnGdx4CjUkrzuz1ghpoa6l21VZIkSVKXqsk6QLuImABMyP/amH89JCKm5P88P6X0xfyfhwEzgFeB4evd4wzg60ArMA04NyI2/KiZKaUpGx4sF40NPVm2uoXla1roU1c0/7ySJEmSykgxNY0xwBkbHNs5/wO50vhFNm9E/rUaOG8T19wLTOlAvpLQtG4LkNXsMrRPxmkkSZIklaOimdqaUpqUUorN/Axf79qZGx7byntESml8N3+1brV+kZQkSZKkrlA0RVKF0dTQE4DZLrgjSZIkqYtYJMvM0H51gCOSkiRJkrqORbLM1NdWM6h3D/eSlCRJktRlLJJlqLGhntmLndoqSZIkqWtYJMuQe0lKkiRJ6koWyTLU2FDPHKe2SpIkSeoiFsky1NTQk8Urm1m1tjXrKJIkSZLKkEWyDDX2y+8l6aikJEmSpC5gkSxDTQ25IulekpIkSZK6gkWyDDXmi6R7SUqSJEnqChbJMtTU0BPAlVslSZIkdQmLZBnq2aOa/r1qHZGUJEmS1CUskmWqsZ97SUqSJEnqGhbJMtXUUM+cpS62I0mSJKnwLJJlqrGhJ7MXOyIpSZIkqfAskmWqqaGeBSvWsrq5NesokiRJksqMRbJMtW8B8tbSNRknkSRJklRuLJJlqilfJGcv8TlJSZIkSYVlkSxT7UVyzlKfk5QkSZJUWBbJMtXY0BPALUAkSZIkFZxFskz1qauhb10NcyySkiRJkgrMIlnGmvrX+4ykJEmSpIKzSJaxxoaejkhKkiRJKjiLZBlr6lfvM5KSJEmSCs4iWcYaG+qZt3wNza1tWUeRJEmSVEYskmWsqaGelGCuW4BIkiRJKiCLZBlrbN9L0umtkiRJkgrIIlnGmtxLUpIkSVIXsEiWMUckJUmSJHUFi2QZ61dfQ68e1Y5ISpIkSSooi2QZiwgaG+qZs3RV1lEkSZIklRGLZJlranAvSUmSJEmFZZEsc00NPX1GUpIkSVJBWSTLXFNDPW8tW0NLa1vWUSRJkiSVCYtkmWtsqKe1LTF/+dqso0iSJEkqExbJMteU3wJk9hIX3JEkSZJUGBbJMtfYryeAC+5IkiRJKhiLZJn7+4ikRVKSJElSYVgky1z/XrXU1VQxx6mtkiRJkgrEIlnmIsK9JCVJkiQVlEWyAjQ21LuXpCRJkqSCsUhWgKaGno5ISpIkSSoYi2QFaGyoZ+7S1bS1payjSJIkSSoDFskK8K6GelraEvNXrMk6iiRJkqQyYJGsAI0Nub0kfU5SkiRJUiFYJCuAe0lKkiRJKiSLZAVozBdJRyQlSZIkFYJFsgIM7NWDHtVVvLlkVdZRJEmSJJUBi2QFqKoKtmuoc0RSkiRJUkFYJCtEUz/3kpQkSZJUGBbJCtHYUO+IpCRJkqSCsEhWiKZ8kUwpZR1FkiRJUomzSFaIxoZ61ra2sXDF2qyjSJIkSSpxFskK4V6SkiRJkgrFIlkhmhp6Au4lKUmSJKnzLJIVYoeBvagKuOu5uVlHkSRJklTiLJIVYmDvHpx9+Aiue+Q1Hp25MOs4kiRJkkqYRbKCnH/0bgzr35ML//AUa1pas44jSZIkqURZJCtIrx41XHriKF56azn/NfXlrONIkiRJKlEWyQpzxO5DOWHfd/Hje17mpbeWZR1HkiRJUgmySFagiz6wFz17VPOVPzxNW1vKOo4kSZKkEmORrEBD+tbx78ftySMzF/Lb6a9lHUeSJElSibFIVqiPjt2eg3ceyGW3zOCtpe4tKUmSJGnrWSQrVERw2YmjWdPSxiU3P5t1HEmSJEklxCJZwXYe0odzj9yF/31yNnfNmJt1HEmSJEklwiJZ4c75h5Hstl0fLrrxaZavack6jiRJkqQSYJGscD1qqvjmh/dh9tLVfOeO57OOI0mSJKkEWCTFATsN4GPjdmLKAzN54rXFWceRJEmSVOQskgLgS+/fnaF967jwD0/R3NqWdRxJkiRJRcwiKQD61ddyyQmjmDF7KT+b9res40iSJEkqYhZJrfP+UY0cs/d2fP/OF3h1wYqs40iSJEkqUhZJvc0lJ4yitrqKf7/haVJKWceRJEmSVIQsknqbxoZ6vvz+3bnvpfnc8Jc3so4jSZIkqQhZJPUOp43bif137M83bn6WhSvWZh1HkiRJUpGxSOodqqqCb354H5atbuHS/3026ziSJEmSioxFUhu1e2NfPvWekfzh8TeY9uK8rONIkiRJKiIWSW3S547chRGDe/PvNzzNqrWtWceRJEmSVCQsktqk+tpqLjtxNLMWrmTyXS9mHUeSJElSkbBIarMOGTmIfxy7PVdNe4Vn31yadRxJkiRJRcAiqS36ynF70r9nLf/2+7+yutkprpIkSVKls0hqi/r36sFlHx7N028s5UvXP0lKKetIkiRJkjJkkdRWOWbvRr50zO786a9v8r07fV5SkiRJqmQ1WQdQ6fjM+JH8bf4KrrjrRXYe3JsJ+w3LOpIkSZKkDDgiqa0WEVx24mjGjRjIv13/JI/OXJh1JEmSJEkZsEhqm/SoqeLKjx3AsAE9+eSvHmPWgpVZR5IkSZLUzSyS2mYDevfgFxMPpLUtceaUR1iyqjnrSJIkSZK6UVEUyYg4KSJ+EHPwk/kAACAASURBVBHTImJpRKSIuLaD99o+In4REW9GxJqImBkR34+IAYXOXclGDO7NT04/gFkLV/KZXz9Gc2tb1pEkSZIkdZOiKJLAV4HPAWOANzp6k4gYCTwGnAk8AnwPeAX4V+DBiBjU+ahqd/DOg7jsxNHc/9ICvnbT024LIkmSJFWIYimS5wO7Af2AT3fiPj8GhgLnppQmpJQuSCkdSa5Q7g78R6eT6m0+OnYHPjN+JNc98ho/m/a3rONIkiRJ6gZFUSRTSveklF5MnRjSioidgfcBM4EfbXD6YmAFcHpE9O5wUG3UF9+3O8eNbuSyW2dwxzNzso4jSZIkqYsVRZEskCPzr3eklN72wF5KaRlwP9ALOLi7g5W7qqrgOx8dwz7DGvjX3zzB028syTqSJEmSpC5UTkVy9/zrC5s4/2L+dbduyFJxevao5qozxjKgVy1nX/Moc5aszjqSJEmSpC5STkWyIf+6qeGw9uP9N3WDiDgnIqZHxPR58+YVNFwlGNq3np9PPJDlq1s4+5pHWbGmJetIkiRJkrpAORXJLYn86yafw0wp/TSlNDalNHbIkCHdFKu87NnUjx+euj8zZi/lvN8+QWubK7lKkiRJ5aacimT7iGPDJs732+A6dZEj9hjK1z6wF//37Fy+fdtzWceRJEmSVGA1WQcooOfzr5t6BnLX/OumnqFUAU08bASvzF/BT//8CiMG9+aUg3bMOpIkSZKkAimnEcl78q/vi4i3fa+I6AscBqwCHuruYJXqax/Yi/fsNoSLbnya+1+an3UcSZIkSQVSckUyImojYo+IGLn+8ZTSy8AdwHDgsxu87RKgN/DLlNKKbgkqaqqr+OGp+zFySB8+de1jPDdnadaRJEmSJBVApJT9YigRMQGYkP+1ETgGeAWYlj82P6X0xfy1w4G/Aa+mlIZvcJ+RwAPAUOAmYAYwDjiC3JTWQ1NKC7Ym09ixY9P06dM7/J30d68vWsmHf/wAq9a28uOP7c+7d3UhI0mSJKnYRcRjKaWxGztXLCOSY4Az8j/H5I/tvN6xk7bmJvlRybHAFHIF8gvASOAK4JCtLZEqrO0H9OKGzx7GsAE9mXj1o1z3yKysI0mSJEnqhKIYkSxGjkgW3rLVzXzuv//CvS/M45P/sDNffv8eVFXFlt8oSZIkqduVwoikKkDf+lp+fsZYTj94J37y51f4zK8fZ9Xa1qxjSZIkSdpGFkl1q5rqKr7+ob256AN7cfuzc/innz7IW8tWZx1LkiRJ0jawSKrbRQRnHz6Cn54+lhfmLufEHz3A83OWZR1LkiRJ0laySCozR++1Hb/71CG0tLXxkf96gHtfmJd1JEmSJElbwSKpTI0a1sCNnz2MHQb24qwpj3LtQ69mHUmSJEnSFlgklbmmhp787lOH8J7dhvDVG5/m0pufpbXN1YQlSZKkYmWRVFHoU1fDT08/gImHDudn9/2NT137GCvXtmQdS5IkSdJGWCRVNGqqq5h0wt5c/MG9uGvGXE7+yUPMXeqKrpIkSVKxsUiq6Jx52Aiu+uexvDxvORN+dD/Pvrk060iSJEmS1mORVFE6as/ciq4pwUevfIC7n5ubdSRJkiRJeRZJFa2935Vb0XX44N6cNWU65173F2YvWZV1LEmSJKniWSRV1Bob6rn+U4dy7pG7cPszczjy8nu54q4XWd3cmnU0SZIkqWJZJFX0evao5vPv2507P/8ejtxjKN/9vxc46jv38r9PziYltwmRJEmSuptFUiVjh4G9+NFp+3PdJw6mb30Nn/3vx/mnnz7kYjySJElSN7NIquQcMnIQ/3vuu/mPE0fxwtxlfOAH0/jKDU+xYPmarKNJkiRJFcEiqZJUXRWcNm4npn7xCCYeOoL/efQ1jrh8Kr+47280t7ZlHU+SJEkqaxZJlbSGXrV87YN7cdt572bfHfrz9Zuf5f3f/zP3vjAv62iSJElS2bJIqizsMrQvvzzrIH5+xlha2xJn/OIRzp7yKH+bvyLraJIkSVLZsUiqbEQER+25Hbef/w9ceOwePPy3hbzve/dy2S0zWLGmJet4kiRJUtmwSKrs1NVU88n3jOTuL76HE/cbxlXTXuGsKY+ypsW9JyVJkqRCsEiqbA3tW8//O2lfvn/yGB7+20K+fP2T7jspSZIkFUBN1gGkrvahMcN4fdEq/vP259l+QC++eMzuWUeSJEmSSppFUhXhM+NH8vqilfzwnpcYNqAnpxy0Y9aRJEmSpJJlkVRFiAi+8aFRvLl4NV+98WmaGuoZv/vQrGNJkiRJJclnJFUxaqqr+NFp+7P7dn357K8f55k3l2QdSZIkSSpJFklVlD51NVx95oH061nLWVMe5c3Fq7KOJEmSJJUci6Qqznb96rn6zANZuaaVs6Y8ytLVzVlHkiRJkkqKRVIVaY/Gflx5+gG89NZyPnPt4zS3tmUdSZIkSSoZFklVrMN2Gcy3PrIP9700nwv/8JR7TEqSJElbyVVbVdFOOmB7Xlu4ksl3vcgOA3rxr+/dNetIkiRJUtGzSKrinffeXXl90Sq+d+cLbD+gJx85YPusI0mSJElFzSKpihcRfPPDo5m9ZBVf/v2TNDXUc+gug7OOJUmSJBUtn5GUgB41VfzXxw5g5yG9+eS1j/HC3GVZR5IkSZKKlkVSymvoWcvVZx5Ez9pqJv7iEeYuXZ11JEmSJKkoWSSl9Qzr35NfTDyQxauaOWvKo6xY05J1JEmSJKnoWCSlDYwa1sCPTtuf5+Ys43P//Tgt7jEpSZIkvY1FUtqII3Yfyjc+NIp7np/HxX98Jus4kiRJUlGxSEqbcOq4HTn78BH8+uFZvDJvedZxJEmSpKJhkZQ24+PvHgHArU/PyTiJJEmSVDwsktJmNDX0ZP8d+3PLU7OzjiJJkiQVDYuktAXHjW7imTeX8uqCFVlHkSRJkoqCRVLagvePagSc3ipJkiS1s0hKW7D9gF7su0N/bnV6qyRJkgQUsEhGRFVEfDwifhARX4yIvoW6t5S140Y18tfXl/D6opVZR5EkSZIyt81FMiIuiIiVETF+g1P/C/wE+CzwbeDBiOjd+YhS9o4d1QTArU85vVWSJEnqyIjkMcBS4N72AxHxvvzxN4BLgUeAPYGzCpBRytyOg3oxalg/bnna6a2SJElSR4rkLsCzKaW03rGPAAn4p5TS14AjgUXAqZ2PKBWHY0c18ZdZi3lz8aqso0iSJEmZ6kiRHARsOCxzODAnpfQAQEppFfAAMLxT6aQictzo3PTW21y9VZIkSRWuI0UyAeuefYyIBmAP4P4NrlsC9O94NKm4jBjcmz2b+nGr01slSZJU4TpSJP8GjIuI9vd+AAjgvg2uGwLM70Q2qegcN6qR6a8uYu7S1VlHkSRJkjLTkSL5R2A74IaIOBf4T6AVuKn9gogIYD9ypVMqG8eObiIlp7dKkiSpsnWkSH4bmAF8EPg+0AhcnlJ6db1rDic3IrnhKKU64/nb4P4rsk5R0XYZ2ofdtuvDLU85vVWSJEmVa5uLZEppCTAWOAP4N2B8SunCDS4bBEwGftPphPq7F++Au78By+dlnaSiHTuqiUdmLmTesjVZR5EkSZIy0ZERSVJKq1JKv0opXZ5S+vNGzt+YUjo/pfRk5yNqnXGfgta18NjVWSepaMfvk5veevszTm+VJElSZepQkdyciBgUEdWFvq+AIbvBLkfDoz+DlrVZp6lYuw7tw8ghvZ3eKkmSpIq1zUUyIsZExL9FxB4bHH9fRLwGvAXMi4hPFCqk1nPwp2D5XHjmhqyTVKyI4LjRTTz0ygIWLHd6qyRJkipPR0Yk/wW4DFjafiAitgP+AAwjt89kf+DKiDiwECG1npFHweDd4aEfQ0pZp6lYx45qoi3BHc/OzTqKJEmS1O06UiQPBZ5MKb253rHTgV7kVnGtBz5Mbm/Jf+l0Qr1dBIz7JMx+Al57OOs0FWvPpr4MH9TL6a2SJEmqSB0pkkOB1zY4djTQDFySUmpJKd0ITAfGdTKfNmbff4L6/rlRSWWifXrrAy8vYNEKn1eVJElSZelIkewLLN/g2EHA4/mtQdq9TG6qqwqtR2844AyY8SdYPCvrNBXruNFNtLYl/s/prZIkSaowHSmSi4Cd2n+JiDFAA3D/Ru7d3PFo2qwDPwEEPHJV1kkq1t7v6scOA3tyy9NOb5UkSVJl6UiRnA6Mi4j2aavnk1tg5+4NrtsV8H9hd5X+O8CeH4THr4G1K7JOU5EiguNGNXH/S/NZstL/z0SSJEmVoyNFcjJQDTwQEQvILbTzN+D29gsiYjAwGniiECG1CQd/BlYvgb9el3WSinXs6CaaWxN3znB6qyRJkirHNhfJlNIdwFnAq0AdMBX4QEqpdb3LTidXNqd2PqI2aYeD4F37wcM/gba2rNNUpH23b2BY/57c6vRWSZIkVZCOjEiSUpqSUto5pdQnpXRkSum5DS65EhgA/LzTCbVpEblRyfkvwMsbzixWd4gIjh3VyJ9fmM/S1U5vlSRJUmXoUJHckpTSqpTSkg1GKdUV9poAfRrdCiRDx45uYm1rG3fPeCvrKJIkSVK36FSRjIhhEXFKRHwx/3NKRLjlR3eq6QEHfhxevgvmPZ91moq03w79aexXzy1POb1VkiRJlaFDRTIi+kfEr4GZwLXAt/M/1wIzI+LaiOhfsJTavLFnQnUdPHxl1kkqUlVV8P5RjUx9YR7L17RkHUeSJEnqcttcJCOiJ7mtPv4JCOBh4L+B64CH8sdOAe7KX6uu1nsw7PNR+OtvYOXCrNNUpONGN7G2pY17nnN6qyRJkspfR0YkzwPGAA8Co1NKh6aUTk8pfSyldBi5bT/uz19zbuGiarPGfRqaV8Ljv8w6SUUau9MAhvatc3qrJEmSKkJHiuQ/AouA41NKMzY8mT92ArCY3KilukPjKBj+bnjkKmh1emV3a5/ees/zb7FyrX//kiRJKm8dKZK7AveklJZs6oKU0mLgnvy16i4HfwaWvg7P/SnrJBXp2FFNrG5uY+rz87KOIkmSJHWpjhTJRO45SBWb3Y6BAcPhIRfdycJBIwYyqHcPp7dKkiSp7HWkSL4MvCci+m7qgojoB4wHXupgLnVEVTWM+xS89hC88XjWaSpOdVVwzKhG7n7uLVY3u4WqJEmSyldHiuTvgIHAHyPiHVNXI2IX4AZgAPA/nYunbTbmNOjR161AMnL86CZWrm11eqskSZLKWkeK5PeAp4H3AM9GxLSI+GVEXBMR04AZwBH5a75fuKjaKvX9YL+PwdN/gGVzsk5TccaNGMiAXrXc+rTTWyVJklS+trlIppRWkiuK1+fffxjwMeD0/J+r8ueOzF+r7jbuHGhrgUd/nnWSilNTXcUxezdy1wynt0qSJKl8dWREkpTSgpTSPwIjyBXIC4AL838ekVL6x5TSgsLF1DYZuDPsfixM/zk0r846TcU5dnQTy9e0cN+L87OOIkmSJHWJms68OaU0C/h1gbKokMZ9Cp6/BZ76Hex/etZpKsqhIwfR0LOWW56ezXv32i7rOJIkSVLBbbFIRsSOnfmAfNlUdxvxDzB079yiO/t9DMIdW7pLbXUVR++1Hbc/M4c1La3U1VRnHUmSJEkqqK0ZkZxJbu/Ijkhb+RkqtAg4+NPwx8/BzGm5Yqluc/zoJq5/7HUeeGkBR+wxNOs4kiRJUkFtTcmbRceLpLI0+qNw58Xw0JUWyW526C6D6Ftfwy1PzbZISpIkqexssUimlIZ3Qw51hdp6GHsW/PlyWPhKbhEedYu6mmqO3nM77nh2Lpe1tlFb3aF1rSRJkqSi5P+6LXdjz4aqanjkqqyTVJxjRzexZFUzD77sAsaSJEkqLxbJctevCfb+MDz+K1i9NOs0FeXduw6mT10NNz/5ZtZRJEmSpIIqqiIZEdtHxC8i4s2IWBMRMyPi+xExYBvvc3hE3JR//+qImBURt0TE+7sqe1E7+FOwdhk88d9ZJ6ko9bXVHLN3I7c+NYfVza1Zx5EkSZIKpmiKZESMBB4DzgQeAb4HvAL8K/BgRAzayvt8GpgGHJV//R5wL/Ae4NaI+PfCpy9yww6AHcbltgJps9B0pxP3G8ayNS3c/dxbWUeRJEmSCqZoiiTwY2AocG5KaUJK6YKU0pHkiuDuwH9s6QYRUQt8E1gNHJBSOj2ldGFK6XRgLLAG+PeIqOuyb1Gsxn0KFv0NHvlp1kkqyiEjBzG0bx03/OWNrKNIkiRJBVMURTIidgbeR27Pyh9tcPpiYAVwekT03sKtBgINwAsppefXP5FSmgG8APQE+hQgdmnZ8wTY5Wi47QK446uOTHaT6qrgQ2PexdTn32LRirVZx5EkSZIKoiiKJHBk/vWOlFLb+idSSsuA+4FewMFbuM9bwDxgt4jYdf0TEbEbsCvwREqp8pbRrK6BU34DB34CHvgB/PZ0WLM861QVYcJ+w2huTfzvU7OzjiJJkiQVRLEUyd3zry9s4vyL+dfdNneTlFICPkvuez0WEddExDcj4pfknr98BvhoAfKWpuoaOP5yOPb/wQu3wtXvhyVOuexqezX1Y7ft+nCj01slSZJUJoqlSDbkX5ds4nz78f5bulFK6XfkRjgXA/8MXACcTm567NXkFvDZqIg4JyKmR8T0efPmbWX0EjTuk3Dq/8DCmXDVkfDmX7JOVNYiggn7DWP6q4uYtWBl1nEkSZKkTiuWIrklkX9NW7ww4mPAneRWbN2T3JTYPYG7gB8Cv9nUe1NKP00pjU0pjR0yZEinQxe1XY+Gs++A6h7wi2Ph2T9mnaisfWjMMABuesJRSUmSJJW+YimS7SOODZs432+D6zYq/xzkL8hNYT09pfRcSmlVSuk5cqOSjwEfjYjxnY9cBrbbCz5xFzSOgv85HaZ9F9IWu7o6YFj/nowbMZAbnniD5N+xJEmSSlyxFMn2FVY39Qxk+8I5m3qGst37gFrg3o0s2tMG/Dn/6wEdCVmW+gyFM/4Eoz4Cd10CN30OWlxdtCucuN8wXpm3gqfe2Oz/HyJJkiQVvWIpkvfkX98XEW/LFBF9gcOAVcBDW7hP+/6Qm5qX2n7cprS+2p7wkZ/Dey6AJ66FX02AlQuzTlV2jh3dRI/qKveUlCRJUskriiKZUnoZuAMYTm7V1fVdAvQGfplSWtF+MCL2iIg9Nrh2Wv71pIjYZ/0TETEGOIncc5Z3Fy59mYiAIy6ED/8MXn8UfnYUzH9xy+/TVmvoWctRew7lT399k5bWti2/QZIkSSpSRVEk8z5Dbh/IKyLixvy2HXcD55Ob0vrvG1w/I/+zTkrpEXIrs/YEHo2I30TEtyPit8DDQD0wOaX0TBd/l9K1z0fhjJth9dJcmXzl3qwTlZUJ+w1j/vK13PfS/KyjSJIkSR1WNEUyPyo5FpgCjAO+AIwErgAOSSkt2MpbnQ2cCTwIHJO/z9HAfcApKaXzC5u8DO04LrcIT98muPbD8Ng1WScqG+N3H0JDz1r3lJQkSVJJq8k6wPpSSq+RK4Fbc21s4ngiV0anFCxYJRowPLc9yO8mwp/OhQUvwnsvgarqrJOVtLqaao7fp4kbHn+DFWta6F1XVP8VlCRJkrZK0YxIqgjVN8Cpv4MDPw4P/CBXKtt8tq+zTtxvGKuaW7nj2TlZR5EkSZI6xCKpzauugeO/A++dBDP+CA9ckXWiknfAjgPYfkBPbvzLm1lHkSRJkjrEIqmtc9h5sOcJcPc34PXpWacpaVVVwYfGvItpL85j3rI1WceRJEmStplFUlsnAk64IrcAz/VnweolWScqaRPGDKMtwZ/+6qikJEmSSo9FUluv5wD4yM9hyetw8+chpawTlaxdt+vLqGH9uPEJV2+VJElS6bFIatvsOA7GXwhPXw9P/DrrNCVtwphhPPn6El56a3nWUSRJkqRtYpHUtnv352H4u+GWL8G8F7JOU7JO2PddVAXc5KikJEmSSoxFUtuuqho+/FOoqc89L9m8OutEJWlov3oO22UwN/zlDZLThCVJklRCLJLqmH7vggn/BXOfgjsvzjpNyTpxv2G8vmgVj726KOsokiRJ0lazSKrjdn8/jPs0PHwlPH9r1mlK0jF7N9Kztpob/uL0VkmSJJUOi6Q65+hLoHE03PgZWOpWFtuqd10N79t7O25+cjZrW9qyjiNJkiRtFYukOqemDk66GlrWwO8/AW2tWScqORP2G8aSVc1Mff6trKNIkiRJW8Uiqc4bvCsc95/w6n0w7btZpyk5795lMIN693BPSUmSJJUMi6QKY8ypMPqjMPWbMOuhrNOUlJrqKj6477u4c8ZbLFnVnHUcSZIkaYsskiqMCDj+u9B/B/j9x2GVq5BuixP3G8baljZue3p21lEkSZKkLbJIqnDq+8FHfgHLZsMf/wXcG3Gr7bN9AzsP7u3qrZIkSSoJFkkV1vYHwFFfgxl/gum/yDpNyYgIJuw3jIdeWcgbi1dlHUeSJEnaLIukCu+Qf4GRR8HtX4G5z2adpmRMGDMMgD8+4TYqkiRJKm4WSRVeVRWceCXU9YXrz4K1K7NOVBJ2HNSLA3YawA1/eZ3ktGBJkiQVMYukukafoXDiT2DejNzIpLbKhP2G8cLc5cyYvSzrKJIkSdImWSTVdXY5Cg49Fx67Gp69Kes0JeEDo5uoqQpuck9JSZIkFTGLpLrWkRfBu/bPreK64OWs0xS9Ab17MH73Idz0xJu0tjm9VZIkScXJIqmuVdMDTsqv3vqT/8/encfHVdf7H399J/u+NUmTpjttutKWtkDLWpCtIqigIIqKoKIiXq/oXfT+rnqvV6/XqyJuoCiisigqCLJd2aHQvUD30i1t07RJs++Zme/vj++ZbE2XpMmcSfJ+Ph7zODPnnJl+0p5Mz/t8l3MBrP+dbgtyAu9dMI6K+lZW7jridykiIiIiIn1SkJShlzsZPv0KFM2Dxz4HD90AjYf9ripmvWtmIelJ8bqnpIiIiIjELAVJiY6cifCxx+HSb8E7z8FPz3b3mpSjJCfEccWcsTy1sYLWjpDf5YiIiIiIHEVBUqInEIClt8GnX4asEnj4I/CXW6G1zu/KYs77FoyjsS3I37cc8rsUEREREZGjKEhK9BXMgFueg/O/Am/9AX66FHa96HdVMeWsKXmMzUzmUXVvFREREZEYpCAp/ohLgIu+Cjc/CwnJcP/V8NQ/Q0eL35XFhLiA4er5xby4rZItB+v9LkdEREREpAcFSfFXySI3Ec+Zn4aVP4O7z4cDa/2uKibcfO5kxqQn8Yn7VnOovtXvckREREREOilIiv8SU2H5d+HGR6G9CX55CbzwbQh1+F2Zrwoyk7n344uob+ngE/etpqkt6HdJIiIiIiKAgqTEkqnL4DMrYO4H4KXvwC/fBZXb/K7KV7OLs/jxDWew5WA9tz+4nlBY9+AUEREREf8pSEpsScmG998NH7wfasvg5+fB6z+FcNjvynyzbEYB37hqNs9tPcx/PLHZ73JERERERBQkJUbNuho++4ZrpXzmX+D+q6Bu9M5geuOSSdxy7mTuW7GHX7+22+9yRERERGSUU5CU2JVRCB96CK66Cw6sg58thc1/9bsq3/zL8plcNruQbz6xmf/brPtLioiIiIh/FCQlthkDZ3wUbn0FcifDH26Ev97uJuUZZeIChh9et4DTx2Vx+4PreXt/nd8liYiIiMgopSApw0PeVPjEs3DuF2Hd/XD3BXDwTb+rirqUxDh+8bFF5KYl8onfrOZAre67KSIiIiLRpyApw0d8Irzr6/DRx6C9EX5xMay4a9RNxFOQkcyvb1pMa3uIm+9bTUPr6L5NioiIiIhEn4KkDD9TLnC3CZl+GTz7Nfjd+6Ghwu+qomp6YQY/+8hC3jncyOceWE9HaHSFaRERERHxl4KkDE+puXDd7+DKH0LZG24inm1P+11VVJ07bQzfet8cXt5eyb//dRPW6h6TIiIiIhIdCpIyfBkDi26CT78EGcXw4HXw5JehY/SMG7xu8QQ+e+FUHlhZxj0v7/K7HBEREREZJRQkZfjLL4VPPgdnfw5W3QO/uAgObfa7qqi549JSrjy9iG8/tZUn3z7odzkiIiIiMgooSMrIEJ8El/8XfPhP0FQF91wIK++BUdDdMxAwfO8D81g4MYcvPryB9WU1fpckIiIiIiOcgqSMLNPe5SbimXIBPPVleOA6FyxHuOSEOO65cSGFmcnc8ps17Ktu9rskERERERnBFCRl5EnPhxv+AFd8F3a9CD9dAgfW+V3VkMtLT+LXNy0mGLZ8/NerqGvWbUFEREREZGgoSMrIZAyc9Wn41AtgAu42IaPA1Px07r5xIWXVzdz6u7W0B3VbEBEREREZfAqSMrIVzoaln4e9r42KVkmAs6fk8d1rT+f1XUf42K9WseNQg98liYiIiMgIoyApI98ZH4WkTHj9x35XEjXvW1DCf18zl03ldVx+5yt8/a+bqG1u97ssERERERkhFCRl5EvOhIUfg02PQm2Z39VEzXWLJ/Dil5fxoTPHc//re7jwey9y/+t7CIbU3VVERERETo2CpIwOZ93qxk2+8XO/K4mq3LRE/vO9c3nyC+cxqyiT//fYJpb/6BVe3THyZ7IVERERkaGjICmjQ1YJzH4frLsfWuv8ribqZozN5Pe3nMXdNy6ktSPMR+5dySfvX8Oeqia/SxMRERGRYUhBUkaPJbdBewOs/Y3flfjCGMNls8fy7BfP5yuXl7LinSou/cHLfPupLTS06lYhIiIiInLyFCRl9CieD5POg5U/h9DoDU7JCXF89sLTeOGOC7lqfjF3v7SLZd97iT+s3kc4bP0uT0RERESGAQVJGV2W3Ab1B9zEO6NcQWYy3/vAPB773DlMyE3hK396i6t+8iqr91T7XZqIiIiIxDgFSRldpl0KY6bDih+BVesbwLzx2fzpM0u58/r5VDW084Gfv87nH1zPgdoWv0sTERERkRilICmjSyAAumtkdgAAIABJREFUSz4HFW/Bnlf8riZmGGO4ev44nr/jAm6/eBrPbqpg2fde5B8f3sCaPdVYhW4RERER6cboBLFvixYtsmvWrPG7DBkKHa3wg9kwbiF8+A9+VxOT9tc08/OXdvLo+nIa24JML0znhjMn8L4zSshKSfC7PBERERGJAmPMWmvtoj63KUj2TUFyhHvxO/Dit+FzqyC/1O9qYlZTW5C/vlnOAyvLePtAHckJAa48vZgbzprAgvHZGGP8LlFEREREhoiC5AAoSI5wTVWuVfL06+CqH/ldzbDw9v46Hli1l8c2lNPcHmLG2Aw+fNYE3rtgHBnJaqUUERERGWkUJAdAQXIUePwLsOFB+OImSM/3u5pho6G1g8c2uFbKzQfrSUmI46p5xXz47AmcXpLtd3kiIiIiMkgUJAdAQXIUqNoBP14EF/wzLPsXv6sZdqy1vLm/jgdW7uXxNw/S0hFizrhMbjhzIlfNLyY9Kd7vEkVERETkFChIDoCC5CjxwPWwf5VrlUxI8buaYau+tYNH1x/ggZVlbK1oIC0xjkWTcsnPSHKP9KSu594jIyleYyxFREREYpiC5AAoSI4Se16F+94NV/4AFn3C72qGPWst68pqeXh1GVsONlDZ0EZVYxvB8NHfM0nxAcb0Dpje67GZyZxekkVBZrIPP4WIiIiIwPGDpPqeyeg28Rwomg+v/wTO+Li7z6QMmDGGhRNzWDgxp3NdOGypa+mgsrGNyoauR1XkdWMb+6qbWbe3hurmdrpf2xqfm8KiibmcMTGHRRNzmF6YQVxArZgiIiIiflOQlNHNGFj6efjTzbDjGSi9wu+KRpxAwJCTlkhOWiLTCzOOu28wFKa6qZ19NS2sL6th7d4aXn2nir+sPwBARlI88ydks2hiLosm5TB/fDZpGospIiIiEnXq2noM6to6ioQ64M75kDMJbvqb39VIL9Za9lW3sLasmjV7XLjcdqgBayFgYGZRJosm5rBwUi4LJ+YwLltjXUVEREQGg8ZIDoCC5Ciz4sfw7Ffhky/AuDP8rkZOoL61g/VltazdU83ashrWl9XS3B4CoCgrmQUTspldnMXs4kxmF2eRn5Hkc8UiIiIiw4+C5AAoSI4yrfXwg9kw7RK49ld+VyP9FAyF2VrRwNq9NazZW8Pb+2vZc6S5c3thZhKzi7OYU5zJrOIs5ozLZFx2imaNFRERETkOBckBUJAchZ75KrzxM/jCm5A93u9q5BTVt3awubyeTeX1bDpQx6byenYcbiAygWxWSgJzxmX2aLmcPCZNk/mIiIiIeBQkB0BBchSq3Qd3zoOzPwOXfcvvamQItHaE2FrRwEYvWG4qr2NrRQPtwTAAqYlxzCzKZHZxJnOKs5hVnMn0wgwS4zWbr4iIiIw+CpIDoCA5Sj1yM2x/Bv5xEyRn+V2NREFHKMw7hxvZVF7vBcw6NpfX0+SNuUyIM0wvzOhstZwzLpOZRZmkJmq2WBERERnZFCQHQEFylCpfD/dcCJf+p7stiIxK4bBlb3Vzj5bLTeX1VDe1A+6uMZPHpDGnW7fY2cWZ5KQl+ly5iIiIyOBRkBwABclR7L4roXo3fGEDxCX4XY3ECGstFfWtbDzQFSw3HaijvK61c59x2SmUjs1g8pi0Ho+xmckENPZSREREhpnjBUn1zRLpbclt8OB1sPkxmHut39VIjDDGUJSVQlFWCpfMKuxcX93UzubyejZ64XLHoQZW7KyitSPcuU9yQoBJeWlMyktjcn7PkJmXlqjZY0VERGTYUYvkMahFchQLh+EnZ0JiKnzqJdePUaQfwmHLoYZWdlc2sftIE7srm9hzpIldVU3sq26mI9T1vZuRFM/k/DQvaKaSlhRPQlyAhPgASXEBEuINCXEBEr11iXEBEuMDbp84Q1Ln8wDxAYMFrAWLJfL17tZ1e9253nr7uu3BsKUjFCYYcssObxkMd3sesrR7y2A4THswTDBsGZOexJWnF5GcEBfNv2oREREZQuraOgAKkqPcml/DE/8AH3sCJp/ndzUyggRDYQ7UtrC7qondVU3sqXIBc8+RJvbXtDCcv5JzUhP4yNkTufHsiRRkJvtdjoiIiJwiBckBUJAc5Tpa4AdzoGQR3PCw39XIKBEMhWkPhekIulY/99wt24NhOjqXrnWwzVsXWR8MW4wBg/GWXQ3qBrfCrTNE2tmNofM98XGG+ECAxHi3jLR6JsQFiI8zJMYFiO++LmBIiA+QEAjw1v5a7n11N/+35RDxAcN75hVz87mTmV2s2Y9FRESGK42RFOmvhBQ485Pw4rehcjvkT/e7IhkF4r2gxjCc/PWsKXmcNSWPPVVN3LdiD39Ys48/rzvAkil53HzuZC6aUaAJh2JAXXMHL2w7zAvbDlOYmcz1i8czJT/d77JERGQYUovkMahFUmiqgh/MhnnXw3vu9LsakWGlrrmDh1aXcd+KPRysa2XKmDRuOmcS1ywsGTH34GxsC1LT1E5xdgpxMRySy2tb+L/Nh3h2cwUrd1UTDFvy0hKpa+kgGLacc1oeN5w5kUtmFZIYH/C7XBERiSHq2joACpICwONfgDcfgju2Q7K66In0V0cozFMbK7j3lV28ub+OrJQEbjhrAh9bMomxWcNjHGU4bNlf08KWinq2HKxn68EGtlTUs/dIM+Bm5Z1emMGMsRmUjs30lhmMSU/ypV5rLVsrGjrD48YD9QBMzU/jklljuXR2IfNLsqlqauOPa/bzwMoyDtS2MCY9iesWl3D94gmMz031pXYREYktCpIDoCApAJS9Ab+6DK79Fcy5xu9qRIYtay1r99Zw76u7eWZTBQFjuPL0Im4+dwpzSwZ2kSYUtrQHw4StJSUhblC6zja1Bdla0cDWbqFxa0UDjW1BwI0nnZSXxsyiDGaOzSQvPYl3Djey7VA92yoaqGps7/ysMemJlI7NYMbYTG+ZwbSCDFISB39m22AozJq9NTy76RD/t6WCfdUtGAMLxmdz6eyxXDKrkKnH6MIaClte3l7J71fu5fmth7HAhdPz+fBZE1k2o2BQWlsP17eyrqyWDftq2bCvhrqWIIlxpseY28iY3HhvluKuMbiRcboBEuMMcYEAIWsJh91Mw6Fw2Ft2PYLdlm6/cOfr1MQ4xmamMDYricLMZIqyUhibmUxBZpJmHY4hHaFw56RkpxWkM2VMmm6VJOIDBckBUJAUAMIh+N40mHoRXPNLv6sRGRH2VTfz69f28PDqMpraQyyelMOE3DTaQ2HaOkLeMkxbsPtzN6FQWzDkLV146C4lIY60pDhSE+NJTYwjLcktUxPjSEuMJzXJW3rbU5PiSI6Pc62NB+vZWlHP3urmzplzM5LimVGUwcyiTGYWdbU0Hq9rbmVDG9u8ILqtooFthxrYVtFAW9DdVzQSREsLvVbLjCSS4wMkJcQdvUwIkBR/9DIS7FraQ7y8o5JnNx3i+a2HqGnuIDE+wLmnjeGSWYVcPLOAgoz+tfoeqG3h4VVlPLR6H4cb2ijKSub6xRO4bvH4k25Bbu0Isam8jvVltaz3wuOB2hYAEuIMs4oyyc9I9m4r0/PWMpHJozp634LGe94e6ro/a1zAEBcwxHvL7s/jA4HOdb33aWwLUlHXSnN76Kjac9MSvXCZTGFmMmMjz7Pc87GZyWSmxA+LQGOt5UBtCxsP1PHW/joaWoMUZ6dQnJ3sLVMozEhy47J9FAyF2VvdzI5DDWyraGT74QZ2HGpgV2VTj9/xCbmpLCvN58IZBSyZkhfTob8jFGZXZROVDW0UZyczLieFpPjYrbe71o4Qtc0dpCTEDZtjXYaWguQAKEhKp0c/C1ufgC/vhLgEv6sRGTHqWzv4w+p9PLx6H83tIZLi3T0yk+JdYOp8nuBaqI5eF0dSQgADNLeHaG4P0tQeoqU9RFNbkOb2EE3tQZrbvKW3PhLqIiLhbsbYjM7AOLMok5KclEE5iQqFLXuPNHkBs6EzaHYPrf3h7h8aR7s3o29WSgIXzSjg0lmFnD89n7SkUx+D2hEK89yWw/x+5V5e2VFFXMDwrpkFfPisiZx72pjO1l9rLXuPNLNhXy3ry2pYv6+WzeX1nQFgXHYKCyZks2BCDvPHZzO7OPOUAoC1lrCFgOGU/m2stTS0BTlU10pFfSsH61o5VNfKwfrWznUVda0caWo/6r1J8QHyM5LcIz2p63mv12PSo9fCaa2lvK6Vt/fXueB4wC2rvfrjA4bUxDjqW4M93hcwMDazK1gWZSczLjuF4iz3elx2yqCFiXDYsq+mmW0VDew43Mj2Qw1sP9TIzspG2rtdaBmfk8r0wnSmF2YwvTCDCXmpbCqv58Wth3ltZxWtHWGS4gMsnZrHshkFLCst8LUrdl1LB1sP1rP5oOvFsPlgPdsPdf1MkZ+rMCOZ8bkpjM9JpSQ3lZIc93x8bgpFWUMzzjoUttQ2t1PT3M6RRresburwll2PyOuapnaaul1gSUuMoyg7haKs5M5joijbPY8sB7uHRaSnQdh6PQysxYYh5L221nY+D4dx+3k9FMIW4gIwLjt1SHp+jFYKkgOgICmdtjwBD38YPvpXmHKB39WIyCkKhS3NXrBsbg9RkJE0KOGrv1o7QtS3dNAWDNPaEaLNa3Ft7ehj2RGiNdjVUtvaESYh3nDBtHwWT84lYQhblfYeaeKBVWX8cc1+qpvamZCbyiWzCtld1cT6shpqmjsASE2MY15JNvMnZLNgvFv2t0U01rQFQxyub+sMlhV1rVQ1tlHZ0EZlZNnQ1mfgBMhMju8WNJPJS0skOzWB7JQEslMTyer2PDslgcyUhBMGCmstFfWtvBUJjd4yUkNcwDCtIJ3TS7KYOy6LuSXZzBibQXJCHE1tQQ7WtXCgtpXy2hbv4T2va+FgbWuPVl9w/65js5JJio/zAjwEvFsIGdN1q6FA5Lm3reu1C1vvHG6ktaPrs8dlp3QGxmmFGUwvTOe0gvTjtvi3doRYubuaF7a6mYcj45Sn5qexrLSAZTMKWDwpd0gmjbLWjZXeVN4VGLccrGd/TUvnPrlpicwqymRmUQazijMpzEimvK6V/TXN7KtuYV9NM/urmzlY39rjIlJ8wFCcndIVNHNSGJ+bSnwgQEtHiJaOEK3toc7nLe0hWrs9b+no+bq1I0xjW5D61o5jXqxKS4wjJy2R3LREclK7L93x2NIe6jwmyuvccVLV2HbU5+SkJlCU5Vq6i7yAOSY9ibZgmJb2IC3tYZo7grR437ct3kW/Zu9niHwPt3S49d2PkVNRnJXM5Pw0Jo9JY/KYdKbkpzFlTBrjslNOqRW+PRimvLaFsupmyqqb2ecty6qbOdLYzvzx2VxQms/50/MZl50yKD+L3xQkB0BBUjq1N8F3p8DCm+CK7/hdjYiIL9qCIZ7ZdIjfv7GXVXuqOS0/vUdr4/TCjJievXYodYTCVDe1dwbL3kGzsqGNww2tVDe1H9Uq2FtmcrwLlqkJZHULmSmJcew41MDbB+o6x+IGDEwvzGDOuCxOL8lizrgsZhUNvNU3HLZUNbW58FDbwgEvaFbUt9AetIBr9Ym0DFvcc2vB4lqIrLcPtut5amKc18LYFRzTB+Hize6qps5QuXJXNe2hMGmJcZxz2hiWzSjgwtJ8irK6TuattXSELK1BF2LaOtxFnNaOcOe61s51LuDsqmxky8EGthysp6HbWOnJY9K80JjJrOJMZhVlUpCRdFKtt5Ewsr/Ghct91c3sq2lhX3Uz+2uae4y17i0xLkByQoCUxDhSEuJITojrfJ6SEEey9zw1Ma4rIKYlkus9z/UuZgzkGGkLhjhU1+YFyxYO1rUetaxr6Ti65viAG07g1ZqaGO/q9YYeRJapifEkJ8SRFB/AGIgzrjt6wBgCxl0kCXiv44x7HhfA2+727QiFKTvSzO6qJnZVNbGrsrHH71xCnGFCbiqTx6QztTNopjE5P418b4K02uaOznBYVt1M2ZGu5wfrWug+qiIxLkBJbgoTclPJSklg9e5qyutaAZhWkM4F0/O5oDSfxZNyY7o79vEoSA6AgqT08MB1cHgzfOGtrju8i4iMUsFQ2PexdcNVKGypb+mgtqWD2ub2rmVzB7XNHdT1WN/1urEtyOQxacwdl83ccZnMLclmVlGmuvB5mtuDrHjnCC9sO8yL2yo7x+UWZia58OiFw3A/T3vTEuOYUZTZGRpnFp14rPSpamkPcaC2mbClR1hMjg/E/O9dU1uQ6qZ2khICnYHRzwtM1lqqm9o7g+VuL1zurmpiz5HmHl2QM7yLG5ELBhFj0pOY4IXFCbmpjPeWE/JSKcxI7jHRm7WWdw438tL2Sl7aXtl5gSM5IcCSKXlesCxgUl7qsBl/OmyCpDGmBPgmcDmQBxwEHgW+Ya2t6ednzQW+DCwDCoA6YAtwr7X2/hO9X0FSelh7n7sVyGdWQOFsv6sRERGRY7DWsuNwIy9uO8yOQ40kJQRIjneBLDkh4Fq9vImtkiNBrdu2rgmu4shLSxyUGaEl9oTClvLalh7hEmBCXlq30JhyShcNmtuDrNxV3RksO/+M3FQXKqfns2Rqni/DK07WsAiSxpipwApc6HsM2AqciQuC24BzrLVHTvKzPg78EmgGngD2ANnAHKDcWnv9iT5DQVJ6aKiA/y2FZV+DC77sdzUiIiIiMszsPdLEy16oXLHzCM3tIRLiDIsn5XLJrEJuOmey3yUeZbgEyWeAS4HbrbV3dVv/feCLwN3W2ltP4nPOBl4FNgKXW2srem1PsNYe3YG7FwVJOcovLgYbhk+94HclIiIiIjKMtQVDrN1T09laWZCZzP2fONPvso4S80HSGDMF2IlrOZxqrQ1325aB6+JqgAJrbdMJPutl4DxgrrV240BrUpCUo7z8PXj+P+Aft0Jmkd/ViIiIiMgI0RYMxeT9Ro8XJGNlxO5F3vLZ7iESwFrbALwGpAJnH+9DvDGW5wFrgE3GmGXGmDuMMV8yxlxsjImVn1eGoxnvdsvtT/lbh4iIiIiMKLEYIk8kVoJVqbfcfoztO7zl9BN8zuJu+z/vPf4H+B7wd2CDMea0U6hTRrP8GZAzCbYpSIqIiIjI6BYrQTLLW9YdY3tkffYJPqfAW34QmAm83/vs04DfAnOBvxljEgdeqoxaxkDpctj1ErQ1+l2NiIiIiIhvYiVInkhk3uUTDeiM67a8xVr7F2ttvbV2J/AxXJfX6cA1ff4hxnzKGLPGGLOmsrJyMOqWkaZ0OYTaYOfzflciIiIiIuKbWAmSkRbHrGNsz+y137FE7jXZBjzZfYN1swo95r3sc0oka+091tpF1tpF+fn5J/ijZFSasASSs9W9VURERERGtVgJktu85bHGQE7zlscaQ9n7cxp6T9rjiQTNlH7UJtIlLh6mXwbbn4ZQ0O9qRERERER8EStBMnJjvkt7z6zq3f7jHKAFeOMEn/MWUAWMMcYU9rF9jrfcM/BSZdQrvQJaqmH/Kr8rERERERHxRUwESW8M47PAJOBzvTZ/A0gD7u9+D0ljzAxjzIxenxME7vZefrd7KDXGzAU+DgSBRwb5R5DRZOrFEEiAbU+eeF8RERERkREo3u8CuvkssAL4kTHmYmALcBawDNel9au99t/iLU2v9f8FXAx8FJhrjHkRyMdNsJMMfMla+85Q/AAySiRnwuTzYeuTcMl/uNlcRURERERGkZhokYTOVslFwH24APklYCrwI2CJtfbISX5OMy5IfgNIxbVwXoULqcuttd8f9OJl9Cm9Aqp3QtWOE+8rIiIiIjLCxFKLJNbafcBNJ7nvMZuBvDD5de8hMvhKl8OTd7jurfnHmiNKRERERGRkipkWSZFhJWscFM3TOEkRERERGZUUJEUGqnQ57FsFjZV+VyIiIiIiElUKkiIDVbocsO6ekiIiIiIio4iCpMhAjZ0LWeNh21N+VyIiIiIiElUKkiIDZYybvXXn89DR4nc1IiIiIiJRoyApcipKr4BgC+x60e9KRERERESiRkFS5FRMPBeSMjV7q4iIiIiMKgqSIqciPhFOexdsexrCYb+rERERERGJCgVJkVNVuhyaDsOBtX5XIiIiIiISFQqSIqdq2rsgEK/urSIiIiIyaihIipyqlByYuFRBUkRERERGDQVJkcFQuhwqt8KRnX5XIiIiIiIy5BQkRQZD6RVuuf1pf+sQEREREYkCBUmRwZAzCQpmw1Z1bxURERGRkU9BUmSwzFgOZa9Dc7XflYiIiIiIDCkFSZHBUnoF2BDseNbvSkREREREhpSCpMhgKVoA6WM1e6uIiIiIjHgKkiKDJRBwrZLvPAfBNr+rEREREREZMgqSIoOpdDm0N8LuV/yuRERERERkyChIigymyedDQpq6t4qIiIjIiKYgKTKYEpLhtItg21Ngrd/ViIiIiIgMCQVJkcFWuhwayuHgBr8rEREREREZEgqSIoNt2mVgAq5VUkRERERkBFKQFBlsaXkw/mzYqnGSIiIiIjIyKUiKDIXSK+DQ21Bb5nclIiIiIiKDTkFSZCjMeLdbbnva3zpERERERIaAgqTIUMibCmOmw7a/+V2JiIiIiMigU5AUGSqly2HPq9Ba53clIiIiIiKDSkFSZKiULodwEN75u9+ViIiIiIgMKgVJkaFSsgjSC+HNh/yuRERERERkUClIigyVQBwsvgV2PAuV2/yuRkRERERk0ChIigylRTdDfDK8/mO/KxERERERGTQKkiJDKS0P5n0I3nwYGg/7XY2IiIiIyKBQkBQZaks+B6E2WP1LvysRERERERkUCpIiQ23MNJh+hQuSHS1+VyMiIiIicsoUJEWiYelt0HwE3nzQ70pERERERE6ZgqRINEw8B4rmw+s/gXDY72pERERERE6JgqRINBgDSz8PR96BHc/4XY2IiIiIyClRkBSJlllXQ2YJrNCtQERERERkeFOQFImWuAQ4+1bY+yqUr/e7GhERERGRAVOQFImmMz4KiRlqlRQRERGRYU1BUiSakrNg4cdg01+gdp/f1YiIiIiIDIiCpEi0nXWrW678ub91iIiIiIgMkIKkSLRlj4fZ74V190Nrvd/ViIiIiIj0m4KkiB+W3AZt9bD+t35XIiIiIiLSbwqSIn4YdwZMPAfe+DmEgn5XIyIiIiLSLwqSIn5ZchvUlcGWx/yuRERERESkXxQkRfwy/XLIO83dCsRav6sRERERETlpCpIifgkE4OzPQvk6KHvd72pERERERE6agqSIn+Z9CFJyXaukiIiIiMgwoSAp4qfEVFh8C2x7Eo7s9LsaEREREZGToiAp4rfFt0BcArz+E78rERERERE5KQqSIn7LKITTPwgbHoDmar+rERERERE5IQVJkViw5DYItsDqe/2uRERERETkhBQkRWJBwUw47V2w6h7oaPW7GhERERGR41KQFIkVS26DpsPw9h/9rkRERERE5LgUJEVixZQLoXCOm3THWr+rERERERE5JgVJkVhhjGuVrNwC7zzndzUiIiIiIsekICkSS+ZcAxlF8PpdflciIiIiInJMCpIisSQ+Ec78FOx6ESre9rsaEREREZE+KUiKxJpFN0FCmhsrKSIiIiISgxQkRWJNSg4s+Ai8/QjUH/S7GhERERGRoyhIisSisz8DNgSr7va7EhERERGRoyhIisSi3Mkw40pY8yuoesfvakREREREelCQFIlVy/4VAvHwy4t0OxARERERiSkKkiKxqmAmfPIFyCyB318Lb/wMrPW7KhERERERBUmRmJYzEW5+BqZfAU//Mzx+OwTb/a5KREREREY5BUmRWJeUAdf9Ds67A9bdD/dfDU1VflclIiIiIqOYgqTIcBAIwMX/BtfcC+Xr4J5lULHR76pEREREZJRSkBQZTuZeCzc9CeEOuPdS2PKE3xWJiIiIyCikICky3Ixb6CbhyS+Fhz8ML/+PJuERERERkahSkBQZjjKLXMvk3A/A8/8Jf7oZOlr8rkpERERERol4vwsQkQFKSIH3/wIKZsFz34TqXXD9A5BZ7HdlIiIiIjLCqUVSZDgzBs77Rxcgq3a4SXj2r/W7KhEREREZ4RQkRUaCGcvh5mchPhF+fQW89Ue/KxIRERGREUxBUmSkKJztJuEpWQR/vgX+/nUIh/2uSkRERERGIAVJkZEkbQzc+Cic8TF49QfwwAehudrvqkRERERkhFGQFBlp4hPhPXfC8u/B7pfg5+dC2Uq/qxIRERGREURBUmQkMgbO/KQbNxmIh/uWw4q7dL9JERERERkUCpIiI1nxAvj0yzD9cnj2a/Dgh9TVVUREREROmYKkyEiXkg3X/Q4u/w6883e4+wLYv8bvqkRERERkGFOQFBkNjIGzPwOfeMa9/tXl8PpP1dVVRERERAZEQVJkNClZCLe+DNMuhWf+BR7+CLTU+l2ViIiIiAwzCpIio01KDlz/e7j0W7D9abj7fDiwzu+qRERERGQYUZAUGY2MgaW3wU1PQTgEv7oMVt6jrq4iIiIiclJiKkgaY0qMMb8yxpQbY9qMMXuMMT80xuScwmeeb4wJGWOsMeY/B7NekWFv/Jlw6yswZRk89WX448ehtc7vqkREREQkxsX7XUCEMWYqsAIoAB4DtgJnAl8ALjfGnGOtPdLPz8wAfgM0A+mDW7HICJGaCx96CF6/C/7+DTj4JnzwN1A078TvDbZDSzU0VUHzEWiugqYj7nlmMcx4N6SNGfqfQURERESiKmaCJPBTXIi83Vp7V2SlMeb7wBeBbwG39vMz7wSygG977xeRvgQCcM4XYPxZ8Meb4JeXwLJ/gbR8Fwqbqtz9J5urer5uO0Hr5RP/ABOWwsz3wMwrIaskOj+PiIiIiAwpY2NgTJQxZgqwE9gDTLXWhrttywAOAgYosNY2neRnXg08CtyIC8y/Br5lrf3aybx/0aJFds0a3WtPRqGmKvjLp909JyPiklzLYmoupI6B1DzvdV7Xo/P1GDehz+HNsOWvsOVNxR4qAAAfhUlEQVRxqNzqPmfcQi9UXgV5U/35+URERETkpBhj1lprF/W1LVZaJC/yls92D5EA1toGY8xrwKXA2cBzJ/owY0wB8AvgUWvt74wxHx/kekVGrrQxcMMfoWobJKS4YJiY5ibo6Y+i093joq9B5XbY+jhs/iv8/evuUTDbC5XvgcLZ/f/8wRZsg/INLgDnz4BxZ0B8kr81iYiIiMSoWAmSpd5y+zG278AFyemcRJAE7sFNJNTfrrAiAq6ra8HMwfu8/OmQ/yU470tQWwZbnnAtlS/9N7z0Hcid0tVSWXyG+/OHWksN7FsFZW+4x4G1EGrr2h6X5FpQJy6BCUvcxETJWUNfl4iIiMgwECtBMnJ2dqwBV5H12Sf6IGPMJ4CrgeustYcGoTYRGUzZE2DJZ92j8TBs9ULl6z+B1+6EjGIovRzypkHWOMj0HukFEIgb2J9prQuw+1ZC2esuOB7e7LYF4qFoPpz5SZhwtmsdPbwF9q5w+752J7zyv2ACbtuEpW6/iUshY+zg/b2IiIiIDCOxEiRPJNLn7bgDOo0xk4AfAn+01v6h33+IMZ8CPgUwYcKE/r5dRPorvQAWfcI9Wmpg+zMuVL75EHQ099w3EA8ZRW422MxxbplV4r32lpGwGQ7BoU1ea6MXHBvK3eckZbrWxdnvd4Fw3EJITO35Z+VOcTPOArQ3wf7VsPd191nrfwur7nbbcia7QDnBa7XMm+p/F10ROVpHq/tOOLgBxkyDyef7XZGIyLAXK0Ey0uJ4rH5jmb32O5ZfAS3AZwdShLX2Hly3WBYtWuT/LEQio0lKDsy73j2sdbPC1h/o9iiHOu/5wQ2w7UkItvb8jEjYbKmF9ga3LnOcF/bOdo+CWf1r2UxMgykXugdAqAMOvgVlK1y43PYUbPi925ZWAFOXwfwPw6TzotNFV0R6CrbBoY1uzHP5evd9cXgLhINd+5z5KbjkPyAh2b86RUSGuVgJktu85fRjbJ/mLY81hjLiDFwYrTR9twp81RjzVeAxa+17+12liESHMZCW5x5Fp/e9T19hMxI0E9O9VsKzIXv84NYWlwAlC91j6echHIYjO1xX2L0rYNvT8NbDkD0RFnwE5n1o8GsQESfY1tXSWL6+a8KsSGhMyYXi+bD0EiheAGPnwKpfwBs/hT2vwbX3Du54cBGRUSRWbv8xFXiH49/+IwDkH+/2H8aYHwGpfWyaBpwPbADWAuuttT85Xk26/YeIDEh7sxv3uf63sPtlwLhWygUfgdJ3D34LSDjkWl/2roC2Rjfb7LiFkHLCIeUiw0/TEff7Vb7OBcdDmyHc4bYlZ7uwWDzfLYvmuzHZfV1Y3v4sPPoZaG+Ey/7Lda9Xt/TYU1sGu19x9zgec5rf1YiMSse7/UdMBEkAY8wzuJlZb7fW3tVt/feBLwJ3W2tv7bZ+BoC1dutJfPbH0X0kRSTaavbAhgdg/e+hfr870T39gy5UFs0b2GcG290J9N7XusZ/ttUfvd+YUihZ5D0WQ/5MiIuVTigi/WCtG6e8+l7Y9Bc3u3JylguK3YNj9sT+hcGGQ/DorbDzeZhxJVx1l7tXrvjHWqjc5sbKb30cDr7p1psAzH4fnHcHFM7yt0aRUWa4BMmpwAqgAHgM2AKcBSzDdWldaq090m1/C2CtPeH/GgqSIuKrcAh2vwTrf+dufRJqg7FzYcGNMPcDxz957T7Zz97XYP8aCLa4bWNK3fjPiee425QkZcCBdW6fA2vc+5q9r82ENHeyHQmWJYs06+xodWQnbH7UjUueebXrQh6L2hrh7T/Cmnuh4m3XZX3e9bDw41A4Z3BaEMNheOMn8PdvQFo+vP8emHzeqX+unLxw2LUwb3nctTYfecetL1nsAv7k82HzY7D6l64FecaVcMFXBn4xTkT6ZVgESQBjzHjgm8DlQB6uS+ujwDestdW99lWQFJHhp7kaNv7JdX09+CbEJboTowUfcRP6tDW425Tsfc11Vy1f78Z7mYALnxPP6ZolNj3/+H+WtVCzG/avdaFy/2p3Qh7pCpg1vitYjlvoWnTSxrhxoDKyNFTAxj/D239wx1REIB6mLIO517qZipMy/Ksx4vBWFx7ffMi1thfOcV1PT//g0NVXvh4euRmqd7n73V74z7H3exAODfwWSLEm1OG+47Y8AVv/5mbVNnEuxM+40h2LmcU939NcDW/8DFbeDW11MO0yFyhL+jy/9V9bA1TtgLzTIDnzxPuLxKhhEyRjiYKkiAy5g2+5GV/fetjd/iQlx804i4VAggt3E5e6x/gzXXe+U9XRChVvdQXL/WuhrqznPql5bgba9Mij0LXWpBe68Jpe6LanjRk5J7YjUWsdbP6ra9Xb8wrYsGvFmftBmPN+11r99iPuwkbdPohPhumXu1B52iXRndE02O5ao1bfC3tfdRdYZr0XFt/sxsdFY/xiWyM89U+w4Xfu4so1v4ScSUP/5x5L3YGu7utlb8DhTe7iT6Qrb/EC9++ZkuNfjf3R0eK6EW95ArY/5b7z4lPgtItdeJx+2cl1LW6phdW/cPcebqlxF0Iu+Ir7nowFR3bCqnvcsIbIsIPsCe6CSOFsN3N44Rx3mykNN5BhQEFyABQkRSRqgm3udibbnnYnFxOXuqvsCSnR+fMbKlyLTP0BaKyExkPQVAmNh93zxsNd3Wm7MwEXOtML3b355n4Apl0aey05QyXY5m5Lk5TpWhxi4efuaIUdz7jwuP1Z1406Z7JrzZtzLeT3MTl6OAz7V7lQuekv0FzlfqaZ73GhctL5Q3fCW7sP1t4H6+6HpsOuVXzRTa7bd9qYofkzT+TtR+CJL7rnV/7A/R0MtXAYqrZ1C46vu4lmwHVLH78Yxp7u1pWvh9q9Xe/NmdxrkqF5g3PRaTC0N7lbJG1+DN75u7s/cHKWu2Ax8z0w9SJ3i6WBaGt0Ldcr7nLfVxPPgfO/7Hp2RHvipHAYdj4HK3/ufs5AghvTWXo5VO92Mwkf2uRaKG3IvSc+GfJLjw6YJ+ppMpSsdS2pTZXu3yW9UJNQiYLkQChIioh4rHVjkxoPu0fT4a7nkdC5f7Vbpua5QDnvQ+6EdiSdhLTUwr5VXSf7B9a6oBaRkOZOkns8MvtY1/2R7VqUkrMG3robDrkZgt9+BLb81bWCpBXAnGvcv8W4M07+3yEUhN0vwtt/ci2EbfWuNXr2+1wQHX/mqf+bhsOw63nX+rj9aXd8Tb8MFt8CUy+Ojfuv1uyFP93iAva8G2D5dwe3W22wzQXC7i2OrbVuW1qBG/McuYVR4dyjg3xzdc9bnpRv6NmzIHdqV6tl8Xz3uxitbsvBNnjnOdj4iAuRHc0ukMx4twuPk84b3Isu7c2w7jfw2p3QcNC1Jp//FZh2ydB//7TWuZbHVb+A6p2QPtZ1w174ccgoPHr/jlZ3weDQZjfbdiRgNh7q2ietwE0oVDDbjWNPTHPjgxPTICm963nn+nSITzx+ne1N3kXByIXC7t/hh3uu635/5vgU15qaM8l7TOx6nj3R1RMLrHV/j9uedENDkjK8XjP57oJUWn7X8/QC93c2kv5vGmIKkgOgICki0g+hDnfy+OYD7uQx1O5OhOZ/yHWl7Ouk6lQ1VLhg13AQskrcCU/2hMFrjanb39U6VPaGO1HBunGFRfPciX7+DHei3FrnPWq7Pe/16LqzVR+MC50pOT0fkaDZ4+Gta2t03VI3/dmdCCZmwKyrBq8VsaMVdjzrAsH2Z9wJZtYEmHuNa1ECV0NbvbvQ0NboWjPaG7o999a3N7jXkfXBFndit+BGd9KdM/HUah0KoSC89N/wyvfcifM197pQfjLCIfdv3lLjQl9LDbRUQ+VW7yLEuq6LEGOmu8AYCY45kwd2ktt0BA6u7xku6/d7G41r/eqcnGvp0WMQT0XkYsbGR9ykOa117h6es652x+OEJUPfDT7Y5iY0e/WHLlQXzXMtlKXvHvyLE5XbvO6rD0JHk+t+feanYOZVJw51fWmqct8vkcfhTXB4S89QdzyBhKMDp4lzF/eaKt3v4VFMV4+S9D6GMrQ1uJbvmj3eY+/RM4Snjuk7YOZOcd/JQxnWQkHY9wZsfdJd9Krd636mglnud6up0h2HfYlP7iNkeo+U7J5/l4lp7iJhYrfHKBvSoSA5AAqSIiID1Fztws2GB93ssSbOjYOa9yEoXT6wsXehDncFf98q99i/qqvrX2/J2V2hMnuiO8npfD2h75aZcNg7ye/WtbBun9uWmO5aOSIn+iWL+t8dL9Kq21rfM3S21LqQ0eotOx/dXrfWHjuExiW67sRzP+Ba9YaqO3RrvZsUZeMjsPOFru55fYlPcSeySRnu7y4po9fzdHfrjoGedEfbntfgz590Yf2Cf3Inyd3DYe+w2FztncD2cX4ViHethJHgOP6soe3C21jpWi4PrHO9BsrecKEe3El/JFROXNr/AGut+13c+AhsetS1aCWmu/GOc691XUz96O4d6nATNb3yv26ysazxUDDTTXqTOwXyproW26yS/gWCcMhdWFl5N+x6AeKSXKv/WZ9y/6aDLRx2IbU98ohclPGed65v6Pa8ybuA0+QmVesc217gjXvvFhpTx/TvYpO17hiv2XN0wKzZ474vw8Gu/VPzoPgMd/ElskwvOLW/k/Zm93e/9W/ugmVLtfsOnHKha/GefkXPi5bBNhfSmyq7LSu7vT7cc1uo/eTqiE/pGSwjj6RMd6wVzXOPzHEjouVTQXIAFCRFRAZB5XZ480E3oVD9AddaOPv9MP8GF86O9Z9sU1VXYNy32nUjjYzTzChyXSxLznQn4tkT3GfX7nXhMvKo8V73Ht+ZktsVKjPHuZk6973RdfU6vbBrZtwJZ7txS35OihEOu5aA3mETXFfQlOzo1tNU5bqPxSe5E6fuATExY2ROINJcDY/f7lrbukvMgNQcd0yl5LjJYlK81309zyyO3tjnvoSC7oLM3hVdM0O3eJPiZxR1hcqJ57jbC/VuybPWzfy88RHY+BfX8heX5C5izL3WXdTw8+frLhR0F7S2PO5+x6t3ud4DEXGJLjznTe0KmHmnuZCZUdT1s7fUuJbOVb9w3zGZ47q6r/o1jjcWhYJu9t2aPVC13bWMH1gPlVu6LoRllsC4Bd0C5oIT9yBpOuK6wG/9m5usKdji3jPtMhceT7t4cLptW+v+D2ir7xnej3refIz1Te53qXpX18+bktsVKiOPnMmx0X2/HxQkB0BBUkRkEEW6vr35oJtJNNjiTtrmXe+6vrbV92xtrN7l3heId5OMjD/LTThScmb/ukxZ64JPbZkXNHuFzbr97vMGo2uhjGzWujFtgfiubsaxMMHSqQiH3Ul/JFTufc11FQd3EhwJlmNPd9s2/sntH4h3E+XMucb1MhgOt7ew1v1sR3a68YxHdrrvmciy+3jn+BQXLjOL3c/d0QwTlsJZn3YtriPxYslQaW9yM5SXr3MXBA+scy3FEXnTerZajp3rhi1se9KFx7LXXTDLLIEZy114nHhO7P7utTe57skH3+x6HN7SddutxAwoOr1nuMybFtPHlILkAChIiogMkbYGN4vjhgfdrR66SyvwWhsXu/BYPD92WjhERjprXYvS3hVdwbLzpN/ApHPdrWNmXg1peX5WOrjCYderoXfArN3rbsN01qddwJHB0VztjeVd51otD6yFxgq3zcR1dZ0vnOMuVMx49/CevC3Y5sJkxVtd4bJiY1dvmfhk97NOuRAu/jc/K+2TguQAKEiKiERBzR53X7n0AhcecyYN35MFkZGovtyd+BbNG9wJekS6qy93rZUHN7jW/tLlkDvZ76qGTigIR3a41tpIuEwvgA/82u/KjqIgOQAKkiIiIiIiMpodL0gOr9GeIiIiIiIi4jsFSREREREREekXBUkRERERERHpFwVJERERERER6RcFSREREREREekXBUkRERERERHpFwVJERERERER6RcFSREREREREekXBUkRERERERHpFwVJERERERER6RcFSREREREREekXBUkRERERERHpFwVJERERERER6RcFSREREREREekXBUkRERERERHpFwVJERERERER6RcFSREREREREekXBUkRERERERHpFwVJERERERER6RcFSREREREREekXBUkRERERERHpFwVJERERERER6RcFSREREREREekXY631u4aYZIypBPb6XUcfxgBVfhcho4KONYkmHW8SLTrWJFp0rEk0DdXxNtFam9/XBgXJYcYYs8Zau8jvOmTk07Em0aTjTaJFx5pEi441iSY/jjd1bRUREREREZF+UZAUERERERGRflGQHH7u8bsAGTV0rEk06XiTaNGxJtGiY02iKerHm8ZIioiIiIiISL+oRVJERERERET6RUFSRERERERE+kVBchgwxpQYY35ljCk3xrQZY/YYY35ojMnxuzYZfowx1xpj7jLGvGKMqTfGWGPM707wnqXGmCeNMdXGmGZjzFvGmH8wxsRFq24ZXowxecaYW4wxfzHGvGOMaTHG1BljXjXG3GyM6fP/Hx1rMlDGmP82xjxnjNnnHW/Vxpj1xph/N8bkHeM9Ot7klBljbvT+L7XGmFuOsc+VxpgXve/BRmPMSmPMx6Jdqwwv3jm/Pcaj4hjvidr3msZIxjhjzFRgBVAAPAZsBc4ElgHbgHOstUf8q1CGG2PMBmAe0AjsB2YAv7fWfuQY+18N/AloBR4GqoH3AKXAI9baD0SjbhlejDG3Aj8DDgIvAGVAIfB+IAt3TH3AdvtPSMeanApjTDuwDtgMHAbSgLOBRUA5cLa1dl+3/XW8ySkzxowH3gbigHTgk9baX/ba5zbgLuAI7lhrB64FSoD/tdbeEdWiZdgwxuwBsoEf9rG50Vr7vV77R/V7TUEyxhljngEuBW631t7Vbf33gS8Cd1trb/WrPhl+jDHLcAHyHeAC3El+n0HSGJPp7ZeFu2ixxlufDDwPLAE+ZK19KErlyzBhjLkIdyL/N2ttuNv6scAqYDxwrbX2T956HWtySowxydba1j7Wfwv4V+Bn1trPeut0vMkpM8YY4P+AycCfgTvoFSSNMZNwjQBNwEJr7R5vfQ6wGpgKLLXWvh7N2mV48IIk1tpJJ7Fv1L/X1LU1hhljpuBC5B7gJ702/zvuS+lGY0xalEuTYcxa+4K1doc9uatI1wL5wEORLyTvM1qBr3kvPzMEZcowZ6193lr7ePcQ6a2vAH7uvbyw2yYda3JK+gqRnj94y2nd1ul4k8FwO3ARcBPunKwvnwCSgB9HQiSAtbYG+C/vpRoEZDBE/XtNQTK2XeQtn+3jZKwBeA1IxXXdERkKkWPw6T62vQw0A0uNMUnRK0lGgA5vGey2TseaDJX3eMu3uq3T8SanxBgzE/gOcKe19uXj7Hq8Y+2pXvuI9CXJGPMRY8y/GmO+YIxZdozxjlH/XosfrA+SIVHqLbcfY/sOXIvldOC5qFQko80xj0FrbdAYsxuYDUwBtkSzMBmejDHxwEe9l93/s9OxJoPCGHMHbqxaFm585Lm4EPmdbrvpeJMB877Hfosb+/2vJ9j9eMfaQWNME1BijEm11jYPbqUyQozFHW/d7TbG3GStfanbuqh/rylIxrYsb1l3jO2R9dlRqEVGJx2DMti+A8wBnrTWPtNtvY41GSx34CZ2inga+Li1trLbOh1vcir+H7AAONda23KCfU/mWEvz9lOQlN5+DbwCbAIacCHwNuBTwFPGmCXW2je9faP+vaaurcOb8ZaaMUn8omNQTpox5nbgS7iJJ27s79u9pY41OS5r7VhrrcFdxX8/7sRrvTHmjH58jI436ZMx5kxcK+T/DtIEOTrW5Jistd/w5hw4ZK1tttZu9CbZ/D6QAny9Hx836MeagmRsi1w5yDrG9sxe+4kMNh2DMiiMMZ8D7sTdmmGZtba61y461mRQeSdef8ENAckD7u+2Wceb9Fu3Lq3bgX87ybed7LFWfwqlyegTmbTu/G7rov69piAZ27Z5y+nH2B6Zge5YYyhFTtUxj0HvP9TJuAlTdkWzKBlejDH/APwY2IgLkX3dRFnHmgwJa+1e3AWM2caYMd5qHW8yEOm4Y2Ym0Nr95vC42fQBfuGti9z373jHWhGuW+t+jY+UfjrsLbvfuSHq32sKkrHtBW95qTGmx7+VMSYDOAdoAd6IdmEyajzvLS/vY9v5uFmDV1hr26JXkgwnxph/An4AbMCFyMPH2FXHmgylYm8Z8pY63mQg2oB7j/FY7+3zqvc60u31eMfaFb32ETlZS7xl91AY9e81BckYZq3dCTwLTAI+12vzN3BXIe631h7r3kUip+oRoAq43hizKLLSu7ntf3ovf+ZHYRL7jDH/hptcZy1wsbW26ji761iTATPGzDDGjO1jfcAY8y2gAHcCVeNt0vEm/WatbbHW3tLXA/irt9tvvHUPe69/jQugtxljJkU+yxiTQ9eMr5FuiiKdjDGzjTG5fayfiOvlA/C7bpui/r1mTu6e5OIXY8xUYAXuP8HHcNP1ngUsw3VpXWqtPeJfhTLcGGPeC7zXezkWuAx3ResVb12VtfaOXvs/ArQCDwHVwFW4aaYfAT5o9UUivRhjPgbch2sBuou+x2Tssdbe1+09OtZkQLzu0/+Du1faTuAIbubWC3CT7VTgLmZs7vYeHW8yaIwxX8d1b/2ktfaXvbZ9HvgR7rh8GGjH3Ty+BDdpzx2I9OIdU/+M66G4Gzdr61Tg3UAy8CTwPmtte7f3RPV7TUFyGDDGjAe+iWuqzgMOAo8C3+hjwgqR4+r2n92x7LXWTur1nnOAr+K6UiQD7wC/An5krQ0d9Qky6p3EcQbwkrX2wl7v07Em/WaMmQN8BjfkowQ3vX0T7oLr33DHz1H/X+p4k8FyvCDpbX8P7tY0Z+B6BG4Gfmyt/U0065ThwxhzAXAr7lYzY3E9EWtxQ0V+C/y2r1AYze81BUkRERERERHpF42RFBERERERkX5RkBQREREREZF+UZAUERERERGRflGQFJH/397dhWpW1XEc//4aa3xtQFLQ0ealbEIIU2hSRJoSQysqFIXsBElUkHil5EWBJxQFSy1wLAqM1LkySSl8g0EDJWGMMhC68BzP5JQvkccRcyaD+Xex18mH7T5nZp+pxjjfDwzrPGuvtfd6npvhx1prL0mSJGkUg6QkSZIkaRSDpCRJkiRpFIOkJEmSJGkUg6QkSStEkrkklWT9oR6LJOn/m0FSkiRJkjSKQVKSJEmSNIpBUpIkSZI0ikFSkqQlJDkqyTeT7EjyapI9SZ5OMp3k6F7b6bYHcTrJhiR3JXkxyd7W58okhy3ynCT5UpJHk8y3PjNJtiY5eT/juyrJb5K80sY3m+TuJJ9aot95SbYn2Z3k9SRPJPns8n8pSdJKkqo61GOQJOltKclJwEPAqcBfgd8Be4GPACcAfwC2VNV8az8NXAPcAXymtX0MeDfwcWA1cC9wUVXtm3hOgLuAS4F/Ao8CLwObgQ3t7/OrakdvfOva+DYBr7Vn7QZOBk4DnqyqLRPt54B1wHXAt4AdwGzrfzpQwCVV9fNl/2iSpBXBIClJ0oAW7h4HzgJuBa6uqtfbtSOAHwNTwM+q6sutfpouSALcA0xV1d527RTgEWAtcHlV3TbxrG8AW4EXgXOr6ulWvwq4BbgC2Alsqqp/tGvvAJ6kC4D3AZctBNp2/Rhgc1Vtn6ibowuSbwCfq6oHJ659G7gWeKaqTjmIn06StAIYJCVJGpDkAuB+4Ang7MkZxHb9KLrZvGOB46tqfiJI7gHWV9VLvT6XAbfTC2tJZoCNwNeq6ie9Pu8CnqGbZZyqqm2t/vPAL4A54NSq2nMA32mOLkjeVFVXDTznJWANsK6q/rS/+0mSVi73SEqSNGxhf+E9/RAJUFV/p5sRPIxuqeukh/shstkG7APen2Qt/Hv57MZWf+fAc95o/QC2TFw6f+GeBxIie361yHNm28cTR95PkrTCGCQlSRq2sZXfbS/Qecs/3gybx/X6Pjt0wxbWnm8fT2rl2lY+v7AMdsBMry10M4sAf9zfFxmw2Gzjq608fBn3lCStIINvjpMkSaxq5a/plo8uZecy7r+wtyS9z0OyxLXleMsMqyRJYxgkJUka9lwr766qrSP7rh+qbPsQT2gf/9LKXa08McnqhZfp9Gxo5Z8n6hbC66aRY5Mk6aC5tFWSpGEPtPLiZfT9ZJL+cleAL9D93ztTVbsAWjnb6qf6HZK8k+5YEOiOBVnwUCunkrgUVZL0P2WQlCRp2L3Ab4GPJflRkmP7DZJsTHL5QN8jgVuTrJ5o+z664zUAftBrf3Mrr03ywYk+q4Ab6fZD7gQmz3e8D/g93ezntiRremM7Jsm5+/2WkiQtg0tbJUkaUFX72hEb9wNfBy5N8hTdUtT3AO8FPkB39mN/6eudwKeBmSSPA0cDn6B7ic0vB9rfBpxNN2P5VJJHgHlgM91Lf+aBiyeXvbbxXQg8DFwInJfkMWA33VEhH6Z7q+x2JEn6DzNISpK0iKralWQz8BXgEuBDwEeBv9HtV/we3VmOfbN0R4JcTxcg17S624Hv948TqapK8kW65bRfBc4EjqDbR/lD4Iaqeo6eqno2yRnAFcBFwDl0Lwl6ge6Ij58ezPeXJGkxqVrqJXGSJOlAJZkGrgG+U1XTh3Y0kiT997hHUpIkSZI0ikFSkiRJkjSKQVKSJEmSNIp7JCVJkiRJozgjKUmSJEkaxSApSZIkSRrFIClJkiRJGsUgKUmSJEkaxSApSZIkSRrFIClJkiRJGuVfzcwDc38+i5cAAAAASUVORK5CYII=\n", 550 | "text/plain": [ 551 | "
" 552 | ] 553 | }, 554 | "metadata": { 555 | "needs_background": "light" 556 | }, 557 | "output_type": "display_data" 558 | } 559 | ], 560 | "source": [ 561 | "plot_history(history.history)" 562 | ] 563 | }, 564 | { 565 | "cell_type": "markdown", 566 | "metadata": {}, 567 | "source": [ 568 | "# 保存模型" 569 | ] 570 | }, 571 | { 572 | "cell_type": "markdown", 573 | "metadata": {}, 574 | "source": [ 575 | "保存为 Keras 模型" 576 | ] 577 | }, 578 | { 579 | "cell_type": "code", 580 | "execution_count": 30, 581 | "metadata": {}, 582 | "outputs": [], 583 | "source": [ 584 | "classifier.save(\"mnist.h5\")" 585 | ] 586 | }, 587 | { 588 | "cell_type": "markdown", 589 | "metadata": {}, 590 | "source": [ 591 | "保存为 onnx 模型" 592 | ] 593 | }, 594 | { 595 | "cell_type": "code", 596 | "execution_count": 31, 597 | "metadata": {}, 598 | "outputs": [], 599 | "source": [ 600 | "import onnx\n", 601 | "import keras2onnx" 602 | ] 603 | }, 604 | { 605 | "cell_type": "code", 606 | "execution_count": 32, 607 | "metadata": {}, 608 | "outputs": [], 609 | "source": [ 610 | "onnx_model = keras2onnx.convert_keras(classifier, 'mnist')\n", 611 | "onnx.save_model(onnx_model, 'mnist.onnx')" 612 | ] 613 | }, 614 | { 615 | "cell_type": "markdown", 616 | "metadata": {}, 617 | "source": [ 618 | "# 加载训练好的模型" 619 | ] 620 | }, 621 | { 622 | "cell_type": "code", 623 | "execution_count": 33, 624 | "metadata": {}, 625 | "outputs": [], 626 | "source": [ 627 | "import onnxruntime as rt\n", 628 | "sess = rt.InferenceSession(\"mnist.onnx\")" 629 | ] 630 | }, 631 | { 632 | "cell_type": "code", 633 | "execution_count": 34, 634 | "metadata": {}, 635 | "outputs": [], 636 | "source": [ 637 | "input_name = sess.get_inputs()[0].name\n", 638 | "output_name = sess.get_outputs()[0].name" 639 | ] 640 | }, 641 | { 642 | "cell_type": "code", 643 | "execution_count": 36, 644 | "metadata": {}, 645 | "outputs": [], 646 | "source": [ 647 | "res = sess.run([output_name], {input_name: X_test})\n", 648 | "res = np.array(res)" 649 | ] 650 | }, 651 | { 652 | "cell_type": "code", 653 | "execution_count": 44, 654 | "metadata": {}, 655 | "outputs": [ 656 | { 657 | "name": "stdout", 658 | "output_type": "stream", 659 | "text": [ 660 | "[0.00000001 0.00000004 0.0000312 0.00062566 0. 0.0000002\n", 661 | " 0. 0.99688894 0.00009035 0.00236361]\n" 662 | ] 663 | }, 664 | { 665 | "data": { 666 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAQgAAAEFCAYAAAAfaHkhAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAPIElEQVR4nO3de6xdZZnH8e9DayigLZcBMZFrA0hiEBUVW4VSkhkGoxBbJpjxkozSjGkwDEJMRo1o/MP4hwhMUtJMHMZApiRtxIzTQJO2UKSQCSQKKra2iEiQKJZwL1B95o+9zng87GedfS77cnq+n2RnudezLu95y/n57rXevU5kJpLUzSHDboCk0WVASCoZEJJKBoSkkgEhqWRASCoZEJJKfQ2IiHh7RHwvIp6KiFcj4vGI+G5EHNXP80qaHdGviVIRsRTYCRwH/BD4JfB+4AJgF7A8M//Yl5NLmh2Z2ZcXcBeQwJUT1n+nWX9zj8dJX7589fdV/f71ZQQREacCe4HHgaWZ+edxtbcAvwMCOC4zX5rkWLPfQEl/JTOj2/p+XYNY2Sy3jA+HpiEvAPcBhwPn9un8kmZBvwLijGa5u6j/qlme3qfzS5oFC/t03CXN8rmiPrb+yG7FiFgDrJntRkmamn4FxGTGPu90vb6QmeuB9eA1CGmY+vURY2yEsKSoL56wnaQR1K+A2NUsq2sMpzXL6hqFpBHQr9ucS4E9tN/mPAQ41tuc0vAN9DZnZu4FtgAnA2snlL8OHAF8f7JwkDRcg5xq/SjwATpTrXcDy3qZau0IQuq/agTRt4AAiIgTgG8AFwHH0PlocQfw9czc1+MxDAipz4YSELPBgJD6b9BTrSUdBAwISSUDQlLJgJBUMiAklQwISSUDQlLJgJBUMiAklQwISSUDQlLJgJBUMiAklQwISSUDQlLJgJBUMiAklQwISSUDQlLJgJBUMiAklQwISSUDQlLJgJBUMiAklQwISSUDQlLJgJBUMiAklQwISSUDQlLJgJBUMiAklQwISSUDQlLJgJBUMiAklQwISSUDQlLJgJBUMiAklRYOuwEHs9WrV5e1K664onXfp556qrW+f//+1vptt93WWn/66afL2p49e1r31fzhCEJSyYCQVDIgJJUMCEklA0JSyYCQVDIgJJUiM4fdhlYRMdoNbPHYY4+VtZNPPnlwDenihRdeKGs///nPB9iS0fLkk0+WtW9/+9ut+z744IOz3ZyByczott4RhKSSASGpZEBIKhkQkkoGhKSSASGpZEBIKvk8iD5qe+bDWWed1brvo48+2lo/88wzW+vvec97WusrVqwoa+eee27rvr/97W9b6yeccEJrfSYOHDjQWv/DH/7QWn/b29427XM/8cQTrfW5PA+i4ghCUsmAkFQyICSVDAhJJQNCUqmngIiI1RFxU0TcGxHPR0RGxK2T7LMsIjZHxL6IeDkiHo6IqyJiwew0XVK/9Xqb8yvAu4AXgSeBd7RtHBGXAJuA/cDtwD7go8D1wHLgsmm2V9IA9fQ8iIi4gE4w7AHOB7YDt2XmJ7tsu7jZbgmwPDMfbNYvArYBHwQ+kZkbemrgHH4exCg76qijytrZZ5/duu9DDz3UWn/f+943rTb1YrK/B7J79+7W+mTzS44++uiytnbt2tZ9161b11ofZTN6HkRmbs/MX2VvT5dZDRwLbBgLh+YY++mMRAA+38t5JQ1XPy5SrmyWd3ap7QBeBpZFxKF9OLekWdSPgDijWb5hrJeZB4Bf07n2cWofzi1pFvXjuxhLmuVzRX1s/ZHVASJiDbBmNhslaeqG8WWtsYsh5fWMzFwPrAcvUkrD1I+PGGMjhCVFffGE7SSNqH4ExK5mefrEQkQsBE4BDgD1M+EljYR+fMTYBvwjcBHwXxNq5wGHAzsy89U+nFs9evbZZ8va9u3bZ3TsrVu3zmj/mVi1alVrvW3+B8AjjzxS1m6//fZptWku68cIYiPwDHB5RJwztrKZKPXN5u3cnVEizSM9jSAi4lLg0ubt8c3ygxFxS/O/n8nMawAy8/mIuIJOUNwdERvoTLX+GJ1boBvpTL+WNOJ6/YhxNvCZCetO5S9zGX4DXDNWyMw7IuJ84MvAKmARnenXVwM39jgjU9KQ9RQQmXkdcN1UDpyZ9wEXT71JkkaFz4OQVDIgJJV6+rr3MDmTUuMdd9xxrfW225S97L969eqytmnTptZ957IZfd1b0vxkQEgqGRCSSgaEpJIBIalkQEgqGRCSSsN4opQ0bZM9ev7YY49trbd9zR1g165drfX5xhGEpJIBIalkQEgqGRCSSgaEpJIBIalkQEgq+TwIjZzly5eXtW3btrXu+6Y3vam1vmLFitb6jh07WusHK58HIWnKDAhJJQNCUsmAkFQyICSVDAhJJQNCUsnnQWjkXHxx/RcbJ5vnsHXr1tb6/fffP602zVeOICSVDAhJJQNCUsmAkFQyICSVDAhJJQNCUsl5EBq4ww47rLV+0UUXlbXXXnutdd+vfe1rrfXXX3+9ta6/5ghCUsmAkFQyICSVDAhJJQNCUsmAkFTyNqcG7tprr22tv/vd7y5rd955Z+u+O3funFab1J0jCEklA0JSyYCQVDIgJJUMCEklA0JSyYCQVIrMHHYbWkXEaDdQb/CRj3yktX7HHXe01l966aWy1vZVcIAHHnigta7uMjO6rXcEIalkQEgqGRCSSgaEpJIBIalkQEgqGRCSSj4PQlN2zDHHtNZvvPHG1vqCBQta65s3by5rznMYLEcQkkoGhKSSASGpZEBIKhkQkko9BUREHBMRn4uIH0TEnoh4JSKei4gfR8RnI6LrcSJiWURsjoh9EfFyRDwcEVdFRPtlbEkjodfbnJcB64DfAduBJ4C3Ah8H/h34+4i4LMd9dzwiLgE2AfuB24F9wEeB64HlzTEljbCengcRESuBI4D/ycw/j1t/PPC/wAnA6szc1KxfDOwBlgDLM/PBZv0iYBvwQeATmbmhh3P7PIgBm2yewmRzEd773ve21vfu3dtab3vmw2T7anpm9DyIzNyWmf89Phya9U8DNzdvV4wrrQaOBTaMhUOz/X7gK83bz/fWdEnDMhsXKV9vlgfGrVvZLLv9GaQdwMvAsog4dBbOL6lPZhQQEbEQ+HTzdnwYnNEsd0/cJzMPAL+mc/3j1JmcX1J/zfS7GN8C3glszsy7xq1f0iyfK/YbW39kt2JErAHWzLBtkmZo2gEREV8Avgj8EvjUVHdvll0vQGbmemB9cx4vUkpDMq2PGBGxFrgB+AVwQWbum7DJ2AhhCd0tnrCdpBE05RFERFxFZy7Dz4ALM/P3XTbbBZwDnA48NGH/hcApdC5qPjbV86v/li5d2lqf7DbmZK6++urWurcyR8eURhAR8SU64fATOiOHbuEAnbkOAN1uaJ8HHA7szMxXp3J+SYPVc0BExFfpXJR8iM7I4ZmWzTcCzwCXR8Q5446xCPhm83bd1JsraZB6+ogREZ8BvgH8CbgX+ELEGyZePZ6ZtwBk5vMRcQWdoLg7IjbQmWr9MTq3QDfSmX4taYT1eg3ilGa5ALiq2OYe4JaxN5l5R0ScD3wZWAUsojP9+mrgxhz1v/knqbeAyMzrgOumevDMvA+4eKr7SRoNPg9CUsmAkFTysffz1EknnVTWtmzZMqNjX3vtta31H/3oRzM6vgbHEYSkkgEhqWRASCoZEJJKBoSkkgEhqWRASCo5D2KeWrOmfqLfiSeeOKNj33PPPa11v4YzdziCkFQyICSVDAhJJQNCUsmAkFQyICSVDAhJJedBHKQ+9KEPtdavvPLKAbVEc5kjCEklA0JSyYCQVDIgJJUMCEklA0JSyYCQVHIexEHqwx/+cGv9zW9+87SPvXfv3tb6iy++OO1ja7Q4gpBUMiAklQwISSUDQlLJgJBUMiAklbzNqTf46U9/2lq/8MILW+v79u2bzeZoiBxBSCoZEJJKBoSkkgEhqWRASCoZEJJKBoSkUoz6n2KPiNFuoHQQyMzott4RhKSSASGpZEBIKhkQkkoGhKSSASGpZEBIKs2F50E8A/xm3Pu/adapd/bZ9MyXfjupKoz8RKmJIuLBzDxn2O2YS+yz6bHf/IghqYUBIak0FwNi/bAbMAfZZ9Mz7/ttzl2DkDQ4c3EEIWlADAhJpTkREBHx9oj4XkQ8FRGvRsTjEfHdiDhq2G0bpohYHRE3RcS9EfF8RGRE3DrJPssiYnNE7IuIlyPi4Yi4KiIWDKrdwxQRx0TE5yLiBxGxJyJeiYjnIuLHEfHZiOj6OzFf+23kr0FExFJgJ3Ac8EPgl8D7gQuAXcDyzPzj8Fo4PBHxE+BdwIvAk8A7gNsy85PF9pcAm4D9wO3APuCjwBnAxsy8bBDtHqaI+GdgHfA7YDvwBPBW4OPAEjr9c1mO+8WY1/2WmSP9Au4CErhywvrvNOtvHnYbh9g3FwCnAQGsaPrj1mLbxcDvgVeBc8atX0QngBO4fNg/0wD6bCWdX+5DJqw/nk5YJLDKfmt+zmE3YJJ/zFObf4Bfd/kHfQud/+d8CThi2G0d9quHgPinpv6fXWorm9o9w/45htyH/9r0w032W+c16tcgVjbLLZn55/GFzHwBuA84HDh30A2bg8b68s4utR3Ay8CyiDh0cE0aOa83ywPj1s3rfhv1gDijWe4u6r9qlqcPoC1zXdmXmXmAzihtIZ1R27wTEQuBTzdvx4fBvO63UQ+IJc3yuaI+tv7IAbRlrrMv230LeCewOTPvGrd+XvfbqAfEZMYe1T3at2LmhnnblxHxBeCLdO6QfWqquzfLg7LfRj0gxtJ5SVFfPGE71ezLLiJiLXAD8AvggszcN2GTed1vox4Qu5pldY3htGZZXaPQX5R92Xz+PoXOxbnHBtmoYYqIq4B/A35GJxye7rLZvO63UQ+I7c3ybyfOcIuItwDLgVeABwbdsDloW7O8qEvtPDp3g3Zm5quDa9LwRMSXgOuBn9AJh98Xm87rfhvpgMjMvcAW4GRg7YTy14EjgO9n5ksDbtpctJHO49Muj4j/f0pSRCwCvtm8XTeMhg1aRHyVzkXJh4ALM7PtsXLzut/m4lTrR4EP0JlFuBtYlvN3qvWlwKXN2+OBv6Mz1L23WfdMZl4zYfuNdKYMb6AzZfhjNFOGgX/IUf8PYoYi4jPALcCfgJvofu3g8cy8Zdw+87ffhj1Tq8cZbicA/0Fn/vxrdB5iewNw9LDbNuR+uY7O1fPq9XiXfZYDm4Fn6Xw8ewT4F2DBsH+eEemzBO623zqvkR9BSBqekb4GIWm4DAhJJQNCUsmAkFQyICSVDAhJJQNCUsmAkFQyICSVDAhJpf8Duxsu2QgN1noAAAAASUVORK5CYII=\n", 667 | "text/plain": [ 668 | "
" 669 | ] 670 | }, 671 | "metadata": { 672 | "needs_background": "light" 673 | }, 674 | "output_type": "display_data" 675 | } 676 | ], 677 | "source": [ 678 | "plt.imshow(X_test[0].reshape((28, 28)), cmap='gray')\n", 679 | "print(res[0][0])" 680 | ] 681 | }, 682 | { 683 | "cell_type": "code", 684 | "execution_count": null, 685 | "metadata": {}, 686 | "outputs": [], 687 | "source": [] 688 | } 689 | ], 690 | "metadata": { 691 | "kernelspec": { 692 | "display_name": "Python 3", 693 | "language": "python", 694 | "name": "python3" 695 | }, 696 | "language_info": { 697 | "codemirror_mode": { 698 | "name": "ipython", 699 | "version": 3 700 | }, 701 | "file_extension": ".py", 702 | "mimetype": "text/x-python", 703 | "name": "python", 704 | "nbconvert_exporter": "python", 705 | "pygments_lexer": "ipython3", 706 | "version": "3.7.3" 707 | } 708 | }, 709 | "nbformat": 4, 710 | "nbformat_minor": 2 711 | } 712 | -------------------------------------------------------------------------------- /examples/model/mnist-lg.onnx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wuhanstudio/onnx-backend/56abccbf30dbd6436c406008aadf4345b4faad58/examples/model/mnist-lg.onnx -------------------------------------------------------------------------------- /examples/model/mnist-sm.onnx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wuhanstudio/onnx-backend/56abccbf30dbd6436c406008aadf4345b4faad58/examples/model/mnist-sm.onnx -------------------------------------------------------------------------------- /src/add.c: -------------------------------------------------------------------------------- 1 | #include "onnx.h" 2 | 3 | void add(const float *input, // pointer to vector 4 | const float *bias, // pointer to matrix 5 | const uint16_t dim_vec, // length of the vector 6 | float *output) 7 | { 8 | for (int i = 0; i < dim_vec; i++) 9 | { 10 | output[i] = input[i] + bias[i]; 11 | } 12 | } 13 | 14 | float* add_layer(Onnx__GraphProto* graph, const float *input, int64_t* shapeInput, int64_t* shapeOutput, const char* layer_name) 15 | { 16 | assert(graph != NULL && input != NULL && layer_name != "" ); 17 | 18 | Onnx__NodeProto* node = onnx_graph_get_node_by_name(graph, layer_name); 19 | const char* bias = node->input[1]; 20 | 21 | float* B = onnx_graph_get_weights_by_name(graph, bias); 22 | int64_t* shapeB = onnx_graph_get_dims_by_name(graph, bias); 23 | if(shapeB == NULL) 24 | { 25 | return NULL; 26 | } 27 | 28 | float* output = (float*) malloc(sizeof(float)*shapeB[0]); 29 | memset(output, 0, sizeof(sizeof(float)*shapeB[0])); 30 | add(input, B, shapeB[0], output); 31 | 32 | memcpy(shapeInput, shapeOutput, sizeof(int64_t)*3); 33 | 34 | return output; 35 | } 36 | -------------------------------------------------------------------------------- /src/conv2d.c: -------------------------------------------------------------------------------- 1 | #include "onnx.h" 2 | 3 | void conv2D(const float *input, // input image 4 | const uint16_t dim_im_in_x, // input image dimention x 5 | const uint16_t dim_im_in_y, // input image dimention y 6 | const uint16_t ch_im_in, // number of input image channels 7 | const float *weight, // kernel weights 8 | const uint16_t ch_im_out, // number of filters, i.e., output image channels 9 | const uint16_t dim_kernel_x, // filter kernel size x 10 | const uint16_t dim_kernel_y, // filter kernel size y 11 | const uint16_t padding_x, // padding sizes x 12 | const uint16_t padding_y, // padding sizes y 13 | const uint16_t stride_x, // stride x 14 | const uint16_t stride_y, // stride y 15 | const float *bias, // bias 16 | float *output, // output image 17 | const uint16_t dim_im_out_x, // output image dimension x 18 | const uint16_t dim_im_out_y // output image dimension y 19 | ) 20 | { 21 | int i, j, k, l, m, n; 22 | float conv_out = 0.0f; 23 | int in_row, in_col; 24 | 25 | // For each filter 26 | for (i = 0; i < ch_im_out; i++) 27 | { 28 | // For each image dimension 29 | for (j = 0; j < dim_im_out_y; j++) 30 | { 31 | for (k = 0; k < dim_im_out_x; k++) 32 | { 33 | conv_out = bias[i]; 34 | // For each kernel dimension 35 | for (m = 0; m < dim_kernel_y; m++) 36 | { 37 | for (n = 0; n < dim_kernel_x; n++) 38 | { 39 | // if-for implementation 40 | in_row = stride_y * j + m - padding_y; 41 | in_col = stride_x * k + n - padding_x; 42 | if (in_row >= 0 && in_col >= 0 && in_row < dim_im_in_y && in_col < dim_im_in_x) 43 | { 44 | // For each input channel 45 | for (l = 0; l < ch_im_in; l++) 46 | { 47 | conv_out += input[(in_row * dim_im_in_x + in_col) * ch_im_in + l] * 48 | weight[i * ch_im_in * dim_kernel_y * dim_kernel_x + (m * dim_kernel_x + n) * ch_im_in + 49 | l]; 50 | } 51 | } 52 | } 53 | } 54 | output[i + (j * dim_im_out_x + k) * ch_im_out] = conv_out; 55 | } 56 | } 57 | } 58 | } 59 | 60 | float* conv2D_layer(Onnx__GraphProto* graph, const float *input, int64_t* shapeInput, int64_t* shapeOutput, const char* layer_name) 61 | { 62 | assert(graph != NULL && input != NULL && layer_name != "" ); 63 | 64 | Onnx__NodeProto* node = onnx_graph_get_node_by_name(graph, layer_name); 65 | if(node == NULL) 66 | { 67 | // layer not found 68 | return NULL; 69 | } 70 | const char* weight = node->input[1]; 71 | const char* bias = node->input[2]; 72 | 73 | // Get weight shape 74 | int64_t* shapeW = onnx_graph_get_dims_by_name(graph, weight); 75 | if(shapeW == NULL) 76 | { 77 | return NULL; 78 | } 79 | int64_t dimW = onnx_graph_get_dim_by_name(graph, weight); 80 | if(dimW < 0) 81 | { 82 | return NULL; 83 | } 84 | 85 | // Get weights 86 | // NCWH --> NWHC 87 | int64_t permW_t[] = { 0, 2, 3, 1}; 88 | float* W = onnx_graph_get_weights_by_name(graph, weight); 89 | if(W == NULL) 90 | { 91 | return NULL; 92 | } 93 | float* W_t = transpose(W, shapeW, dimW, permW_t); 94 | 95 | // Get bias 96 | float* B = onnx_graph_get_weights_by_name(graph, bias); 97 | if(B == NULL) 98 | { 99 | return NULL; 100 | } 101 | 102 | float* output = (float*) malloc(sizeof(float)*shapeW[0]*shapeInput[W_INDEX]*shapeInput[H_INDEX]); 103 | memset(output, 0, sizeof(sizeof(float)*shapeW[0]*shapeInput[W_INDEX]*shapeInput[H_INDEX])); 104 | conv2D(input, shapeInput[W_INDEX], shapeInput[H_INDEX], shapeW[1], W_t, shapeW[0], shapeW[2], shapeW[3], 1, 1, 1, 1, B, output, shapeInput[W_INDEX], shapeInput[H_INDEX]); 105 | 106 | shapeOutput[W_INDEX] = shapeInput[W_INDEX]; 107 | shapeOutput[H_INDEX] = shapeInput[H_INDEX]; 108 | shapeOutput[C_INDEX] = shapeW[0]; 109 | 110 | free(W_t); 111 | 112 | return output; 113 | } 114 | -------------------------------------------------------------------------------- /src/dense.c: -------------------------------------------------------------------------------- 1 | #include "onnx.h" 2 | 3 | void dense(const float *input, // pointer to vector 4 | const float *weight, // pointer to matrix 5 | const uint16_t dim_vec, // length of the vector 6 | const uint16_t num_of_rows, // numCol of A 7 | const float *bias, 8 | float *output) // output operand 9 | { 10 | for (int i = 0; i < num_of_rows; i++) 11 | { 12 | float ip_out = bias[i]; 13 | for (int j = 0; j < dim_vec; j++) 14 | { 15 | ip_out += input[j] * weight[i * dim_vec + j]; 16 | } 17 | output[i] = ip_out; 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /src/info.c: -------------------------------------------------------------------------------- 1 | #include "onnx.h" 2 | 3 | void onnx_tensor_info(const float* A, int64_t* shape, int64_t dim) 4 | { 5 | int elem = 1; 6 | for(int i = 0; i < dim; i++) 7 | { 8 | elem = elem * shape[i]; 9 | } 10 | 11 | printf("Array size: %d\n", elem); 12 | for(int i = 0; i < elem; i++) 13 | { 14 | printf( "%f ", A[i] ); 15 | int split = 1; 16 | for(int j = dim-1; j > 0; j--) 17 | { 18 | split = split * shape[j]; 19 | if( (i+1) % split == 0) 20 | { 21 | printf("\n"); 22 | } 23 | } 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /src/matmul.c: -------------------------------------------------------------------------------- 1 | #include "onnx.h" 2 | 3 | void matmul(const float *input, // pointer to vector 4 | const float *weight, // pointer to matrix 5 | const uint16_t dim_vec, // length of the vector 6 | const uint16_t num_of_rows, // numCol of A 7 | float *output) 8 | { 9 | for (int i = 0; i < num_of_rows; i++) 10 | { 11 | float ip_out = 0; 12 | for (int j = 0; j < dim_vec; j++) 13 | { 14 | ip_out += input[j] * weight[i * dim_vec + j]; 15 | } 16 | output[i] = ip_out; 17 | } 18 | } 19 | 20 | float* matmul_layer(Onnx__GraphProto* graph, const float *input, int64_t* shapeInput, int64_t* shapeOutput, const char* layer_name) 21 | { 22 | assert(graph != NULL && input != NULL && layer_name != "" ); 23 | 24 | Onnx__NodeProto* node = onnx_graph_get_node_by_name(graph, layer_name); 25 | const char* weight = node->input[1]; 26 | 27 | int64_t* shapeW = onnx_graph_get_dims_by_name(graph, weight); 28 | if(shapeW == NULL) 29 | { 30 | return NULL; 31 | } 32 | int64_t dimW = onnx_graph_get_dim_by_name(graph, weight); 33 | if(dimW < 0) 34 | { 35 | return NULL; 36 | } 37 | 38 | assert(shapeW[0] == shapeInput[1]); 39 | 40 | int64_t permW_t[] = {1, 0}; 41 | float* W = onnx_graph_get_weights_by_name(graph, weight); 42 | if(W == NULL) 43 | { 44 | return NULL; 45 | } 46 | float* W_t = transpose(W, shapeW, dimW, permW_t); 47 | 48 | float* output = (float*) malloc(sizeof(float)*shapeW[1]); 49 | if(output == NULL) 50 | { 51 | // No memory 52 | return NULL; 53 | } 54 | memset(output, 0, sizeof(sizeof(float)*shapeW[1])); 55 | matmul(input, W_t, shapeW[0], shapeW[1], output); 56 | 57 | shapeOutput[0] = shapeInput[0]; 58 | shapeOutput[1] = shapeW[1]; 59 | 60 | free(W_t); 61 | 62 | return output; 63 | } 64 | -------------------------------------------------------------------------------- /src/maxpool.c: -------------------------------------------------------------------------------- 1 | #include "onnx.h" 2 | 3 | void maxpool(const float *input, 4 | const uint16_t dim_im_in_x, // input image dimension x or W 5 | const uint16_t dim_im_in_y, // input image dimension y or H 6 | const uint16_t ch_im_in, // number of input image channels 7 | const uint16_t dim_kernel_x, // window kernel size 8 | const uint16_t dim_kernel_y, // window kernel size 9 | const uint16_t padding_x, // padding sizes 10 | const uint16_t padding_y, // padding sizes 11 | const uint16_t stride_x, // stride 12 | const uint16_t stride_y, // stride 13 | const uint16_t dim_im_out_x, // output image dimension x or W 14 | const uint16_t dim_im_out_y, // output image dimension y or H 15 | float *output) 16 | { 17 | int16_t i_ch_in, i_x, i_y; 18 | int16_t k_x, k_y; 19 | 20 | for (i_ch_in = 0; i_ch_in < ch_im_in; i_ch_in++) 21 | { 22 | for (i_y = 0; i_y < dim_im_out_y; i_y++) 23 | { 24 | for (i_x = 0; i_x < dim_im_out_x; i_x++) 25 | { 26 | float max = FLT_MIN; 27 | for (k_y = i_y * stride_y - padding_y; k_y < i_y * stride_y - padding_y + dim_kernel_y; k_y++) 28 | { 29 | for (k_x = i_x * stride_x - padding_x; k_x < i_x * stride_x - padding_x + dim_kernel_x; k_x++) 30 | { 31 | if (k_y >= 0 && k_x >= 0 && k_y < dim_im_in_y && k_x < dim_im_in_x) 32 | { 33 | if (input[i_ch_in + ch_im_in * (k_x + k_y * dim_im_in_x)] > max) 34 | { 35 | max = input[i_ch_in + ch_im_in * (k_x + k_y * dim_im_in_x)]; 36 | } 37 | } 38 | } 39 | } 40 | output[i_ch_in + ch_im_in * (i_x + i_y * dim_im_out_x)] = max; 41 | } 42 | } 43 | } 44 | } 45 | 46 | float* maxpool_layer(Onnx__GraphProto* graph, float* input, int64_t* shapeInput, int64_t* shapeOutput, const char* layer_name) 47 | { 48 | assert(graph != NULL && input != NULL && layer_name != "" ); 49 | 50 | Onnx__NodeProto* node = onnx_graph_get_node_by_name(graph, layer_name); 51 | if(node == NULL) 52 | { 53 | // layer not found 54 | return NULL; 55 | } 56 | 57 | uint16_t kernel_x = 1; 58 | uint16_t kernel_y = 1; 59 | uint16_t padding_x = 0; 60 | uint16_t padding_y = 0; 61 | uint16_t stride_x = 1; 62 | uint16_t stride_y = 1; 63 | 64 | for(int i = 0; i < node->n_attribute; i++) 65 | { 66 | if( strcmp(node->attribute[i]->name, "kernel_shape") == 0 ) 67 | { 68 | kernel_x = node->attribute[i]->ints[0]; 69 | kernel_y = node->attribute[i]->ints[1]; 70 | } 71 | if( strcmp(node->attribute[i]->name, "strides") == 0 ) 72 | { 73 | stride_x = node->attribute[i]->ints[0]; 74 | stride_y = node->attribute[i]->ints[1]; 75 | } 76 | } 77 | 78 | uint16_t out_x = (shapeInput[W_INDEX] - kernel_x + 2 * padding_x) / stride_x + 1; 79 | uint16_t out_y = (shapeInput[H_INDEX] - kernel_y + 2 * padding_y) / stride_y + 1; 80 | 81 | float* output = (float*) malloc(sizeof(float)*out_x*out_y*shapeInput[C_INDEX]); 82 | if(output == NULL) 83 | { 84 | // No memory 85 | return NULL; 86 | } 87 | memset(output, 0, sizeof(sizeof(float)*out_x*out_y*shapeInput[C_INDEX])); 88 | maxpool(input, shapeInput[W_INDEX], shapeInput[H_INDEX], shapeInput[C_INDEX], kernel_x, kernel_y, padding_x, padding_y, stride_x, stride_y, out_x, out_y, output); 89 | 90 | shapeOutput[W_INDEX] = out_x; 91 | shapeOutput[H_INDEX] = out_y; 92 | shapeOutput[C_INDEX] = shapeInput[C_INDEX]; 93 | 94 | return output; 95 | } 96 | -------------------------------------------------------------------------------- /src/model.c: -------------------------------------------------------------------------------- 1 | #include 2 | #include "onnx.h" 3 | 4 | float* onnx_model_run(Onnx__ModelProto* model, float* input, int64_t* shapeInput) 5 | { 6 | int64_t* shapeOutput = (int64_t*) malloc(sizeof(int64_t)*3); 7 | shapeOutput[0] = -1; shapeOutput[1] = -1; shapeOutput[2] = -1; 8 | 9 | Onnx__NodeProto* node = onnx_graph_get_node_by_input(model->graph, model->graph->input[0]->name); 10 | 11 | int i = 0; 12 | float* output; 13 | while(node != NULL) 14 | { 15 | printf("[%2d] %-10s %-20s ", i++, node->op_type, node->name); 16 | if(strcmp(node->op_type, "Conv") == 0) 17 | { 18 | output = conv2D_layer(model->graph, input, shapeInput, shapeOutput, node->name); 19 | } 20 | else if(strcmp(node->op_type, "Relu") == 0) 21 | { 22 | output = relu_layer(model->graph, input, shapeInput, shapeOutput, node->name); 23 | } 24 | else if(strcmp(node->op_type, "MaxPool") == 0) 25 | { 26 | output = maxpool_layer(model->graph, input, shapeInput, shapeOutput, node->name); 27 | } 28 | else if(strcmp(node->op_type, "Softmax") == 0) 29 | { 30 | output = softmax_layer(model->graph, input, shapeInput, shapeOutput, node->name); 31 | } 32 | else if(strcmp(node->op_type, "MatMul") == 0) 33 | { 34 | output = matmul_layer(model->graph, input, shapeInput, shapeOutput, node->name); 35 | } 36 | else if(strcmp(node->op_type, "Add") == 0) 37 | { 38 | output = add_layer(model->graph, input, shapeInput, shapeOutput, node->name); 39 | } 40 | else if(strcmp(node->op_type, "Identity") == 0) 41 | { 42 | node = onnx_graph_get_node_by_input(model->graph, node->output[0]); 43 | printf("\n"); 44 | 45 | continue; 46 | } 47 | else if(strcmp(node->op_type, "Transpose") == 0) 48 | { 49 | node = onnx_graph_get_node_by_input(model->graph, node->output[0]); 50 | printf("\n"); 51 | 52 | continue; 53 | } 54 | else if(strcmp(node->op_type, "Reshape") == 0) 55 | { 56 | shapeOutput[1] = shapeOutput[0] * shapeOutput[1] * shapeOutput[2]; 57 | shapeOutput[2] = 1; 58 | shapeOutput[0] = 1; 59 | printf("[%2" PRId64 ", %2" PRId64 ", %2" PRId64 "] --> [%2" PRId64 ", %2" PRId64 ", %2" PRId64 "]\n", shapeInput[0], shapeInput[1], shapeInput[2], shapeOutput[0], shapeOutput[1], shapeOutput[2]); 60 | 61 | // free(input); 62 | // input = output; 63 | memcpy(shapeInput, shapeOutput, sizeof(int64_t)*3); 64 | 65 | node = onnx_graph_get_node_by_input(model->graph, node->output[0]); 66 | continue; 67 | } 68 | else 69 | { 70 | printf("Unsupported operand: %s\n", node->op_type); 71 | } 72 | printf("[%2" PRId64 ", %2" PRId64 ", %2" PRId64 "] --> [%2" PRId64 ", %2" PRId64 ", %2" PRId64 "]\n", shapeInput[0], shapeInput[1], shapeInput[2], shapeOutput[0], shapeOutput[1], shapeOutput[2]); 73 | 74 | free(input); 75 | input = output; 76 | memcpy(shapeInput, shapeOutput, sizeof(int64_t)*3); 77 | 78 | node = onnx_graph_get_node_by_input(model->graph, node->output[0]); 79 | } 80 | output = input; 81 | free(shapeOutput); 82 | 83 | return output; 84 | } 85 | -------------------------------------------------------------------------------- /src/onnx.h: -------------------------------------------------------------------------------- 1 | #ifndef __ONNX_H__ 2 | #define __ONNX_H__ 3 | 4 | #include 5 | #include 6 | #include 7 | #include 8 | #include 9 | 10 | #include 11 | 12 | #define ONNX_USE_NWHC 13 | 14 | #ifdef ONNX_USE_NWHC 15 | // NWHC 16 | #define W_INDEX 0 17 | #define H_INDEX 1 18 | #define C_INDEX 2 19 | #else 20 | // NCWH 21 | #define C_INDEX 0 22 | #define W_INDEX 1 23 | #define H_INDEX 2 24 | #endif 25 | 26 | // Model 27 | void onnx_tensor_info(const float* A, int64_t* shape, int64_t dim); 28 | float* onnx_model_run(Onnx__ModelProto* model, float* input, int64_t* shapeInput); 29 | 30 | // Layers 31 | float* conv2D_layer(Onnx__GraphProto* graph, const float *input, int64_t* shapeInput, int64_t* shapeOutput, const char* layer_name); 32 | float* relu_layer(Onnx__GraphProto* graph, const float *input, int64_t* shapeInput, int64_t* shapeOutput, const char* layer_name); 33 | float* maxpool_layer(Onnx__GraphProto* graph, float* input, int64_t* shapeInput, int64_t* shapeOutput, const char* layer_name); 34 | float* matmul_layer(Onnx__GraphProto* graph, const float *input, int64_t* shapeInput, int64_t* shapeOutput, const char* layer_name); 35 | float* add_layer(Onnx__GraphProto* graph, const float *input, int64_t* shapeInput, int64_t* shapeOutput, const char* layer_name); 36 | float* softmax_layer(Onnx__GraphProto* graph, const float *input, int64_t* shapeInput, int64_t* shapeOutput, const char* layer_name); 37 | 38 | // Operators 39 | float* transpose(const float* A, int64_t* shape, int64_t dim, int64_t* perm); 40 | 41 | void conv2D(const float *input, // input image 42 | const uint16_t dim_im_in_x, // input image dimention x 43 | const uint16_t dim_im_in_y, // input image dimention y 44 | const uint16_t ch_im_in, // number of input image channels 45 | const float *weight, // kernel weights 46 | const uint16_t ch_im_out, // number of filters, i.e., output image channels 47 | const uint16_t dim_kernel_x, // filter kernel size x 48 | const uint16_t dim_kernel_y, // filter kernel size y 49 | const uint16_t padding_x, // padding sizes x 50 | const uint16_t padding_y, // padding sizes y 51 | const uint16_t stride_x, // stride x 52 | const uint16_t stride_y, // stride y 53 | const float *bias, // bias 54 | float *output, // output image 55 | const uint16_t dim_im_out_x, // output image dimension x 56 | const uint16_t dim_im_out_y // output image dimension y 57 | ); 58 | 59 | void relu(const float *input, uint32_t size, float* output); 60 | 61 | void maxpool(const float *input, 62 | const uint16_t dim_im_in_x, // input image dimension x or W 63 | const uint16_t dim_im_in_y, // input image dimension y or H 64 | const uint16_t ch_im_in, // number of input image channels 65 | const uint16_t dim_kernel_x, // window kernel size 66 | const uint16_t dim_kernel_y, // window kernel size 67 | const uint16_t padding_x, // padding sizes 68 | const uint16_t padding_y, // padding sizes 69 | const uint16_t stride_x, // stride 70 | const uint16_t stride_y, // stride 71 | const uint16_t dim_im_out_x, // output image dimension x or W 72 | const uint16_t dim_im_out_y, // output image dimension y or H 73 | float *output); 74 | 75 | void matmul(const float *input, // pointer to vector 76 | const float *weight, // pointer to matrix 77 | const uint16_t dim_vec, // length of the vector 78 | const uint16_t num_of_rows, // numCol of A 79 | float *output); 80 | 81 | void add(const float *input, // pointer to vector 82 | const float *bias, // pointer to matrix 83 | const uint16_t dim_vec, // length of the vector 84 | float *output); 85 | 86 | void dense(const float *input, // pointer to vector 87 | const float *weight, // pointer to matrix 88 | const uint16_t dim_vec, // length of the vector 89 | const uint16_t num_of_rows, // numCol of A 90 | const float *bias, 91 | float *output); 92 | 93 | void softmax(const float *input, const uint32_t dim_vec, float *output); 94 | 95 | #endif // __ONNX_H__ 96 | -------------------------------------------------------------------------------- /src/relu.c: -------------------------------------------------------------------------------- 1 | #include "onnx.h" 2 | 3 | void relu(const float *input, uint32_t size, float* output) 4 | { 5 | uint32_t i; 6 | memcpy(output, input, sizeof(float) * size); 7 | for (i = 0; i < size; i++) 8 | { 9 | if (output[i] < 0) 10 | output[i] = 0; 11 | } 12 | } 13 | 14 | float* relu_layer(Onnx__GraphProto* graph, const float *input, int64_t* shapeInput, int64_t* shapeOutput, const char* layer_name) 15 | { 16 | assert(graph != NULL && input != NULL && layer_name != "" ); 17 | 18 | int64_t len = shapeInput[0] * shapeInput[1] * shapeInput[2]; 19 | float* output = (float*) malloc(sizeof(float)*len); 20 | memset(output, 0, sizeof(sizeof(float)*len)); 21 | 22 | relu(input, len, output); 23 | 24 | memcpy(shapeInput, shapeOutput, sizeof(int64_t)*3); 25 | 26 | return output; 27 | } 28 | -------------------------------------------------------------------------------- /src/softmax.c: -------------------------------------------------------------------------------- 1 | #include "onnx.h" 2 | 3 | void softmax(const float *input, const uint32_t dim_vec, float *output) 4 | { 5 | float sum = 0.0f; 6 | 7 | for(int i = 0; i < dim_vec; i++) 8 | { 9 | output[i] = expf(input[i]); 10 | sum = sum + output[i]; 11 | } 12 | 13 | for(int i = 0; i < dim_vec; i++) 14 | { 15 | output[i] = output[i] / sum; 16 | } 17 | } 18 | 19 | float* softmax_layer(Onnx__GraphProto* graph, const float *input, int64_t* shapeInput, int64_t* shapeOutput, const char* layer_name) 20 | { 21 | assert(graph != NULL && input != NULL && layer_name != "" && shapeInput[1] > 0); 22 | 23 | float* output = (float*) malloc(sizeof(float)*shapeInput[1]); 24 | memset(output, 0, sizeof(sizeof(float)*shapeInput[1])); 25 | softmax(input, shapeInput[1], output); 26 | 27 | memcpy(shapeInput, shapeOutput, sizeof(int64_t)*3); 28 | 29 | return output; 30 | } 31 | -------------------------------------------------------------------------------- /src/transpose.c: -------------------------------------------------------------------------------- 1 | #include "onnx.h" 2 | 3 | float* transpose(const float* A, int64_t* shape, int64_t dim, int64_t* perm) 4 | { 5 | // Get array size 6 | int elem = 1; 7 | for(int i = 0; i < dim; i++) 8 | { 9 | elem = elem * shape[i]; 10 | } 11 | 12 | // Malloc memory for B 13 | float* B = malloc(sizeof(float) * elem); 14 | if(B == NULL) 15 | { 16 | return NULL; 17 | } 18 | 19 | // Malloc memory for shapeB 20 | int* shapeB = malloc(sizeof(int) * dim); 21 | if( shapeB == NULL) 22 | { 23 | return NULL; 24 | } 25 | for(int i = 0; i < dim; i++) 26 | { 27 | shapeB[i] = shape[perm[i]]; 28 | } 29 | 30 | // Transpose 31 | for(int src = 0; src < elem; src++) 32 | { 33 | // Get transposed B array 34 | // A[1][0][3] -> B[3][1][0] 35 | int temp = src; 36 | int* indexA = malloc(sizeof(int) * dim); 37 | if(indexA == NULL) 38 | { 39 | return NULL; 40 | } 41 | int* indexB = malloc(sizeof(int) * dim); 42 | if(indexB == NULL) 43 | { 44 | return NULL; 45 | } 46 | for(int i = dim-1; i >= 0; i--) 47 | { 48 | indexA[i] = temp % shape[i]; 49 | temp = temp / shape[i]; 50 | } 51 | for(int i = 0; i < dim; i++) 52 | { 53 | indexB[i] = indexA[perm[i]]; 54 | } 55 | 56 | // Get transposed B index 57 | // #15 A[1][0][3] -> B[3][1][0] #21 58 | int dst = 0; 59 | temp = 1; 60 | for(int i = dim - 1; i >= 0; i--) 61 | { 62 | dst = dst + indexB[i] * temp; 63 | temp = temp * shapeB[i]; 64 | } 65 | 66 | B[dst] = A[src]; 67 | 68 | free(indexA); 69 | free(indexB); 70 | } 71 | 72 | free(shapeB); 73 | 74 | return B; 75 | } 76 | 77 | float* transpose_layer(Onnx__GraphProto* graph, const float *input, int64_t* shapeInput, int64_t* shapeOutput, const char* layer_name) 78 | { 79 | assert(graph != NULL && input != NULL && layer_name != "" ); 80 | 81 | Onnx__NodeProto* node = onnx_graph_get_node_by_name(graph, layer_name); 82 | if(node == NULL) 83 | { 84 | return NULL; 85 | } 86 | 87 | int64_t perm_t[3]; 88 | int64_t* perm = node->attribute[0]->ints; 89 | perm_t[0] = perm[1] - 1; 90 | perm_t[1] = perm[2] - 1; 91 | perm_t[2] = perm[3] - 1; 92 | 93 | float* output = transpose(input, shapeInput, 3, perm_t); 94 | 95 | shapeOutput[0] = shapeInput[perm_t[0]]; 96 | shapeOutput[1] = shapeInput[perm_t[1]]; 97 | shapeOutput[2] = shapeInput[perm_t[2]]; 98 | 99 | return output; 100 | } 101 | --------------------------------------------------------------------------------