├── Diaplacement_run.ipynb
├── DisplacementNet_train.ipynb
├── README.md
├── StrainNet_run.ipynb
├── StrainNet_train.ipynb
├── requirements.txt
├── train_dataset.txt
└── validation_dataset.txt
/Diaplacement_run.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": null,
6 | "metadata": {},
7 | "outputs": [],
8 | "source": [
9 | "import numpy as np\n",
10 | "import torch\n",
11 | "import torch.nn as nn\n",
12 | "from torch.autograd import Variable\n",
13 | "import torchvision\n",
14 | "from torch.nn import init\n",
15 | "\n",
16 | "from torchvision.models.resnet import BasicBlock, ResNet\n",
17 | "from torchvision.transforms import ToTensor\n",
18 | "import io\n",
19 | "from torchvision import models, transforms\n",
20 | "import torch.utils.data as data_utils\n",
21 | "from PIL import Image\n",
22 | "import os\n",
23 | "\n",
24 | "import cv2\n",
25 | "import matplotlib.pyplot as plt\n",
26 | "import torch.nn.functional as F\n",
27 | "def default_loader(path):\n",
28 | " return Image.open(path) "
29 | ]
30 | },
31 | {
32 | "cell_type": "code",
33 | "execution_count": null,
34 | "metadata": {},
35 | "outputs": [],
36 | "source": [
37 | "\n"
38 | ]
39 | },
40 | {
41 | "cell_type": "code",
42 | "execution_count": null,
43 | "metadata": {},
44 | "outputs": [],
45 | "source": [
46 | "from torchvision.models.resnet import BasicBlock, ResNet\n",
47 | "from torch.nn import init\n",
48 | "\n",
49 | "def conv(in_planes, out_planes, kernel_size=3, stride=1, dilation=1, bias=False, transposed=False):\n",
50 | " if transposed:\n",
51 | " layer = nn.ConvTranspose2d(in_planes, out_planes, kernel_size=kernel_size, stride=stride, padding=1, output_padding=1,\n",
52 | " dilation=dilation, bias=bias)\n",
53 | " else:\n",
54 | " padding = (kernel_size + 2 * (dilation - 1)) // 2\n",
55 | " layer = nn.Conv2d(in_planes, out_planes, kernel_size=kernel_size, stride=stride, padding=padding, dilation=dilation, bias=bias)\n",
56 | " if bias:\n",
57 | " init.constant(layer.bias, 0)\n",
58 | " return layer\n",
59 | "\n",
60 | "# Returns 2D batch normalisation layer\n",
61 | "def bn(planes):\n",
62 | " layer = nn.BatchNorm2d(planes)\n",
63 | " # Use mean 0, standard deviation 1 init\n",
64 | " init.constant(layer.weight, 1)\n",
65 | " init.constant(layer.bias, 0)\n",
66 | " return layer\n",
67 | "\n",
68 | "\n",
69 | "class FeatureResNet(ResNet):\n",
70 | " def __init__(self):\n",
71 | " super().__init__(BasicBlock, [3, 14, 16, 3], 1000)\n",
72 | " self.conv_f = conv(2,64, kernel_size=3,stride = 1)\n",
73 | " self.ReLu_1 = nn.ReLU(inplace=True)\n",
74 | " self.conv_pre = conv(512, 1024, stride=2, transposed=False)\n",
75 | " self.bn_pre = bn(1024)\n",
76 | "\n",
77 | " def forward(self, x):\n",
78 | " x1 = self.conv_f(x)\n",
79 | " x = self.bn1(x1)\n",
80 | " x = self.relu(x)\n",
81 | " x2 = self.maxpool(x)\n",
82 | " x = self.layer1(x2)\n",
83 | " x3 = self.layer2(x)\n",
84 | " x4 = self.layer3(x3)\n",
85 | " x5 = self.layer4(x4)\n",
86 | " x6 = self.ReLu_1(self.bn_pre(self.conv_pre(x5)))\n",
87 | " return x1, x2, x3, x4, x5,x6\n",
88 | "\n",
89 | "\n",
90 | "class SegResNet(nn.Module):\n",
91 | " def __init__(self, num_classes, pretrained_net):\n",
92 | " super().__init__()\n",
93 | " self.pretrained_net = pretrained_net\n",
94 | " self.relu = nn.ReLU(inplace=True)\n",
95 | " #self.conv3 = conv(1024,1024, stride=1, transposed=False)\n",
96 | " #self.bn3 = bn(1024)\n",
97 | " self.conv3_2 = conv(1024, 512, stride=1, transposed=False)\n",
98 | " self.bn3_2 = bn(512)\n",
99 | " self.conv4 = conv(512,512, stride=2, transposed=True)\n",
100 | " self.bn4 = bn(512)\n",
101 | " self.conv5 = conv(512, 256, stride=2, transposed=True)\n",
102 | " self.bn5 = bn(256)\n",
103 | " self.conv6 = conv(256, 128, stride=2, transposed=True)\n",
104 | " self.bn6 = bn(128)\n",
105 | " self.conv7 = conv(128, 64, stride=2, transposed=True)\n",
106 | " self.bn7 = bn(64)\n",
107 | " self.conv8 = conv(64, 64, stride=2, transposed=True)\n",
108 | " self.bn8 = bn(64)\n",
109 | " self.conv9 = conv(64, 32, stride=2, transposed=True)\n",
110 | " self.bn9 = bn(32)\n",
111 | " self.convadd = conv(32, 16, stride=1, transposed=False)\n",
112 | " self.bnadd = bn(16)\n",
113 | " self.conv10 = conv(16, num_classes,stride=2, kernel_size=5)\n",
114 | " init.constant(self.conv10.weight, 0) # Zero init\n",
115 | "\n",
116 | " def forward(self, x):\n",
117 | " \n",
118 | " x1, x2, x3, x4, x5, x6 = self.pretrained_net(x)\n",
119 | " x = self.relu(self.bn3_2(self.conv3_2(x6)))\n",
120 | " x = self.relu(self.bn4(self.conv4(x)))\n",
121 | " x = self.relu(self.bn5(self.conv5(x)))\n",
122 | " x = self.relu(self.bn6(self.conv6(x+x4 )))\n",
123 | " x = self.relu(self.bn7(self.conv7(x+x3 )))\n",
124 | " x = self.relu(self.bn8(self.conv8(x+x2 )))\n",
125 | " x = self.relu(self.bn9(self.conv9(x+x1 )))\n",
126 | " x = self.relu(self.bnadd(self.convadd(x)))\n",
127 | " x = self.conv10(x)\n",
128 | " return x\n"
129 | ]
130 | },
131 | {
132 | "cell_type": "code",
133 | "execution_count": null,
134 | "metadata": {},
135 | "outputs": [],
136 | "source": [
137 | "fnet = FeatureResNet()\n",
138 | "fcn = SegResNet(2,fnet)\n",
139 | "fcn = fcn.cuda()\n",
140 | "fcn.load_state_dict(torch.load(model_result + 'pretrained_displacementnet'))"
141 | ]
142 | },
143 | {
144 | "cell_type": "code",
145 | "execution_count": null,
146 | "metadata": {},
147 | "outputs": [],
148 | "source": [
149 | "from scipy.io import savemat\n",
150 | "x1, x2, y1, y2 = 440,806,25,215 #ylo,yhi,xlo,xhi\n",
151 | "x100, x200, y100, y200 = 440,806,25,215 #ylo,yhi,xlo,xhi\n",
152 | "dx1,dx2,dy1,dy2 = 0,0,0,0\n",
153 | "\n",
154 | "h0 = x2-x1\n",
155 | "w0 = y2-y1\n",
156 | "\n",
157 | "disp_1_x = np.zeros((128,128))\n",
158 | "disp_1_y = np.zeros((128,128))\n",
159 | "\n",
160 | "path_img = ''\n",
161 | "results_path = ''\n",
162 | "\n",
163 | "for i in range(1,img_num):\n",
164 | " h0 = x2-x1\n",
165 | " w0 = y2-y1\n",
166 | " hnew = int((h0//32+1)*32)\n",
167 | " wnew = int((w0//32+1)*32)\n",
168 | " newsize = (wnew,hnew)\n",
169 | " \n",
170 | " img1 = default_loader('first_image')\n",
171 | " img1_c = img1.crop((y1,x1,y2,x2))\n",
172 | " img1_r = img1_c.resize(newsize)\n",
173 | " img2 = default_loader('next_img')\n",
174 | " img2_c = img2.crop((y1,x1,y2,x2))\n",
175 | " img2_r = img2_c.resize(newsize)\n",
176 | " \n",
177 | " img_1 = ToTensor()(img1_r)\n",
178 | " img_2 = ToTensor()(img2_r)\n",
179 | " imgs = torch.cat((img_1/np.max(img_1.numpy()), img_2/np.max(img_2.numpy())), 0)\n",
180 | " imgs = imgs.unsqueeze(0)\n",
181 | " imgs = imgs.type(torch.cuda.FloatTensor)\n",
182 | " imgs = Variable(imgs).cuda()\n",
183 | " \n",
184 | " predict = fcn(imgs)\n",
185 | " predict_np = predict.detach().cpu().numpy().squeeze(0)\n",
186 | " \n",
187 | " dy1 = dy1+np.mean(predict_np[0,:,:])*(w0)/(wnew)/2.0\n",
188 | " dy2 = dy2+np.mean(predict_np[0,:,:])*(w0)/(wnew)/2.0\n",
189 | " dx1 = dx1+np.mean(predict_np[1,:,:])*(h0)/(hnew)/2.0\n",
190 | " dx2 = dx2+np.mean(predict_np[1,:,:])*(h0)/(hnew)/2.0\n",
191 | " \n",
192 | " \n",
193 | " \n",
194 | " x10,x20,y10,y20 = x1,x2,y1,y2\n",
195 | " h0 = x20-x10\n",
196 | " w0 = y20-y10\n",
197 | " \n",
198 | " x1 = np.int(x100-dx1)\n",
199 | " y1 = np.int(y100-dy1)\n",
200 | " x2 = round(x200-dx2)\n",
201 | " y2 = round(y200-dy2) ###new roi updated\n",
202 | " \n",
203 | " disp_1_x = predict_np/2\n",
204 | " \n",
205 | " position = [x1,x2,y1,y2,h0,w0]\n",
206 | " \n",
207 | " matname = results_path+'/result_'+str(i)+'_position1.mat'\n",
208 | " mdic = {\"position\": position, \"label\": \"position\"}\n",
209 | " savemat(matname,mdic)\n",
210 | " \n",
211 | " matname = results_path+'/result_'+str(i)+'_disp_0_x.mat'\n",
212 | " mdic = {\"disp_1_x\": disp_1_x, \"label\": \"disp_1_x\"}\n",
213 | " savemat(matname,mdic)\n",
214 | " "
215 | ]
216 | },
217 | {
218 | "cell_type": "code",
219 | "execution_count": null,
220 | "metadata": {},
221 | "outputs": [],
222 | "source": []
223 | },
224 | {
225 | "cell_type": "code",
226 | "execution_count": null,
227 | "metadata": {},
228 | "outputs": [],
229 | "source": []
230 | },
231 | {
232 | "cell_type": "code",
233 | "execution_count": null,
234 | "metadata": {},
235 | "outputs": [],
236 | "source": []
237 | }
238 | ],
239 | "metadata": {
240 | "kernelspec": {
241 | "display_name": "Python 3",
242 | "language": "python",
243 | "name": "python3"
244 | },
245 | "language_info": {
246 | "codemirror_mode": {
247 | "name": "ipython",
248 | "version": 3
249 | },
250 | "file_extension": ".py",
251 | "mimetype": "text/x-python",
252 | "name": "python",
253 | "nbconvert_exporter": "python",
254 | "pygments_lexer": "ipython3",
255 | "version": "3.6.10"
256 | }
257 | },
258 | "nbformat": 4,
259 | "nbformat_minor": 4
260 | }
261 |
--------------------------------------------------------------------------------
/DisplacementNet_train.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": null,
6 | "metadata": {},
7 | "outputs": [],
8 | "source": [
9 | "import numpy as np\n",
10 | "import torch\n",
11 | "import torch.nn as nn\n",
12 | "from torch.autograd import Variable\n",
13 | "import torchvision\n",
14 | "from torch.nn import init\n",
15 | "\n",
16 | "from torchvision.models.resnet import BasicBlock, ResNet\n",
17 | "from torchvision.transforms import ToTensor"
18 | ]
19 | },
20 | {
21 | "cell_type": "code",
22 | "execution_count": null,
23 | "metadata": {},
24 | "outputs": [],
25 | "source": [
26 | "import io\n",
27 | "from torchvision import models, transforms\n",
28 | "import torch.utils.data as data_utils\n",
29 | "from PIL import Image\n",
30 | "import os\n",
31 | "\n",
32 | "import cv2\n",
33 | "import matplotlib.pyplot as plt\n",
34 | "import torch.nn.functional as F\n",
35 | "def default_loader(path):\n",
36 | " return Image.open(path) "
37 | ]
38 | },
39 | {
40 | "cell_type": "code",
41 | "execution_count": null,
42 | "metadata": {},
43 | "outputs": [],
44 | "source": []
45 | },
46 | {
47 | "cell_type": "code",
48 | "execution_count": null,
49 | "metadata": {},
50 | "outputs": [],
51 | "source": [
52 | "from torchvision.models.resnet import BasicBlock, ResNet\n",
53 | "from torch.nn import init\n",
54 | "\n",
55 | "def conv(in_planes, out_planes, kernel_size=3, stride=1, dilation=1, bias=False, transposed=False):\n",
56 | " if transposed:\n",
57 | " layer = nn.ConvTranspose2d(in_planes, out_planes, kernel_size=kernel_size, stride=stride, padding=1, output_padding=1,\n",
58 | " dilation=dilation, bias=bias)\n",
59 | " else:\n",
60 | " padding = (kernel_size + 2 * (dilation - 1)) // 2\n",
61 | " layer = nn.Conv2d(in_planes, out_planes, kernel_size=kernel_size, stride=stride, padding=padding, dilation=dilation, bias=bias)\n",
62 | " if bias:\n",
63 | " init.constant(layer.bias, 0)\n",
64 | " return layer\n",
65 | "\n",
66 | "# Returns 2D batch normalisation layer\n",
67 | "def bn(planes):\n",
68 | " layer = nn.BatchNorm2d(planes)\n",
69 | " # Use mean 0, standard deviation 1 init\n",
70 | " init.constant(layer.weight, 1)\n",
71 | " init.constant(layer.bias, 0)\n",
72 | " return layer\n",
73 | "\n",
74 | "\n",
75 | "class FeatureResNet(ResNet):\n",
76 | " def __init__(self):\n",
77 | " super().__init__(BasicBlock, [3, 14, 16, 3], 1000)\n",
78 | " self.conv_f = conv(2,64, kernel_size=3,stride = 1)\n",
79 | " self.ReLu_1 = nn.ReLU(inplace=True)\n",
80 | " self.conv_pre = conv(512, 1024, stride=2, transposed=False)\n",
81 | " self.bn_pre = bn(1024)\n",
82 | "\n",
83 | " def forward(self, x):\n",
84 | " x1 = self.conv_f(x)\n",
85 | " x = self.bn1(x1)\n",
86 | " x = self.relu(x)\n",
87 | " x2 = self.maxpool(x)\n",
88 | " x = self.layer1(x2)\n",
89 | " x3 = self.layer2(x)\n",
90 | " x4 = self.layer3(x3)\n",
91 | " x5 = self.layer4(x4)\n",
92 | " x6 = self.ReLu_1(self.bn_pre(self.conv_pre(x5)))\n",
93 | " return x1, x2, x3, x4, x5,x6\n",
94 | "\n",
95 | "\n",
96 | "class SegResNet(nn.Module):\n",
97 | " def __init__(self, num_classes, pretrained_net):\n",
98 | " super().__init__()\n",
99 | " self.pretrained_net = pretrained_net\n",
100 | " self.relu = nn.ReLU(inplace=True)\n",
101 | " self.conv3_2 = conv(1024, 512, stride=1, transposed=False)\n",
102 | " self.bn3_2 = bn(512)\n",
103 | " self.conv4 = conv(512,512, stride=2, transposed=True)\n",
104 | " self.bn4 = bn(512)\n",
105 | " self.conv5 = conv(512, 256, stride=2, transposed=True)\n",
106 | " self.bn5 = bn(256)\n",
107 | " self.conv6 = conv(256, 128, stride=2, transposed=True)\n",
108 | " self.bn6 = bn(128)\n",
109 | " self.conv7 = conv(128, 64, stride=2, transposed=True)\n",
110 | " self.bn7 = bn(64)\n",
111 | " self.conv8 = conv(64, 64, stride=2, transposed=True)\n",
112 | " self.bn8 = bn(64)\n",
113 | " self.conv9 = conv(64, 32, stride=2, transposed=True)\n",
114 | " self.bn9 = bn(32)\n",
115 | " self.convadd = conv(32, 16, stride=1, transposed=False)\n",
116 | " self.bnadd = bn(16)\n",
117 | " self.conv10 = conv(16, num_classes,stride=2, kernel_size=5)\n",
118 | " init.constant(self.conv10.weight, 0) # Zero init\n",
119 | "\n",
120 | " def forward(self, x):\n",
121 | " \n",
122 | " x1, x2, x3, x4, x5, x6 = self.pretrained_net(x)\n",
123 | " \n",
124 | " x = self.relu(self.bn3_2(self.conv3_2(x6)))\n",
125 | " \n",
126 | " x = self.relu(self.bn4(self.conv4(x)))\n",
127 | " x = self.relu(self.bn5(self.conv5(x)))\n",
128 | " #print(x.size())\n",
129 | " x = self.relu(self.bn6(self.conv6(x+x4 )))\n",
130 | " #print(x.size())\n",
131 | " x = self.relu(self.bn7(self.conv7(x+x3 )))\n",
132 | " #print(x.size())\n",
133 | " x = self.relu(self.bn8(self.conv8(x+x2 )))\n",
134 | " #print(x.size())\n",
135 | " x = self.relu(self.bn9(self.conv9(x+x1 )))\n",
136 | " #print(x.size())\n",
137 | " x = self.relu(self.bnadd(self.convadd(x)))\n",
138 | " x = self.conv10(x)\n",
139 | " return x\n"
140 | ]
141 | },
142 | {
143 | "cell_type": "code",
144 | "execution_count": null,
145 | "metadata": {},
146 | "outputs": [],
147 | "source": [
148 | "fnet = FeatureResNet()\n",
149 | "fcn = SegResNet(2,fnet)\n",
150 | "fcn = fcn.cuda()\n"
151 | ]
152 | },
153 | {
154 | "cell_type": "code",
155 | "execution_count": 54,
156 | "metadata": {},
157 | "outputs": [],
158 | "source": [
159 | "dataset_path = ''\n",
160 | "filename = \"validation_dataset.txt\"\n",
161 | "mynumbers = []\n",
162 | "with open(filename) as f:\n",
163 | " for line in f:\n",
164 | " item = line.strip().split('\\n')\n",
165 | " for subitem in item:\n",
166 | " mynumbers.append(subitem)\n",
167 | " \n",
168 | "test_set = []\n",
169 | "for i in range(4000):\n",
170 | " test_set.append((dataset_path+'/imgs3/train_image_'+str(z+1)+'_1.png',\n",
171 | " dataset_path+'/imgs3/train_image_'+str(z+1)+'_2.png',\n",
172 | " dataset_path+'/gt3/train_image_'+str(z+1)+'.mat'))\n",
173 | "\n",
174 | "dataset_path = ''\n",
175 | "filename = \"train_dataset.txt\"\n",
176 | "mynumbers = []\n",
177 | "with open(filename) as f:\n",
178 | " for line in f:\n",
179 | " item = line.strip().split('\\n')\n",
180 | " for subitem in item:\n",
181 | " mynumbers.append(subitem)\n",
182 | " \n",
183 | "test_set = []\n",
184 | "for i in range(36000):\n",
185 | " train_set.append((dataset_path+'/imgs3/train_image_'+str(z+1)+'_1.png',\n",
186 | " dataset_path+'/imgs3/train_image_'+str(z+1)+'_2.png',\n",
187 | " dataset_path+'/gt3/train_image_'+str(z+1)+'.mat'))\n",
188 | " "
189 | ]
190 | },
191 | {
192 | "cell_type": "code",
193 | "execution_count": null,
194 | "metadata": {},
195 | "outputs": [],
196 | "source": [
197 | "import scipy.io as sio\n",
198 | "#light_index_2 = [2,7,15,8,4,22,13,57,54,40,91,21,29,84,71,25,28,51,67,62,34,46,93,87]\n",
199 | "class MyDataset(data_utils.Dataset):\n",
200 | " def __init__(self, dataset, transform=None, target_transform=None, loader=default_loader):\n",
201 | " '''\n",
202 | " fh = open(txt, 'r')\n",
203 | " imgs = []\n",
204 | " for line in fh:\n",
205 | " line = line.strip('\\n')\n",
206 | " line = line.rstrip()\n",
207 | " words = line.split()\n",
208 | " imgs.append((words[0],int(words[1])))\n",
209 | " \n",
210 | " '''\n",
211 | " \n",
212 | " self.imgs = dataset\n",
213 | " self.transform = transform\n",
214 | " self.target_transform = target_transform\n",
215 | " self.loader = loader\n",
216 | "\n",
217 | " def __getitem__(self, index):\n",
218 | " label_x, label_y, label_z = self.imgs[index]\n",
219 | " img1 = self.loader(label_x)\n",
220 | " img_1 = ToTensor()(img1.resize((128,128)))\n",
221 | " img2 = self.loader(label_y)\n",
222 | " img_2 = ToTensor()(img2.resize((128,128)))\n",
223 | " imgs = torch.cat((img_1, img_2), 0)\n",
224 | " try:\n",
225 | " gt = sio.loadmat(label_z)['Disp_field_1'].astype(float)\n",
226 | " \n",
227 | " except KeyError:\n",
228 | " gt = sio.loadmat(label_z)['Disp_field_2'].astype(float)\n",
229 | " \n",
230 | " gt = gt[::2,::2,:]\n",
231 | " gt = np.moveaxis(gt, -1, 0)\n",
232 | " \n",
233 | " return imgs,gt\n",
234 | "\n",
235 | " def __len__(self):\n",
236 | " return len(self.imgs)"
237 | ]
238 | },
239 | {
240 | "cell_type": "code",
241 | "execution_count": null,
242 | "metadata": {},
243 | "outputs": [],
244 | "source": [
245 | "EPOCH = 100 # train the training data n times, to save time, we just train 1 epoch\n",
246 | "BATCH_SIZE = 12\n",
247 | "print('BATCH_SIZE = ',BATCH_SIZE)\n",
248 | "LR = 0.001 # learning rate\n",
249 | "#root = './gdrive_northwestern/My Drive/dl_encoder/data/orig/orig'\n",
250 | "NUM_WORKERS = 0\n",
251 | "\n",
252 | "optimizer = torch.optim.Adam(fcn.parameters(), lr=LR) # optimize all cnn parameters\n",
253 | "#optimizer = torch.optim.SGD(cnn.parameters(), lr=LR, momentum=0.9) # optimize all cnn parameters\n",
254 | "loss_func = nn.MSELoss()\n",
255 | "\n",
256 | "\n",
257 | "train_data=MyDataset(dataset=train_set)\n",
258 | "train_loader = data_utils.DataLoader(dataset=train_data, batch_size=BATCH_SIZE, shuffle=True, num_workers=NUM_WORKERS)\n",
259 | "\n",
260 | "test_data=MyDataset(dataset=test_set)\n",
261 | "test_loader = data_utils.DataLoader(dataset=test_data, batch_size=1)"
262 | ]
263 | },
264 | {
265 | "cell_type": "code",
266 | "execution_count": null,
267 | "metadata": {},
268 | "outputs": [],
269 | "source": [
270 | "from datetime import datetime\n",
271 | "dataString = datetime.strftime(datetime.now(), '%Y_%m_%d_%H:%M:%S')"
272 | ]
273 | },
274 | {
275 | "cell_type": "code",
276 | "execution_count": null,
277 | "metadata": {},
278 | "outputs": [],
279 | "source": [
280 | "root_result = ''\n",
281 | "os.mkdir(root_result)\n",
282 | "model_result = root_result+'model/'\n",
283 | "log_result = root_result+'log/'\n",
284 | "os.mkdir(model_result)\n",
285 | "os.mkdir(log_result)"
286 | ]
287 | },
288 | {
289 | "cell_type": "code",
290 | "execution_count": null,
291 | "metadata": {},
292 | "outputs": [],
293 | "source": [
294 | "fileOut=open(log_result+'log'+dataString,'a')\n",
295 | "fileOut.write(dataString+'Epoch: Step: Loss: Val_Accu :\\n')\n",
296 | "fileOut.close()\n",
297 | "fileOut2 = open(log_result+'validation'+dataString, 'a')\n",
298 | "fileOut2.write('kernal_size of conv_f is 2')\n",
299 | "fileOut2.write(dataString+'Epoch: loss:')\n",
300 | "\n",
301 | "\n",
302 | "\n",
303 | "#fcn.load_state_dict(torch.load(model_result + 'param_all_1_99_1156'))\n",
304 | "for epoch in range(EPOCH):\n",
305 | " fcn.train()\n",
306 | " for step, (img,gt) in enumerate(train_loader): # gives batch data, normalize x when iterate train_loader\n",
307 | " \n",
308 | " img = Variable(img).cuda()\n",
309 | " #gt=gt.unsqueeze(1).float()# batch x\n",
310 | " gt=gt.float()\n",
311 | " gt = Variable(gt).cuda()\n",
312 | " #b_y = Variable(y)#.cuda() # batch y\n",
313 | " #print(img.size())\n",
314 | " #output = cnn(b_x)[0] # cnn output\n",
315 | " output = fcn(img) # cnn output\n",
316 | " #print(output.size())\n",
317 | " #print(gt.size())\n",
318 | " loss = loss_func(output, gt) # cross entropy loss\n",
319 | " optimizer.zero_grad() # clear gradients for this training step\n",
320 | " loss.backward() # backpropagation, compute gradients\n",
321 | " optimizer.step() # apply gradients\n",
322 | " print(epoch, step, loss.data.item())\n",
323 | " fileOut=open(log_result+'log'+dataString,'a')\n",
324 | " fileOut.write(str(epoch)+' '+str(step)+' '+str(loss.data.item())+'\\n')\n",
325 | " fileOut.close()\n",
326 | " if epoch%10 == 9:\n",
327 | " PATH = model_result + 'param_all_2_' + str(epoch) + '_' + str(step)\n",
328 | " torch.save(fcn.state_dict(), PATH)\n",
329 | " print('finished saving checkpoints')\n",
330 | " \n",
331 | " LOSS_VALIDATION = 0\n",
332 | " fcn.eval()\n",
333 | " with torch.no_grad():\n",
334 | " for step, (img,gt) in enumerate(test_loader):\n",
335 | "\n",
336 | " img = Variable(img).cuda()\n",
337 | " gt=gt.unsqueeze(1)# batch x\n",
338 | " gt = Variable(gt).cuda()\n",
339 | " output = fcn(img) \n",
340 | " LOSS_VALIDATION += loss_func(output, gt)\n",
341 | " #print(LOSS_VALIDATION.data.item())\n",
342 | " LOSS_VALIDATION = LOSS_VALIDATION/step\n",
343 | " fileOut2 = open(log_result+'validation'+dataString, 'a')\n",
344 | " fileOut2.write(str(epoch)+' '+str(step)+' '+str(LOSS_VALIDATION.data.item())+'\\n')\n",
345 | " fileOut2.close()\n",
346 | " print('validation error epoch '+str(epoch)+': '+str(LOSS_VALIDATION)+'\\n'+str(step))\n"
347 | ]
348 | }
349 | ],
350 | "metadata": {
351 | "kernelspec": {
352 | "display_name": "Python 3",
353 | "language": "python",
354 | "name": "python3"
355 | },
356 | "language_info": {
357 | "codemirror_mode": {
358 | "name": "ipython",
359 | "version": 3
360 | },
361 | "file_extension": ".py",
362 | "mimetype": "text/x-python",
363 | "name": "python",
364 | "nbconvert_exporter": "python",
365 | "pygments_lexer": "ipython3",
366 | "version": "3.6.10"
367 | }
368 | },
369 | "nbformat": 4,
370 | "nbformat_minor": 4
371 | }
372 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Deep DIC: Deep Learning-Based Digital Image Correlation for End-to-End Displacement and Strain Measurement
2 |
3 | Note: We are still working on uploading more dataset.
4 |
5 | Digital image correlation (DIC) has become an industry standard to retrieve accurate displacement and strain measurement in tensile testing and other material characterization. Though traditional DIC offers a high precision estimation of deformation for general tensile testing cases, the prediction becomes unstable at large deformation or when the speckle patterns start to tear. In addition, traditional DIC requires a long computation time and often produces a low spatial resolution output affected by filtering and speckle pattern quality. To address these challenges, we propose a new deep learning-based DIC approach – Deep DIC, in which two convolutional neural networks, DisplacementNet and StrainNet, are designed to work together for end-to-end prediction of displacements and strains. DisplacementNet predicts the displacement field and adaptively tracks the region of interest. StrainNet predicts the strain field directly from the image input without relying on the displacement prediction, which significantly improves the strain prediction accuracy. A new dataset generation method is developed to synthesize a realistic and comprehensive dataset, including generation of speckle patterns and deformation of the speckle image with synthetic displacement field. Though trained on synthetic dataset only, Deep DIC is tested on both simulated and experimental data. Deep DIC gives highly consistent and comparable predictions of displacement and strain with those obtained from commercial DIC software, while it outperforms commercial software with very robust strain prediction even at large and localized deformation and varied pattern qualities. In addition, Deep DIC is capable of real-time prediction of deformation with a calculation time down to milliseconds.
6 | Please refer to our paper: https://www.sciencedirect.com/science/article/pii/S092401362100434
7 | and https://arxiv.org/ftp/arxiv/papers/2110/2110.13720.pdf
8 |
9 | ## Citation
10 | If you find this code or the provided data useful in your research, please consider cite:
11 | @article{yang2022deep,
12 | title={Deep DIC: Deep learning-based digital image correlation for end-to-end displacement and strain measurement},
13 | author={Yang, Ru and Li, Yang and Zeng, Danielle and Guo, Ping},
14 | journal={Journal of Materials Processing Technology},
15 | volume={302},
16 | pages={117474},
17 | year={2022},
18 | publisher={Elsevier}
19 | }
20 |
21 | ## Dependencies
22 | Deep-DIC is implemented in [PyTorch](https://pytorch.org/) and tested with Ubuntu 20.04, please install PyTorch first following the official instruction.
23 | - Python 3.7
24 | - PyTorch (version = 1.16)
25 | - Torchvision (version = 0.7.0)
26 | - Pillow (version = 7.2)
27 | - numpy
28 | - scipy
29 | - CUDA
30 |
31 | ## Overview
32 | We provide:
33 | [https://drive.google.com/drive/folders/13x_-cnnSzsmg1Eiuzg73X7FJSiH69WZE?usp=sharing](https://drive.google.com/drive/folders/13x_-cnnSzsmg1Eiuzg73X7FJSiH69WZE?usp=sharing)
34 | - Datasets: training dataset, validation dataset and test dataset.
35 | - Pre-trained models:
36 | - DisplacementNet
37 | - StrainNet
38 | - Code to test with pair of speckle images.
39 | - Code to train the two CNNs with dataset.
40 |
--------------------------------------------------------------------------------
/StrainNet_run.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": null,
6 | "metadata": {},
7 | "outputs": [],
8 | "source": [
9 | "import numpy as np\n",
10 | "import torch\n",
11 | "import torch.nn as nn\n",
12 | "from torch.autograd import Variable\n",
13 | "import torchvision\n",
14 | "from torch.nn import init\n",
15 | "\n",
16 | "from torchvision.models.resnet import BasicBlock, ResNet\n",
17 | "from torchvision.transforms import ToTensor\n",
18 | "from scipy.io import savemat,loadmat\n",
19 | "\n",
20 | "import io\n",
21 | "from torchvision import models, transforms\n",
22 | "import torch.utils.data as data_utils\n",
23 | "from PIL import Image\n",
24 | "import os\n",
25 | "\n",
26 | "import cv2\n",
27 | "import matplotlib.pyplot as plt\n",
28 | "import torch.nn.functional as F\n",
29 | "def default_loader(path):\n",
30 | " return Image.open(path) "
31 | ]
32 | },
33 | {
34 | "cell_type": "code",
35 | "execution_count": null,
36 | "metadata": {},
37 | "outputs": [],
38 | "source": [
39 | "from torchvision.models.resnet import BasicBlock, ResNet\n",
40 | "from torch.nn import init\n",
41 | "\n",
42 | "def conv(in_planes, out_planes, kernel_size=3, stride=1, dilation=1, bias=False, transposed=False):\n",
43 | " if transposed:\n",
44 | " layer = nn.ConvTranspose2d(in_planes, out_planes, kernel_size=kernel_size, stride=stride, padding=1, output_padding=1,\n",
45 | " dilation=dilation, bias=bias)\n",
46 | " else:\n",
47 | " padding = (kernel_size + 2 * (dilation - 1)) // 2\n",
48 | " layer = nn.Conv2d(in_planes, out_planes, kernel_size=kernel_size, stride=stride, padding=padding, dilation=dilation, bias=bias)\n",
49 | " if bias:\n",
50 | " init.constant(layer.bias, 0)\n",
51 | " return layer\n",
52 | "\n",
53 | "# Returns 2D batch normalisation layer\n",
54 | "def bn(planes):\n",
55 | " layer = nn.BatchNorm2d(planes)\n",
56 | " # Use mean 0, standard deviation 1 init\n",
57 | " init.constant(layer.weight, 1)\n",
58 | " init.constant(layer.bias, 0)\n",
59 | " return layer\n",
60 | "\n",
61 | "\n",
62 | "class FeatureResNet(ResNet):\n",
63 | " def __init__(self):\n",
64 | " super().__init__(BasicBlock, [3, 14, 16, 3], 1000)\n",
65 | " self.conv_f = conv(2,64, kernel_size=3,stride = 1)\n",
66 | " self.ReLu_1 = nn.ReLU(inplace=True)\n",
67 | " self.conv_pre = conv(512, 1024, stride=2, transposed=False)\n",
68 | " self.bn_pre = bn(1024)\n",
69 | "\n",
70 | " def forward(self, x):\n",
71 | " x1 = self.conv_f(x)\n",
72 | " #print('x1',x1.size())\n",
73 | " x = self.bn1(x1)\n",
74 | " #print(x.size())\n",
75 | " x = self.relu(x)\n",
76 | " #print(x.size())\n",
77 | " x2 = self.maxpool(x)\n",
78 | " #print('x2',x2.size())\n",
79 | " x = self.layer1(x2)\n",
80 | " #print(x2.size())\n",
81 | " x3 = self.layer2(x)\n",
82 | " #print('x3',x3.size())\n",
83 | " x4 = self.layer3(x3)\n",
84 | " #print('x4',x4.size())\n",
85 | " x5 = self.layer4(x4)\n",
86 | " #print('x5',x5.size())\n",
87 | " x6 = self.ReLu_1(self.bn_pre(self.conv_pre(x5)))\n",
88 | " #print('x6',x6.size())\n",
89 | " return x1, x2, x3, x4, x5,x6\n",
90 | "\n",
91 | "\n",
92 | "class SegResNet(nn.Module):\n",
93 | " def __init__(self, num_classes, pretrained_net):\n",
94 | " super().__init__()\n",
95 | " self.pretrained_net = pretrained_net\n",
96 | " self.relu = nn.ReLU(inplace=True)\n",
97 | " #self.conv3 = conv(1024,1024, stride=1, transposed=False)\n",
98 | " #self.bn3 = bn(1024)\n",
99 | " self.conv3_2 = conv(1024, 512, stride=1, transposed=False)\n",
100 | " self.bn3_2 = bn(512)\n",
101 | " self.conv4 = conv(512,512, stride=2, transposed=True)\n",
102 | " self.bn4 = bn(512)\n",
103 | " self.conv5 = conv(512, 256, stride=2, transposed=True)\n",
104 | " self.bn5 = bn(256)\n",
105 | " self.conv6 = conv(256, 128, stride=2, transposed=True)\n",
106 | " self.bn6 = bn(128)\n",
107 | " self.conv7 = conv(128, 64, stride=2, transposed=True)\n",
108 | " self.bn7 = bn(64)\n",
109 | " self.conv8 = conv(64, 64, stride=2, transposed=True)\n",
110 | " self.bn8 = bn(64)\n",
111 | " self.conv9 = conv(64, 32, stride=1, transposed=False)\n",
112 | " self.bn9 = bn(32)\n",
113 | " self.convadd = conv(32, 16, stride=1, transposed=False)\n",
114 | " self.bnadd = bn(16)\n",
115 | " self.conv10 = conv(16, num_classes,stride=2, kernel_size=3)\n",
116 | " init.constant(self.conv10.weight, 0) # Zero init\n",
117 | "\n",
118 | " def forward(self, x):\n",
119 | " #b,c,w,h = x.size()\n",
120 | " #x = x.view(b,c,w,h)\n",
121 | " x1, x2, x3, x4, x5, x6 = self.pretrained_net(x)\n",
122 | " #x1 = x1.view(b,x1.size(1),x1.size(2),x1.size(3))\n",
123 | " #x2 = x2.view(b,x2.size(1),x2.size(2),x2.size(3))\n",
124 | " #x3 = x3.view(b,x3.size(1),x3.size(2),x3.size(3))\n",
125 | " #x4 = x4.view(b,x4.size(1),x4.size(2),x4.size(3))\n",
126 | " #x5 = x5.view(b,x5.size(1),x5.size(2),x5.size(3))\n",
127 | "\n",
128 | " #x1 = torch.max(x1,1)[0]\n",
129 | " #x2 = torch.max(x2,1)[0]\n",
130 | " #x3 = torch.max(x3,1)[0]\n",
131 | " #x4 = torch.max(x4,1)[0]\n",
132 | " #x5 = torch.max(x5,1)[0]\n",
133 | "\n",
134 | "\n",
135 | " #print(x5.size())\n",
136 | " #print(x4.size())\n",
137 | " #print(x1.size())\n",
138 | " #x = self.relu(self.bn3(self.conv3(x6)))\n",
139 | " x = self.relu(self.bn3_2(self.conv3_2(x6)))\n",
140 | " \n",
141 | " x = self.relu(self.bn4(self.conv4(x)))\n",
142 | " x = self.relu(self.bn5(self.conv5(x)))\n",
143 | " #print(x.size())\n",
144 | " x = self.relu(self.bn6(self.conv6(x+x4 )))\n",
145 | " #print(x.size())\n",
146 | " x = self.relu(self.bn7(self.conv7(x+x3 )))\n",
147 | " #print(x.size())\n",
148 | " x = self.relu(self.bn8(self.conv8(x+x2 )))\n",
149 | " #print(x.size())\n",
150 | " x = self.relu(self.bn9(self.conv9(x+x1 )))\n",
151 | " #print(x.size())\n",
152 | " x = self.relu(self.bnadd(self.convadd(x)))\n",
153 | " x = self.conv10(x)\n",
154 | " return x\n"
155 | ]
156 | },
157 | {
158 | "cell_type": "code",
159 | "execution_count": null,
160 | "metadata": {},
161 | "outputs": [],
162 | "source": [
163 | "fnet = FeatureResNet()\n",
164 | "fcn = SegResNet(4,fnet)\n",
165 | "fcn = fcn.cuda()\n",
166 | "fcn.load_state_dict(torch.load('PATH_TO_PRETRAINED' + 'pretrained_strainnet'))"
167 | ]
168 | },
169 | {
170 | "cell_type": "code",
171 | "execution_count": null,
172 | "metadata": {},
173 | "outputs": [],
174 | "source": [
175 | "displacement_result_path = ''\n",
176 | "speckle_img_path = ''\n",
177 | "result_path = ''\n",
178 | "\n",
179 | "for i in range(1,num_img):\n",
180 | " f = loadmat(displacement_result_path+'/result_'+str(i)+'position.mat') #load ROI\n",
181 | " [x1,x2,y1,y2] = f['position'][0,0:4]\n",
182 | " \n",
183 | " h0 = int((x2-x1))\n",
184 | " w0 = int((y2-y1))\n",
185 | " hnew = int((h0//32+1)*32)\n",
186 | " wnew = int((w0//32+1)*32)\n",
187 | " newsize = (wnew,hnew)\n",
188 | " \n",
189 | " \n",
190 | " img1 = default_loader('first_img')\n",
191 | " img1_c = img1.crop((y1,x1,y2,x2))\n",
192 | " img1_r = img1_c.resize(newsize)\n",
193 | " img2 = default_loader('second_img')\n",
194 | " img2_c = img2.crop((y1,x1,y2,x2))\n",
195 | " img2_r = img2_c.resize(newsize)\n",
196 | " \n",
197 | " img_1 = ToTensor()(img1_r)\n",
198 | " img_2 = ToTensor()(img2_r)\n",
199 | " imgs2 = torch.cat((img_1/np.max(img_1.numpy()), img_2/np.max(img_1.numpy())), 0)\n",
200 | " imgs2 = imgs.unsqueeze(0)\n",
201 | " imgs2 = imgs.type(torch.cuda.FloatTensor)\n",
202 | " imgs2 = Variable(imgs).cuda()\n",
203 | " \n",
204 | " predict = fcn(imgs)\n",
205 | " predict_np = predict.detach().cpu().numpy().squeeze(0)\n",
206 | " \n",
207 | " \n",
208 | " strain = predict_np*0.01\n",
209 | "\n",
210 | " \n",
211 | " matname = result_path+str(i)+'strain.mat'\n",
212 | " mdic = {\"strain\": strain, \"label\": \"strain\"}\n",
213 | " savemat(matname,mdic)\n",
214 | " "
215 | ]
216 | },
217 | {
218 | "cell_type": "code",
219 | "execution_count": null,
220 | "metadata": {},
221 | "outputs": [],
222 | "source": []
223 | },
224 | {
225 | "cell_type": "code",
226 | "execution_count": null,
227 | "metadata": {},
228 | "outputs": [],
229 | "source": []
230 | },
231 | {
232 | "cell_type": "code",
233 | "execution_count": null,
234 | "metadata": {},
235 | "outputs": [],
236 | "source": []
237 | },
238 | {
239 | "cell_type": "code",
240 | "execution_count": null,
241 | "metadata": {},
242 | "outputs": [],
243 | "source": []
244 | }
245 | ],
246 | "metadata": {
247 | "kernelspec": {
248 | "display_name": "Python 3",
249 | "language": "python",
250 | "name": "python3"
251 | },
252 | "language_info": {
253 | "codemirror_mode": {
254 | "name": "ipython",
255 | "version": 3
256 | },
257 | "file_extension": ".py",
258 | "mimetype": "text/x-python",
259 | "name": "python",
260 | "nbconvert_exporter": "python",
261 | "pygments_lexer": "ipython3",
262 | "version": "3.6.10"
263 | }
264 | },
265 | "nbformat": 4,
266 | "nbformat_minor": 4
267 | }
268 |
--------------------------------------------------------------------------------
/StrainNet_train.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": null,
6 | "metadata": {},
7 | "outputs": [],
8 | "source": [
9 | "import numpy as np\n",
10 | "import torch\n",
11 | "import torch.nn as nn\n",
12 | "from torch.autograd import Variable\n",
13 | "import torchvision\n",
14 | "from torch.nn import init\n",
15 | "\n",
16 | "from torchvision.models.resnet import BasicBlock, ResNet\n",
17 | "from torchvision.transforms import ToTensor"
18 | ]
19 | },
20 | {
21 | "cell_type": "code",
22 | "execution_count": null,
23 | "metadata": {},
24 | "outputs": [],
25 | "source": [
26 | "import io\n",
27 | "from torchvision import models, transforms\n",
28 | "import torch.utils.data as data_utils\n",
29 | "from PIL import Image\n",
30 | "import os\n",
31 | "import scipy.io as sio\n",
32 | "\n",
33 | "import cv2\n",
34 | "import matplotlib.pyplot as plt\n",
35 | "import torch.nn.functional as F\n",
36 | "def default_loader(path):\n",
37 | " return Image.open(path) "
38 | ]
39 | },
40 | {
41 | "cell_type": "code",
42 | "execution_count": null,
43 | "metadata": {},
44 | "outputs": [],
45 | "source": [
46 | "from torchvision.models.resnet import BasicBlock, ResNet\n",
47 | "from torch.nn import init\n",
48 | "\n",
49 | "def conv(in_planes, out_planes, kernel_size=3, stride=1, dilation=1, bias=False, transposed=False):\n",
50 | " if transposed:\n",
51 | " layer = nn.ConvTranspose2d(in_planes, out_planes, kernel_size=kernel_size, stride=stride, padding=1, output_padding=1,\n",
52 | " dilation=dilation, bias=bias)\n",
53 | " else:\n",
54 | " padding = (kernel_size + 2 * (dilation - 1)) // 2\n",
55 | " layer = nn.Conv2d(in_planes, out_planes, kernel_size=kernel_size, stride=stride, padding=padding, dilation=dilation, bias=bias)\n",
56 | " if bias:\n",
57 | " init.constant(layer.bias, 0)\n",
58 | " return layer\n",
59 | "\n",
60 | "# Returns 2D batch normalisation layer\n",
61 | "def bn(planes):\n",
62 | " layer = nn.BatchNorm2d(planes)\n",
63 | " # Use mean 0, standard deviation 1 init\n",
64 | " init.constant(layer.weight, 1)\n",
65 | " init.constant(layer.bias, 0)\n",
66 | " return layer\n",
67 | "\n",
68 | "\n",
69 | "class FeatureResNet(ResNet):\n",
70 | " def __init__(self):\n",
71 | " super().__init__(BasicBlock, [3, 14, 16, 3], 1000)\n",
72 | " self.conv_f = conv(2,64, kernel_size=3,stride = 1)\n",
73 | " self.ReLu_1 = nn.ReLU(inplace=True)\n",
74 | " self.conv_pre = conv(512, 1024, stride=2, transposed=False)\n",
75 | " self.bn_pre = bn(1024)\n",
76 | "\n",
77 | " def forward(self, x):\n",
78 | " x1 = self.conv_f(x)\n",
79 | " #print('x1',x1.size())\n",
80 | " x = self.bn1(x1)\n",
81 | " #print(x.size())\n",
82 | " x = self.relu(x)\n",
83 | " #print(x.size())\n",
84 | " x2 = self.maxpool(x)\n",
85 | " #print('x2',x2.size())\n",
86 | " x = self.layer1(x2)\n",
87 | " #print(x2.size())\n",
88 | " x3 = self.layer2(x)\n",
89 | " #print('x3',x3.size())\n",
90 | " x4 = self.layer3(x3)\n",
91 | " #print('x4',x4.size())\n",
92 | " x5 = self.layer4(x4)\n",
93 | " #print('x5',x5.size())\n",
94 | " x6 = self.ReLu_1(self.bn_pre(self.conv_pre(x5)))\n",
95 | " #print('x6',x6.size())\n",
96 | " return x1, x2, x3, x4, x5,x6\n",
97 | "\n",
98 | "\n",
99 | "class SegResNet(nn.Module):\n",
100 | " def __init__(self, num_classes, pretrained_net):\n",
101 | " super().__init__()\n",
102 | " self.pretrained_net = pretrained_net\n",
103 | " self.relu = nn.ReLU(inplace=True)\n",
104 | " #self.conv3 = conv(1024,1024, stride=1, transposed=False)\n",
105 | " #self.bn3 = bn(1024)\n",
106 | " self.conv3_2 = conv(1024, 512, stride=1, transposed=False)\n",
107 | " self.bn3_2 = bn(512)\n",
108 | " self.conv4 = conv(512,512, stride=2, transposed=True)\n",
109 | " self.bn4 = bn(512)\n",
110 | " self.conv5 = conv(512, 256, stride=2, transposed=True)\n",
111 | " self.bn5 = bn(256)\n",
112 | " self.conv6 = conv(256, 128, stride=2, transposed=True)\n",
113 | " self.bn6 = bn(128)\n",
114 | " self.conv7 = conv(128, 64, stride=2, transposed=True)\n",
115 | " self.bn7 = bn(64)\n",
116 | " self.conv8 = conv(64, 64, stride=2, transposed=True)\n",
117 | " self.bn8 = bn(64)\n",
118 | " self.conv9 = conv(64, 32, stride=1, transposed=False)\n",
119 | " self.bn9 = bn(32)\n",
120 | " self.convadd = conv(32, 16, stride=1, transposed=False)\n",
121 | " self.bnadd = bn(16)\n",
122 | " self.conv10 = conv(16, num_classes,stride=2, kernel_size=3)\n",
123 | " init.constant(self.conv10.weight, 0) # Zero init\n",
124 | "\n",
125 | " def forward(self, x):\n",
126 | " #b,c,w,h = x.size()\n",
127 | " #x = x.view(b,c,w,h)\n",
128 | " x1, x2, x3, x4, x5, x6 = self.pretrained_net(x)\n",
129 | " #x1 = x1.view(b,x1.size(1),x1.size(2),x1.size(3))\n",
130 | " #x2 = x2.view(b,x2.size(1),x2.size(2),x2.size(3))\n",
131 | " #x3 = x3.view(b,x3.size(1),x3.size(2),x3.size(3))\n",
132 | " #x4 = x4.view(b,x4.size(1),x4.size(2),x4.size(3))\n",
133 | " #x5 = x5.view(b,x5.size(1),x5.size(2),x5.size(3))\n",
134 | "\n",
135 | " #x1 = torch.max(x1,1)[0]\n",
136 | " #x2 = torch.max(x2,1)[0]\n",
137 | " #x3 = torch.max(x3,1)[0]\n",
138 | " #x4 = torch.max(x4,1)[0]\n",
139 | " #x5 = torch.max(x5,1)[0]\n",
140 | "\n",
141 | "\n",
142 | " #print(x5.size())\n",
143 | " #print(x4.size())\n",
144 | " #print(x1.size())\n",
145 | " #x = self.relu(self.bn3(self.conv3(x6)))\n",
146 | " x = self.relu(self.bn3_2(self.conv3_2(x6)))\n",
147 | " \n",
148 | " x = self.relu(self.bn4(self.conv4(x)))\n",
149 | " x = self.relu(self.bn5(self.conv5(x)))\n",
150 | " #print(x.size())\n",
151 | " x = self.relu(self.bn6(self.conv6(x+x4 )))\n",
152 | " #print(x.size())\n",
153 | " x = self.relu(self.bn7(self.conv7(x+x3 )))\n",
154 | " #print(x.size())\n",
155 | " x = self.relu(self.bn8(self.conv8(x+x2 )))\n",
156 | " #print(x.size())\n",
157 | " x = self.relu(self.bn9(self.conv9(x+x1 )))\n",
158 | " #print(x.size())\n",
159 | " x = self.relu(self.bnadd(self.convadd(x)))\n",
160 | " x = self.conv10(x)\n",
161 | " return x\n"
162 | ]
163 | },
164 | {
165 | "cell_type": "code",
166 | "execution_count": null,
167 | "metadata": {},
168 | "outputs": [],
169 | "source": [
170 | "fnet = FeatureResNet()\n",
171 | "fcn = SegResNet(4,fnet)\n",
172 | "fcn = fcn.cuda()\n"
173 | ]
174 | },
175 | {
176 | "cell_type": "code",
177 | "execution_count": null,
178 | "metadata": {},
179 | "outputs": [],
180 | "source": [
181 | "dataset_path = ''\n",
182 | "filename = \"validation_dataset.txt\"\n",
183 | "mynumbers = []\n",
184 | "with open(filename) as f:\n",
185 | " for line in f:\n",
186 | " item = line.strip().split('\\n')\n",
187 | " for subitem in item:\n",
188 | " mynumbers.append(subitem)\n",
189 | " \n",
190 | "test_set = []\n",
191 | "for i in range(4000):\n",
192 | " test_set.append((dataset_path+'/imgs3/train_image_'+str(z+1)+'_1.png',\n",
193 | " dataset_path+'/imgs3/train_image_'+str(z+1)+'_2.png',\n",
194 | " dataset_path+'/gt3/train_image_'+str(z+1)+'.mat'))\n",
195 | "\n",
196 | "dataset_path = ''\n",
197 | "filename = \"train_dataset.txt\"\n",
198 | "mynumbers = []\n",
199 | "with open(filename) as f:\n",
200 | " for line in f:\n",
201 | " item = line.strip().split('\\n')\n",
202 | " for subitem in item:\n",
203 | " mynumbers.append(subitem)\n",
204 | " \n",
205 | "test_set = []\n",
206 | "for i in range(36000):\n",
207 | " train_set.append((dataset_path+'/imgs3/train_image_'+str(z+1)+'_1.png',\n",
208 | " dataset_path+'/imgs3/train_image_'+str(z+1)+'_2.png',\n",
209 | " dataset_path+'/gt3/train_image_'+str(z+1)+'.mat'))\n",
210 | " "
211 | ]
212 | },
213 | {
214 | "cell_type": "code",
215 | "execution_count": null,
216 | "metadata": {},
217 | "outputs": [],
218 | "source": [
219 | "import scipy.io as sio\n",
220 | "x = np.arange(0,256,1)\n",
221 | "y = np.arange(0,256,1)\n",
222 | "xnew = np.arange(1.5,257.5,4)\n",
223 | "ynew = np.arange(1.5,257.5,4)\n",
224 | "\n",
225 | "class MyDataset(data_utils.Dataset):\n",
226 | " def __init__(self, dataset, transform=None, target_transform=None, loader=default_loader):\n",
227 | " \n",
228 | " self.imgs = dataset\n",
229 | " self.transform = transform\n",
230 | " self.target_transform = target_transform\n",
231 | " self.loader = loader\n",
232 | "\n",
233 | " def __getitem__(self, index):\n",
234 | " label_x, label_y, label_z = self.imgs[index]\n",
235 | " img1 = self.loader(label_x)\n",
236 | " img_1 = ToTensor()(img1.resize((128,128)))\n",
237 | " img2 = self.loader(label_y)\n",
238 | " img_2 = ToTensor()(img2.resize((128,128)))\n",
239 | " imgs = torch.cat((img_1, img_2), 0)\n",
240 | " try:\n",
241 | " gt = sio.loadmat(label_z)['Disp_field_1'].astype(float)\n",
242 | " \n",
243 | " except KeyError:\n",
244 | " gt = sio.loadmat(label_z)['Disp_field_2'].astype(float)\n",
245 | " \n",
246 | " gt = np.asarray(gt)\n",
247 | " gt = gt*100\n",
248 | " [dudx, dudy]= np.gradient(gt[:,:,0])\n",
249 | " [dvdx, dvdy]= np.gradient(gt[:,:,1])\n",
250 | " \n",
251 | " f = interpolate.interp2d(x, y, dudx, kind='cubic')\n",
252 | " dudx_ = f(xnew, ynew)\n",
253 | " f = interpolate.interp2d(x, y, dudy, kind='cubic')\n",
254 | " dudy_ = f(xnew, ynew)\n",
255 | " f = interpolate.interp2d(x, y, dvdx, kind='cubic')\n",
256 | " dvdx_ = f(xnew, ynew)\n",
257 | " f = interpolate.interp2d(x, y, dvdy, kind='cubic')\n",
258 | " dvdy_ = f(xnew, ynew)\n",
259 | " st = np.stack([dudx_, dudy_, dvdx_, dvdy_], axis=0)\n",
260 | " #st = np.stack([dudx, dudy, dvdx, dvdy], axis=0)\n",
261 | " \n",
262 | " \n",
263 | " return imgs,st\n",
264 | "\n",
265 | " def __len__(self):\n",
266 | " return len(self.imgs)"
267 | ]
268 | },
269 | {
270 | "cell_type": "code",
271 | "execution_count": null,
272 | "metadata": {},
273 | "outputs": [],
274 | "source": [
275 | "EPOCH = 100 # train the training data n times, to save time, we just train 100 epoch\n",
276 | "BATCH_SIZE = 12\n",
277 | "print('BATCH_SIZE = ',BATCH_SIZE)\n",
278 | "LR = 0.001 # learning rate\n",
279 | "#root = './gdrive_northwestern/My Drive/dl_encoder/data/orig/orig'\n",
280 | "NUM_WORKERS = 0\n",
281 | "\n",
282 | "optimizer = torch.optim.Adam(fcn.parameters(), lr=LR) # optimize all cnn parameters\n",
283 | "#optimizer = torch.optim.SGD(cnn.parameters(), lr=LR, momentum=0.9) # optimize all cnn parameters\n",
284 | "loss_func = nn.MSELoss()\n",
285 | "\n",
286 | "\n",
287 | "train_data=MyDataset(dataset=train_set)\n",
288 | "train_loader = data_utils.DataLoader(dataset=train_data, batch_size=BATCH_SIZE, shuffle=True, num_workers=NUM_WORKERS)\n",
289 | "\n",
290 | "test_data=MyDataset(dataset=test_set)\n",
291 | "test_loader = data_utils.DataLoader(dataset=test_data, batch_size=1)"
292 | ]
293 | },
294 | {
295 | "cell_type": "code",
296 | "execution_count": null,
297 | "metadata": {},
298 | "outputs": [],
299 | "source": [
300 | "from datetime import datetime\n",
301 | "dataString = datetime.strftime(datetime.now(), '%Y_%m_%d_%H:%M:%S')"
302 | ]
303 | },
304 | {
305 | "cell_type": "code",
306 | "execution_count": null,
307 | "metadata": {},
308 | "outputs": [],
309 | "source": [
310 | "root_result = ''\n",
311 | "os.mkdir(root_result)\n",
312 | "model_result = root_result+'model/'\n",
313 | "log_result = root_result+'log/'\n",
314 | "os.mkdir(model_result)\n",
315 | "os.mkdir(log_result)"
316 | ]
317 | },
318 | {
319 | "cell_type": "code",
320 | "execution_count": null,
321 | "metadata": {},
322 | "outputs": [],
323 | "source": [
324 | "fileOut=open(log_result+'log'+dataString,'a')\n",
325 | "fileOut.write(dataString+'Epoch: Step: Loss: Val_Accu :\\n')\n",
326 | "fileOut.close()\n",
327 | "fileOut2 = open(log_result+'validation'+dataString, 'a')\n",
328 | "fileOut2.write('kernal_size of conv_f is 2')\n",
329 | "fileOut2.write(dataString+'Epoch: loss:')\n",
330 | "\n",
331 | "optimizer = torch.optim.Adam(fcn.parameters(), lr=LR ) # optimize all cnn parameters\n",
332 | "\n",
333 | "fcn.load_state_dict(torch.load(model_result + 'PATH_TO_PRETRAINED')) #comment this line if you start a new training\n",
334 | "for epoch in range(EPOCH):\n",
335 | " fcn.train()\n",
336 | " for step, (img,gt) in enumerate(train_loader): # gives batch data, normalize x when iterate train_loader\n",
337 | " \n",
338 | " img = Variable(img).cuda()\n",
339 | " gt=gt.float()\n",
340 | " gt = Variable(gt).cuda()\n",
341 | " output = fcn(img) # cnn output\n",
342 | " loss = loss_func(output, gt) # loss\n",
343 | " optimizer.zero_grad() # clear gradients for this training step\n",
344 | " loss.backward() # backpropagation, compute gradients\n",
345 | " optimizer.step() # apply gradients\n",
346 | " print(epoch, step, loss.data.item())\n",
347 | " fileOut=open(log_result+'log'+dataString,'a')\n",
348 | " fileOut.write(str(epoch)+' '+str(step)+' '+str(loss.data.item())+'\\n')\n",
349 | " fileOut.close()\n",
350 | " if epoch%10 == 9:\n",
351 | " PATH = model_result + 'param_all_strain2_' + str(epoch) + '_' + str(step)\n",
352 | " torch.save(fcn.state_dict(), PATH)\n",
353 | " print('finished saving checkpoints')\n",
354 | " \n",
355 | " LOSS_VALIDATION = 0\n",
356 | " fcn.eval()\n",
357 | " with torch.no_grad():\n",
358 | " for step, (img,gt) in enumerate(test_loader):\n",
359 | "\n",
360 | " img = Variable(img).cuda()\n",
361 | " gt=gt.unsqueeze(1)# batch x\n",
362 | " gt = Variable(gt).cuda()\n",
363 | " output = fcn(img) \n",
364 | " LOSS_VALIDATION += loss_func(output, gt)\n",
365 | " LOSS_VALIDATION = LOSS_VALIDATION/step\n",
366 | " fileOut2 = open(log_result+'validation'+dataString, 'a')\n",
367 | " fileOut2.write(str(epoch)+' '+str(step)+' '+str(LOSS_VALIDATION.data.item())+'\\n')\n",
368 | " fileOut2.close()\n",
369 | " print('validation error epoch '+str(epoch)+': '+str(LOSS_VALIDATION)+'\\n'+str(step))\n",
370 | "\n"
371 | ]
372 | },
373 | {
374 | "cell_type": "code",
375 | "execution_count": null,
376 | "metadata": {},
377 | "outputs": [],
378 | "source": []
379 | },
380 | {
381 | "cell_type": "code",
382 | "execution_count": null,
383 | "metadata": {},
384 | "outputs": [],
385 | "source": []
386 | }
387 | ],
388 | "metadata": {
389 | "kernelspec": {
390 | "display_name": "Python 3",
391 | "language": "python",
392 | "name": "python3"
393 | },
394 | "language_info": {
395 | "codemirror_mode": {
396 | "name": "ipython",
397 | "version": 3
398 | },
399 | "file_extension": ".py",
400 | "mimetype": "text/x-python",
401 | "name": "python",
402 | "nbconvert_exporter": "python",
403 | "pygments_lexer": "ipython3",
404 | "version": "3.6.10"
405 | }
406 | },
407 | "nbformat": 4,
408 | "nbformat_minor": 4
409 | }
410 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | jupyter==1.0.0
2 | numpy==1.17.4
3 | Pillow==8.4.0
4 | scikit-image==0.18.3
5 | scikit-learn==1.0.1
6 | scikit-neuralnetwork==0.7
7 | scipy==1.6.3
8 | sklearn==0.0
9 | torch==1.8.1+cpu
10 | torchvision==0.9.1+cpu
11 | tqdm==4.60.0
--------------------------------------------------------------------------------
/validation_dataset.txt:
--------------------------------------------------------------------------------
1 | 21573
2 | 28334
3 | 21536
4 | 37795
5 | 37450
6 | 15745
7 | 27126
8 | 38116
9 | 9150
10 | 20552
11 | 18037
12 | 12952
13 | 16596
14 | 11992
15 | 25377
16 | 1401
17 | 4489
18 | 13228
19 | 26388
20 | 17008
21 | 16591
22 | 33735
23 | 15321
24 | 19870
25 | 592
26 | 8557
27 | 38415
28 | 34081
29 | 33222
30 | 1087
31 | 23010
32 | 35298
33 | 34113
34 | 37335
35 | 22912
36 | 10270
37 | 1293
38 | 20745
39 | 13345
40 | 18154
41 | 9419
42 | 26251
43 | 23751
44 | 22530
45 | 445
46 | 4799
47 | 6139
48 | 24517
49 | 7942
50 | 7539
51 | 35231
52 | 19551
53 | 4398
54 | 38111
55 | 21789
56 | 4654
57 | 19708
58 | 4969
59 | 9263
60 | 26448
61 | 39503
62 | 14636
63 | 9016
64 | 27237
65 | 13453
66 | 23285
67 | 33184
68 | 19951
69 | 25656
70 | 36267
71 | 10852
72 | 14266
73 | 10519
74 | 14700
75 | 19759
76 | 35063
77 | 20728
78 | 4171
79 | 27982
80 | 27236
81 | 32744
82 | 11385
83 | 24597
84 | 35073
85 | 33797
86 | 17585
87 | 19862
88 | 491
89 | 16154
90 | 19627
91 | 36587
92 | 414
93 | 19696
94 | 37876
95 | 15342
96 | 4242
97 | 23110
98 | 21415
99 | 28163
100 | 131
101 | 28669
102 | 9593
103 | 25231
104 | 1106
105 | 26130
106 | 8174
107 | 17755
108 | 5757
109 | 13512
110 | 26302
111 | 21248
112 | 39577
113 | 483
114 | 15155
115 | 7853
116 | 11295
117 | 29724
118 | 28958
119 | 28652
120 | 27618
121 | 36563
122 | 16316
123 | 1999
124 | 35783
125 | 36277
126 | 9909
127 | 4494
128 | 23648
129 | 7637
130 | 5184
131 | 14411
132 | 840
133 | 22450
134 | 12471
135 | 30669
136 | 20150
137 | 33947
138 | 31432
139 | 38997
140 | 28576
141 | 21300
142 | 3013
143 | 9732
144 | 8995
145 | 6450
146 | 20568
147 | 1935
148 | 599
149 | 37861
150 | 15063
151 | 22774
152 | 23407
153 | 13215
154 | 27745
155 | 1607
156 | 20308
157 | 5069
158 | 5701
159 | 20329
160 | 38928
161 | 29343
162 | 14891
163 | 25082
164 | 8053
165 | 26260
166 | 36798
167 | 16162
168 | 20700
169 | 6409
170 | 13882
171 | 3467
172 | 24093
173 | 4027
174 | 10464
175 | 2206
176 | 14753
177 | 38281
178 | 8137
179 | 7042
180 | 10521
181 | 5285
182 | 18461
183 | 4407
184 | 12084
185 | 17790
186 | 10456
187 | 23963
188 | 32704
189 | 26360
190 | 7516
191 | 30410
192 | 28015
193 | 8948
194 | 29717
195 | 33473
196 | 26046
197 | 34586
198 | 24134
199 | 14047
200 | 19240
201 | 7037
202 | 22327
203 | 16875
204 | 26233
205 | 18612
206 | 37990
207 | 6643
208 | 2831
209 | 23995
210 | 19052
211 | 5240
212 | 36977
213 | 8394
214 | 38984
215 | 7352
216 | 5172
217 | 4029
218 | 27887
219 | 20628
220 | 37439
221 | 11554
222 | 22627
223 | 28931
224 | 16893
225 | 39770
226 | 36014
227 | 34233
228 | 4968
229 | 22093
230 | 1413
231 | 37786
232 | 10465
233 | 18078
234 | 12812
235 | 11899
236 | 29370
237 | 26189
238 | 31271
239 | 3448
240 | 19982
241 | 5140
242 | 28357
243 | 36877
244 | 14259
245 | 12154
246 | 3795
247 | 15013
248 | 38248
249 | 5508
250 | 1023
251 | 37479
252 | 406
253 | 31389
254 | 8564
255 | 11772
256 | 19218
257 | 21058
258 | 12688
259 | 12851
260 | 36386
261 | 39174
262 | 14903
263 | 12758
264 | 16067
265 | 29307
266 | 29833
267 | 11285
268 | 28437
269 | 23142
270 | 36052
271 | 17977
272 | 38508
273 | 11694
274 | 39529
275 | 18767
276 | 22162
277 | 19187
278 | 8833
279 | 10435
280 | 35280
281 | 94
282 | 10527
283 | 21585
284 | 34354
285 | 29587
286 | 33256
287 | 10439
288 | 29160
289 | 25320
290 | 7987
291 | 20385
292 | 34249
293 | 1253
294 | 33519
295 | 730
296 | 15294
297 | 4728
298 | 36305
299 | 2122
300 | 11764
301 | 13257
302 | 13193
303 | 36682
304 | 8637
305 | 28035
306 | 21316
307 | 22514
308 | 35075
309 | 5557
310 | 6758
311 | 17335
312 | 21100
313 | 6477
314 | 9781
315 | 32976
316 | 39842
317 | 17989
318 | 1406
319 | 32564
320 | 3391
321 | 23521
322 | 33203
323 | 10953
324 | 9273
325 | 10074
326 | 25549
327 | 15366
328 | 24527
329 | 5223
330 | 6726
331 | 7878
332 | 34538
333 | 12977
334 | 2836
335 | 3840
336 | 349
337 | 21079
338 | 33038
339 | 36663
340 | 29618
341 | 31526
342 | 27290
343 | 14185
344 | 25819
345 | 19916
346 | 22208
347 | 29874
348 | 20582
349 | 32853
350 | 20769
351 | 12045
352 | 33769
353 | 26244
354 | 13034
355 | 20849
356 | 421
357 | 23494
358 | 24032
359 | 27738
360 | 1695
361 | 19281
362 | 1074
363 | 9457
364 | 25125
365 | 17807
366 | 25889
367 | 31248
368 | 12282
369 | 29201
370 | 24071
371 | 36864
372 | 20843
373 | 25887
374 | 6095
375 | 13546
376 | 16894
377 | 11151
378 | 21699
379 | 38027
380 | 32191
381 | 2417
382 | 34953
383 | 39725
384 | 22754
385 | 22931
386 | 5163
387 | 18104
388 | 37835
389 | 11125
390 | 18243
391 | 16829
392 | 17391
393 | 29710
394 | 12167
395 | 28721
396 | 6715
397 | 18842
398 | 19606
399 | 13782
400 | 10007
401 | 12541
402 | 33207
403 | 13777
404 | 35756
405 | 36498
406 | 35155
407 | 4596
408 | 22682
409 | 5186
410 | 8937
411 | 34338
412 | 20354
413 | 39106
414 | 857
415 | 31361
416 | 31123
417 | 35532
418 | 37748
419 | 20544
420 | 26882
421 | 21019
422 | 29097
423 | 509
424 | 6523
425 | 31608
426 | 24112
427 | 4503
428 | 14807
429 | 14752
430 | 24039
431 | 12631
432 | 13049
433 | 38022
434 | 2133
435 | 20749
436 | 33049
437 | 21384
438 | 22004
439 | 14533
440 | 30010
441 | 6485
442 | 7380
443 | 31562
444 | 29070
445 | 31453
446 | 32900
447 | 7082
448 | 32946
449 | 23268
450 | 33244
451 | 17181
452 | 30808
453 | 26085
454 | 17746
455 | 36768
456 | 13536
457 | 35123
458 | 31360
459 | 22036
460 | 3344
461 | 23314
462 | 21973
463 | 4169
464 | 28540
465 | 17591
466 | 34414
467 | 37419
468 | 5788
469 | 20975
470 | 28563
471 | 24436
472 | 16157
473 | 36752
474 | 25722
475 | 13822
476 | 37722
477 | 8634
478 | 14338
479 | 4482
480 | 20425
481 | 38955
482 | 1729
483 | 27015
484 | 4455
485 | 14529
486 | 3015
487 | 30599
488 | 32133
489 | 18841
490 | 25278
491 | 23782
492 | 24302
493 | 11437
494 | 29161
495 | 33134
496 | 34072
497 | 34852
498 | 33510
499 | 36102
500 | 3723
501 | 2700
502 | 37684
503 | 28935
504 | 14634
505 | 1539
506 | 15175
507 | 19130
508 | 20909
509 | 28927
510 | 9092
511 | 22465
512 | 15045
513 | 29384
514 | 35798
515 | 35331
516 | 6544
517 | 16653
518 | 36771
519 | 28280
520 | 21396
521 | 25147
522 | 13032
523 | 31107
524 | 34616
525 | 28178
526 | 6806
527 | 1558
528 | 39128
529 | 28630
530 | 20404
531 | 33501
532 | 2365
533 | 29563
534 | 33635
535 | 2760
536 | 13186
537 | 17430
538 | 25387
539 | 10618
540 | 27680
541 | 27981
542 | 24265
543 | 20474
544 | 4534
545 | 38914
546 | 19913
547 | 28238
548 | 6258
549 | 347
550 | 3171
551 | 24559
552 | 24212
553 | 14365
554 | 6393
555 | 31142
556 | 22330
557 | 29615
558 | 37681
559 | 11373
560 | 21160
561 | 12976
562 | 23135
563 | 31051
564 | 19494
565 | 16379
566 | 3746
567 | 23199
568 | 19954
569 | 20892
570 | 7476
571 | 14477
572 | 36467
573 | 28946
574 | 34729
575 | 34361
576 | 20815
577 | 18451
578 | 4010
579 | 7777
580 | 38157
581 | 31559
582 | 8687
583 | 26353
584 | 20050
585 | 38810
586 | 3947
587 | 33468
588 | 35719
589 | 6754
590 | 16799
591 | 18693
592 | 5480
593 | 1510
594 | 33674
595 | 30759
596 | 9322
597 | 21449
598 | 1600
599 | 7676
600 | 15728
601 | 23133
602 | 24862
603 | 22268
604 | 22280
605 | 5927
606 | 34959
607 | 6893
608 | 22725
609 | 29725
610 | 23429
611 | 21999
612 | 16287
613 | 5684
614 | 34088
615 | 4916
616 | 32451
617 | 17027
618 | 6282
619 | 30967
620 | 26898
621 | 35384
622 | 23793
623 | 25140
624 | 37888
625 | 16246
626 | 2833
627 | 11284
628 | 21609
629 | 26115
630 | 13100
631 | 13583
632 | 34642
633 | 21531
634 | 34242
635 | 20099
636 | 21582
637 | 28219
638 | 15082
639 | 6103
640 | 23676
641 | 36088
642 | 36496
643 | 14647
644 | 31367
645 | 26453
646 | 25954
647 | 18481
648 | 14208
649 | 38962
650 | 26252
651 | 661
652 | 23527
653 | 24521
654 | 8198
655 | 5370
656 | 21703
657 | 14855
658 | 1576
659 | 10555
660 | 17538
661 | 36838
662 | 25002
663 | 26781
664 | 15695
665 | 22308
666 | 7342
667 | 2098
668 | 19423
669 | 14319
670 | 5147
671 | 34834
672 | 19992
673 | 24210
674 | 27998
675 | 2411
676 | 39584
677 | 18891
678 | 9681
679 | 19595
680 | 26629
681 | 34076
682 | 16172
683 | 33245
684 | 13710
685 | 31748
686 | 4180
687 | 7289
688 | 28719
689 | 846
690 | 33573
691 | 24306
692 | 34312
693 | 14043
694 | 14766
695 | 29612
696 | 9618
697 | 33934
698 | 4932
699 | 14197
700 | 34973
701 | 11102
702 | 37804
703 | 19586
704 | 19029
705 | 13341
706 | 22172
707 | 8523
708 | 21325
709 | 17371
710 | 575
711 | 3508
712 | 1486
713 | 31095
714 | 8731
715 | 15944
716 | 31891
717 | 12788
718 | 28888
719 | 24882
720 | 16461
721 | 29419
722 | 1197
723 | 35462
724 | 26124
725 | 19938
726 | 15891
727 | 4587
728 | 2517
729 | 8452
730 | 18534
731 | 30247
732 | 7369
733 | 19391
734 | 15881
735 | 9557
736 | 34501
737 | 6360
738 | 18502
739 | 28668
740 | 38719
741 | 926
742 | 10248
743 | 10561
744 | 20005
745 | 1314
746 | 6220
747 | 11771
748 | 30887
749 | 7163
750 | 14025
751 | 23213
752 | 1416
753 | 39194
754 | 32993
755 | 19876
756 | 717
757 | 6980
758 | 28132
759 | 6510
760 | 2015
761 | 20824
762 | 31924
763 | 32166
764 | 23364
765 | 28826
766 | 26061
767 | 37071
768 | 30439
769 | 21032
770 | 34291
771 | 16749
772 | 24175
773 | 11598
774 | 20298
775 | 665
776 | 2313
777 | 12559
778 | 12610
779 | 39433
780 | 2280
781 | 8271
782 | 33994
783 | 34663
784 | 36112
785 | 5972
786 | 26410
787 | 763
788 | 13993
789 | 16683
790 | 5700
791 | 15451
792 | 9973
793 | 29309
794 | 12843
795 | 29407
796 | 18735
797 | 37507
798 | 30528
799 | 28711
800 | 14108
801 | 36562
802 | 3290
803 | 32815
804 | 33924
805 | 11330
806 | 1536
807 | 29136
808 | 32669
809 | 17359
810 | 5462
811 | 4815
812 | 5807
813 | 32786
814 | 5772
815 | 34533
816 | 37158
817 | 19645
818 | 31535
819 | 37365
820 | 15198
821 | 15327
822 | 16831
823 | 37740
824 | 5252
825 | 6788
826 | 3154
827 | 11265
828 | 13308
829 | 3283
830 | 19159
831 | 26003
832 | 20608
833 | 39908
834 | 14586
835 | 31357
836 | 24314
837 | 7917
838 | 6984
839 | 29214
840 | 12219
841 | 272
842 | 3697
843 | 10206
844 | 11721
845 | 24600
846 | 6463
847 | 19219
848 | 14404
849 | 27092
850 | 37037
851 | 9247
852 | 7939
853 | 21644
854 | 7726
855 | 12606
856 | 24880
857 | 37498
858 | 9424
859 | 33457
860 | 15136
861 | 14145
862 | 38470
863 | 31916
864 | 8496
865 | 16625
866 | 4043
867 | 20022
868 | 34713
869 | 17268
870 | 14310
871 | 35271
872 | 10792
873 | 21038
874 | 22177
875 | 13578
876 | 16173
877 | 10823
878 | 13408
879 | 17944
880 | 9808
881 | 16130
882 | 4771
883 | 5047
884 | 33699
885 | 19056
886 | 14593
887 | 17449
888 | 1447
889 | 20804
890 | 31285
891 | 19889
892 | 28508
893 | 36693
894 | 8832
895 | 25891
896 | 2205
897 | 3444
898 | 19008
899 | 8429
900 | 9232
901 | 20332
902 | 37075
903 | 33238
904 | 11497
905 | 17640
906 | 10148
907 | 9270
908 | 37568
909 | 6987
910 | 543
911 | 5802
912 | 29676
913 | 25347
914 | 1224
915 | 3829
916 | 36168
917 | 23281
918 | 12115
919 | 33739
920 | 15147
921 | 14607
922 | 20573
923 | 850
924 | 6138
925 | 623
926 | 12737
927 | 15215
928 | 39783
929 | 22549
930 | 5810
931 | 18413
932 | 29478
933 | 20437
934 | 10097
935 | 7014
936 | 11535
937 | 27731
938 | 21195
939 | 34265
940 | 31703
941 | 39969
942 | 38753
943 | 9691
944 | 27282
945 | 15384
946 | 37434
947 | 25424
948 | 15535
949 | 28699
950 | 11998
951 | 15591
952 | 17567
953 | 19431
954 | 25982
955 | 22994
956 | 30692
957 | 9327
958 | 1209
959 | 28934
960 | 39276
961 | 35821
962 | 23851
963 | 8574
964 | 8875
965 | 34543
966 | 22639
967 | 161
968 | 34446
969 | 259
970 | 11062
971 | 30929
972 | 12374
973 | 16209
974 | 6382
975 | 10911
976 | 16471
977 | 10480
978 | 12265
979 | 37605
980 | 4651
981 | 27540
982 | 17174
983 | 25264
984 | 38673
985 | 21692
986 | 30187
987 | 26175
988 | 14306
989 | 12533
990 | 18884
991 | 39837
992 | 37245
993 | 6418
994 | 10895
995 | 29019
996 | 24380
997 | 37126
998 | 30930
999 | 9345
1000 | 5945
1001 | 37422
1002 | 38959
1003 | 17192
1004 | 3688
1005 | 20083
1006 | 2809
1007 | 2312
1008 | 20246
1009 | 27081
1010 | 4452
1011 | 29816
1012 | 34089
1013 | 172
1014 | 2690
1015 | 1258
1016 | 30095
1017 | 25872
1018 | 18152
1019 | 37658
1020 | 16190
1021 | 15583
1022 | 24503
1023 | 25631
1024 | 22111
1025 | 32818
1026 | 12918
1027 | 29275
1028 | 27649
1029 | 19086
1030 | 38290
1031 | 22242
1032 | 31056
1033 | 24974
1034 | 20952
1035 | 1564
1036 | 37420
1037 | 35786
1038 | 26271
1039 | 3495
1040 | 29026
1041 | 38288
1042 | 26086
1043 | 21802
1044 | 11249
1045 | 16963
1046 | 16018
1047 | 38791
1048 | 3916
1049 | 6560
1050 | 14071
1051 | 20147
1052 | 26018
1053 | 36057
1054 | 15675
1055 | 37743
1056 | 16933
1057 | 25567
1058 | 12488
1059 | 10146
1060 | 16459
1061 | 33341
1062 | 31678
1063 | 3521
1064 | 31967
1065 | 28787
1066 | 18343
1067 | 5840
1068 | 22100
1069 | 1366
1070 | 19476
1071 | 11336
1072 | 19953
1073 | 39427
1074 | 39635
1075 | 21115
1076 | 5510
1077 | 4886
1078 | 27856
1079 | 21071
1080 | 15569
1081 | 27557
1082 | 1294
1083 | 13724
1084 | 15236
1085 | 25328
1086 | 3835
1087 | 30754
1088 | 35877
1089 | 28445
1090 | 2324
1091 | 10057
1092 | 38991
1093 | 9835
1094 | 14055
1095 | 37557
1096 | 8324
1097 | 37969
1098 | 890
1099 | 11259
1100 | 37987
1101 | 37060
1102 | 12174
1103 | 246
1104 | 3719
1105 | 35924
1106 | 38406
1107 | 25574
1108 | 14963
1109 | 37047
1110 | 8701
1111 | 19757
1112 | 26988
1113 | 34166
1114 | 1243
1115 | 34632
1116 | 27812
1117 | 4703
1118 | 14939
1119 | 21021
1120 | 25187
1121 | 26746
1122 | 10229
1123 | 19611
1124 | 39468
1125 | 27566
1126 | 11248
1127 | 24102
1128 | 35242
1129 | 19210
1130 | 27583
1131 | 34421
1132 | 25363
1133 | 32572
1134 | 20587
1135 | 12968
1136 | 20688
1137 | 24488
1138 | 36933
1139 | 11407
1140 | 33828
1141 | 1964
1142 | 6229
1143 | 16701
1144 | 35328
1145 | 13429
1146 | 14692
1147 | 36206
1148 | 37541
1149 | 38082
1150 | 15941
1151 | 36449
1152 | 16118
1153 | 26314
1154 | 27208
1155 | 26139
1156 | 14862
1157 | 13361
1158 | 3070
1159 | 10400
1160 | 10292
1161 | 7328
1162 | 29903
1163 | 4901
1164 | 2008
1165 | 36636
1166 | 12980
1167 | 13191
1168 | 35925
1169 | 9848
1170 | 4533
1171 | 39512
1172 | 403
1173 | 27150
1174 | 27275
1175 | 5519
1176 | 5572
1177 | 3447
1178 | 22302
1179 | 31238
1180 | 18332
1181 | 35903
1182 | 38859
1183 | 10757
1184 | 2856
1185 | 34279
1186 | 7100
1187 | 33350
1188 | 4775
1189 | 30109
1190 | 5408
1191 | 8041
1192 | 3281
1193 | 30511
1194 | 33267
1195 | 10865
1196 | 12162
1197 | 13198
1198 | 7710
1199 | 35845
1200 | 39944
1201 | 34960
1202 | 10969
1203 | 28877
1204 | 10861
1205 | 26719
1206 | 32045
1207 | 35302
1208 | 37695
1209 | 27756
1210 | 11190
1211 | 36259
1212 | 23800
1213 | 29578
1214 | 3757
1215 | 270
1216 | 18427
1217 | 28658
1218 | 24980
1219 | 15956
1220 | 17133
1221 | 33724
1222 | 19762
1223 | 8417
1224 | 10390
1225 | 26023
1226 | 31679
1227 | 19732
1228 | 26083
1229 | 3078
1230 | 8389
1231 | 28959
1232 | 6075
1233 | 37642
1234 | 33340
1235 | 19417
1236 | 17491
1237 | 7446
1238 | 26106
1239 | 25332
1240 | 17397
1241 | 12926
1242 | 30818
1243 | 26890
1244 | 1897
1245 | 38768
1246 | 38182
1247 | 34839
1248 | 26677
1249 | 30366
1250 | 21559
1251 | 29226
1252 | 36421
1253 | 10918
1254 | 32852
1255 | 11891
1256 | 35811
1257 | 5366
1258 | 39223
1259 | 13395
1260 | 29542
1261 | 25677
1262 | 2893
1263 | 29454
1264 | 32679
1265 | 11005
1266 | 38815
1267 | 16694
1268 | 33156
1269 | 28672
1270 | 25489
1271 | 3832
1272 | 19237
1273 | 37599
1274 | 9769
1275 | 38553
1276 | 5669
1277 | 3338
1278 | 37482
1279 | 25736
1280 | 19664
1281 | 9353
1282 | 12273
1283 | 38634
1284 | 5775
1285 | 2945
1286 | 32875
1287 | 15304
1288 | 2909
1289 | 13767
1290 | 21690
1291 | 27005
1292 | 4135
1293 | 20460
1294 | 35398
1295 | 29687
1296 | 39100
1297 | 21517
1298 | 30169
1299 | 20424
1300 | 23766
1301 | 36592
1302 | 12281
1303 | 19526
1304 | 13920
1305 | 29969
1306 | 26993
1307 | 10517
1308 | 37548
1309 | 36360
1310 | 35841
1311 | 21075
1312 | 39330
1313 | 7988
1314 | 16015
1315 | 13545
1316 | 37102
1317 | 2790
1318 | 28832
1319 | 11976
1320 | 13441
1321 | 32114
1322 | 5374
1323 | 25167
1324 | 39111
1325 | 18605
1326 | 18848
1327 | 39677
1328 | 25298
1329 | 17124
1330 | 2307
1331 | 10877
1332 | 2041
1333 | 15917
1334 | 39954
1335 | 21488
1336 | 36046
1337 | 2895
1338 | 22234
1339 | 15452
1340 | 15456
1341 | 5623
1342 | 16660
1343 | 14174
1344 | 19232
1345 | 36654
1346 | 21516
1347 | 716
1348 | 38546
1349 | 11547
1350 | 1591
1351 | 3873
1352 | 6605
1353 | 7350
1354 | 34689
1355 | 2819
1356 | 29804
1357 | 32963
1358 | 26074
1359 | 22764
1360 | 15075
1361 | 31404
1362 | 33434
1363 | 35217
1364 | 9685
1365 | 2519
1366 | 33418
1367 | 38344
1368 | 823
1369 | 2443
1370 | 10525
1371 | 32560
1372 | 33286
1373 | 27346
1374 | 7885
1375 | 18684
1376 | 28171
1377 | 23583
1378 | 36871
1379 | 22288
1380 | 35167
1381 | 38739
1382 | 13935
1383 | 35853
1384 | 26798
1385 | 4175
1386 | 21482
1387 | 34246
1388 | 26968
1389 | 9883
1390 | 9541
1391 | 2549
1392 | 34935
1393 | 27608
1394 | 4851
1395 | 2926
1396 | 6304
1397 | 5378
1398 | 24310
1399 | 24403
1400 | 5118
1401 | 18058
1402 | 20277
1403 | 33144
1404 | 24360
1405 | 35603
1406 | 12327
1407 | 37981
1408 | 18191
1409 | 35094
1410 | 7537
1411 | 16224
1412 | 37747
1413 | 28609
1414 | 30516
1415 | 17345
1416 | 37896
1417 | 19433
1418 | 3358
1419 | 18094
1420 | 18328
1421 | 29622
1422 | 17928
1423 | 6218
1424 | 5703
1425 | 22526
1426 | 22736
1427 | 3810
1428 | 26110
1429 | 36188
1430 | 31234
1431 | 4808
1432 | 29576
1433 | 3693
1434 | 13510
1435 | 35704
1436 | 39690
1437 | 3641
1438 | 13276
1439 | 14793
1440 | 10970
1441 | 28402
1442 | 25664
1443 | 31324
1444 | 21649
1445 | 28355
1446 | 33663
1447 | 39703
1448 | 12934
1449 | 28925
1450 | 35169
1451 | 36189
1452 | 22691
1453 | 15886
1454 | 29916
1455 | 35895
1456 | 33100
1457 | 18801
1458 | 15087
1459 | 3390
1460 | 27006
1461 | 37806
1462 | 12390
1463 | 30106
1464 | 18484
1465 | 34987
1466 | 38582
1467 | 434
1468 | 8454
1469 | 5379
1470 | 6486
1471 | 11325
1472 | 16585
1473 | 10280
1474 | 16391
1475 | 35327
1476 | 24515
1477 | 18172
1478 | 20880
1479 | 5602
1480 | 39642
1481 | 15742
1482 | 27901
1483 | 35440
1484 | 29364
1485 | 12329
1486 | 38756
1487 | 30225
1488 | 37069
1489 | 22937
1490 | 545
1491 | 34067
1492 | 493
1493 | 11924
1494 | 6170
1495 | 4250
1496 | 3485
1497 | 37386
1498 | 16610
1499 | 1385
1500 | 21288
1501 | 27175
1502 | 13585
1503 | 30504
1504 | 4481
1505 | 13676
1506 | 24106
1507 | 38916
1508 | 28196
1509 | 3043
1510 | 36077
1511 | 895
1512 | 2270
1513 | 19859
1514 | 27322
1515 | 14591
1516 | 3075
1517 | 10600
1518 | 5698
1519 | 9203
1520 | 38103
1521 | 29360
1522 | 8522
1523 | 25214
1524 | 32413
1525 | 889
1526 | 13944
1527 | 10330
1528 | 13081
1529 | 19578
1530 | 25697
1531 | 16324
1532 | 20601
1533 | 22936
1534 | 29892
1535 | 27027
1536 | 2432
1537 | 3610
1538 | 1219
1539 | 5826
1540 | 29236
1541 | 2239
1542 | 4327
1543 | 28816
1544 | 13721
1545 | 32604
1546 | 30336
1547 | 22136
1548 | 39572
1549 | 39946
1550 | 4282
1551 | 15765
1552 | 24606
1553 | 13674
1554 | 8567
1555 | 14930
1556 | 23713
1557 | 30632
1558 | 13716
1559 | 7604
1560 | 31731
1561 | 28419
1562 | 29915
1563 | 28461
1564 | 39449
1565 | 39064
1566 | 20467
1567 | 11197
1568 | 39884
1569 | 7097
1570 | 9907
1571 | 36619
1572 | 23113
1573 | 15859
1574 | 15678
1575 | 9830
1576 | 36841
1577 | 30359
1578 | 26869
1579 | 20686
1580 | 17242
1581 | 31886
1582 | 34185
1583 | 15389
1584 | 7193
1585 | 18734
1586 | 22353
1587 | 19049
1588 | 1115
1589 | 15197
1590 | 35356
1591 | 11745
1592 | 36006
1593 | 23678
1594 | 16422
1595 | 26533
1596 | 20829
1597 | 13686
1598 | 23410
1599 | 6607
1600 | 30957
1601 | 15024
1602 | 28212
1603 | 38900
1604 | 28533
1605 | 24204
1606 | 5358
1607 | 19885
1608 | 1026
1609 | 13123
1610 | 26547
1611 | 29712
1612 | 33547
1613 | 12213
1614 | 31600
1615 | 21824
1616 | 25760
1617 | 9562
1618 | 16453
1619 | 3155
1620 | 1004
1621 | 39952
1622 | 18322
1623 | 13804
1624 | 30241
1625 | 3682
1626 | 38640
1627 | 4832
1628 | 4740
1629 | 37847
1630 | 12481
1631 | 5144
1632 | 27214
1633 | 15914
1634 | 34300
1635 | 29090
1636 | 8318
1637 | 31939
1638 | 39054
1639 | 38551
1640 | 24376
1641 | 3301
1642 | 36304
1643 | 29809
1644 | 13662
1645 | 13102
1646 | 38419
1647 | 8774
1648 | 31598
1649 | 20154
1650 | 23734
1651 | 39559
1652 | 6321
1653 | 33163
1654 | 3855
1655 | 39493
1656 | 18270
1657 | 2776
1658 | 29184
1659 | 14617
1660 | 32801
1661 | 34275
1662 | 16791
1663 | 21770
1664 | 38822
1665 | 4310
1666 | 26669
1667 | 30651
1668 | 34832
1669 | 34855
1670 | 28842
1671 | 9739
1672 | 2579
1673 | 6851
1674 | 17091
1675 | 36412
1676 | 6029
1677 | 18825
1678 | 9821
1679 | 13262
1680 | 5900
1681 | 10349
1682 | 35229
1683 | 37724
1684 | 4159
1685 | 6037
1686 | 24282
1687 | 11582
1688 | 6927
1689 | 20480
1690 | 39023
1691 | 21744
1692 | 8661
1693 | 17686
1694 | 35268
1695 | 26424
1696 | 1027
1697 | 37049
1698 | 25495
1699 | 30478
1700 | 6502
1701 | 32991
1702 | 10374
1703 | 8534
1704 | 38578
1705 | 34884
1706 | 25983
1707 | 24598
1708 | 6955
1709 | 8735
1710 | 9005
1711 | 6121
1712 | 4979
1713 | 13252
1714 | 14405
1715 | 25587
1716 | 9368
1717 | 21860
1718 | 35187
1719 | 38812
1720 | 38481
1721 | 31895
1722 | 37802
1723 | 9018
1724 | 21072
1725 | 9444
1726 | 31911
1727 | 28975
1728 | 27269
1729 | 1187
1730 | 27355
1731 | 32232
1732 | 37319
1733 | 8422
1734 | 31061
1735 | 10790
1736 | 22668
1737 | 24624
1738 | 28156
1739 | 30262
1740 | 35969
1741 | 18560
1742 | 28549
1743 | 18176
1744 | 37890
1745 | 24735
1746 | 32504
1747 | 29387
1748 | 15422
1749 | 20378
1750 | 34677
1751 | 36598
1752 | 22159
1753 | 15408
1754 | 2608
1755 | 9753
1756 | 13598
1757 | 13385
1758 | 25586
1759 | 6232
1760 | 19516
1761 | 37020
1762 | 7994
1763 | 15256
1764 | 25442
1765 | 37366
1766 | 23742
1767 | 21891
1768 | 37010
1769 | 33858
1770 | 24809
1771 | 3539
1772 | 25248
1773 | 28323
1774 | 34571
1775 | 33667
1776 | 26212
1777 | 34712
1778 | 20609
1779 | 32479
1780 | 30128
1781 | 28819
1782 | 34896
1783 | 18125
1784 | 10078
1785 | 29272
1786 | 26490
1787 | 9051
1788 | 17081
1789 | 38818
1790 | 14159
1791 | 23019
1792 | 16876
1793 | 6338
1794 | 16258
1795 | 18470
1796 | 17052
1797 | 31309
1798 | 30737
1799 | 13048
1800 | 35735
1801 | 11481
1802 | 38856
1803 | 14812
1804 | 6315
1805 | 31336
1806 | 3733
1807 | 18555
1808 | 10602
1809 | 37189
1810 | 24080
1811 | 11390
1812 | 18029
1813 | 8134
1814 | 39463
1815 | 29353
1816 | 8023
1817 | 3174
1818 | 25046
1819 | 39845
1820 | 15623
1821 | 16143
1822 | 2648
1823 | 7727
1824 | 39213
1825 | 18418
1826 | 15507
1827 | 3436
1828 | 38101
1829 | 15332
1830 | 3515
1831 | 32200
1832 | 19684
1833 | 7647
1834 | 4406
1835 | 35843
1836 | 18991
1837 | 3291
1838 | 3575
1839 | 39461
1840 | 17113
1841 | 21529
1842 | 13307
1843 | 7470
1844 | 39184
1845 | 22780
1846 | 28857
1847 | 21845
1848 | 27363
1849 | 5097
1850 | 35042
1851 | 9160
1852 | 3122
1853 | 1685
1854 | 28022
1855 | 20370
1856 | 37915
1857 | 26509
1858 | 24105
1859 | 17626
1860 | 32599
1861 | 21862
1862 | 16692
1863 | 12263
1864 | 20206
1865 | 13350
1866 | 38332
1867 | 16946
1868 | 15454
1869 | 31427
1870 | 5171
1871 | 37800
1872 | 23567
1873 | 11177
1874 | 17560
1875 | 2090
1876 | 26418
1877 | 294
1878 | 14665
1879 | 17812
1880 | 39291
1881 | 3987
1882 | 18079
1883 | 5382
1884 | 8049
1885 | 30119
1886 | 4071
1887 | 19206
1888 | 1506
1889 | 961
1890 | 38162
1891 | 9946
1892 | 24563
1893 | 10101
1894 | 23200
1895 | 2263
1896 | 6871
1897 | 17171
1898 | 657
1899 | 28607
1900 | 17963
1901 | 3256
1902 | 2548
1903 | 18951
1904 | 22494
1905 | 19320
1906 | 15937
1907 | 879
1908 | 34150
1909 | 25805
1910 | 12031
1911 | 33145
1912 | 7132
1913 | 7395
1914 | 26154
1915 | 1024
1916 | 26998
1917 | 18656
1918 | 10726
1919 | 13706
1920 | 30571
1921 | 17741
1922 | 31015
1923 | 6703
1924 | 35335
1925 | 31180
1926 | 16976
1927 | 26877
1928 | 22078
1929 | 20180
1930 | 9425
1931 | 29140
1932 | 4971
1933 | 39961
1934 | 35066
1935 | 6010
1936 | 19650
1937 | 8911
1938 | 38558
1939 | 25280
1940 | 26007
1941 | 11001
1942 | 23123
1943 | 1207
1944 | 4509
1945 | 8390
1946 | 13077
1947 | 2860
1948 | 1496
1949 | 16074
1950 | 37083
1951 | 33130
1952 | 15833
1953 | 34514
1954 | 27205
1955 | 7718
1956 | 1934
1957 | 37256
1958 | 37968
1959 | 14975
1960 | 7580
1961 | 1437
1962 | 37006
1963 | 10107
1964 | 20771
1965 | 35032
1966 | 10631
1967 | 20676
1968 | 15502
1969 | 21745
1970 | 20660
1971 | 13713
1972 | 19730
1973 | 30910
1974 | 14675
1975 | 35984
1976 | 1732
1977 | 16057
1978 | 27448
1979 | 28056
1980 | 27185
1981 | 6646
1982 | 34737
1983 | 19701
1984 | 30890
1985 | 4155
1986 | 10608
1987 | 32455
1988 | 23386
1989 | 36705
1990 | 20455
1991 | 21567
1992 | 19407
1993 | 7856
1994 | 23943
1995 | 21515
1996 | 24247
1997 | 8437
1998 | 17108
1999 | 36612
2000 | 38564
2001 | 29171
2002 | 16597
2003 | 3469
2004 | 37329
2005 | 10251
2006 | 9918
2007 | 22328
2008 | 28321
2009 | 38884
2010 | 38795
2011 | 8902
2012 | 27364
2013 | 37906
2014 | 17357
2015 | 35634
2016 | 9475
2017 | 35116
2018 | 6884
2019 | 36843
2020 | 19923
2021 | 7368
2022 | 11539
2023 | 27591
2024 | 3086
2025 | 35395
2026 | 32243
2027 | 28710
2028 | 18831
2029 | 35646
2030 | 1267
2031 | 20717
2032 | 14154
2033 | 7731
2034 | 1513
2035 | 25045
2036 | 12719
2037 | 14158
2038 | 8597
2039 | 11261
2040 | 13889
2041 | 25919
2042 | 5515
2043 | 21509
2044 | 28184
2045 | 9400
2046 | 39891
2047 | 29729
2048 | 8470
2049 | 31299
2050 | 21139
2051 | 13741
2052 | 37156
2053 | 19461
2054 | 38714
2055 | 6792
2056 | 23982
2057 | 36642
2058 | 27869
2059 | 20960
2060 | 35185
2061 | 10893
2062 | 6147
2063 | 12176
2064 | 18124
2065 | 19483
2066 | 30898
2067 | 27924
2068 | 3978
2069 | 5871
2070 | 15053
2071 | 8711
2072 | 1232
2073 | 6595
2074 | 17558
2075 | 13381
2076 | 33731
2077 | 30422
2078 | 19636
2079 | 6036
2080 | 31009
2081 | 26151
2082 | 27966
2083 | 15058
2084 | 23129
2085 | 26956
2086 | 14760
2087 | 34576
2088 | 13823
2089 | 26559
2090 | 24292
2091 | 30989
2092 | 25797
2093 | 13623
2094 | 290
2095 | 16371
2096 | 33536
2097 | 13971
2098 | 23573
2099 | 13268
2100 | 5765
2101 | 2253
2102 | 26262
2103 | 6535
2104 | 33115
2105 | 5476
2106 | 9412
2107 | 5570
2108 | 28776
2109 | 16501
2110 | 26117
2111 | 1831
2112 | 14559
2113 | 38417
2114 | 35497
2115 | 24188
2116 | 4626
2117 | 20675
2118 | 12061
2119 | 24689
2120 | 14095
2121 | 27071
2122 | 6446
2123 | 27846
2124 | 28237
2125 | 5180
2126 | 21133
2127 | 21176
2128 | 14204
2129 | 16980
2130 | 39358
2131 | 30842
2132 | 15271
2133 | 5296
2134 | 16995
2135 | 28208
2136 | 28133
2137 | 2484
2138 | 19266
2139 | 29284
2140 | 24120
2141 | 28262
2142 | 11594
2143 | 2734
2144 | 1303
2145 | 1374
2146 | 20569
2147 | 7248
2148 | 14421
2149 | 3324
2150 | 31560
2151 | 31472
2152 | 14028
2153 | 35734
2154 | 23145
2155 | 3815
2156 | 36220
2157 | 21147
2158 | 16124
2159 | 31006
2160 | 19965
2161 | 8581
2162 | 30581
2163 | 6982
2164 | 24286
2165 | 32493
2166 | 35312
2167 | 36908
2168 | 4985
2169 | 9633
2170 | 33754
2171 | 11565
2172 | 1516
2173 | 26617
2174 | 16800
2175 | 22185
2176 | 1966
2177 | 5377
2178 | 21011
2179 | 34408
2180 | 27009
2181 | 27294
2182 | 39950
2183 | 12254
2184 | 37752
2185 | 32291
2186 | 38793
2187 | 1792
2188 | 19442
2189 | 5466
2190 | 22474
2191 | 2679
2192 | 4013
2193 | 11501
2194 | 11974
2195 | 31067
2196 | 26152
2197 | 26215
2198 | 20319
2199 | 34080
2200 | 14017
2201 | 19958
2202 | 2522
2203 | 36936
2204 | 28013
2205 | 23368
2206 | 30529
2207 | 38655
2208 | 35824
2209 | 35113
2210 | 24950
2211 | 32472
2212 | 38045
2213 | 7331
2214 | 2056
2215 | 34915
2216 | 24805
2217 | 18641
2218 | 36736
2219 | 34486
2220 | 19783
2221 | 21502
2222 | 28709
2223 | 6078
2224 | 551
2225 | 36432
2226 | 34686
2227 | 14548
2228 | 32000
2229 | 1255
2230 | 16220
2231 | 24021
2232 | 25733
2233 | 22390
2234 | 33580
2235 | 37145
2236 | 13939
2237 | 39361
2238 | 2801
2239 | 11734
2240 | 32260
2241 | 3342
2242 | 1167
2243 | 14206
2244 | 247
2245 | 383
2246 | 130
2247 | 15547
2248 | 22379
2249 | 28715
2250 | 14102
2251 | 17778
2252 | 25117
2253 | 30112
2254 | 26337
2255 | 33249
2256 | 34244
2257 | 17942
2258 | 16930
2259 | 14192
2260 | 27848
2261 | 13506
2262 | 37085
2263 | 1337
2264 | 15254
2265 | 1684
2266 | 18932
2267 | 10655
2268 | 1411
2269 | 28175
2270 | 2104
2271 | 26275
2272 | 29054
2273 | 15697
2274 | 17252
2275 | 24012
2276 | 24284
2277 | 32755
2278 | 37224
2279 | 32386
2280 | 16566
2281 | 7650
2282 | 12997
2283 | 7430
2284 | 5614
2285 | 25687
2286 | 11988
2287 | 9007
2288 | 37481
2289 | 39590
2290 | 25987
2291 | 24101
2292 | 24673
2293 | 8125
2294 | 34916
2295 | 36316
2296 | 29240
2297 | 7615
2298 | 3449
2299 | 7708
2300 | 13974
2301 | 16906
2302 | 18700
2303 | 17842
2304 | 35963
2305 | 35226
2306 | 32361
2307 | 39988
2308 | 5876
2309 | 9491
2310 | 5874
2311 | 34041
2312 | 10420
2313 | 23845
2314 | 17341
2315 | 470
2316 | 2559
2317 | 1528
2318 | 33684
2319 | 37624
2320 | 27519
2321 | 20724
2322 | 23300
2323 | 39371
2324 | 12800
2325 | 7211
2326 | 25365
2327 | 18148
2328 | 21620
2329 | 10903
2330 | 16956
2331 | 24014
2332 | 16249
2333 | 35852
2334 | 19574
2335 | 33830
2336 | 19165
2337 | 6660
2338 | 54
2339 | 15769
2340 | 36346
2341 | 7618
2342 | 2194
2343 | 9634
2344 | 11856
2345 | 17210
2346 | 32170
2347 | 24330
2348 | 15588
2349 | 35347
2350 | 5276
2351 | 631
2352 | 31707
2353 | 8844
2354 | 36911
2355 | 39473
2356 | 37504
2357 | 9395
2358 | 28896
2359 | 32707
2360 | 34218
2361 | 24363
2362 | 16623
2363 | 23590
2364 | 34215
2365 | 9554
2366 | 11780
2367 | 16825
2368 | 30378
2369 | 26071
2370 | 25383
2371 | 2185
2372 | 7157
2373 | 39009
2374 | 30351
2375 | 24427
2376 | 6824
2377 | 25263
2378 | 30757
2379 | 35617
2380 | 13164
2381 | 14507
2382 | 9884
2383 | 6314
2384 | 23804
2385 | 1310
2386 | 6437
2387 | 6022
2388 | 5809
2389 | 13999
2390 | 14865
2391 | 18237
2392 | 30140
2393 | 8465
2394 | 33642
2395 | 34750
2396 | 36174
2397 | 17141
2398 | 17752
2399 | 10334
2400 | 26985
2401 | 14318
2402 | 31768
2403 | 35184
2404 | 6554
2405 | 19339
2406 | 21793
2407 | 26372
2408 | 15699
2409 | 27156
2410 | 28550
2411 | 31134
2412 | 26732
2413 | 34366
2414 | 1845
2415 | 23936
2416 | 20619
2417 | 14064
2418 | 14229
2419 | 35665
2420 | 24261
2421 | 8989
2422 | 35701
2423 | 24595
2424 | 9919
2425 | 19957
2426 | 21273
2427 | 31305
2428 | 27921
2429 | 39901
2430 | 21894
2431 | 32935
2432 | 39966
2433 | 37554
2434 | 8026
2435 | 21181
2436 | 3296
2437 | 19075
2438 | 26840
2439 | 35084
2440 | 37143
2441 | 37952
2442 | 20483
2443 | 5237
2444 | 7409
2445 | 353
2446 | 3098
2447 | 35706
2448 | 2650
2449 | 18073
2450 | 34565
2451 | 26363
2452 | 11613
2453 | 12293
2454 | 32922
2455 | 12665
2456 | 1164
2457 | 17460
2458 | 23127
2459 | 11502
2460 | 27549
2461 | 15646
2462 | 24949
2463 | 22766
2464 | 26254
2465 | 8942
2466 | 19784
2467 | 10882
2468 | 8602
2469 | 1720
2470 | 33299
2471 | 34841
2472 | 5179
2473 | 32333
2474 | 19037
2475 | 16168
2476 | 39975
2477 | 21550
2478 | 10324
2479 | 39811
2480 | 39129
2481 | 16803
2482 | 35036
2483 | 39021
2484 | 7699
2485 | 38827
2486 | 36549
2487 | 14488
2488 | 29798
2489 | 7770
2490 | 22165
2491 | 17161
2492 | 29818
2493 | 26395
2494 | 36074
2495 | 39348
2496 | 58
2497 | 14736
2498 | 18875
2499 | 38731
2500 | 243
2501 | 26601
2502 | 3420
2503 | 19152
2504 | 14852
2505 | 3894
2506 | 12576
2507 | 10314
2508 | 4885
2509 | 29530
2510 | 14769
2511 | 10347
2512 | 14137
2513 | 24484
2514 | 18139
2515 | 36454
2516 | 33798
2517 | 10189
2518 | 17036
2519 | 10442
2520 | 2594
2521 | 8207
2522 | 28701
2523 | 5077
2524 | 36657
2525 | 3347
2526 | 36049
2527 | 6366
2528 | 2966
2529 | 37919
2530 | 29095
2531 | 8514
2532 | 2461
2533 | 35827
2534 | 12424
2535 | 38269
2536 | 34466
2537 | 24548
2538 | 30995
2539 | 30091
2540 | 28633
2541 | 6413
2542 | 36626
2543 | 31431
2544 | 28337
2545 | 36537
2546 | 39130
2547 | 35728
2548 | 38610
2549 | 2109
2550 | 22708
2551 | 3072
2552 | 2943
2553 | 18338
2554 | 16813
2555 | 5653
2556 | 2941
2557 | 26857
2558 | 19525
2559 | 6327
2560 | 35648
2561 | 23375
2562 | 13047
2563 | 25552
2564 | 22099
2565 | 23505
2566 | 39475
2567 | 22367
2568 | 10921
2569 | 18196
2570 | 32119
2571 | 11140
2572 | 17189
2573 | 29730
2574 | 39552
2575 | 11841
2576 | 8162
2577 | 8391
2578 | 34379
2579 | 35815
2580 | 11496
2581 | 17025
2582 | 12536
2583 | 14223
2584 | 31588
2585 | 19625
2586 | 39045
2587 | 7294
2588 | 23863
2589 | 8010
2590 | 15837
2591 | 16104
2592 | 1884
2593 | 32785
2594 | 10182
2595 | 18259
2596 | 15936
2597 | 29575
2598 | 30959
2599 | 3661
2600 | 3082
2601 | 25257
2602 | 19368
2603 | 11300
2604 | 24255
2605 | 14485
2606 | 22814
2607 | 14498
2608 | 20989
2609 | 9349
2610 | 9814
2611 | 5072
2612 | 10935
2613 | 28430
2614 | 20003
2615 | 37728
2616 | 20011
2617 | 18824
2618 | 8975
2619 | 39000
2620 | 38799
2621 | 33217
2622 | 32218
2623 | 16522
2624 | 37132
2625 | 3882
2626 | 9980
2627 | 32480
2628 | 29693
2629 | 22728
2630 | 9290
2631 | 20203
2632 | 32088
2633 | 24043
2634 | 32540
2635 | 33938
2636 | 39300
2637 | 30619
2638 | 1620
2639 | 11167
2640 | 25092
2641 | 18953
2642 | 23939
2643 | 8744
2644 | 22696
2645 | 13992
2646 | 1046
2647 | 11634
2648 | 4417
2649 | 12575
2650 | 18897
2651 | 10021
2652 | 29567
2653 | 34580
2654 | 28748
2655 | 14825
2656 | 26796
2657 | 8544
2658 | 32111
2659 | 15448
2660 | 16121
2661 | 14364
2662 | 20908
2663 | 37720
2664 | 20497
2665 | 31237
2666 | 32019
2667 | 881
2668 | 19789
2669 | 6667
2670 | 30280
2671 | 21313
2672 | 11876
2673 | 24372
2674 | 8329
2675 | 39262
2676 | 30374
2677 | 14905
2678 | 27421
2679 | 23553
2680 | 35015
2681 | 25133
2682 | 5760
2683 | 37381
2684 | 16553
2685 | 11915
2686 | 4955
2687 | 17960
2688 | 19580
2689 | 2387
2690 | 11736
2691 | 1869
2692 | 9954
2693 | 21864
2694 | 462
2695 | 17653
2696 | 16728
2697 | 24305
2698 | 34482
2699 | 7915
2700 | 30627
2701 | 3313
2702 | 38573
2703 | 2503
2704 | 17425
2705 | 19373
2706 | 26087
2707 | 36312
2708 | 2621
2709 | 21282
2710 | 34440
2711 | 4976
2712 | 4108
2713 | 24428
2714 | 2659
2715 | 25599
2716 | 5522
2717 | 39413
2718 | 10284
2719 | 22271
2720 | 997
2721 | 1632
2722 | 15517
2723 | 38750
2724 | 20149
2725 | 7001
2726 | 32542
2727 | 4846
2728 | 38556
2729 | 14560
2730 | 22044
2731 | 7925
2732 | 14540
2733 | 28557
2734 | 9008
2735 | 26458
2736 | 20488
2737 | 16099
2738 | 7998
2739 | 13041
2740 | 16192
2741 | 17473
2742 | 4840
2743 | 10357
2744 | 8547
2745 | 24757
2746 | 12888
2747 | 38017
2748 | 10327
2749 | 16367
2750 | 2739
2751 | 31484
2752 | 14691
2753 | 5535
2754 | 20641
2755 | 9719
2756 | 19716
2757 | 8195
2758 | 16059
2759 | 18498
2760 | 26364
2761 | 36416
2762 | 3255
2763 | 29358
2764 | 34253
2765 | 11649
2766 | 1970
2767 | 20336
2768 | 20549
2769 | 23308
2770 | 39308
2771 | 9319
2772 | 13896
2773 | 19594
2774 | 5648
2775 | 24156
2776 | 22816
2777 | 603
2778 | 14917
2779 | 7363
2780 | 16480
2781 | 31770
2782 | 36763
2783 | 28139
2784 | 33423
2785 | 26675
2786 | 3906
2787 | 13833
2788 | 32032
2789 | 8057
2790 | 19028
2791 | 29383
2792 | 17403
2793 | 37192
2794 | 31157
2795 | 29075
2796 | 18226
2797 | 33063
2798 | 23185
2799 | 35374
2800 | 20421
2801 | 34193
2802 | 35256
2803 | 24048
2804 | 36775
2805 | 25534
2806 | 4622
2807 | 18717
2808 | 28724
2809 | 21777
2810 | 9626
2811 | 7290
2812 | 28895
2813 | 5813
2814 | 8059
2815 | 6671
2816 | 13742
2817 | 15473
2818 | 31050
2819 | 26954
2820 | 7299
2821 | 38479
2822 | 10467
2823 | 25330
2824 | 35833
2825 | 39251
2826 | 7003
2827 | 22520
2828 | 35464
2829 | 37900
2830 | 33928
2831 | 4257
2832 | 31738
2833 | 35961
2834 | 11073
2835 | 10680
2836 | 7408
2837 | 7413
2838 | 14116
2839 | 1874
2840 | 31212
2841 | 26564
2842 | 11832
2843 | 37676
2844 | 9189
2845 | 6903
2846 | 6690
2847 | 4230
2848 | 10564
2849 | 32392
2850 | 36696
2851 | 3346
2852 | 36327
2853 | 3932
2854 | 24076
2855 | 36661
2856 | 19010
2857 | 5391
2858 | 20008
2859 | 4459
2860 | 19131
2861 | 6230
2862 | 1980
2863 | 12355
2864 | 33296
2865 | 9208
2866 | 27936
2867 | 503
2868 | 30028
2869 | 26785
2870 | 8489
2871 | 1370
2872 | 3562
2873 | 8210
2874 | 17668
2875 | 10904
2876 | 4251
2877 | 20726
2878 | 32766
2879 | 5521
2880 | 12160
2881 | 28057
2882 | 36138
2883 | 28769
2884 | 12731
2885 | 12671
2886 | 17532
2887 | 2952
2888 | 5787
2889 | 38586
2890 | 38690
2891 | 33709
2892 | 32030
2893 | 6306
2894 | 1780
2895 | 35386
2896 | 9084
2897 | 33017
2898 | 29682
2899 | 17889
2900 | 11083
2901 | 20433
2902 | 3724
2903 | 28495
2904 | 21017
2905 | 38230
2906 | 4637
2907 | 376
2908 | 10214
2909 | 3852
2910 | 9460
2911 | 14504
2912 | 21036
2913 | 17530
2914 | 15498
2915 | 18896
2916 | 23241
2917 | 19934
2918 | 27902
2919 | 4111
2920 | 31929
2921 | 11297
2922 | 29312
2923 | 31170
2924 | 13678
2925 | 21555
2926 | 29974
2927 | 3150
2928 | 28389
2929 | 19712
2930 | 6848
2931 | 14594
2932 | 38785
2933 | 39220
2934 | 5749
2935 | 17144
2936 | 32603
2937 | 14622
2938 | 32304
2939 | 3140
2940 | 39238
2941 | 29999
2942 | 11797
2943 | 33870
2944 | 32877
2945 | 8850
2946 | 17256
2947 | 12465
2948 | 28578
2949 | 6702
2950 | 36035
2951 | 18639
2952 | 34579
2953 | 26725
2954 | 25966
2955 | 39149
2956 | 1717
2957 | 5899
2958 | 24915
2959 | 30908
2960 | 34194
2961 | 9114
2962 | 15985
2963 | 4278
2964 | 12443
2965 | 11274
2966 | 15848
2967 | 36622
2968 | 36568
2969 | 36778
2970 | 19765
2971 | 4782
2972 | 1015
2973 | 33568
2974 | 35466
2975 | 37961
2976 | 12042
2977 | 27594
2978 | 35037
2979 | 29176
2980 | 1797
2981 | 29443
2982 | 23057
2983 | 11152
2984 | 892
2985 | 36893
2986 | 35453
2987 | 30843
2988 | 9233
2989 | 31995
2990 | 22841
2991 | 10053
2992 | 27357
2993 | 31121
2994 | 17150
2995 | 2473
2996 | 6617
2997 | 33278
2998 | 26846
2999 | 38204
3000 | 7078
3001 | 16971
3002 | 3330
3003 | 25809
3004 | 5803
3005 | 12147
3006 | 7181
3007 | 31013
3008 | 15685
3009 | 34777
3010 | 2174
3011 | 16160
3012 | 10209
3013 | 31508
3014 | 10766
3015 | 30900
3016 | 32929
3017 | 17856
3018 | 29181
3019 | 31951
3020 | 8133
3021 | 8165
3022 | 21163
3023 | 21545
3024 | 13475
3025 | 23615
3026 | 24405
3027 | 10455
3028 | 39563
3029 | 2340
3030 | 14932
3031 | 12290
3032 | 8381
3033 | 2444
3034 | 13129
3035 | 11301
3036 | 33812
3037 | 17644
3038 | 801
3039 | 13037
3040 | 180
3041 | 31433
3042 | 20458
3043 | 35565
3044 | 29432
3045 | 28199
3046 | 15199
3047 | 22784
3048 | 8230
3049 | 23207
3050 | 13653
3051 | 4558
3052 | 22677
3053 | 8920
3054 | 802
3055 | 16619
3056 | 17211
3057 | 22738
3058 | 33605
3059 | 9130
3060 | 36462
3061 | 20560
3062 | 27115
3063 | 35883
3064 | 37467
3065 | 27640
3066 | 29599
3067 | 29352
3068 | 38978
3069 | 22810
3070 | 17218
3071 | 17389
3072 | 639
3073 | 32905
3074 | 39874
3075 | 28439
3076 | 21998
3077 | 13203
3078 | 5591
3079 | 19478
3080 | 14967
3081 | 27547
3082 | 25041
3083 | 14526
3084 | 2046
3085 | 3525
3086 | 11278
3087 | 36729
3088 | 18805
3089 | 32891
3090 | 36849
3091 | 19700
3092 | 16096
3093 | 16824
3094 | 22057
3095 | 33665
3096 | 37478
3097 | 18339
3098 | 9184
3099 | 25774
3100 | 2782
3101 | 21052
3102 | 22803
3103 | 13409
3104 | 12373
3105 | 13010
3106 | 29674
3107 | 27249
3108 | 32441
3109 | 14809
3110 | 22807
3111 | 33352
3112 | 35898
3113 | 9182
3114 | 34031
3115 | 13260
3116 | 28980
3117 | 27639
3118 | 14840
3119 | 30574
3120 | 24565
3121 | 25192
3122 | 18382
3123 | 23692
3124 | 3828
3125 | 10916
3126 | 36020
3127 | 12372
3128 | 38711
3129 | 12383
3130 | 27499
3131 | 6098
3132 | 33695
3133 | 26977
3134 | 6949
3135 | 7083
3136 | 1173
3137 | 10663
3138 | 26477
3139 | 12049
3140 | 38382
3141 | 8141
3142 | 7542
3143 | 2750
3144 | 24505
3145 | 3178
3146 | 32868
3147 | 27051
3148 | 726
3149 | 24239
3150 | 22301
3151 | 12197
3152 | 20509
3153 | 25973
3154 | 32577
3155 | 17334
3156 | 8847
3157 | 3111
3158 | 9480
3159 | 25955
3160 | 14260
3161 | 1449
3162 | 241
3163 | 15172
3164 | 5671
3165 | 1444
3166 | 2189
3167 | 36889
3168 | 7771
3169 | 30703
3170 | 39301
3171 | 5894
3172 | 2692
3173 | 7457
3174 | 2676
3175 | 33962
3176 | 38644
3177 | 24411
3178 | 31677
3179 | 9416
3180 | 21481
3181 | 32789
3182 | 38833
3183 | 11707
3184 | 21292
3185 | 698
3186 | 18416
3187 | 28841
3188 | 3578
3189 | 7870
3190 | 13027
3191 | 20980
3192 | 11854
3193 | 38917
3194 | 5216
3195 | 35642
3196 | 33368
3197 | 39134
3198 | 16859
3199 | 34749
3200 | 9503
3201 | 32313
3202 | 13146
3203 | 9757
3204 | 9159
3205 | 923
3206 | 1
3207 | 8700
3208 | 20398
3209 | 38113
3210 | 18787
3211 | 24786
3212 | 25246
3213 | 28966
3214 | 38189
3215 | 11596
3216 | 32358
3217 | 2228
3218 | 8745
3219 | 23053
3220 | 5657
3221 | 21069
3222 | 36547
3223 | 39607
3224 | 18829
3225 | 24007
3226 | 32249
3227 | 36133
3228 | 10090
3229 | 25010
3230 | 13941
3231 | 20104
3232 | 11966
3233 | 39907
3234 | 2818
3235 | 8739
3236 | 17783
3237 | 35009
3238 | 55
3239 | 1305
3240 | 38414
3241 | 34054
3242 | 9837
3243 | 15665
3244 | 24211
3245 | 29366
3246 | 39960
3247 | 6521
3248 | 10190
3249 | 29870
3250 | 29936
3251 | 13115
3252 | 38416
3253 | 35967
3254 | 1990
3255 | 18477
3256 | 22687
3257 | 21485
3258 | 369
3259 | 20049
3260 | 24590
3261 | 2773
3262 | 13828
3263 | 12887
3264 | 11862
3265 | 9286
3266 | 5292
3267 | 17184
3268 | 30572
3269 | 8712
3270 | 22045
3271 | 7103
3272 | 31105
3273 | 10951
3274 | 17476
3275 | 638
3276 | 34509
3277 | 13423
3278 | 25756
3279 | 993
3280 | 6436
3281 | 5201
3282 | 28135
3283 | 13587
3284 | 30390
3285 | 14721
3286 | 17039
3287 | 25353
3288 | 34181
3289 | 10303
3290 | 14725
3291 | 15235
3292 | 31528
3293 | 5232
3294 | 39881
3295 | 3877
3296 | 36066
3297 | 25012
3298 | 33148
3299 | 3433
3300 | 14814
3301 | 24417
3302 | 10005
3303 | 22779
3304 | 4990
3305 | 3773
3306 | 8827
3307 | 1943
3308 | 25630
3309 | 32809
3310 | 18198
3311 | 6500
3312 | 36713
3313 | 39178
3314 | 30953
3315 | 25557
3316 | 3963
3317 | 15243
3318 | 27461
3319 | 6937
3320 | 7447
3321 | 24254
3322 | 14040
3323 | 34025
3324 | 24057
3325 | 36123
3326 | 17223
3327 | 26809
3328 | 26245
3329 | 24909
3330 | 9053
3331 | 2020
3332 | 3115
3333 | 23073
3334 | 28452
3335 | 21453
3336 | 5893
3337 | 28836
3338 | 33129
3339 | 32382
3340 | 21045
3341 | 19263
3342 | 906
3343 | 25627
3344 | 29515
3345 | 34921
3346 | 33826
3347 | 26616
3348 | 38839
3349 | 19857
3350 | 34221
3351 | 32372
3352 | 7831
3353 | 31551
3354 | 25416
3355 | 34984
3356 | 28473
3357 | 22583
3358 | 34432
3359 | 15281
3360 | 26313
3361 | 17100
3362 | 15581
3363 | 19288
3364 | 9429
3365 | 32492
3366 | 10836
3367 | 15847
3368 | 11012
3369 | 37725
3370 | 33190
3371 | 17723
3372 | 35640
3373 | 5934
3374 | 2575
3375 | 35047
3376 | 813
3377 | 33326
3378 | 38675
3379 | 3189
3380 | 19030
3381 | 19849
3382 | 17628
3383 | 33282
3384 | 25699
3385 | 14534
3386 | 34471
3387 | 4681
3388 | 12820
3389 | 20967
3390 | 20599
3391 | 29508
3392 | 29380
3393 | 31854
3394 | 27172
3395 | 37661
3396 | 24646
3397 | 15495
3398 | 20137
3399 | 35761
3400 | 10242
3401 | 1865
3402 | 20706
3403 | 12091
3404 | 26613
3405 | 11061
3406 | 9151
3407 | 12021
3408 | 16690
3409 | 11738
3410 | 15524
3411 | 11561
3412 | 3842
3413 | 19144
3414 | 20715
3415 | 28502
3416 | 10616
3417 | 6522
3418 | 3854
3419 | 5721
3420 | 20462
3421 | 31611
3422 | 10378
3423 | 17246
3424 | 4200
3425 | 19975
3426 | 231
3427 | 15163
3428 | 13421
3429 | 25739
3430 | 25472
3431 | 11934
3432 | 30384
3433 | 28867
3434 | 31570
3435 | 32918
3436 | 6309
3437 | 27634
3438 | 15825
3439 | 17048
3440 | 2063
3441 | 15297
3442 | 26167
3443 | 19865
3444 | 17053
3445 | 18813
3446 | 9234
3447 | 16982
3448 | 28665
3449 | 23460
3450 | 11828
3451 | 33555
3452 | 6204
3453 | 2472
3454 | 4287
3455 | 25769
3456 | 22558
3457 | 248
3458 | 35057
3459 | 28531
3460 | 12493
3461 | 4650
3462 | 21082
3463 | 2352
3464 | 34954
3465 | 13212
3466 | 32627
3467 | 28700
3468 | 10807
3469 | 12578
3470 | 29174
3471 | 39694
3472 | 16798
3473 | 7175
3474 | 21844
3475 | 4865
3476 | 18235
3477 | 27041
3478 | 37703
3479 | 23417
3480 | 32107
3481 | 38107
3482 | 15115
3483 | 34307
3484 | 27886
3485 | 34268
3486 | 26462
3487 | 30643
3488 | 11480
3489 | 37265
3490 | 23046
3491 | 1162
3492 | 10775
3493 | 12999
3494 | 7245
3495 | 3517
3496 | 15542
3497 | 4485
3498 | 3913
3499 | 35775
3500 | 26549
3501 | 28472
3502 | 28596
3503 | 16090
3504 | 32511
3505 | 6778
3506 | 32920
3507 | 12403
3508 | 20065
3509 | 35724
3510 | 38109
3511 | 37741
3512 | 37716
3513 | 6008
3514 | 38264
3515 | 23637
3516 | 32389
3517 | 29972
3518 | 193
3519 | 15126
3520 | 18381
3521 | 13416
3522 | 1051
3523 | 1642
3524 | 31027
3525 | 15142
3526 | 14755
3527 | 651
3528 | 15002
3529 | 28118
3530 | 9015
3531 | 30483
3532 | 571
3533 | 1950
3534 | 33234
3535 | 7926
3536 | 8996
3537 | 27744
3538 | 5457
3539 | 9082
3540 | 13537
3541 | 8834
3542 | 2866
3543 | 33209
3544 | 20823
3545 | 1135
3546 | 9427
3547 | 23025
3548 | 3473
3549 | 24202
3550 | 15912
3551 | 35243
3552 | 22051
3553 | 10750
3554 | 28173
3555 | 35456
3556 | 17941
3557 | 23833
3558 | 35059
3559 | 12212
3560 | 9731
3561 | 16544
3562 | 32491
3563 | 3468
3564 | 2426
3565 | 18640
3566 | 23949
3567 | 28001
3568 | 5259
3569 | 34883
3570 | 7135
3571 | 32276
3572 | 10835
3573 | 32227
3574 | 33766
3575 | 6047
3576 | 38853
3577 | 32663
3578 | 6168
3579 | 14612
3580 | 33463
3581 | 10315
3582 | 24918
3583 | 35322
3584 | 37048
3585 | 29838
3586 | 14233
3587 | 37019
3588 | 38491
3589 | 3439
3590 | 2516
3591 | 37852
3592 | 38263
3593 | 33980
3594 | 9357
3595 | 20456
3596 | 36837
3597 | 33753
3598 | 11868
3599 | 30716
3600 | 16047
3601 | 11870
3602 | 2766
3603 | 15835
3604 | 38258
3605 | 10274
3606 | 30733
3607 | 20923
3608 | 21586
3609 | 29122
3610 | 39281
3611 | 38376
3612 | 14093
3613 | 19393
3614 | 37993
3615 | 2985
3616 | 32897
3617 | 14495
3618 | 7948
3619 | 29862
3620 | 18744
3621 | 27967
3622 | 36264
3623 | 4281
3624 | 25496
3625 | 8268
3626 | 7892
3627 | 15401
3628 | 18898
3629 | 32640
3630 | 29421
3631 | 38843
3632 | 20184
3633 | 13954
3634 | 5506
3635 | 21462
3636 | 37484
3637 | 25341
3638 | 39211
3639 | 4751
3640 | 22669
3641 | 7478
3642 | 21122
3643 | 20657
3644 | 2719
3645 | 29628
3646 | 19276
3647 | 35285
3648 | 36974
3649 | 13246
3650 | 27785
3651 | 34159
3652 | 15722
3653 | 4860
3654 | 10627
3655 | 20554
3656 | 24713
3657 | 7762
3658 | 5645
3659 | 38618
3660 | 34603
3661 | 26367
3662 | 12990
3663 | 25896
3664 | 13106
3665 | 14795
3666 | 16028
3667 | 37749
3668 | 9921
3669 | 17719
3670 | 22449
3671 | 20718
3672 | 20004
3673 | 31796
3674 | 27394
3675 | 38166
3676 | 4998
3677 | 10129
3678 | 1223
3679 | 31537
3680 | 26939
3681 | 34998
3682 | 15361
3683 | 28260
3684 | 9715
3685 | 29175
3686 | 18827
3687 | 16463
3688 | 20875
3689 | 14890
3690 | 20534
3691 | 2808
3692 | 35501
3693 | 28828
3694 | 16146
3695 | 39088
3696 | 8024
3697 | 5288
3698 | 12722
3699 | 6619
3700 | 14900
3701 | 7617
3702 | 6962
3703 | 25904
3704 | 22025
3705 | 282
3706 | 30938
3707 | 26510
3708 | 12161
3709 | 35858
3710 | 19199
3711 | 1550
3712 | 27770
3713 | 8070
3714 | 8516
3715 | 5403
3716 | 35477
3717 | 28429
3718 | 25960
3719 | 8103
3720 | 22618
3721 | 24690
3722 | 28771
3723 | 8377
3724 | 20774
3725 | 4381
3726 | 17303
3727 | 31705
3728 | 13158
3729 | 32830
3730 | 18871
3731 | 32485
3732 | 24619
3733 | 32591
3734 | 36159
3735 | 27592
3736 | 15398
3737 | 2938
3738 | 31184
3739 | 2636
3740 | 10167
3741 | 36062
3742 | 17699
3743 | 20914
3744 | 6442
3745 | 16965
3746 | 34287
3747 | 21905
3748 | 37289
3749 | 32887
3750 | 4150
3751 | 27740
3752 | 28287
3753 | 28384
3754 | 33706
3755 | 30256
3756 | 18585
3757 | 35249
3758 | 314
3759 | 28561
3760 | 39180
3761 | 25859
3762 | 10377
3763 | 9849
3764 | 23544
3765 | 1055
3766 | 18588
3767 | 25884
3768 | 12925
3769 | 9070
3770 | 18962
3771 | 36707
3772 | 34449
3773 | 35952
3774 | 14506
3775 | 38620
3776 | 19318
3777 | 23961
3778 | 14773
3779 | 19947
3780 | 8755
3781 | 27470
3782 | 30087
3783 | 20772
3784 | 8653
3785 | 27047
3786 | 288
3787 | 32758
3788 | 17438
3789 | 13194
3790 | 28396
3791 | 20250
3792 | 32979
3793 | 29357
3794 | 18651
3795 | 30650
3796 | 35390
3797 | 9861
3798 | 1889
3799 | 30837
3800 | 31450
3801 | 4191
3802 | 30495
3803 | 17074
3804 | 758
3805 | 13870
3806 | 24419
3807 | 31972
3808 | 25493
3809 | 19950
3810 | 7392
3811 | 17363
3812 | 19617
3813 | 13631
3814 | 37974
3815 | 21816
3816 | 6074
3817 | 8968
3818 | 27943
3819 | 33196
3820 | 17577
3821 | 18800
3822 | 18353
3823 | 11683
3824 | 31098
3825 | 36087
3826 | 39416
3827 | 38940
3828 | 18137
3829 | 20620
3830 | 9960
3831 | 29852
3832 | 25423
3833 | 36287
3834 | 3076
3835 | 6329
3836 | 1144
3837 | 24129
3838 | 37570
3839 | 34142
3840 | 11233
3841 | 21892
3842 | 15515
3843 | 9913
3844 | 678
3845 | 6240
3846 | 39241
3847 | 12885
3848 | 9376
3849 | 20594
3850 | 24187
3851 | 11071
3852 | 7638
3853 | 20172
3854 | 13054
3855 | 16896
3856 | 21981
3857 | 19546
3858 | 2051
3859 | 13952
3860 | 6801
3861 | 955
3862 | 5607
3863 | 22769
3864 | 36811
3865 | 32439
3866 | 33089
3867 | 2571
3868 | 30664
3869 | 38567
3870 | 13035
3871 | 23870
3872 | 917
3873 | 31340
3874 | 7519
3875 | 34697
3876 | 2447
3877 | 35774
3878 | 35690
3879 | 11017
3880 | 17946
3881 | 23806
3882 | 36252
3883 | 6518
3884 | 3689
3885 | 22831
3886 | 38093
3887 | 23732
3888 | 32233
3889 | 22559
3890 | 27001
3891 | 22478
3892 | 9526
3893 | 12335
3894 | 2664
3895 | 15730
3896 | 30389
3897 | 6266
3898 | 19389
3899 | 37625
3900 | 27836
3901 | 16490
3902 | 14756
3903 | 12093
3904 | 36671
3905 | 13288
3906 | 2883
3907 | 12514
3908 | 20034
3909 | 27590
3910 | 19016
3911 | 1954
3912 | 5569
3913 | 11053
3914 | 31017
3915 | 18121
3916 | 17674
3917 | 17248
3918 | 37774
3919 | 23267
3920 | 8291
3921 | 27586
3922 | 28938
3923 | 17193
3924 | 30308
3925 | 3167
3926 | 6243
3927 | 31834
3928 | 21395
3929 | 1919
3930 | 21407
3931 | 27914
3932 | 13045
3933 | 39191
3934 | 14918
3935 | 127
3936 | 34585
3937 | 6967
3938 | 11039
3939 | 19139
3940 | 3839
3941 | 38170
3942 | 37647
3943 | 20314
3944 | 10961
3945 | 1795
3946 | 8776
3947 | 12864
3948 | 6537
3949 | 1577
3950 | 16199
3951 | 31060
3952 | 29648
3953 | 21980
3954 | 13611
3955 | 38724
3956 | 35332
3957 | 38192
3958 | 9252
3959 | 39351
3960 | 24578
3961 | 20130
3962 | 10304
3963 | 35115
3964 | 4802
3965 | 26323
3966 | 38849
3967 | 9583
3968 | 34044
3969 | 4410
3970 | 32350
3971 | 26101
3972 | 18096
3973 | 14264
3974 | 32757
3975 | 12519
3976 | 12707
3977 | 13685
3978 | 6171
3979 | 1333
3980 | 2538
3981 | 4038
3982 | 7837
3983 | 12190
3984 | 4051
3985 | 3169
3986 | 9931
3987 | 36414
3988 | 2496
3989 | 21347
3990 | 27569
3991 | 21285
3992 | 21846
3993 | 28155
3994 | 18751
3995 | 31555
3996 | 4338
3997 | 39406
3998 | 19720
3999 | 17018
4000 | 38688
4001 |
--------------------------------------------------------------------------------