├── .vs
├── PythonSettings.json
├── Pytorch-pixelRL
│ ├── FileContentIndex
│ │ ├── 59766ca0-a0b9-4cd4-bc3f-b93b4651b0b3.vsidx
│ │ └── read.lock
│ └── v17
│ │ └── .wsuo
├── VSWorkspaceState.json
└── slnx.sqlite
├── BSD432.txt
├── BSD68.txt
├── BSD68
├── test001.png
├── test002.png
├── test003.png
├── test004.png
├── test005.png
├── test006.png
├── test007.png
├── test008.png
├── test009.png
├── test010.png
├── test011.png
├── test012.png
├── test013.png
├── test014.png
├── test015.png
├── test016.png
├── test017.png
├── test018.png
├── test019.png
├── test020.png
├── test021.png
├── test022.png
├── test023.png
├── test024.png
├── test025.png
├── test026.png
├── test027.png
├── test028.png
├── test029.png
├── test030.png
├── test031.png
├── test032.png
├── test033.png
├── test034.png
├── test035.png
├── test036.png
├── test037.png
├── test038.png
├── test039.png
├── test040.png
├── test041.png
├── test042.png
├── test043.png
├── test044.png
├── test045.png
├── test046.png
├── test047.png
├── test048.png
├── test049.png
├── test050.png
├── test051.png
├── test052.png
├── test053.png
├── test054.png
├── test055.png
├── test056.png
├── test057.png
├── test058.png
├── test059.png
├── test060.png
├── test061.png
├── test062.png
├── test063.png
├── test064.png
├── test065.png
├── test066.png
├── test067.png
└── test068.png
├── FCN.py
├── LICENSE
├── README.md
├── State.py
├── Train_torch.py
├── Tst.py
├── action_map
├── Figure_1.png
├── Figure_2.png
├── Figure_3.png
├── Figure_4.png
└── Figure_5.png
├── caffe2pytorch.py
├── initial_weight
├── zhang_cvpr17_denoise_15_gray.caffemodel
├── zhang_cvpr17_denoise_25_gray.caffemodel
└── zhang_cvpr17_denoise_50_gray.caffemodel
├── mini_batch_loader.py
├── pixel_model
├── chainer2pytorch.py
├── pretrained_15.npz
├── pretrained_25.npz
└── pretrained_50.npz
├── pixelwise_a3c.py
├── read_file.py
├── torch_initweight
├── sig15_gray.pth
├── sig25_gray.pth
└── sig50_gray.pth
└── torch_pixel_model
├── pixel_sig15_gray.pth
├── pixel_sig25_gray.pth
└── pixel_sig50_gray.pth
/.vs/PythonSettings.json:
--------------------------------------------------------------------------------
1 | {
2 | "Interpreter": "CondaEnv|CondaEnv|pytorch"
3 | }
--------------------------------------------------------------------------------
/.vs/Pytorch-pixelRL/FileContentIndex/59766ca0-a0b9-4cd4-bc3f-b93b4651b0b3.vsidx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/.vs/Pytorch-pixelRL/FileContentIndex/59766ca0-a0b9-4cd4-bc3f-b93b4651b0b3.vsidx
--------------------------------------------------------------------------------
/.vs/Pytorch-pixelRL/FileContentIndex/read.lock:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/.vs/Pytorch-pixelRL/FileContentIndex/read.lock
--------------------------------------------------------------------------------
/.vs/Pytorch-pixelRL/v17/.wsuo:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/.vs/Pytorch-pixelRL/v17/.wsuo
--------------------------------------------------------------------------------
/.vs/VSWorkspaceState.json:
--------------------------------------------------------------------------------
1 | {
2 | "ExpandedNodes": [
3 | ""
4 | ],
5 | "SelectedNode": "\\State.py",
6 | "PreviewInSolutionExplorer": false
7 | }
--------------------------------------------------------------------------------
/.vs/slnx.sqlite:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/.vs/slnx.sqlite
--------------------------------------------------------------------------------
/BSD432.txt:
--------------------------------------------------------------------------------
1 | ./BSD432_color\100007.jpg
2 | ./BSD432_color\100039.jpg
3 | ./BSD432_color\100075.jpg
4 | ./BSD432_color\100080.jpg
5 | ./BSD432_color\100098.jpg
6 | ./BSD432_color\100099.jpg
7 | ./BSD432_color\10081.jpg
8 | ./BSD432_color\101027.jpg
9 | ./BSD432_color\101084.jpg
10 | ./BSD432_color\102062.jpg
11 | ./BSD432_color\103006.jpg
12 | ./BSD432_color\103029.jpg
13 | ./BSD432_color\103041.jpg
14 | ./BSD432_color\103078.jpg
15 | ./BSD432_color\104010.jpg
16 | ./BSD432_color\104022.jpg
17 | ./BSD432_color\104055.jpg
18 | ./BSD432_color\105019.jpg
19 | ./BSD432_color\105027.jpg
20 | ./BSD432_color\105053.jpg
21 | ./BSD432_color\106005.jpg
22 | ./BSD432_color\106020.jpg
23 | ./BSD432_color\106025.jpg
24 | ./BSD432_color\106047.jpg
25 | ./BSD432_color\107014.jpg
26 | ./BSD432_color\107045.jpg
27 | ./BSD432_color\107072.jpg
28 | ./BSD432_color\108004.jpg
29 | ./BSD432_color\108036.jpg
30 | ./BSD432_color\108041.jpg
31 | ./BSD432_color\108069.jpg
32 | ./BSD432_color\108073.jpg
33 | ./BSD432_color\109034.jpg
34 | ./BSD432_color\109055.jpg
35 | ./BSD432_color\112056.jpg
36 | ./BSD432_color\112082.jpg
37 | ./BSD432_color\112090.jpg
38 | ./BSD432_color\113009.jpg
39 | ./BSD432_color\113016.jpg
40 | ./BSD432_color\113044.jpg
41 | ./BSD432_color\117025.jpg
42 | ./BSD432_color\117054.jpg
43 | ./BSD432_color\118015.jpg
44 | ./BSD432_color\118020.jpg
45 | ./BSD432_color\118031.jpg
46 | ./BSD432_color\118035.jpg
47 | ./BSD432_color\118072.jpg
48 | ./BSD432_color\120003.jpg
49 | ./BSD432_color\12003.jpg
50 | ./BSD432_color\120093.jpg
51 | ./BSD432_color\12074.jpg
52 | ./BSD432_color\122048.jpg
53 | ./BSD432_color\123057.jpg
54 | ./BSD432_color\124084.jpg
55 | ./BSD432_color\126039.jpg
56 | ./BSD432_color\128035.jpg
57 | ./BSD432_color\130014.jpg
58 | ./BSD432_color\130034.jpg
59 | ./BSD432_color\130066.jpg
60 | ./BSD432_color\134008.jpg
61 | ./BSD432_color\134049.jpg
62 | ./BSD432_color\134052.jpg
63 | ./BSD432_color\134067.jpg
64 | ./BSD432_color\135037.jpg
65 | ./BSD432_color\135069.jpg
66 | ./BSD432_color\138032.jpg
67 | ./BSD432_color\138078.jpg
68 | ./BSD432_color\140006.jpg
69 | ./BSD432_color\140055.jpg
70 | ./BSD432_color\140075.jpg
71 | ./BSD432_color\140088.jpg
72 | ./BSD432_color\14085.jpg
73 | ./BSD432_color\14092.jpg
74 | ./BSD432_color\141012.jpg
75 | ./BSD432_color\141048.jpg
76 | ./BSD432_color\144067.jpg
77 | ./BSD432_color\145014.jpg
78 | ./BSD432_color\145053.jpg
79 | ./BSD432_color\145059.jpg
80 | ./BSD432_color\145079.jpg
81 | ./BSD432_color\146074.jpg
82 | ./BSD432_color\147021.jpg
83 | ./BSD432_color\147062.jpg
84 | ./BSD432_color\147077.jpg
85 | ./BSD432_color\147080.jpg
86 | ./BSD432_color\15004.jpg
87 | ./BSD432_color\15011.jpg
88 | ./BSD432_color\15062.jpg
89 | ./BSD432_color\15088.jpg
90 | ./BSD432_color\151087.jpg
91 | ./BSD432_color\153077.jpg
92 | ./BSD432_color\153093.jpg
93 | ./BSD432_color\155060.jpg
94 | ./BSD432_color\156054.jpg
95 | ./BSD432_color\156065.jpg
96 | ./BSD432_color\156079.jpg
97 | ./BSD432_color\157032.jpg
98 | ./BSD432_color\157036.jpg
99 | ./BSD432_color\157087.jpg
100 | ./BSD432_color\159002.jpg
101 | ./BSD432_color\159022.jpg
102 | ./BSD432_color\159029.jpg
103 | ./BSD432_color\159045.jpg
104 | ./BSD432_color\159091.jpg
105 | ./BSD432_color\160006.jpg
106 | ./BSD432_color\16004.jpg
107 | ./BSD432_color\160067.jpg
108 | ./BSD432_color\16052.jpg
109 | ./BSD432_color\16068.jpg
110 | ./BSD432_color\161045.jpg
111 | ./BSD432_color\161062.jpg
112 | ./BSD432_color\163004.jpg
113 | ./BSD432_color\163014.jpg
114 | ./BSD432_color\163062.jpg
115 | ./BSD432_color\163096.jpg
116 | ./BSD432_color\164046.jpg
117 | ./BSD432_color\164074.jpg
118 | ./BSD432_color\166081.jpg
119 | ./BSD432_color\168084.jpg
120 | ./BSD432_color\169012.jpg
121 | ./BSD432_color\170054.jpg
122 | ./BSD432_color\17067.jpg
123 | ./BSD432_color\172032.jpg
124 | ./BSD432_color\173036.jpg
125 | ./BSD432_color\175083.jpg
126 | ./BSD432_color\176019.jpg
127 | ./BSD432_color\176035.jpg
128 | ./BSD432_color\176039.jpg
129 | ./BSD432_color\176051.jpg
130 | ./BSD432_color\178054.jpg
131 | ./BSD432_color\179084.jpg
132 | ./BSD432_color\181018.jpg
133 | ./BSD432_color\181021.jpg
134 | ./BSD432_color\181079.jpg
135 | ./BSD432_color\181091.jpg
136 | ./BSD432_color\183055.jpg
137 | ./BSD432_color\183066.jpg
138 | ./BSD432_color\183087.jpg
139 | ./BSD432_color\185092.jpg
140 | ./BSD432_color\187003.jpg
141 | ./BSD432_color\187029.jpg
142 | ./BSD432_color\187039.jpg
143 | ./BSD432_color\187058.jpg
144 | ./BSD432_color\187071.jpg
145 | ./BSD432_color\187083.jpg
146 | ./BSD432_color\187099.jpg
147 | ./BSD432_color\188005.jpg
148 | ./BSD432_color\188025.jpg
149 | ./BSD432_color\188063.jpg
150 | ./BSD432_color\188091.jpg
151 | ./BSD432_color\189003.jpg
152 | ./BSD432_color\189006.jpg
153 | ./BSD432_color\189011.jpg
154 | ./BSD432_color\189013.jpg
155 | ./BSD432_color\189029.jpg
156 | ./BSD432_color\189096.jpg
157 | ./BSD432_color\196015.jpg
158 | ./BSD432_color\196027.jpg
159 | ./BSD432_color\196040.jpg
160 | ./BSD432_color\196062.jpg
161 | ./BSD432_color\196088.jpg
162 | ./BSD432_color\198004.jpg
163 | ./BSD432_color\198023.jpg
164 | ./BSD432_color\198054.jpg
165 | ./BSD432_color\198087.jpg
166 | ./BSD432_color\20008.jpg
167 | ./BSD432_color\20069.jpg
168 | ./BSD432_color\201080.jpg
169 | ./BSD432_color\2018.jpg
170 | ./BSD432_color\202000.jpg
171 | ./BSD432_color\202012.jpg
172 | ./BSD432_color\206062.jpg
173 | ./BSD432_color\206097.jpg
174 | ./BSD432_color\207038.jpg
175 | ./BSD432_color\207049.jpg
176 | ./BSD432_color\207056.jpg
177 | ./BSD432_color\208078.jpg
178 | ./BSD432_color\209021.jpg
179 | ./BSD432_color\209070.jpg
180 | ./BSD432_color\2092.jpg
181 | ./BSD432_color\216041.jpg
182 | ./BSD432_color\216053.jpg
183 | ./BSD432_color\216066.jpg
184 | ./BSD432_color\217013.jpg
185 | ./BSD432_color\217090.jpg
186 | ./BSD432_color\220003.jpg
187 | ./BSD432_color\22013.jpg
188 | ./BSD432_color\22090.jpg
189 | ./BSD432_color\22093.jpg
190 | ./BSD432_color\223004.jpg
191 | ./BSD432_color\223060.jpg
192 | ./BSD432_color\225017.jpg
193 | ./BSD432_color\225022.jpg
194 | ./BSD432_color\226022.jpg
195 | ./BSD432_color\226033.jpg
196 | ./BSD432_color\226043.jpg
197 | ./BSD432_color\226060.jpg
198 | ./BSD432_color\227040.jpg
199 | ./BSD432_color\227046.jpg
200 | ./BSD432_color\228076.jpg
201 | ./BSD432_color\230063.jpg
202 | ./BSD432_color\230098.jpg
203 | ./BSD432_color\23025.jpg
204 | ./BSD432_color\23050.jpg
205 | ./BSD432_color\23080.jpg
206 | ./BSD432_color\23084.jpg
207 | ./BSD432_color\231015.jpg
208 | ./BSD432_color\232038.jpg
209 | ./BSD432_color\232076.jpg
210 | ./BSD432_color\235098.jpg
211 | ./BSD432_color\236017.jpg
212 | ./BSD432_color\238011.jpg
213 | ./BSD432_color\238025.jpg
214 | ./BSD432_color\239007.jpg
215 | ./BSD432_color\239096.jpg
216 | ./BSD432_color\24004.jpg
217 | ./BSD432_color\24063.jpg
218 | ./BSD432_color\242078.jpg
219 | ./BSD432_color\243095.jpg
220 | ./BSD432_color\245051.jpg
221 | ./BSD432_color\246009.jpg
222 | ./BSD432_color\246016.jpg
223 | ./BSD432_color\246053.jpg
224 | ./BSD432_color\247003.jpg
225 | ./BSD432_color\247012.jpg
226 | ./BSD432_color\247085.jpg
227 | ./BSD432_color\249021.jpg
228 | ./BSD432_color\249061.jpg
229 | ./BSD432_color\249087.jpg
230 | ./BSD432_color\250047.jpg
231 | ./BSD432_color\250087.jpg
232 | ./BSD432_color\25098.jpg
233 | ./BSD432_color\253016.jpg
234 | ./BSD432_color\253036.jpg
235 | ./BSD432_color\253092.jpg
236 | ./BSD432_color\254033.jpg
237 | ./BSD432_color\254054.jpg
238 | ./BSD432_color\257098.jpg
239 | ./BSD432_color\258089.jpg
240 | ./BSD432_color\259060.jpg
241 | ./BSD432_color\260081.jpg
242 | ./BSD432_color\26031.jpg
243 | ./BSD432_color\267036.jpg
244 | ./BSD432_color\268002.jpg
245 | ./BSD432_color\268048.jpg
246 | ./BSD432_color\268074.jpg
247 | ./BSD432_color\27059.jpg
248 | ./BSD432_color\271008.jpg
249 | ./BSD432_color\271031.jpg
250 | ./BSD432_color\274007.jpg
251 | ./BSD432_color\277053.jpg
252 | ./BSD432_color\277095.jpg
253 | ./BSD432_color\279005.jpg
254 | ./BSD432_color\28075.jpg
255 | ./BSD432_color\28083.jpg
256 | ./BSD432_color\28096.jpg
257 | ./BSD432_color\281017.jpg
258 | ./BSD432_color\285022.jpg
259 | ./BSD432_color\285036.jpg
260 | ./BSD432_color\286092.jpg
261 | ./BSD432_color\288024.jpg
262 | ./BSD432_color\289011.jpg
263 | ./BSD432_color\290035.jpg
264 | ./BSD432_color\29030.jpg
265 | ./BSD432_color\292066.jpg
266 | ./BSD432_color\293029.jpg
267 | ./BSD432_color\296028.jpg
268 | ./BSD432_color\296058.jpg
269 | ./BSD432_color\299091.jpg
270 | ./BSD432_color\301007.jpg
271 | ./BSD432_color\302003.jpg
272 | ./BSD432_color\302022.jpg
273 | ./BSD432_color\306051.jpg
274 | ./BSD432_color\306052.jpg
275 | ./BSD432_color\3063.jpg
276 | ./BSD432_color\309004.jpg
277 | ./BSD432_color\309040.jpg
278 | ./BSD432_color\310007.jpg
279 | ./BSD432_color\311068.jpg
280 | ./BSD432_color\311081.jpg
281 | ./BSD432_color\314016.jpg
282 | ./BSD432_color\317043.jpg
283 | ./BSD432_color\317080.jpg
284 | ./BSD432_color\323016.jpg
285 | ./BSD432_color\326025.jpg
286 | ./BSD432_color\326038.jpg
287 | ./BSD432_color\326085.jpg
288 | ./BSD432_color\33044.jpg
289 | ./BSD432_color\33066.jpg
290 | ./BSD432_color\334025.jpg
291 | ./BSD432_color\335088.jpg
292 | ./BSD432_color\335094.jpg
293 | ./BSD432_color\344010.jpg
294 | ./BSD432_color\346016.jpg
295 | ./BSD432_color\347031.jpg
296 | ./BSD432_color\35008.jpg
297 | ./BSD432_color\35010.jpg
298 | ./BSD432_color\35028.jpg
299 | ./BSD432_color\35049.jpg
300 | ./BSD432_color\35058.jpg
301 | ./BSD432_color\35070.jpg
302 | ./BSD432_color\35091.jpg
303 | ./BSD432_color\353013.jpg
304 | ./BSD432_color\36046.jpg
305 | ./BSD432_color\361010.jpg
306 | ./BSD432_color\361084.jpg
307 | ./BSD432_color\365025.jpg
308 | ./BSD432_color\365072.jpg
309 | ./BSD432_color\365073.jpg
310 | ./BSD432_color\368016.jpg
311 | ./BSD432_color\368037.jpg
312 | ./BSD432_color\368078.jpg
313 | ./BSD432_color\370036.jpg
314 | ./BSD432_color\37073.jpg
315 | ./BSD432_color\372019.jpg
316 | ./BSD432_color\372047.jpg
317 | ./BSD432_color\374020.jpg
318 | ./BSD432_color\374067.jpg
319 | ./BSD432_color\376001.jpg
320 | ./BSD432_color\376020.jpg
321 | ./BSD432_color\376043.jpg
322 | ./BSD432_color\376086.jpg
323 | ./BSD432_color\38082.jpg
324 | ./BSD432_color\38092.jpg
325 | ./BSD432_color\384022.jpg
326 | ./BSD432_color\384089.jpg
327 | ./BSD432_color\385022.jpg
328 | ./BSD432_color\385028.jpg
329 | ./BSD432_color\385039.jpg
330 | ./BSD432_color\388006.jpg
331 | ./BSD432_color\388016.jpg
332 | ./BSD432_color\388018.jpg
333 | ./BSD432_color\388067.jpg
334 | ./BSD432_color\393035.jpg
335 | ./BSD432_color\41004.jpg
336 | ./BSD432_color\41006.jpg
337 | ./BSD432_color\41025.jpg
338 | ./BSD432_color\41029.jpg
339 | ./BSD432_color\41033.jpg
340 | ./BSD432_color\41069.jpg
341 | ./BSD432_color\41085.jpg
342 | ./BSD432_color\41096.jpg
343 | ./BSD432_color\42012.jpg
344 | ./BSD432_color\42044.jpg
345 | ./BSD432_color\42049.jpg
346 | ./BSD432_color\42078.jpg
347 | ./BSD432_color\43033.jpg
348 | ./BSD432_color\43051.jpg
349 | ./BSD432_color\43070.jpg
350 | ./BSD432_color\43074.jpg
351 | ./BSD432_color\43083.jpg
352 | ./BSD432_color\45000.jpg
353 | ./BSD432_color\45077.jpg
354 | ./BSD432_color\45096.jpg
355 | ./BSD432_color\46076.jpg
356 | ./BSD432_color\48017.jpg
357 | ./BSD432_color\48025.jpg
358 | ./BSD432_color\48055.jpg
359 | ./BSD432_color\49024.jpg
360 | ./BSD432_color\5096.jpg
361 | ./BSD432_color\51084.jpg
362 | ./BSD432_color\54005.jpg
363 | ./BSD432_color\54082.jpg
364 | ./BSD432_color\55067.jpg
365 | ./BSD432_color\55073.jpg
366 | ./BSD432_color\55075.jpg
367 | ./BSD432_color\56028.jpg
368 | ./BSD432_color\58060.jpg
369 | ./BSD432_color\59078.jpg
370 | ./BSD432_color\60079.jpg
371 | ./BSD432_color\6046.jpg
372 | ./BSD432_color\61034.jpg
373 | ./BSD432_color\61060.jpg
374 | ./BSD432_color\61086.jpg
375 | ./BSD432_color\62096.jpg
376 | ./BSD432_color\64061.jpg
377 | ./BSD432_color\65010.jpg
378 | ./BSD432_color\65019.jpg
379 | ./BSD432_color\65033.jpg
380 | ./BSD432_color\65074.jpg
381 | ./BSD432_color\65084.jpg
382 | ./BSD432_color\65132.jpg
383 | ./BSD432_color\66039.jpg
384 | ./BSD432_color\66053.jpg
385 | ./BSD432_color\66075.jpg
386 | ./BSD432_color\67079.jpg
387 | ./BSD432_color\68077.jpg
388 | ./BSD432_color\69000.jpg
389 | ./BSD432_color\69007.jpg
390 | ./BSD432_color\69015.jpg
391 | ./BSD432_color\69020.jpg
392 | ./BSD432_color\69022.jpg
393 | ./BSD432_color\69040.jpg
394 | ./BSD432_color\70011.jpg
395 | ./BSD432_color\70090.jpg
396 | ./BSD432_color\71046.jpg
397 | ./BSD432_color\71076.jpg
398 | ./BSD432_color\71099.jpg
399 | ./BSD432_color\76002.jpg
400 | ./BSD432_color\76053.jpg
401 | ./BSD432_color\77062.jpg
402 | ./BSD432_color\78004.jpg
403 | ./BSD432_color\78019.jpg
404 | ./BSD432_color\78098.jpg
405 | ./BSD432_color\79073.jpg
406 | ./BSD432_color\80085.jpg
407 | ./BSD432_color\80090.jpg
408 | ./BSD432_color\80099.jpg
409 | ./BSD432_color\8023.jpg
410 | ./BSD432_color\8049.jpg
411 | ./BSD432_color\8068.jpg
412 | ./BSD432_color\81066.jpg
413 | ./BSD432_color\81090.jpg
414 | ./BSD432_color\81095.jpg
415 | ./BSD432_color\8143.jpg
416 | ./BSD432_color\85048.jpg
417 | ./BSD432_color\86000.jpg
418 | ./BSD432_color\86016.jpg
419 | ./BSD432_color\86068.jpg
420 | ./BSD432_color\87015.jpg
421 | ./BSD432_color\87046.jpg
422 | ./BSD432_color\87065.jpg
423 | ./BSD432_color\89072.jpg
424 | ./BSD432_color\90076.jpg
425 | ./BSD432_color\92014.jpg
426 | ./BSD432_color\92059.jpg
427 | ./BSD432_color\94079.jpg
428 | ./BSD432_color\94095.jpg
429 | ./BSD432_color\95006.jpg
430 | ./BSD432_color\97010.jpg
431 | ./BSD432_color\97017.jpg
432 | ./BSD432_color\97033.jpg
433 |
--------------------------------------------------------------------------------
/BSD68.txt:
--------------------------------------------------------------------------------
1 | ./BSD68\test001.png
2 | ./BSD68\test002.png
3 | ./BSD68\test003.png
4 | ./BSD68\test004.png
5 | ./BSD68\test005.png
6 | ./BSD68\test006.png
7 | ./BSD68\test007.png
8 | ./BSD68\test008.png
9 | ./BSD68\test009.png
10 | ./BSD68\test010.png
11 | ./BSD68\test011.png
12 | ./BSD68\test012.png
13 | ./BSD68\test013.png
14 | ./BSD68\test014.png
15 | ./BSD68\test015.png
16 | ./BSD68\test016.png
17 | ./BSD68\test017.png
18 | ./BSD68\test018.png
19 | ./BSD68\test019.png
20 | ./BSD68\test020.png
21 | ./BSD68\test021.png
22 | ./BSD68\test022.png
23 | ./BSD68\test023.png
24 | ./BSD68\test024.png
25 | ./BSD68\test025.png
26 | ./BSD68\test026.png
27 | ./BSD68\test027.png
28 | ./BSD68\test028.png
29 | ./BSD68\test029.png
30 | ./BSD68\test030.png
31 | ./BSD68\test031.png
32 | ./BSD68\test032.png
33 | ./BSD68\test033.png
34 | ./BSD68\test034.png
35 | ./BSD68\test035.png
36 | ./BSD68\test036.png
37 | ./BSD68\test037.png
38 | ./BSD68\test038.png
39 | ./BSD68\test039.png
40 | ./BSD68\test040.png
41 | ./BSD68\test041.png
42 | ./BSD68\test042.png
43 | ./BSD68\test043.png
44 | ./BSD68\test044.png
45 | ./BSD68\test045.png
46 | ./BSD68\test046.png
47 | ./BSD68\test047.png
48 | ./BSD68\test048.png
49 | ./BSD68\test049.png
50 | ./BSD68\test050.png
51 | ./BSD68\test051.png
52 | ./BSD68\test052.png
53 | ./BSD68\test053.png
54 | ./BSD68\test054.png
55 | ./BSD68\test055.png
56 | ./BSD68\test056.png
57 | ./BSD68\test057.png
58 | ./BSD68\test058.png
59 | ./BSD68\test059.png
60 | ./BSD68\test060.png
61 | ./BSD68\test061.png
62 | ./BSD68\test062.png
63 | ./BSD68\test063.png
64 | ./BSD68\test064.png
65 | ./BSD68\test065.png
66 | ./BSD68\test066.png
67 | ./BSD68\test067.png
68 | ./BSD68\test068.png
69 |
--------------------------------------------------------------------------------
/BSD68/test001.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/BSD68/test001.png
--------------------------------------------------------------------------------
/BSD68/test002.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/BSD68/test002.png
--------------------------------------------------------------------------------
/BSD68/test003.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/BSD68/test003.png
--------------------------------------------------------------------------------
/BSD68/test004.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/BSD68/test004.png
--------------------------------------------------------------------------------
/BSD68/test005.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/BSD68/test005.png
--------------------------------------------------------------------------------
/BSD68/test006.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/BSD68/test006.png
--------------------------------------------------------------------------------
/BSD68/test007.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/BSD68/test007.png
--------------------------------------------------------------------------------
/BSD68/test008.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/BSD68/test008.png
--------------------------------------------------------------------------------
/BSD68/test009.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/BSD68/test009.png
--------------------------------------------------------------------------------
/BSD68/test010.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/BSD68/test010.png
--------------------------------------------------------------------------------
/BSD68/test011.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/BSD68/test011.png
--------------------------------------------------------------------------------
/BSD68/test012.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/BSD68/test012.png
--------------------------------------------------------------------------------
/BSD68/test013.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/BSD68/test013.png
--------------------------------------------------------------------------------
/BSD68/test014.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/BSD68/test014.png
--------------------------------------------------------------------------------
/BSD68/test015.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/BSD68/test015.png
--------------------------------------------------------------------------------
/BSD68/test016.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/BSD68/test016.png
--------------------------------------------------------------------------------
/BSD68/test017.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/BSD68/test017.png
--------------------------------------------------------------------------------
/BSD68/test018.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/BSD68/test018.png
--------------------------------------------------------------------------------
/BSD68/test019.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/BSD68/test019.png
--------------------------------------------------------------------------------
/BSD68/test020.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/BSD68/test020.png
--------------------------------------------------------------------------------
/BSD68/test021.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/BSD68/test021.png
--------------------------------------------------------------------------------
/BSD68/test022.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/BSD68/test022.png
--------------------------------------------------------------------------------
/BSD68/test023.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/BSD68/test023.png
--------------------------------------------------------------------------------
/BSD68/test024.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/BSD68/test024.png
--------------------------------------------------------------------------------
/BSD68/test025.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/BSD68/test025.png
--------------------------------------------------------------------------------
/BSD68/test026.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/BSD68/test026.png
--------------------------------------------------------------------------------
/BSD68/test027.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/BSD68/test027.png
--------------------------------------------------------------------------------
/BSD68/test028.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/BSD68/test028.png
--------------------------------------------------------------------------------
/BSD68/test029.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/BSD68/test029.png
--------------------------------------------------------------------------------
/BSD68/test030.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/BSD68/test030.png
--------------------------------------------------------------------------------
/BSD68/test031.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/BSD68/test031.png
--------------------------------------------------------------------------------
/BSD68/test032.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/BSD68/test032.png
--------------------------------------------------------------------------------
/BSD68/test033.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/BSD68/test033.png
--------------------------------------------------------------------------------
/BSD68/test034.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/BSD68/test034.png
--------------------------------------------------------------------------------
/BSD68/test035.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/BSD68/test035.png
--------------------------------------------------------------------------------
/BSD68/test036.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/BSD68/test036.png
--------------------------------------------------------------------------------
/BSD68/test037.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/BSD68/test037.png
--------------------------------------------------------------------------------
/BSD68/test038.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/BSD68/test038.png
--------------------------------------------------------------------------------
/BSD68/test039.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/BSD68/test039.png
--------------------------------------------------------------------------------
/BSD68/test040.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/BSD68/test040.png
--------------------------------------------------------------------------------
/BSD68/test041.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/BSD68/test041.png
--------------------------------------------------------------------------------
/BSD68/test042.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/BSD68/test042.png
--------------------------------------------------------------------------------
/BSD68/test043.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/BSD68/test043.png
--------------------------------------------------------------------------------
/BSD68/test044.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/BSD68/test044.png
--------------------------------------------------------------------------------
/BSD68/test045.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/BSD68/test045.png
--------------------------------------------------------------------------------
/BSD68/test046.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/BSD68/test046.png
--------------------------------------------------------------------------------
/BSD68/test047.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/BSD68/test047.png
--------------------------------------------------------------------------------
/BSD68/test048.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/BSD68/test048.png
--------------------------------------------------------------------------------
/BSD68/test049.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/BSD68/test049.png
--------------------------------------------------------------------------------
/BSD68/test050.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/BSD68/test050.png
--------------------------------------------------------------------------------
/BSD68/test051.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/BSD68/test051.png
--------------------------------------------------------------------------------
/BSD68/test052.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/BSD68/test052.png
--------------------------------------------------------------------------------
/BSD68/test053.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/BSD68/test053.png
--------------------------------------------------------------------------------
/BSD68/test054.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/BSD68/test054.png
--------------------------------------------------------------------------------
/BSD68/test055.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/BSD68/test055.png
--------------------------------------------------------------------------------
/BSD68/test056.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/BSD68/test056.png
--------------------------------------------------------------------------------
/BSD68/test057.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/BSD68/test057.png
--------------------------------------------------------------------------------
/BSD68/test058.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/BSD68/test058.png
--------------------------------------------------------------------------------
/BSD68/test059.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/BSD68/test059.png
--------------------------------------------------------------------------------
/BSD68/test060.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/BSD68/test060.png
--------------------------------------------------------------------------------
/BSD68/test061.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/BSD68/test061.png
--------------------------------------------------------------------------------
/BSD68/test062.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/BSD68/test062.png
--------------------------------------------------------------------------------
/BSD68/test063.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/BSD68/test063.png
--------------------------------------------------------------------------------
/BSD68/test064.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/BSD68/test064.png
--------------------------------------------------------------------------------
/BSD68/test065.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/BSD68/test065.png
--------------------------------------------------------------------------------
/BSD68/test066.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/BSD68/test066.png
--------------------------------------------------------------------------------
/BSD68/test067.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/BSD68/test067.png
--------------------------------------------------------------------------------
/BSD68/test068.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/BSD68/test068.png
--------------------------------------------------------------------------------
/FCN.py:
--------------------------------------------------------------------------------
1 |
2 | import torch.nn.functional as F
3 | import torch
4 | import torch.nn as nn
5 | import math
6 | import torch.optim as optim
7 | torch.manual_seed(1)
8 |
9 | class PPO(nn.Module):
10 | def __init__(self, Action_N):
11 | super(PPO, self).__init__()
12 | self.action_n = Action_N
13 | self.conv = nn.Sequential(
14 | nn.Conv2d(in_channels=1, out_channels=64, kernel_size=3, stride=1, padding=(1, 1), bias=True),
15 | nn.ReLU(),
16 | nn.Conv2d(in_channels=64, out_channels=64, kernel_size=3, stride=1, padding=(2, 2), dilation=2, bias=True),
17 | nn.ReLU(),
18 | nn.Conv2d(in_channels=64, out_channels=64, kernel_size=3, stride=1, padding=(3, 3), dilation=3, bias=True),
19 | nn.ReLU(),
20 | nn.Conv2d(in_channels=64, out_channels=64, kernel_size=3, stride=1, padding=(4, 4), dilation=4, bias=True),
21 | nn.ReLU(),
22 | )
23 | self.diconv1_p = nn.Conv2d(in_channels=64, out_channels=64, kernel_size=3, stride=1, padding=(3, 3), dilation=3,
24 | bias=True)
25 | self.diconv2_p = nn.Conv2d(in_channels=64, out_channels=64, kernel_size=3, stride=1, padding=(2, 2), dilation=2,
26 | bias=True)
27 | self.policy = nn.Conv2d(in_channels=64, out_channels=self.action_n, kernel_size=3, stride=1, padding=(1, 1), bias=True)
28 |
29 | self.diconv1_v = nn.Conv2d(in_channels=64, out_channels=64, kernel_size=3, stride=1, padding=(3, 3), dilation=3,
30 | bias=True)
31 | self.diconv2_v = nn.Conv2d(in_channels=64, out_channels=64, kernel_size=3, stride=1, padding=(2, 2), dilation=2,
32 | bias=True)
33 | self.value = nn.Conv2d(in_channels=64, out_channels=1, kernel_size=3, stride=1, padding=(1, 1), bias=True)
34 |
35 | self.conv7_Wz = nn.Conv2d(in_channels=64, out_channels=64, kernel_size=3, stride=1, padding=(1, 1), bias=False)
36 | self.conv7_Uz = nn.Conv2d(in_channels=64, out_channels=64, kernel_size=3, stride=1, padding=(1, 1), bias=False)
37 | self.conv7_Wr = nn.Conv2d(in_channels=64, out_channels=64, kernel_size=3, stride=1, padding=(1, 1), bias=False)
38 | self.conv7_Ur = nn.Conv2d(in_channels=64, out_channels=64, kernel_size=3, stride=1, padding=(1, 1), bias=False)
39 | self.conv7_W = nn.Conv2d(in_channels=64, out_channels=64, kernel_size=3, stride=1, padding=(1, 1), bias=False)
40 | self.conv7_U = nn.Conv2d(in_channels=64, out_channels=64, kernel_size=3, stride=1, padding=(1, 1), bias=False)
41 |
42 | self.conv.apply(self.weight_init)
43 | self.diconv1_p.apply(self.weight_init)
44 | self.diconv2_p.apply(self.weight_init)
45 | self.policy.apply(self.weight_init)
46 | self.diconv1_v.apply(self.weight_init)
47 | self.diconv2_v.apply(self.weight_init)
48 | self.value.apply(self.weight_init)
49 | self.conv7_Wz.apply(self.weight_init)
50 | self.conv7_Uz.apply(self.weight_init)
51 | self.conv7_Wr.apply(self.weight_init)
52 | self.conv7_Ur.apply(self.weight_init)
53 | self.conv7_W.apply(self.weight_init)
54 | self.conv7_U.apply(self.weight_init)
55 |
56 | def weight_init(self, m):
57 | classname = m.__class__.__name__
58 | if classname.find("Conv2d") != -1:
59 | n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
60 | m.weight.data.normal_(0, math.sqrt(2. / n))
61 | if m.bias is not None:
62 | m.bias.data.zero_()
63 | elif classname.find('Linear') != -1:
64 | m.weight.data.normal_(0, 0.01)
65 | m.bias.data = torch.ones(m.bias.data.size())
66 |
67 | def pi_and_v(self, x):
68 | conv = self.conv(x[:,0:1,:,:])
69 | p = self.diconv1_p(conv)
70 | p = F.relu(p)
71 | p = self.diconv2_p(p)
72 | p = F.relu(p)
73 | GRU_in = p
74 | ht = x[:, -64:, :, :]
75 | z_t = torch.sigmoid(self.conv7_Wz(GRU_in) + self.conv7_Uz(ht))
76 | r_t = torch.sigmoid(self.conv7_Wr(GRU_in) + self.conv7_Ur(ht))
77 | h_title_t = torch.tanh(self.conv7_W(GRU_in) + self.conv7_U(r_t * ht))
78 | h_t = (1 - z_t) * ht + z_t * h_title_t
79 | policy = F.softmax(self.policy(h_t), dim=1)
80 |
81 | v = self.diconv1_v(conv)
82 | v = F.relu(v)
83 | v = self.diconv2_v(v)
84 | v = F.relu(v)
85 | value = self.value(v)
86 | return policy, value, h_t
87 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2020 Srain
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # [PixelRL](https://arxiv.org/abs/1811.04323)
2 |
3 |
4 | `Pytorch reproduction of pixelRL.`
5 |
6 | `If it helps you with your research, please star it. 🎈`
7 |
8 | # Requirements and Dependencies
9 | - pytorch 0.4.1+
10 | - python 3.5+
11 |
12 |
13 | The PixelRL Denoiser
14 | ----------
15 | * Grayscale image denoising
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 | `Just run Train_torch.py.`
24 |
25 |
26 | # Citation
27 |
28 | Please quote the original paper.
29 |
30 | ```
31 | @inproceedings{aaai_furuta_2019,
32 | author={Ryosuke Furuta and Naoto Inoue and Toshihiko Yamasaki},
33 | title={Fully Convolutional Network with Multi-Step Reinforcement Learning for Image Processing},
34 | booktitle={AAAI Conference on Artificial Intelligence (AAAI)},
35 | year={2019}
36 | }
37 | @article{furuta2020pixelrl,
38 | title={PixelRL: Fully Convolutional Network with Reinforcement Learning for Image Processing},
39 | author={Ryosuke Furuta and Naoto Inoue and Toshihiko Yamasaki},
40 | journal={IEEE Transactions on Multimedia (TMM)},
41 | year={2020},
42 | volume={22},
43 | number={7},
44 | pages={1704-1719}
45 | }
46 | ```
47 |
48 |
49 |
50 |
--------------------------------------------------------------------------------
/State.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import sys
3 | import cv2
4 |
5 | class State():
6 | def __init__(self, size, move_range):
7 | self.image = np.zeros(size, dtype=np.float32)
8 | self.move_range = move_range
9 |
10 | def reset(self, x, n):
11 | self.image = np.clip(x + n, a_min=0., a_max=1.)
12 | size = self.image.shape
13 | prev_state = np.zeros((size[0], 64, size[2], size[3]),dtype=np.float32)
14 | self.tensor = np.concatenate([self.image, prev_state], axis=1)
15 |
16 | def set(self, x):
17 | self.image = x
18 | self.tensor[:, :self.image.shape[1], :, :] = self.image
19 |
20 | def step(self, act, inner_state):
21 | act = act.numpy()
22 | neutral = (self.move_range - 1)/2
23 | move = act.astype(np.float32)
24 | move = (move - neutral)/255.
25 | moved_image = self.image + move[:,np.newaxis,:,:]
26 |
27 | gaussian = np.zeros(self.image.shape, self.image.dtype)
28 | gaussian2 = np.zeros(self.image.shape, self.image.dtype)
29 | bilateral = np.zeros(self.image.shape, self.image.dtype)
30 | bilateral2 = np.zeros(self.image.shape, self.image.dtype)
31 | median = np.zeros(self.image.shape, self.image.dtype)
32 | box = np.zeros(self.image.shape, self.image.dtype)
33 |
34 | b, c, h, w = self.image.shape
35 | for i in range(0, b):
36 | if np.sum(act[i] == self.move_range) > 0:
37 | gaussian[i] = np.expand_dims(cv2.GaussianBlur(self.image[i].squeeze().astype(np.float32), ksize=(5, 5),
38 | sigmaX=0.5), 0)
39 | if np.sum(act[i] == self.move_range + 1) > 0:
40 | bilateral[i] = np.expand_dims(cv2.bilateralFilter(self.image[i].squeeze().astype(np.float32), d=5,
41 | sigmaColor=0.1, sigmaSpace=5), 0)
42 | if np.sum(act[i] == self.move_range + 2) > 0:
43 | median[i] = np.expand_dims(cv2.medianBlur(self.image[i].squeeze().astype(np.float32), ksize=5), 0) # 5
44 |
45 | if np.sum(act[i] == self.move_range + 3) > 0:
46 | gaussian2[i] = np.expand_dims(cv2.GaussianBlur(self.image[i].squeeze().astype(np.float32), ksize=(5, 5),
47 | sigmaX=1.5), 0)
48 | if np.sum(act[i] == self.move_range + 4) > 0:
49 | bilateral2[i] = np.expand_dims(cv2.bilateralFilter(self.image[i].squeeze().astype(np.float32), d=5,
50 | sigmaColor=1.0, sigmaSpace=5), 0)
51 | if np.sum(act[i] == self.move_range + 5) > 0: # 7
52 | box[i] = np.expand_dims(
53 | cv2.boxFilter(self.image[i].squeeze().astype(np.float32), ddepth=-1, ksize=(5, 5)), 0)
54 |
55 | self.image = moved_image
56 | self.image = np.where(act[:,np.newaxis,:,:]==self.move_range, gaussian, self.image)
57 | self.image = np.where(act[:,np.newaxis,:,:]==self.move_range+1, bilateral, self.image)
58 | self.image = np.where(act[:,np.newaxis,:,:]==self.move_range+2, median, self.image)
59 | self.image = np.where(act[:,np.newaxis,:,:]==self.move_range+3, gaussian2, self.image)
60 | self.image = np.where(act[:,np.newaxis,:,:]==self.move_range+4, bilateral2, self.image)
61 | self.image = np.where(act[:,np.newaxis,:,:]==self.move_range+5, box, self.image)
62 |
63 | self.image = np.clip(self.image, a_min=0., a_max=1.)
64 | self.tensor[:, :self.image.shape[1], :, :] = self.image
65 | self.tensor[:, -64:, :, :] = inner_state
66 |
--------------------------------------------------------------------------------
/Train_torch.py:
--------------------------------------------------------------------------------
1 |
2 | import torch
3 | import numpy as np
4 | import cv2
5 | import copy
6 | from tqdm import tqdm
7 | import State as State
8 | from pixelwise_a3c import *
9 | from FCN import *
10 | from mini_batch_loader import MiniBatchLoader
11 | import matplotlib.pyplot as plt
12 | import torch.optim as optim
13 | device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
14 | torch.manual_seed(1)
15 |
16 | MOVE_RANGE = 3
17 | EPISODE_LEN = 5
18 | MAX_EPISODE = 100000
19 | GAMMA = 0.95
20 | N_ACTIONS = 9
21 | BATCH_SIZE = 22
22 | DIS_LR = 3e-4
23 | LR = 1e-4
24 | img_size = 63
25 | sigma = 25
26 |
27 | # TRAINING_DATA_PATH = "./train.txt"
28 | # TESTING_DATA_PATH = "./train.txt"
29 | TRAINING_DATA_PATH = "./BSD68.txt"
30 | TESTING_DATA_PATH = "./BSD68.txt"
31 | IMAGE_DIR_PATH = ".//"
32 |
33 | def main():
34 | model = PPO(N_ACTIONS).to(device)
35 | model.load_state_dict(torch.load("./torch_initweight/sig25_gray.pth"))
36 | optimizer = optim.Adam(model.parameters(), lr=LR)
37 | i_index = 0
38 |
39 | mini_batch_loader = MiniBatchLoader(
40 | TRAINING_DATA_PATH,
41 | TESTING_DATA_PATH,
42 | IMAGE_DIR_PATH,
43 | img_size)
44 |
45 | current_state = State.State((BATCH_SIZE, 1, 63, 63), MOVE_RANGE)
46 | agent = PixelWiseA3C_InnerState(model, optimizer, BATCH_SIZE, EPISODE_LEN, GAMMA)
47 |
48 | train_data_size = MiniBatchLoader.count_paths(TRAINING_DATA_PATH)
49 | indices = np.random.permutation(train_data_size)
50 |
51 | for n_epi in tqdm(range(0, 10000), ncols=70, initial=0):
52 |
53 | r = indices[i_index: i_index + BATCH_SIZE]
54 | raw_x = mini_batch_loader.load_training_data(r)
55 |
56 | label = copy.deepcopy(raw_x)
57 | raw_n = np.random.normal(0, sigma, label.shape).astype(label.dtype) / 255.
58 | current_state.reset(raw_x, raw_n)
59 | reward = np.zeros(label.shape, label.dtype)
60 | sum_reward = 0
61 |
62 | if n_epi % 10 == 0:
63 | image = np.asanyarray(label[10].transpose(1, 2, 0) * 255, dtype=np.uint8)
64 | image = np.squeeze(image)
65 | cv2.imshow("rerr", image)
66 | cv2.waitKey(1)
67 |
68 | for t in range(EPISODE_LEN):
69 |
70 | if n_epi % 10 == 0:
71 | # # cv2.imwrite('./test_img/'+'ori%2d' % (t+c)+'.jpg', current_state.image[20].transpose(1, 2, 0) * 255)
72 | image = np.asanyarray(current_state.image[10].transpose(1, 2, 0) * 255, dtype=np.uint8)
73 | image = np.squeeze(image)
74 | cv2.imshow("temp", image)
75 | cv2.waitKey(1)
76 |
77 | previous_image = np.clip(current_state.image.copy(), a_min=0., a_max=1.)
78 | action, inner_state, action_prob = agent.act_and_train(current_state.tensor, reward)
79 |
80 | if n_epi % 10 == 0:
81 | print(action[10])
82 | print(action_prob[10])
83 | paint_amap(action[10])
84 |
85 | current_state.step(action, inner_state)
86 | reward = np.square(label - previous_image) * 255 - np.square(label - current_state.image) * 255
87 | sum_reward += np.mean(reward) * np.power(GAMMA, t)
88 |
89 | agent.stop_episode_and_train(current_state.tensor, reward, True)
90 |
91 | if i_index + BATCH_SIZE >= train_data_size:
92 | i_index = 0
93 | indices = np.random.permutation(train_data_size)
94 | else:
95 | i_index += BATCH_SIZE
96 |
97 | if i_index + 2 * BATCH_SIZE >= train_data_size:
98 | i_index = train_data_size - BATCH_SIZE
99 |
100 | print("train total reward {a}".format(a=sum_reward * 255))
101 |
102 | def paint_amap(acmap):
103 | image = np.asanyarray(acmap.squeeze(), dtype=np.uint8)
104 | # print(image)
105 | plt.imshow(image, vmin=1, vmax=9)
106 | plt.colorbar()
107 | plt.pause(1)
108 | # plt.show()
109 | plt.close()
110 | if __name__ == '__main__':
111 | main()
112 |
--------------------------------------------------------------------------------
/Tst.py:
--------------------------------------------------------------------------------
1 |
2 | import torch
3 | import numpy as np
4 | import cv2
5 | from FCN import PPO
6 | import State as State
7 | import matplotlib.pyplot as plt
8 | from torch.distributions import Categorical
9 | device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
10 |
11 | model = PPO(9).to(device)
12 | model.load_state_dict(torch.load('./torch_pixel_model/pixel_sig25_gray.pth'))
13 | simga = 25
14 | def tst(model):
15 | model.eval()
16 | img_path = "./test002.png"
17 | raw_x = cv2.imread(img_path).astype(np.float32)
18 | raw_x = cv2.cvtColor(raw_x, cv2.COLOR_RGB2GRAY)
19 | raw_n = np.random.normal(0, simga, raw_x.shape).astype(raw_x.dtype)/255
20 | raw_x = np.expand_dims(raw_x, 0)
21 | raw_x = np.array([raw_x]) / 255
22 | step_test = State((raw_x.shape[0], 1, raw_x.shape[2], raw_x.shape[3]), move_range=3)
23 | s = np.clip(raw_x + raw_n, a_max=1., a_min=0.)
24 | ht = np.zeros([s.shape[0], 64, s.shape[2], s.shape[3]], dtype=np.float32)
25 | st = np.concatenate([s, ht], axis=1)
26 |
27 | image = np.asanyarray(raw_x[0, 0:1, :, :].transpose(1, 2, 0) * 255, dtype=np.uint8)
28 | image = np.squeeze(image)
29 | cv2.imshow("rerr", image)
30 | cv2.waitKey(0)
31 |
32 | image = np.asanyarray(st[0, 0:1, :, :].transpose(1, 2, 0) * 255, dtype=np.uint8)
33 | image = np.squeeze(image)
34 | cv2.imshow("rerr", image)
35 | cv2.waitKey(0)
36 | for t in range(5):
37 | action_map, action_map_prob, ht_ = select_action(torch.FloatTensor(st).to(device), test=True) # 1, 1, 63, 63
38 | step_test.set(st, 1)
39 | paint_amap(action_map[0])
40 | print(action_map[0])
41 | print(action_map_prob[0])
42 | st = step_test.steps(action_map, ht_, st, 1)
43 | image = np.asanyarray(st[0, 0:1, :, :].transpose(1, 2, 0) * 255, dtype=np.uint8)
44 | image = np.squeeze(image)
45 | cv2.imshow("rerr", image)
46 | cv2.waitKey(0)
47 |
48 | def select_action(state, test=False):
49 | with torch.no_grad():
50 | pout, _, ht_, a = model(state, 1)
51 | pout = torch.clamp(pout, min=0, max=1)
52 | p_trans = pout.permute([0, 2, 3, 1])
53 | dist = Categorical(p_trans)
54 | if test:
55 | _, action = torch.max(pout, dim=1)
56 | else:
57 | action = dist.sample().detach() # 动作
58 |
59 | action_prob = pout.gather(1, action.unsqueeze(1))
60 | return action.unsqueeze(1).detach().cpu().numpy(), action_prob.detach().cpu().numpy(), ht_.detach().cpu().numpy()
61 | def paint_amap(acmap):
62 | image = np.asanyarray(acmap.squeeze(), dtype=np.uint8)
63 | plt.imshow(image, vmin=1, vmax=9)
64 | plt.colorbar()
65 | plt.show()
66 | tst(model)
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
--------------------------------------------------------------------------------
/action_map/Figure_1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/action_map/Figure_1.png
--------------------------------------------------------------------------------
/action_map/Figure_2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/action_map/Figure_2.png
--------------------------------------------------------------------------------
/action_map/Figure_3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/action_map/Figure_3.png
--------------------------------------------------------------------------------
/action_map/Figure_4.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/action_map/Figure_4.png
--------------------------------------------------------------------------------
/action_map/Figure_5.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/action_map/Figure_5.png
--------------------------------------------------------------------------------
/caffe2pytorch.py:
--------------------------------------------------------------------------------
1 | from chainer.links.caffe import CaffeFunction
2 | from FCN import PPO
3 | import torch
4 | import numpy as np
5 | net = CaffeFunction('./initial_weight/zhang_cvpr17_denoise_50_gray.caffemodel')
6 | print(net.layer1.W.data.shape)
7 | model = PPO(9, 1)
8 | model_dict = model.state_dict()
9 | print(model_dict['conv.0.weight'].size())
10 | print(model_dict.keys())
11 | model_dict['conv.0.weight'] = torch.FloatTensor(net.layer1.W.data)
12 | model_dict['conv.0.bias'] = torch.FloatTensor(net.layer1.b.data)
13 | model_dict['conv.2.weight'] = torch.FloatTensor(net.layer3.W.data)
14 | model_dict['conv.2.bias'] = torch.FloatTensor(net.layer3.b.data)
15 | model_dict['conv.4.weight'] = torch.FloatTensor(net.layer6.W.data)
16 | model_dict['conv.4.bias'] = torch.FloatTensor(net.layer6.b.data)
17 | model_dict['conv.6.weight'] = torch.FloatTensor(net.layer9.W.data)
18 | model_dict['conv.6.bias'] = torch.FloatTensor(net.layer9.b.data)
19 |
20 | model_dict['diconv1_p.weight'] = torch.FloatTensor(net.layer12.W.data)
21 | model_dict['diconv1_p.bias'] = torch.FloatTensor(net.layer12.b.data)
22 | model_dict['diconv2_p.weight'] = torch.FloatTensor(net.layer15.W.data)
23 | model_dict['diconv2_p.bias'] = torch.FloatTensor(net.layer15.b.data)
24 |
25 | model_dict['diconv1_v.weight'] = torch.FloatTensor(net.layer12.W.data)
26 | model_dict['diconv1_v.bias'] = torch.FloatTensor(net.layer12.b.data)
27 | model_dict['diconv2_v.weight'] = torch.FloatTensor(net.layer15.W.data)
28 | model_dict['diconv2_v.bias'] = torch.FloatTensor(net.layer15.b.data)
29 | model.load_state_dict(model_dict)
30 | torch.save(model.state_dict(), "./torch_initweight/sig50_gray.pth")
31 |
32 |
33 |
34 |
--------------------------------------------------------------------------------
/initial_weight/zhang_cvpr17_denoise_15_gray.caffemodel:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/initial_weight/zhang_cvpr17_denoise_15_gray.caffemodel
--------------------------------------------------------------------------------
/initial_weight/zhang_cvpr17_denoise_25_gray.caffemodel:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/initial_weight/zhang_cvpr17_denoise_25_gray.caffemodel
--------------------------------------------------------------------------------
/initial_weight/zhang_cvpr17_denoise_50_gray.caffemodel:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/initial_weight/zhang_cvpr17_denoise_50_gray.caffemodel
--------------------------------------------------------------------------------
/mini_batch_loader.py:
--------------------------------------------------------------------------------
1 | import os
2 | import numpy as np
3 | import cv2
4 |
5 |
6 | class MiniBatchLoader(object):
7 |
8 | def __init__(self, train_path, test_path, image_dir_path, crop_size):
9 |
10 | # load data paths
11 | self.training_path_infos = self.read_paths(train_path, image_dir_path)
12 | self.testing_path_infos = self.read_paths(test_path, image_dir_path)
13 |
14 | self.crop_size = crop_size
15 |
16 | # test ok
17 | @staticmethod
18 | def path_label_generator(txt_path, src_path):
19 | for line in open(txt_path):
20 | line = line.strip()
21 | src_full_path = os.path.join(src_path, line)
22 | if os.path.isfile(src_full_path):
23 | yield src_full_path
24 |
25 | # test ok
26 | @staticmethod
27 | def count_paths(path):
28 | c = 0
29 | for _ in open(path):
30 | c += 1
31 | return c
32 |
33 | # test ok
34 | @staticmethod
35 | def read_paths(txt_path, src_path):
36 | cs = []
37 | for pair in MiniBatchLoader.path_label_generator(txt_path, src_path):
38 | cs.append(pair)
39 | return cs
40 |
41 | def load_training_data(self, indices):
42 | return self.load_data(self.training_path_infos, indices, augment=True)
43 |
44 | def load_testing_data(self, indices):
45 | return self.load_data(self.testing_path_infos, indices)
46 |
47 | # test ok
48 | def load_data(self, path_infos, indices, augment=False):
49 | mini_batch_size = len(indices)
50 | in_channels = 1
51 |
52 | if augment:
53 | xs = np.zeros((mini_batch_size, in_channels, self.crop_size, self.crop_size)).astype(np.float32)
54 |
55 | for i, index in enumerate(indices):
56 | path = path_infos[index]
57 |
58 | img = cv2.imread(path,0)
59 | if img is None:
60 | raise RuntimeError("invalid image: {i}".format(i=path))
61 | h, w = img.shape
62 |
63 | if np.random.rand() > 0.5:
64 | img = np.fliplr(img)
65 |
66 | if np.random.rand() > 0.5:
67 | angle = 10*np.random.rand()
68 | if np.random.rand() > 0.5:
69 | angle *= -1
70 | M = cv2.getRotationMatrix2D((w/2,h/2),angle,1)
71 | img = cv2.warpAffine(img,M,(w,h))
72 |
73 | rand_range_h = h-self.crop_size
74 | rand_range_w = w-self.crop_size
75 | x_offset = np.random.randint(rand_range_w)
76 | y_offset = np.random.randint(rand_range_h)
77 | img = img[y_offset:y_offset+self.crop_size, x_offset:x_offset+self.crop_size]
78 | xs[i, 0, :, :] = (img/255).astype(np.float32)
79 |
80 | elif mini_batch_size == 1:
81 | for i, index in enumerate(indices):
82 | path = path_infos[index]
83 |
84 | img = cv2.imread(path,0)
85 | if img is None:
86 | raise RuntimeError("invalid image: {i}".format(i=path))
87 |
88 | h, w = img.shape
89 | xs = np.zeros((mini_batch_size, in_channels, h, w)).astype(np.float32)
90 | xs[0, 0, :, :] = (img/255).astype(np.float32)
91 |
92 | else:
93 | raise RuntimeError("mini batch size must be 1 when testing")
94 |
95 | return xs
96 |
--------------------------------------------------------------------------------
/pixel_model/chainer2pytorch.py:
--------------------------------------------------------------------------------
1 |
2 | import numpy as np
3 | from RL_model.Attention_FCN import PPO
4 | import torch
5 |
6 |
7 | d = np.load('./pretrained_50.npz')
8 | print(d.files)
9 | print(d['diconv6_pi/diconv/W'].shape)
10 | model = PPO(9, 1)
11 | model_dict = model.state_dict()
12 |
13 | model_dict['conv.0.weight'] = torch.FloatTensor(d['conv1/W'])
14 | model_dict['conv.0.bias'] = torch.FloatTensor(d['conv1/b'])
15 | model_dict['conv.2.weight'] = torch.FloatTensor(d['diconv2/diconv/W'])
16 | model_dict['conv.2.bias'] = torch.FloatTensor(d['diconv2/diconv/b'])
17 | model_dict['conv.4.weight'] = torch.FloatTensor(d['diconv3/diconv/W'])
18 | model_dict['conv.4.bias'] = torch.FloatTensor(d['diconv3/diconv/b'])
19 | model_dict['conv.6.weight'] = torch.FloatTensor(d['diconv4/diconv/W'])
20 | model_dict['conv.6.bias'] = torch.FloatTensor(d['diconv4/diconv/b'])
21 |
22 | model_dict['diconv1_p.weight'] = torch.FloatTensor(d['diconv5_pi/diconv/W'])
23 | model_dict['diconv1_p.bias'] = torch.FloatTensor(d['diconv5_pi/diconv/b'])
24 | model_dict['diconv2_p.weight'] = torch.FloatTensor(d['diconv6_pi/diconv/W'])
25 | model_dict['diconv2_p.bias'] = torch.FloatTensor(d['diconv6_pi/diconv/b'])
26 | model_dict['policy.weight'] = torch.FloatTensor(d['conv8_pi/model/W'])
27 | model_dict['policy.bias'] = torch.FloatTensor(d['conv8_pi/model/b'])
28 |
29 | model_dict['diconv1_v.weight'] = torch.FloatTensor(d['diconv5_V/diconv/W'])
30 | model_dict['diconv1_v.bias'] = torch.FloatTensor(d['diconv5_V/diconv/b'])
31 | model_dict['diconv2_v.weight'] = torch.FloatTensor(d['diconv6_V/diconv/W'])
32 | model_dict['diconv2_v.bias'] = torch.FloatTensor(d['diconv6_V/diconv/b'])
33 | model_dict['value.weight'] = torch.FloatTensor(d['conv7_V/W'])
34 | model_dict['value.bias'] = torch.FloatTensor(d['conv7_V/b'])
35 |
36 | model_dict['conv7_Wz.weight'] = torch.FloatTensor(d['conv7_Wz/W'])
37 | model_dict['conv7_Uz.weight'] = torch.FloatTensor(d['conv7_Uz/W'])
38 | model_dict['conv7_Wr.weight'] = torch.FloatTensor(d['conv7_Wr/W'])
39 | model_dict['conv7_Ur.weight'] = torch.FloatTensor(d['conv7_Ur/W'])
40 | model_dict['conv7_W.weight'] = torch.FloatTensor(d['conv7_W/W'])
41 | model_dict['conv7_U.weight'] = torch.FloatTensor(d['conv7_U/W'])
42 |
43 | model.load_state_dict(model_dict)
44 | torch.save(model.state_dict(), "pixel_sig50_gray.pth")
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
--------------------------------------------------------------------------------
/pixel_model/pretrained_15.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/pixel_model/pretrained_15.npz
--------------------------------------------------------------------------------
/pixel_model/pretrained_25.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/pixel_model/pretrained_25.npz
--------------------------------------------------------------------------------
/pixel_model/pretrained_50.npz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/pixel_model/pretrained_50.npz
--------------------------------------------------------------------------------
/pixelwise_a3c.py:
--------------------------------------------------------------------------------
1 |
2 | import copy
3 | import numpy as np
4 | from torch.autograd import Variable
5 | import torch.nn.functional as F
6 | from torch import autograd
7 | from torch.distributions import Categorical
8 | import torch
9 | device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
10 | torch.manual_seed(1)
11 |
12 | class PixelWiseA3C_InnerState():
13 |
14 | def __init__(self, model, optimizer, batch_size, t_max, gamma, beta=1e-2,
15 | phi=lambda x: x,
16 | pi_loss_coef=1.0, v_loss_coef=0.5,
17 | average_reward_tau=1e-2,
18 | act_deterministically=False,
19 | average_entropy_decay=0.999,
20 | average_value_decay=0.999):
21 |
22 | self.shared_model = model
23 | self.model = copy.deepcopy(self.shared_model)
24 |
25 | self.optimizer = optimizer
26 | self.batch_size = batch_size
27 |
28 | self.t_max = t_max
29 | self.gamma = gamma
30 | self.beta = beta
31 | self.phi = phi
32 | self.pi_loss_coef = pi_loss_coef
33 | self.v_loss_coef = v_loss_coef
34 | self.average_reward_tau = average_reward_tau
35 | self.act_deterministically = act_deterministically
36 | self.average_value_decay = average_value_decay
37 | self.average_entropy_decay = average_entropy_decay
38 | # self.batch_states = batch_states
39 |
40 | self.t = 0
41 | self.t_start = 0
42 | self.past_action_log_prob = {}
43 | self.past_action_entropy = {}
44 | self.past_states = {}
45 | self.past_rewards = {}
46 | self.past_values = {}
47 | self.average_reward = 0
48 |
49 | self.explorer = None
50 |
51 | # Stats
52 | self.average_value = 0
53 | self.average_entropy = 0
54 |
55 | """
56 | 异步更新参数
57 | """
58 | def sync_parameters(self):
59 | for m1, m2 in zip(self.model.modules(), self.shared_model.modules()):
60 | m1._buffers = m2._buffers.copy()
61 | for target_param, param in zip(self.model.parameters(), self.shared_model.parameters()):
62 | target_param.detach().copy_(param.detach())
63 | """
64 | 异步更新梯度
65 | """
66 | def update_grad(self, target, source):
67 | target_params = dict(target.named_parameters())
68 | # print(target_params)
69 | for param_name, param in source.named_parameters():
70 | if target_params[param_name].grad is None:
71 | if param.grad is None:
72 | pass
73 | else:
74 | target_params[param_name].grad = param.grad
75 | else:
76 | if param.grad is None:
77 | target_params[param_name].grad = None
78 | else:
79 | target_params[param_name].grad[...] = param.grad
80 |
81 | def update(self, statevar):
82 | assert self.t_start < self.t
83 | if statevar is None:
84 | R = torch.zeros(self.batch_size, 1, 63, 63).cuda()
85 | else:
86 | _, vout, _ = self.model.pi_and_v(statevar)
87 | R = vout.detach()
88 | pi_loss = 0
89 | v_loss = 0
90 |
91 | for i in reversed(range(self.t_start, self.t)):
92 | R *= self.gamma
93 | R += self.past_rewards[i]
94 | v = self.past_values[i]
95 | advantage = R - v.detach() # (32, 3, 63, 63)
96 | log_prob = self.past_action_log_prob[i]
97 | entropy = self.past_action_entropy[i]
98 |
99 | pi_loss -= log_prob * advantage.detach()
100 | pi_loss -= self.beta * entropy
101 | v_loss += (v - R) ** 2 / 2.
102 |
103 | if self.pi_loss_coef != 1.0:
104 | pi_loss *= self.pi_loss_coef
105 |
106 | if self.v_loss_coef != 1.0:
107 | v_loss *= self.v_loss_coef
108 |
109 | print(pi_loss.mean())
110 | print(v_loss.mean())
111 | print("==========")
112 | total_loss = (pi_loss + v_loss).mean()
113 |
114 | print("loss:", total_loss)
115 |
116 | self.optimizer.zero_grad()
117 | total_loss.backward()
118 | self.optimizer.step()
119 | self.update_grad(self.shared_model, self.model)
120 | self.sync_parameters()
121 |
122 | self.past_action_log_prob = {}
123 | self.past_action_entropy = {}
124 | self.past_states = {}
125 | self.past_rewards = {}
126 | self.past_values = {}
127 |
128 | self.t_start = self.t
129 |
130 | def act_and_train(self, state, reward):
131 | statevar = torch.Tensor(state).cuda()
132 | self.past_rewards[self.t - 1] = torch.Tensor(reward).cuda()
133 |
134 | if self.t - self.t_start == self.t_max:
135 | self.update(statevar)
136 |
137 | self.past_states[self.t] = statevar
138 | pout, vout, inner_state = self.model.pi_and_v(statevar)
139 | n, num_actions, h, w = pout.shape
140 |
141 | p_trans = pout.permute([0, 2, 3, 1]).contiguous().view(-1, pout.shape[1])
142 | dist = Categorical(p_trans)
143 | action = dist.sample()
144 | log_p = torch.log(torch.clamp(p_trans, min=1e-9, max=1-1e-9))
145 | log_action_prob = torch.gather(log_p, 1, Variable(action.unsqueeze(-1))).view(n, 1, h, w)
146 | entropy = -torch.sum(p_trans * log_p, dim=-1).view(n, 1, h, w)
147 |
148 |
149 | self.past_action_log_prob[self.t] = log_action_prob.cuda()
150 | self.past_action_entropy[self.t] = entropy.cuda()
151 | self.past_values[self.t] = vout
152 | self.t += 1
153 |
154 | return action.squeeze(1).detach().cpu(), inner_state.detach().cpu(), torch.exp(log_action_prob).squeeze(1).detach().cpu()
155 |
156 |
157 | def stop_episode_and_train(self, state, reward, done=False):
158 | self.past_rewards[self.t - 1] = torch.Tensor(reward).cuda()
159 | if done:
160 | self.update(None)
161 | else:
162 | statevar = state
163 | self.update(statevar)
164 |
165 |
166 |
167 |
168 |
--------------------------------------------------------------------------------
/read_file.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 |
4 | def readname():
5 | # filePath = './exploration_database_and_code/pristine_images'
6 | filePath = './BSD432_color'
7 | name = os.listdir(filePath)
8 | return name, filePath
9 |
10 |
11 | if __name__ == "__main__":
12 | name, filePath = readname()
13 | print(name)
14 | txt = open("BSD432_test.txt", 'w')
15 | for i in name:
16 | # print(filePath + "/" + i)
17 | # image_dir = os.path.join('./exploration_database_and_code/pristine_images/', str(i))
18 | image_dir = os.path.join('./BSD432_color', str(i))
19 | txt.write(image_dir + "\n")
20 |
21 |
--------------------------------------------------------------------------------
/torch_initweight/sig15_gray.pth:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/torch_initweight/sig15_gray.pth
--------------------------------------------------------------------------------
/torch_initweight/sig25_gray.pth:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/torch_initweight/sig25_gray.pth
--------------------------------------------------------------------------------
/torch_initweight/sig50_gray.pth:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/torch_initweight/sig50_gray.pth
--------------------------------------------------------------------------------
/torch_pixel_model/pixel_sig15_gray.pth:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/torch_pixel_model/pixel_sig15_gray.pth
--------------------------------------------------------------------------------
/torch_pixel_model/pixel_sig25_gray.pth:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/torch_pixel_model/pixel_sig25_gray.pth
--------------------------------------------------------------------------------
/torch_pixel_model/pixel_sig50_gray.pth:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FightingSrain/Pytorch-pixelRL/72d548153ea43e1ae668143eb1f657648ac30697/torch_pixel_model/pixel_sig50_gray.pth
--------------------------------------------------------------------------------