├── ICRA_2021_RL.csv
├── README.md
├── LICENSE
├── rlpapers.txt
└── ICRA_area_summary.ipynb
/ICRA_2021_RL.csv:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/tsaoyu/ICRA_2021_RL/HEAD/ICRA_2021_RL.csv
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # ICRA_2021_RL
2 | Paper list on ICRA reinforcement learning
3 |
4 |
5 | You can also use ICRA area summary notebook to generate a list like this for any area.
6 | The notebook should be self explanatory and what you need to change is the `rlpapers.txt`.
7 |
8 | 
9 |
10 |
11 | Simply copy all `
` HTML tag of the papers you interested, the notebook will do the rest for you.
12 | This project relies on the following
13 |
14 | ```
15 | pandas
16 | beautifulsoup
17 | requests
18 | ```
19 | it is more likely your dependency is missing than the code is wrong.
20 |
21 | Thanks for using and happy hacking.
22 |
23 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2021 Tony Yu Cao
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/rlpapers.txt:
--------------------------------------------------------------------------------
1 | | ThAT1.4, ThAT14.1, ThBT20.3, ThBT7.1, ThBT7.4, ThDT1.2, ThDT1.4, ThDT14.1, ThDT14.3, ThDT7.1, ThDT8.3, ThET12.1, ThET17.2, ThFT16.3, ThFT6.3, ThHT12.2, ThHT12.3, ThHT12.4, ThHT16.3, ThHT2.1, ThHT20.4, ThHT6.3, ThHT9.2, ThHT9.3, ThIT12.1, ThIT12.2, ThIT12.3, ThIT12.4, ThIT18.2, ThIT2.1, ThIT2.2, ThIT21.2, ThIT5.1, ThIT5.2, ThJT10.3, ThJT13.1, ThJT15.3, ThJT19.2, ThJT6.2, ThJT6.4, ThKT12.4, ThKT2.2, ThKT20.1, ThKT7.1, ThKT7.4, TuAT13.1, TuAT13.2, TuAT13.3, TuAT13.4, TuAT14.1, TuAT14.2, TuAT15.2, TuAT19.1, TuAT23.2, TuAT4.4, TuAT9.2, TuBT16.1, TuBT19.1, TuBT23.2, TuBT5.3, TuCT14.2, TuCT15.3, TuCT16.2, TuCT16.3, TuCT18.1, TuCT3.1, TuCT7.2, TuDT10.2, TuDT11.2, TuDT11.3, TuDT12.1, TuDT12.4, TuDT2.4, TuDT20.2, TuDT5.2, TuDT7.4, TuET10.1, TuET10.3, TuET10.4, TuET12.1, TuET12.2, TuET12.4, TuET21.1, TuET4.3, TuFT18.3, TuFT20.2, TuGT11.1, TuGT12.4, TuGT13.2, TuGT19.1, TuGT2.1, TuGT2.2, TuGT2.3, TuGT2.4, TuGT2.5, TuGT22.1, TuGT22.2, TuGT5.1, TuGT8.1, TuGT8.3, TuHT1.1, TuHT1.3, TuHT10.4, TuHT11.2, TuHT13.4, TuHT18.2, TuHT8.4, TuIT16.2, TuIT16.3, TuIT24.3, TuIT5.2, TuIT5.4, TuIT9.3, TuJT10.2, TuJT12.2, TuJT12.4, TuJT15.3, TuJT16.1, TuJT16.3, TuJT2.1, TuJT2.2, TuJT2.3, TuJT20.1, TuJT20.2, TuJT20.4, TuJT21.1, TuJT22.1, TuJT22.2, TuJT5.2, TuJT5.3, TuJT6.3, TuKT10.2, TuKT13.1, TuKT13.2, TuKT13.4, TuKT18.2, TuKT18.3, TuKT2.2, TuKT2.3, TuKT2.4, TuKT20.3, TuKT21.4, TuKT3.2, TuKT5.1, TuKT6.2, TuKT7.1, TuKT7.3, WeAT10.2, WeAT3.3, WeBT1.3, WeBT1.4, WeBT10.3, WeBT15.3, WeBT17.1, WeBT4.1, WeBT5.2, WeBT7.3, WeCT11.1, WeCT9.3 |
--------------------------------------------------------------------------------
/ICRA_area_summary.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": 1,
6 | "metadata": {},
7 | "outputs": [],
8 | "source": [
9 | "import pandas as pd"
10 | ]
11 | },
12 | {
13 | "cell_type": "markdown",
14 | "metadata": {},
15 | "source": [
16 | "It is easy to generate papers summary for any area. See more at README\n",
17 | "\n",
18 | "\n"
19 | ]
20 | },
21 | {
22 | "cell_type": "code",
23 | "execution_count": 2,
24 | "metadata": {},
25 | "outputs": [],
26 | "source": [
27 | "from html.parser import HTMLParser\n",
28 | "\n",
29 | "paper_session = []\n",
30 | "class MyHTMLParser(HTMLParser):\n",
31 | " \n",
32 | " def handle_starttag(self, tag, attrs):\n",
33 | " self.data = attrs\n",
34 | "\n",
35 | " def handle_endtag(self, tag):\n",
36 | " pass\n",
37 | "\n",
38 | " def handle_data(self, data):\n",
39 | " if len(data) > 2:\n",
40 | " #print(\"Encountered some data :\", data)\n",
41 | " paper_session.append([self.data, data])\n",
42 | " \n",
43 | "\n",
44 | "parser = MyHTMLParser()\n",
45 | "\n",
46 | "\n",
47 | "file = open('./rlpapers.txt')\n",
48 | "lines = file.read()\n",
49 | "\n",
50 | "parser.feed(lines)"
51 | ]
52 | },
53 | {
54 | "cell_type": "code",
55 | "execution_count": 3,
56 | "metadata": {},
57 | "outputs": [],
58 | "source": [
59 | "full_list = []\n",
60 | "def session_processing(s):\n",
61 | " return [s[1],s[0][0][1]]\n",
62 | "for s in paper_session:\n",
63 | " full_list.append(session_processing(s))"
64 | ]
65 | },
66 | {
67 | "cell_type": "code",
68 | "execution_count": 4,
69 | "metadata": {},
70 | "outputs": [
71 | {
72 | "name": "stdout",
73 | "output_type": "stream",
74 | "text": [
75 | "There are 159 papers in your selection.\n"
76 | ]
77 | }
78 | ],
79 | "source": [
80 | "print(\"There are {} papers in your selection.\".format(len(full_list)))"
81 | ]
82 | },
83 | {
84 | "cell_type": "code",
85 | "execution_count": 5,
86 | "metadata": {},
87 | "outputs": [],
88 | "source": [
89 | "import requests"
90 | ]
91 | },
92 | {
93 | "cell_type": "code",
94 | "execution_count": 6,
95 | "metadata": {},
96 | "outputs": [],
97 | "source": [
98 | "request1 = requests.get('https://ras.papercept.net/conferences/conferences/ICRA21/program/ICRA21_ContentListWeb_1.html')\n",
99 | "request2 = requests.get('https://ras.papercept.net/conferences/conferences/ICRA21/program/ICRA21_ContentListWeb_2.html')\n",
100 | "request3 = requests.get('https://ras.papercept.net/conferences/conferences/ICRA21/program/ICRA21_ContentListWeb_3.html')"
101 | ]
102 | },
103 | {
104 | "cell_type": "code",
105 | "execution_count": 7,
106 | "metadata": {},
107 | "outputs": [],
108 | "source": [
109 | "data1 = request1.content\n",
110 | "data2 = request2.content\n",
111 | "data3 = request3.content"
112 | ]
113 | },
114 | {
115 | "cell_type": "code",
116 | "execution_count": 8,
117 | "metadata": {},
118 | "outputs": [],
119 | "source": [
120 | "[data1, data2, data3];"
121 | ]
122 | },
123 | {
124 | "cell_type": "code",
125 | "execution_count": 9,
126 | "metadata": {},
127 | "outputs": [],
128 | "source": [
129 | "from bs4 import BeautifulSoup\n",
130 | "soup = BeautifulSoup(data1, 'html.parser')"
131 | ]
132 | },
133 | {
134 | "cell_type": "code",
135 | "execution_count": 10,
136 | "metadata": {},
137 | "outputs": [],
138 | "source": [
139 | "soup1, soup2, soup3 = [BeautifulSoup(data1, 'html.parser'), \n",
140 | " BeautifulSoup(data2, 'html.parser'),\n",
141 | " BeautifulSoup(data3, 'html.parser')]"
142 | ]
143 | },
144 | {
145 | "cell_type": "code",
146 | "execution_count": 11,
147 | "metadata": {},
148 | "outputs": [],
149 | "source": [
150 | "soups = [soup1, soup2, soup3]"
151 | ]
152 | },
153 | {
154 | "cell_type": "code",
155 | "execution_count": 12,
156 | "metadata": {},
157 | "outputs": [],
158 | "source": [
159 | "def summary_data(paper_info):\n",
160 | " web_page,paper_id = paper_info[-1].rsplit('#')\n",
161 | " soup = soups[int(web_page.split('.')[0][-1])-1]\n",
162 | " a = soup.find('a', {\"name\":paper_id})\n",
163 | " title = a.find_next('a', {\"title\":\"Click to show or hide the keywords and abstract (text summary)\"}).string\n",
164 | " abstract = a.find_next('div').text.rsplit('Abstract: ')[-1]\n",
165 | " author_list = []\n",
166 | " for name in a.find_all_next('a'):\n",
167 | " try:\n",
168 | " name[\"name\"] != None\n",
169 | " break\n",
170 | " except:\n",
171 | " if name[\"title\"] == \"Click to go to the Author Index\":\n",
172 | " author_list.append(name.text)\n",
173 | " aff_list = []\n",
174 | " td_counter = 0\n",
175 | " for name in a.find_all_next(\"td\", {\"class\":\"r\"}):\n",
176 | " if \"Add to\" in name.text:\n",
177 | " if td_counter == 1:\n",
178 | " break\n",
179 | " td_counter += 1\n",
180 | " else:\n",
181 | " aff_list.append(name.text)\n",
182 | " \n",
183 | " return title, abstract, author_list, aff_list"
184 | ]
185 | },
186 | {
187 | "cell_type": "code",
188 | "execution_count": 13,
189 | "metadata": {},
190 | "outputs": [],
191 | "source": [
192 | "rl_icra = []"
193 | ]
194 | },
195 | {
196 | "cell_type": "code",
197 | "execution_count": 14,
198 | "metadata": {},
199 | "outputs": [],
200 | "source": [
201 | "for s in full_list:\n",
202 | " rl_icra.append(summary_data(s))"
203 | ]
204 | },
205 | {
206 | "cell_type": "code",
207 | "execution_count": 15,
208 | "metadata": {},
209 | "outputs": [],
210 | "source": [
211 | "df = pd.DataFrame(data=rl_icra)"
212 | ]
213 | },
214 | {
215 | "cell_type": "code",
216 | "execution_count": 16,
217 | "metadata": {},
218 | "outputs": [],
219 | "source": [
220 | "df.columns = [\"Title\", \"Abstract\",\"Authors\", \"Affiliations\"]"
221 | ]
222 | },
223 | {
224 | "cell_type": "code",
225 | "execution_count": 17,
226 | "metadata": {},
227 | "outputs": [],
228 | "source": [
229 | "df.to_csv(\"ICRA_long.csv\")"
230 | ]
231 | },
232 | {
233 | "cell_type": "code",
234 | "execution_count": 18,
235 | "metadata": {},
236 | "outputs": [],
237 | "source": [
238 | "au_af = []"
239 | ]
240 | },
241 | {
242 | "cell_type": "code",
243 | "execution_count": 19,
244 | "metadata": {},
245 | "outputs": [],
246 | "source": [
247 | "for i, row in df.iterrows():\n",
248 | " j = 0 \n",
249 | " for r in row[\"Authors\"]:\n",
250 | " au_af.append(r + \"_\" +row[\"Affiliations\"][j])\n",
251 | " j += 1"
252 | ]
253 | },
254 | {
255 | "cell_type": "code",
256 | "execution_count": 20,
257 | "metadata": {},
258 | "outputs": [],
259 | "source": [
260 | "from collections import Counter"
261 | ]
262 | },
263 | {
264 | "cell_type": "code",
265 | "execution_count": 21,
266 | "metadata": {},
267 | "outputs": [],
268 | "source": [
269 | "ct = Counter(au_af)"
270 | ]
271 | },
272 | {
273 | "cell_type": "code",
274 | "execution_count": 22,
275 | "metadata": {},
276 | "outputs": [],
277 | "source": [
278 | "for i, row in df.iterrows():\n",
279 | " j = 0\n",
280 | " max_p = 0\n",
281 | " for r in row[\"Authors\"]:\n",
282 | " a = r + \"_\" +row[\"Affiliations\"][j]\n",
283 | " if ct[a] > max_p:\n",
284 | " max_p = ct[a]\n",
285 | " au_af.append(a)\n",
286 | " j += 1\n",
287 | " df.at[i,\"max_p\"] = max_p\n",
288 | " "
289 | ]
290 | },
291 | {
292 | "cell_type": "code",
293 | "execution_count": 23,
294 | "metadata": {},
295 | "outputs": [
296 | {
297 | "data": {
298 | "text/html": [
299 | "\n",
300 | "\n",
313 | "
\n",
314 | " \n",
315 | " \n",
316 | " | \n",
317 | " Title | \n",
318 | " Abstract | \n",
319 | " Authors | \n",
320 | " Affiliations | \n",
321 | " max_p | \n",
322 | "
\n",
323 | " \n",
324 | " \n",
325 | " \n",
326 | " | 0 | \n",
327 | " Towards Efficient Multiview Object Detection w... | \n",
328 | " Active vision is a desirable perceptual featur... | \n",
329 | " [Xu, Qianli, Fang, Fen, Gauthier, Nicolas, Lia... | \n",
330 | " [Institute for Infocomm Research, I2R, Institu... | \n",
331 | " 1.0 | \n",
332 | "
\n",
333 | " \n",
334 | " | 1 | \n",
335 | " Deep Reinforcement Learning Framework for Unde... | \n",
336 | " Soft robotics is an emerging technology with e... | \n",
337 | " [Li, Guanda, Shintake, Jun, Hayashibe, Mitsuhiro] | \n",
338 | " [Tohoku University, University of Electro-Comm... | \n",
339 | " 2.0 | \n",
340 | "
\n",
341 | " \n",
342 | " | 2 | \n",
343 | " Robotic Imitation of Human Assembly Skills Usi... | \n",
344 | " Robotic assembly tasks involve complex and low... | \n",
345 | " [Wang, Yan, Beltran-Hernandez, Cristian Camilo... | \n",
346 | " [Osaka University, Osaka University, Osaka Uni... | \n",
347 | " 1.0 | \n",
348 | "
\n",
349 | " \n",
350 | " | 3 | \n",
351 | " Context-Aware Safe Reinforcement Learning for ... | \n",
352 | " Safety is a critical concern when deploying re... | \n",
353 | " [Chen, Baiming, Liu, Zuxin, Zhu, Jiacheng, Xu,... | \n",
354 | " [Tsinghua University, Carnegie Mellon Universi... | \n",
355 | " 2.0 | \n",
356 | "
\n",
357 | " \n",
358 | " | 4 | \n",
359 | " Quantification of Joint Redundancy Considering... | \n",
360 | " The robotic joint redundancy for executing a t... | \n",
361 | " [Chai, Jiazheng, Hayashibe, Mitsuhiro] | \n",
362 | " [Tohoku University, Tohoku University] | \n",
363 | " 2.0 | \n",
364 | "
\n",
365 | " \n",
366 | " | ... | \n",
367 | " ... | \n",
368 | " ... | \n",
369 | " ... | \n",
370 | " ... | \n",
371 | " ... | \n",
372 | "
\n",
373 | " \n",
374 | " | 154 | \n",
375 | " Multi-Target Coverage with Connectivity Mainte... | \n",
376 | " This paper considers a multi-target coverage p... | \n",
377 | " [Wu, Shiguang, Pu, Zhiqiang, Liu, Zhen, Qiu, T... | \n",
378 | " [Chinese Academy of Sciences Beijing, China, U... | \n",
379 | " 1.0 | \n",
380 | "
\n",
381 | " \n",
382 | " | 155 | \n",
383 | " Remote-Center-Of-Motion Recommendation Toward ... | \n",
384 | " Brain needle intervention is a typical diagnos... | \n",
385 | " [Gao, Huxin, Xiao, Xiao, Qiu, Liang, Meng, Max... | \n",
386 | " [National University of Singapore, Southern Un... | \n",
387 | " 1.0 | \n",
388 | "
\n",
389 | " \n",
390 | " | 156 | \n",
391 | " A General Approach for the Automation of Hydra... | \n",
392 | " This article presents a general approach to de... | \n",
393 | " [Egli, Pascal Arturo, Hutter, Marco] | \n",
394 | " [RSL, ETHZ, ETH Zurich] | \n",
395 | " 1.0 | \n",
396 | "
\n",
397 | " \n",
398 | " | 157 | \n",
399 | " Relational Navigation Learning in Continuous A... | \n",
400 | " In this paper, a novel navigation learning met... | \n",
401 | " [Zhang, Xueyou, Xi, Wei, Guo, Xian, Fang, Yong... | \n",
402 | " [Nankai University, Nankai University, Nankai ... | \n",
403 | " 1.0 | \n",
404 | "
\n",
405 | " \n",
406 | " | 158 | \n",
407 | " Autonomous Overtaking in Gran Turismo Sport Us... | \n",
408 | " Professional race-car drivers can execute extr... | \n",
409 | " [Song, Yunlong, Lin, HaoChih, Kaufmann, Elia, ... | \n",
410 | " [University of Zurich, ETH Zurich, University ... | \n",
411 | " 2.0 | \n",
412 | "
\n",
413 | " \n",
414 | "
\n",
415 | "
159 rows × 5 columns
\n",
416 | "
"
417 | ],
418 | "text/plain": [
419 | " Title \\\n",
420 | "0 Towards Efficient Multiview Object Detection w... \n",
421 | "1 Deep Reinforcement Learning Framework for Unde... \n",
422 | "2 Robotic Imitation of Human Assembly Skills Usi... \n",
423 | "3 Context-Aware Safe Reinforcement Learning for ... \n",
424 | "4 Quantification of Joint Redundancy Considering... \n",
425 | ".. ... \n",
426 | "154 Multi-Target Coverage with Connectivity Mainte... \n",
427 | "155 Remote-Center-Of-Motion Recommendation Toward ... \n",
428 | "156 A General Approach for the Automation of Hydra... \n",
429 | "157 Relational Navigation Learning in Continuous A... \n",
430 | "158 Autonomous Overtaking in Gran Turismo Sport Us... \n",
431 | "\n",
432 | " Abstract \\\n",
433 | "0 Active vision is a desirable perceptual featur... \n",
434 | "1 Soft robotics is an emerging technology with e... \n",
435 | "2 Robotic assembly tasks involve complex and low... \n",
436 | "3 Safety is a critical concern when deploying re... \n",
437 | "4 The robotic joint redundancy for executing a t... \n",
438 | ".. ... \n",
439 | "154 This paper considers a multi-target coverage p... \n",
440 | "155 Brain needle intervention is a typical diagnos... \n",
441 | "156 This article presents a general approach to de... \n",
442 | "157 In this paper, a novel navigation learning met... \n",
443 | "158 Professional race-car drivers can execute extr... \n",
444 | "\n",
445 | " Authors \\\n",
446 | "0 [Xu, Qianli, Fang, Fen, Gauthier, Nicolas, Lia... \n",
447 | "1 [Li, Guanda, Shintake, Jun, Hayashibe, Mitsuhiro] \n",
448 | "2 [Wang, Yan, Beltran-Hernandez, Cristian Camilo... \n",
449 | "3 [Chen, Baiming, Liu, Zuxin, Zhu, Jiacheng, Xu,... \n",
450 | "4 [Chai, Jiazheng, Hayashibe, Mitsuhiro] \n",
451 | ".. ... \n",
452 | "154 [Wu, Shiguang, Pu, Zhiqiang, Liu, Zhen, Qiu, T... \n",
453 | "155 [Gao, Huxin, Xiao, Xiao, Qiu, Liang, Meng, Max... \n",
454 | "156 [Egli, Pascal Arturo, Hutter, Marco] \n",
455 | "157 [Zhang, Xueyou, Xi, Wei, Guo, Xian, Fang, Yong... \n",
456 | "158 [Song, Yunlong, Lin, HaoChih, Kaufmann, Elia, ... \n",
457 | "\n",
458 | " Affiliations max_p \n",
459 | "0 [Institute for Infocomm Research, I2R, Institu... 1.0 \n",
460 | "1 [Tohoku University, University of Electro-Comm... 2.0 \n",
461 | "2 [Osaka University, Osaka University, Osaka Uni... 1.0 \n",
462 | "3 [Tsinghua University, Carnegie Mellon Universi... 2.0 \n",
463 | "4 [Tohoku University, Tohoku University] 2.0 \n",
464 | ".. ... ... \n",
465 | "154 [Chinese Academy of Sciences Beijing, China, U... 1.0 \n",
466 | "155 [National University of Singapore, Southern Un... 1.0 \n",
467 | "156 [RSL, ETHZ, ETH Zurich] 1.0 \n",
468 | "157 [Nankai University, Nankai University, Nankai ... 1.0 \n",
469 | "158 [University of Zurich, ETH Zurich, University ... 2.0 \n",
470 | "\n",
471 | "[159 rows x 5 columns]"
472 | ]
473 | },
474 | "execution_count": 23,
475 | "metadata": {},
476 | "output_type": "execute_result"
477 | }
478 | ],
479 | "source": [
480 | "df"
481 | ]
482 | },
483 | {
484 | "cell_type": "code",
485 | "execution_count": 24,
486 | "metadata": {},
487 | "outputs": [],
488 | "source": [
489 | "df = df.sort_values(by=[\"max_p\"],ascending=False)"
490 | ]
491 | },
492 | {
493 | "cell_type": "code",
494 | "execution_count": 25,
495 | "metadata": {},
496 | "outputs": [
497 | {
498 | "data": {
499 | "text/html": [
500 | "\n",
501 | "\n",
514 | "
\n",
515 | " \n",
516 | " \n",
517 | " | \n",
518 | " Title | \n",
519 | " Abstract | \n",
520 | " Authors | \n",
521 | " Affiliations | \n",
522 | " max_p | \n",
523 | "
\n",
524 | " \n",
525 | " \n",
526 | " \n",
527 | " | 137 | \n",
528 | " Reset-Free Reinforcement Learning Via Multi-Ta... | \n",
529 | " Reinforcement Learning (RL) algorithms can in ... | \n",
530 | " [Gupta, Abhishek, Yu, Justin, Zhao, Zihao, Kum... | \n",
531 | " [UC Berkeley, UC Berkeley, UC Berkeley, Univ. ... | \n",
532 | " 9.0 | \n",
533 | "
\n",
534 | " \n",
535 | " | 120 | \n",
536 | " DisCo RL: Distribution-Conditioned Reinforceme... | \n",
537 | " Can we use reinforcement learning to instead l... | \n",
538 | " [Nasiriany, Soroush, Pong, Vitchyr, Nair, Ashv... | \n",
539 | " [UC Berkeley, UC Berkeley, UC Berkeley, UC Ber... | \n",
540 | " 9.0 | \n",
541 | "
\n",
542 | " \n",
543 | " | 33 | \n",
544 | " What Can I Do Here? Learning New Skills by Ima... | \n",
545 | " A generalist robot equipped with learned skill... | \n",
546 | " [Khazatsky, Alexander, Nair, Ashvin, Jing, Dan... | \n",
547 | " [UC Berkeley, UC Berkeley, University of Calif... | \n",
548 | " 9.0 | \n",
549 | "
\n",
550 | " \n",
551 | " | 134 | \n",
552 | " Reinforcement Learning for Robust Parameterize... | \n",
553 | " Developing robust walking controllers for bipe... | \n",
554 | " [Li, Zhongyu, Cheng, Xuxin, Peng, Xue Bin, Abb... | \n",
555 | " [University of California, Berkeley, Universit... | \n",
556 | " 9.0 | \n",
557 | "
\n",
558 | " \n",
559 | " | 26 | \n",
560 | " Model-Based Meta-Reinforcement Learning for Fl... | \n",
561 | " Transporting suspended payloads is challenging... | \n",
562 | " [Belkhale, Suneel, Kahn, Gregory, McAllister, ... | \n",
563 | " [Stanford University, University of California... | \n",
564 | " 9.0 | \n",
565 | "
\n",
566 | " \n",
567 | " | ... | \n",
568 | " ... | \n",
569 | " ... | \n",
570 | " ... | \n",
571 | " ... | \n",
572 | " ... | \n",
573 | "
\n",
574 | " \n",
575 | " | 52 | \n",
576 | " A Peg-In-Hole Task Strategy for Holes in Concrete | \n",
577 | " A method that enables an industrial robot to a... | \n",
578 | " [Yasutomi, André Yuji, Mori, Hiroki, Ogata, Te... | \n",
579 | " [Hitachi Ltd, Waseda University, Waseda Univer... | \n",
580 | " 1.0 | \n",
581 | "
\n",
582 | " \n",
583 | " | 51 | \n",
584 | " Learning from Demonstration without Demonstrat... | \n",
585 | " State-of-the-art reinforcement learning (RL) a... | \n",
586 | " [Blau, Tom, Morere, Philippe, Francis, Gilad] | \n",
587 | " [University of Sydney, University of Sydney, T... | \n",
588 | " 1.0 | \n",
589 | "
\n",
590 | " \n",
591 | " | 50 | \n",
592 | " Dreaming: Model-Based Reinforcement Learning b... | \n",
593 | " In the present paper, we propose a decoder-fre... | \n",
594 | " [Okada, Masashi, Taniguchi, Tadahiro] | \n",
595 | " [Panasonic Corporation, Ritsumeikan University] | \n",
596 | " 1.0 | \n",
597 | "
\n",
598 | " \n",
599 | " | 49 | \n",
600 | " Sample Efficient Reinforcement Learning Via Mo... | \n",
601 | " Model-based deep reinforcement learning has ac... | \n",
602 | " [Yao, Yao, Xiao, Li, An, Zhicheng, Zhang, Wanp... | \n",
603 | " [Tsinghua-Berkeley Shenzhen Institute, Tsinghu... | \n",
604 | " 1.0 | \n",
605 | "
\n",
606 | " \n",
607 | " | 79 | \n",
608 | " Sample-Efficient Reinforcement Learning in Rob... | \n",
609 | " Reinforcement learning (RL) has recently shown... | \n",
610 | " [Tebbe, Jonas, Krauch, Lukas, Gao, Yapeng, Zel... | \n",
611 | " [University of Tübingen, University of Tübinge... | \n",
612 | " 1.0 | \n",
613 | "
\n",
614 | " \n",
615 | "
\n",
616 | "
159 rows × 5 columns
\n",
617 | "
"
618 | ],
619 | "text/plain": [
620 | " Title \\\n",
621 | "137 Reset-Free Reinforcement Learning Via Multi-Ta... \n",
622 | "120 DisCo RL: Distribution-Conditioned Reinforceme... \n",
623 | "33 What Can I Do Here? Learning New Skills by Ima... \n",
624 | "134 Reinforcement Learning for Robust Parameterize... \n",
625 | "26 Model-Based Meta-Reinforcement Learning for Fl... \n",
626 | ".. ... \n",
627 | "52 A Peg-In-Hole Task Strategy for Holes in Concrete \n",
628 | "51 Learning from Demonstration without Demonstrat... \n",
629 | "50 Dreaming: Model-Based Reinforcement Learning b... \n",
630 | "49 Sample Efficient Reinforcement Learning Via Mo... \n",
631 | "79 Sample-Efficient Reinforcement Learning in Rob... \n",
632 | "\n",
633 | " Abstract \\\n",
634 | "137 Reinforcement Learning (RL) algorithms can in ... \n",
635 | "120 Can we use reinforcement learning to instead l... \n",
636 | "33 A generalist robot equipped with learned skill... \n",
637 | "134 Developing robust walking controllers for bipe... \n",
638 | "26 Transporting suspended payloads is challenging... \n",
639 | ".. ... \n",
640 | "52 A method that enables an industrial robot to a... \n",
641 | "51 State-of-the-art reinforcement learning (RL) a... \n",
642 | "50 In the present paper, we propose a decoder-fre... \n",
643 | "49 Model-based deep reinforcement learning has ac... \n",
644 | "79 Reinforcement learning (RL) has recently shown... \n",
645 | "\n",
646 | " Authors \\\n",
647 | "137 [Gupta, Abhishek, Yu, Justin, Zhao, Zihao, Kum... \n",
648 | "120 [Nasiriany, Soroush, Pong, Vitchyr, Nair, Ashv... \n",
649 | "33 [Khazatsky, Alexander, Nair, Ashvin, Jing, Dan... \n",
650 | "134 [Li, Zhongyu, Cheng, Xuxin, Peng, Xue Bin, Abb... \n",
651 | "26 [Belkhale, Suneel, Kahn, Gregory, McAllister, ... \n",
652 | ".. ... \n",
653 | "52 [Yasutomi, André Yuji, Mori, Hiroki, Ogata, Te... \n",
654 | "51 [Blau, Tom, Morere, Philippe, Francis, Gilad] \n",
655 | "50 [Okada, Masashi, Taniguchi, Tadahiro] \n",
656 | "49 [Yao, Yao, Xiao, Li, An, Zhicheng, Zhang, Wanp... \n",
657 | "79 [Tebbe, Jonas, Krauch, Lukas, Gao, Yapeng, Zel... \n",
658 | "\n",
659 | " Affiliations max_p \n",
660 | "137 [UC Berkeley, UC Berkeley, UC Berkeley, Univ. ... 9.0 \n",
661 | "120 [UC Berkeley, UC Berkeley, UC Berkeley, UC Ber... 9.0 \n",
662 | "33 [UC Berkeley, UC Berkeley, University of Calif... 9.0 \n",
663 | "134 [University of California, Berkeley, Universit... 9.0 \n",
664 | "26 [Stanford University, University of California... 9.0 \n",
665 | ".. ... ... \n",
666 | "52 [Hitachi Ltd, Waseda University, Waseda Univer... 1.0 \n",
667 | "51 [University of Sydney, University of Sydney, T... 1.0 \n",
668 | "50 [Panasonic Corporation, Ritsumeikan University] 1.0 \n",
669 | "49 [Tsinghua-Berkeley Shenzhen Institute, Tsinghu... 1.0 \n",
670 | "79 [University of Tübingen, University of Tübinge... 1.0 \n",
671 | "\n",
672 | "[159 rows x 5 columns]"
673 | ]
674 | },
675 | "execution_count": 25,
676 | "metadata": {},
677 | "output_type": "execute_result"
678 | }
679 | ],
680 | "source": [
681 | "df"
682 | ]
683 | },
684 | {
685 | "cell_type": "code",
686 | "execution_count": 26,
687 | "metadata": {},
688 | "outputs": [],
689 | "source": [
690 | "df.to_csv(\"ICRA_long_sorted.csv\")"
691 | ]
692 | },
693 | {
694 | "cell_type": "code",
695 | "execution_count": 27,
696 | "metadata": {},
697 | "outputs": [],
698 | "source": [
699 | "short_info = []"
700 | ]
701 | },
702 | {
703 | "cell_type": "code",
704 | "execution_count": 28,
705 | "metadata": {},
706 | "outputs": [],
707 | "source": [
708 | "for i, row in df.iterrows():\n",
709 | " short_info.append([row[\"Title\"], row[\"Authors\"][0], row[\"Affiliations\"][0]])"
710 | ]
711 | },
712 | {
713 | "cell_type": "code",
714 | "execution_count": 29,
715 | "metadata": {},
716 | "outputs": [],
717 | "source": [
718 | "pd.DataFrame(short_info).to_csv(\"ICRA_short.csv\")"
719 | ]
720 | },
721 | {
722 | "cell_type": "code",
723 | "execution_count": null,
724 | "metadata": {},
725 | "outputs": [],
726 | "source": []
727 | }
728 | ],
729 | "metadata": {
730 | "kernelspec": {
731 | "display_name": "Python 3",
732 | "language": "python",
733 | "name": "python3"
734 | },
735 | "language_info": {
736 | "codemirror_mode": {
737 | "name": "ipython",
738 | "version": 3
739 | },
740 | "file_extension": ".py",
741 | "mimetype": "text/x-python",
742 | "name": "python",
743 | "nbconvert_exporter": "python",
744 | "pygments_lexer": "ipython3",
745 | "version": "3.7.6"
746 | }
747 | },
748 | "nbformat": 4,
749 | "nbformat_minor": 5
750 | }
751 |
--------------------------------------------------------------------------------