├── FasterRCNN ├── weights │ └── .empty ├── config │ └── ek18-2gpu-e2e-faster-rcnn-R-101-FPN_1x.yaml └── tools │ └── detect_video.py ├── FEATEXT ├── data.tar ├── scripts │ └── download_models.sh ├── extract_example_obj.py ├── extract_example_rgb.py └── extract_example_flow.py ├── RULSTM ├── requirements.txt ├── data │ ├── ek55 │ │ ├── EPIC_many_shot_verbs.csv │ │ ├── validation_videos.csv │ │ ├── EPIC_many_shot_nouns.csv │ │ └── training_videos.csv │ ├── ek100 │ │ ├── validation_videos.csv │ │ ├── training_videos.csv │ │ ├── validation_unseen_participants_ids.csv │ │ ├── validation_tail_verbs_ids.csv │ │ └── validation_tail_nouns_ids.csv │ └── egtea │ │ └── actions.csv ├── scripts │ ├── download_data_ek55.sh │ ├── download_data_ek55_full.sh │ ├── download_data_ek100_full.sh │ ├── download_models_ek100.sh │ ├── train_anticipation_ek55.sh │ ├── train_early_recognition_ek55.sh │ ├── download_models_ek55.sh │ └── train_anticipation_ek100.sh ├── environment.yml ├── utils.py ├── models.py ├── dataset.py └── main.py └── README.md /FasterRCNN/weights/.empty: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /FEATEXT/data.tar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fpv-iplab/rulstm/HEAD/FEATEXT/data.tar -------------------------------------------------------------------------------- /FEATEXT/scripts/download_models.sh: -------------------------------------------------------------------------------- 1 | mkdir models 2 | wget -P models/ http://iplab.dmi.unict.it/sharing/rulstm/TSN-rgb.pth.tar 3 | wget -P models/ http://iplab.dmi.unict.it/sharing/rulstm/TSN-flow.pth.tar 4 | -------------------------------------------------------------------------------- /RULSTM/requirements.txt: -------------------------------------------------------------------------------- 1 | certifi==2019.6.16 2 | cffi==1.12.3 3 | lmdb==0.96 4 | mkl-fft==1.0.12 5 | mkl-random==1.0.2 6 | numpy==1.17.0 7 | pandas==0.25.0 8 | pycparser==2.19 9 | python-dateutil==2.8.0 10 | pytz==2019.2 11 | six==1.12.0 12 | torch==1.0.1 13 | tqdm==4.32.2 14 | -------------------------------------------------------------------------------- /RULSTM/data/ek55/EPIC_many_shot_verbs.csv: -------------------------------------------------------------------------------- 1 | verb_class,verb 2 | 1,put 3 | 0,take 4 | 4,wash 5 | 2,open 6 | 3,close 7 | 5,cut 8 | 6,mix 9 | 7,pour 10 | 9,move 11 | 12,turn-on 12 | 10,remove 13 | 15,turn-off 14 | 8,throw 15 | 11,dry 16 | 16,peel 17 | 22,insert 18 | 13,turn 19 | 14,shake 20 | 21,squeeze 21 | 23,press 22 | 20,check 23 | 19,scoop 24 | 18,empty 25 | 17,adjust 26 | 24,fill 27 | 32,flip 28 | -------------------------------------------------------------------------------- /RULSTM/scripts/download_data_ek55.sh: -------------------------------------------------------------------------------- 1 | mkdir -p data/ek55/rgb 2 | mkdir -p data/ek55/flow 3 | mkdir -p data/ek55/obj 4 | curl https://iplab.dmi.unict.it/sharing/rulstm/features/rgb/data.mdb -o data/ek55/rgb/data.mdb 5 | curl https://iplab.dmi.unict.it/sharing/rulstm/features/flow/data.mdb -o data/ek55/flow/data.mdb 6 | curl https://iplab.dmi.unict.it/sharing/rulstm/features/obj/data.mdb -o data/ek55/obj/data.mdb 7 | -------------------------------------------------------------------------------- /RULSTM/scripts/download_data_ek55_full.sh: -------------------------------------------------------------------------------- 1 | mkdir data/ek55/rgb 2 | mkdir data/ek55/flow 3 | mkdir data/ek55/obj 4 | curl https://iplab.dmi.unict.it/sharing/rulstm/features/rgb_full/data.mdb -o data/ek55/rgb/data.mdb 5 | curl https://iplab.dmi.unict.it/sharing/rulstm/features/flow_full/data.mdb -o data/ek55/flow/data.mdb 6 | curl https://iplab.dmi.unict.it/sharing/rulstm/features/obj_full/data.mdb -o data/ek55/obj/data.mdb 7 | -------------------------------------------------------------------------------- /RULSTM/scripts/download_data_ek100_full.sh: -------------------------------------------------------------------------------- 1 | mkdir data/ek100/rgb 2 | mkdir data/ek100/flow 3 | mkdir data/ek100/obj 4 | curl https://iplab.dmi.unict.it/sharing/rulstm/features/ek100/rgb_full/data.mdb -o data/ek100/rgb/data.mdb 5 | curl https://iplab.dmi.unict.it/sharing/rulstm/features/ek100/flow_full/data.mdb -o data/ek100/flow/data.mdb 6 | curl https://iplab.dmi.unict.it/sharing/rulstm/features/ek100/obj_full/data.mdb -o data/ek100/obj/data.mdb 7 | -------------------------------------------------------------------------------- /RULSTM/data/ek55/validation_videos.csv: -------------------------------------------------------------------------------- 1 | P01_01 2 | P01_10 3 | P02_03 4 | P02_05 5 | P03_06 6 | P03_11 7 | P04_09 8 | P06_05 9 | P07_02 10 | P07_08 11 | P07_10 12 | P08_01 13 | P08_05 14 | P08_12 15 | P10_01 16 | P13_04 17 | P13_06 18 | P13_09 19 | P14_01 20 | P14_02 21 | P20_03 22 | P20_04 23 | P22_08 24 | P22_10 25 | P22_11 26 | P22_13 27 | P23_03 28 | P24_08 29 | P25_11 30 | P26_02 31 | P26_11 32 | P26_16 33 | P27_03 34 | P28_05 35 | P28_12 36 | P28_13 37 | P30_01 38 | P30_03 39 | P31_01 40 | P31_08 41 | -------------------------------------------------------------------------------- /RULSTM/scripts/download_models_ek100.sh: -------------------------------------------------------------------------------- 1 | mkdir -p models/ek100 2 | cd models/ek100 3 | curl https://iplab.dmi.unict.it/sharing/rulstm/ek100_models/RULSTM-anticipation_0.25_6_8_rgb_mt5r_best.pth.tar -O 4 | curl https://iplab.dmi.unict.it/sharing/rulstm/ek100_models/RULSTM-anticipation_0.25_6_8_flow_mt5r_best.pth.tar -O 5 | curl https://iplab.dmi.unict.it/sharing/rulstm/ek100_models/RULSTM-anticipation_0.25_6_8_obj_mt5r_best.pth.tar -O 6 | curl https://iplab.dmi.unict.it/sharing/rulstm/ek100_models/RULSTM-anticipation_0.25_6_8_fusion_mt5r_best.pth.tar -O 7 | -------------------------------------------------------------------------------- /RULSTM/scripts/train_anticipation_ek55.sh: -------------------------------------------------------------------------------- 1 | mkdir -p models/ek55 2 | 3 | # RGB branch 4 | python main.py train data/ek55 models/ek55 --modality rgb --sequence_completion 5 | python main.py train data/ek55 models/ek55 --modality rgb 6 | 7 | # Optical Flow branch 8 | python main.py train data/ek55 models/ek55 --modality flow --sequence_completion 9 | python main.py train data/ek55 models/ek55 --modality flow 10 | 11 | # Object branch 12 | python main.py train data/ek55 models/ek55 --modality obj --feat_in 352 --sequence_completion 13 | python main.py train data/ek55 models/ek55 --modality obj --feat_in 352 14 | 15 | # Complete architecture with MATT 16 | python main.py train data/ek55 models/ek55 --modality fusion --feat_in 352 17 | -------------------------------------------------------------------------------- /FEATEXT/extract_example_obj.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import numpy as np 3 | from torch import nn 4 | from pretrainedmodels import bninception 5 | from torchvision import transforms 6 | from glob import glob 7 | from PIL import Image 8 | import lmdb 9 | from tqdm import tqdm 10 | from os.path import basename 11 | 12 | env = lmdb.open('features/obj', map_size=1099511627776) 13 | video_name = 'P01_01_frame_{:010d}.jpg' 14 | detections = np.load('data/sample_obj.npy', allow_pickle=True, encoding='bytes') 15 | 16 | for i, dets in enumerate(tqdm(detections,'Extracting features')): 17 | feat = np.zeros(352, dtype='float32') 18 | for d in dets: 19 | feat[int(d[0])]+=d[5] 20 | key = video_name.format(i+1) 21 | with env.begin(write=True) as txn: 22 | txn.put(key.encode(),feat) 23 | 24 | 25 | -------------------------------------------------------------------------------- /RULSTM/scripts/train_early_recognition_ek55.sh: -------------------------------------------------------------------------------- 1 | mkdir models/ek55 2 | 3 | # RGB Branch 4 | python main.py train data/ek55 models/ek55 --modality rgb --task early_recognition --sequence_completion --epochs 200 5 | python main.py train data/ek55 models/ek55 --modality rgb --task early_recognition --epochs 200 6 | 7 | # Optical Flow Branch 8 | python main.py train data/ek55 models/ek55 --modality flow --task early_recognition --sequence_completion --epochs 200 9 | python main.py train data/ek55 models/ek55 --modality flow --task early_recognition --epochs 200 10 | 11 | # Object Branch 12 | python main.py train data/ek55 models/ek55 --modality obj --task early_recognition --sequence_completion --epochs 200 --feat_in 352 13 | python main.py train data/ek55 models/ek55 --modality obj --task early_recognition --epochs 200 --feat_in 352 14 | 15 | -------------------------------------------------------------------------------- /RULSTM/scripts/download_models_ek55.sh: -------------------------------------------------------------------------------- 1 | mkdir -p models/ek55 2 | cd models/ek55 3 | curl https://iplab.dmi.unict.it/sharing/rulstm/iccv_models/RULSTM-anticipation_0.25_6_8_rgb_best.pth.tar -O 4 | curl https://iplab.dmi.unict.it/sharing/rulstm/iccv_models/RULSTM-anticipation_0.25_6_8_flow_best.pth.tar -O 5 | curl https://iplab.dmi.unict.it/sharing/rulstm/iccv_models/RULSTM-anticipation_0.25_6_8_obj_best.pth.tar -O 6 | curl https://iplab.dmi.unict.it/sharing/rulstm/iccv_models/RULSTM-anticipation_0.25_6_8_fusion_best.pth.tar -O 7 | curl https://iplab.dmi.unict.it/sharing/rulstm/iccv_models/RULSTM-early_recognition_0.25_8_rgb_best.pth.tar -O 8 | curl https://iplab.dmi.unict.it/sharing/rulstm/iccv_models/RULSTM-early_recognition_0.25_8_flow_best.pth.tar -O 9 | curl https://iplab.dmi.unict.it/sharing/rulstm/iccv_models/RULSTM-early_recognition_0.25_8_obj_best.pth.tar -O -------------------------------------------------------------------------------- /RULSTM/scripts/train_anticipation_ek100.sh: -------------------------------------------------------------------------------- 1 | mkdir -p models/ek100 2 | 3 | # RGB branch 4 | python main.py train data/ek100 models/ek100 --modality rgb --sequence_completion --mt5r --num_class 3806 5 | python main.py train data/ek100 models/ek100 --modality rgb --mt5r --num_class 3806 6 | 7 | # Optical Flow branch 8 | python main.py train data/ek100 models/ek100 --modality flow --sequence_completion --mt5r --num_class 3806 9 | python main.py train data/ek100 models/ek100 --modality flow --mt5r --num_class 3806 10 | 11 | # Object branch 12 | python main.py train data/ek100 models/ek100 --modality obj --feat_in 352 --sequence_completion --mt5r --num_class 3806 13 | python main.py train data/ek100 models/ek100 --modality obj --feat_in 352 --mt5r --num_class 3806 14 | 15 | # Complete architecture with MATT 16 | python main.py train data/ek100 models/ek100 --modality fusion --feat_in 352 --mt5r --num_class 3806 17 | -------------------------------------------------------------------------------- /RULSTM/data/ek55/EPIC_many_shot_nouns.csv: -------------------------------------------------------------------------------- 1 | noun_class,noun 2 | 3,tap 3 | 4,plate 4 | 8,cupboard 5 | 1,pan 6 | 7,spoon 7 | 5,knife 8 | 9,drawer 9 | 10,fridge 10 | 6,bowl 11 | 12,hand 12 | 11,lid 13 | 13,onion 14 | 16,glass 15 | 23,cup 16 | 17,water 17 | 19,board:chopping 18 | 21,sponge 19 | 18,fork 20 | 32,cloth 21 | 20,bag 22 | 28,bottle 23 | 15,pot 24 | 22,spatula 25 | 39,box 26 | 26,meat 27 | 24,oil 28 | 30,tomato 29 | 31,salt 30 | 29,container 31 | 27,potato 32 | 77,package 33 | 37,food 34 | 47,hob 35 | 35,pasta 36 | 78,top 37 | 40,carrot 38 | 45,garlic 39 | 68,skin 40 | 44,rice 41 | 25,bin 42 | 38,kettle 43 | 46,pepper 44 | 33,sink 45 | 51,cheese 46 | 56,oven 47 | 70,liquid:washing 48 | 58,coffee 49 | 52,bread 50 | 108,rubbish 51 | 67,peach 52 | 42,colander 53 | 41,sauce 54 | 54,salad 55 | 126,maker:coffee 56 | 60,jar 57 | 84,sausage 58 | 75,cutlery 59 | 43,milk 60 | 62,chicken 61 | 50,egg 62 | 59,filter 63 | 55,microwave 64 | 49,dishwasher 65 | 87,can 66 | 48,dough 67 | 63,tray 68 | 72,leaf 69 | 105,jug 70 | 106,heat 71 | 79,spice 72 | 111,stock 73 | -------------------------------------------------------------------------------- /FasterRCNN/config/ek18-2gpu-e2e-faster-rcnn-R-101-FPN_1x.yaml: -------------------------------------------------------------------------------- 1 | MODEL: 2 | TYPE: generalized_rcnn 3 | CONV_BODY: FPN.add_fpn_ResNet101_conv5_body 4 | NUM_CLASSES: 353 5 | FASTER_RCNN: True 6 | NUM_GPUS: 2 7 | SOLVER: 8 | WEIGHT_DECAY: 0.0001 9 | LR_POLICY: steps_with_decay 10 | BASE_LR: 0.005 11 | GAMMA: 0.1 12 | MAX_ITER: 504531 13 | STEPS: [0, 336354, 454077] 14 | FPN: 15 | FPN_ON: True 16 | MULTILEVEL_ROIS: True 17 | MULTILEVEL_RPN: True 18 | FAST_RCNN: 19 | ROI_BOX_HEAD: fast_rcnn_heads.add_roi_2mlp_head 20 | ROI_XFORM_METHOD: RoIAlign 21 | ROI_XFORM_RESOLUTION: 7 22 | ROI_XFORM_SAMPLING_RATIO: 2 23 | TRAIN: 24 | #WEIGHTS: https://dl.fbaipublicfiles.com/detectron/ImageNetPretrained/MSRA/R-101.pkl 25 | WEIGHTS: ek18-2gpu-e2e-faster-rcnn-R-101-FPN_1x.pkl 26 | DATASETS: ('ek18_train',) 27 | SCALES: (800,) 28 | MAX_SIZE: 1333 29 | AUTO_RESUME: True 30 | BATCH_SIZE_PER_IM: 512 31 | RPN_PRE_NMS_TOP_N: 2000 # Per FPN level 32 | TEST: 33 | DATASETS: ('ek18_test',) 34 | SCALE: 800 35 | MAX_SIZE: 1333 36 | NMS: 0.5 37 | RPN_PRE_NMS_TOP_N: 1000 # Per FPN level 38 | RPN_POST_NMS_TOP_N: 1000 39 | OUTPUT_DIR: . 40 | -------------------------------------------------------------------------------- /RULSTM/environment.yml: -------------------------------------------------------------------------------- 1 | name: rulstm 2 | channels: 3 | - defaults 4 | dependencies: 5 | - _libgcc_mutex=0.1=main 6 | - blas=1.0=mkl 7 | - ca-certificates=2019.5.15=0 8 | - certifi=2019.6.16=py37_1 9 | - cffi=1.12.3=py37h2e261b9_0 10 | - cudatoolkit=10.0.130=0 11 | - cudnn=7.6.0=cuda10.0_0 12 | - intel-openmp=2019.4=243 13 | - libedit=3.1.20181209=hc058e9b_0 14 | - libffi=3.2.1=hd88cf55_4 15 | - libgcc-ng=9.1.0=hdf63c60_0 16 | - libgfortran-ng=7.3.0=hdf63c60_0 17 | - libstdcxx-ng=9.1.0=hdf63c60_0 18 | - mkl=2019.4=243 19 | - mkl_fft=1.0.12=py37ha843d7b_0 20 | - mkl_random=1.0.2=py37hd81dba3_0 21 | - ncurses=6.1=he6710b0_1 22 | - ninja=1.9.0=py37hfd86e86_0 23 | - numpy=1.16.4=py37h7e9f1db_0 24 | - numpy-base=1.16.4=py37hde5b4d6_0 25 | - openssl=1.1.1c=h7b6447c_1 26 | - pip=19.1.1=py37_0 27 | - pycparser=2.19=py37_0 28 | - python=3.7.3=h0371630_0 29 | - pytorch=1.0.1=cuda100py37he554f03_0 30 | - readline=7.0=h7b6447c_5 31 | - setuptools=41.0.1=py37_0 32 | - sqlite=3.29.0=h7b6447c_0 33 | - tk=8.6.8=hbc83047_0 34 | - wheel=0.33.4=py37_0 35 | - xz=5.2.4=h14c3975_4 36 | - zlib=1.2.11=h7b6447c_3 37 | - pip: 38 | - lmdb==0.96 39 | - pandas==0.25.0 40 | - python-dateutil==2.8.0 41 | - pytz==2019.2 42 | - six==1.12.0 43 | - torch==1.0.1 44 | - tqdm==4.32.2 45 | prefix: /home/furnari/anaconda3/envs/rulstm 46 | 47 | -------------------------------------------------------------------------------- /FEATEXT/extract_example_rgb.py: -------------------------------------------------------------------------------- 1 | import torch 2 | from torch import nn 3 | from pretrainedmodels import bninception 4 | from torchvision import transforms 5 | from glob import glob 6 | from PIL import Image 7 | import lmdb 8 | from tqdm import tqdm 9 | from os.path import basename 10 | from argparse import ArgumentParser 11 | 12 | env = lmdb.open('features/rgb', map_size=1099511627776) 13 | 14 | device = 'cuda' if torch.cuda.is_available() else 'cpu' 15 | 16 | model = bninception(pretrained=None) 17 | state_dict = torch.load('models/TSN-rgb.pth.tar')['state_dict'] 18 | state_dict = {k.replace('module.base_model.','') : v for k,v in state_dict.items()} 19 | model.load_state_dict(state_dict, strict=False) 20 | 21 | 22 | model.last_linear = nn.Identity() 23 | model.global_pool = nn.AdaptiveAvgPool2d(1) 24 | 25 | model.to(device) 26 | 27 | transform = transforms.Compose([ 28 | transforms.Resize([256, 454]), 29 | transforms.ToTensor(), 30 | transforms.Lambda(lambda x: x[[2,1,0],...]*255), #to BGR 31 | transforms.Normalize(mean=[104, 117, 128], 32 | std=[1, 1, 1]), 33 | ]) 34 | 35 | imgs = sorted(glob('data/sample_rgb/*.jpg')) 36 | 37 | model.eval() 38 | for im in tqdm(imgs,'Extracting features'): 39 | key = basename(im) 40 | img = Image.open(im) 41 | data = transform(img).unsqueeze(0).to(device) 42 | feat = model(data).squeeze().detach().cpu().numpy() 43 | with env.begin(write=True) as txn: 44 | txn.put(key.encode(),feat.tobytes()) 45 | 46 | 47 | -------------------------------------------------------------------------------- /RULSTM/data/ek100/validation_videos.csv: -------------------------------------------------------------------------------- 1 | P01_11 2 | P01_12 3 | P01_13 4 | P01_14 5 | P01_15 6 | P02_12 7 | P02_13 8 | P02_14 9 | P02_15 10 | P03_21 11 | P03_22 12 | P03_23 13 | P03_24 14 | P03_25 15 | P03_26 16 | P04_24 17 | P04_25 18 | P04_26 19 | P04_27 20 | P04_28 21 | P04_29 22 | P04_30 23 | P04_31 24 | P04_32 25 | P04_33 26 | P05_07 27 | P05_09 28 | P06_10 29 | P06_11 30 | P06_12 31 | P06_13 32 | P06_14 33 | P07_12 34 | P07_13 35 | P07_14 36 | P07_15 37 | P07_16 38 | P07_17 39 | P07_18 40 | P08_09 41 | P08_10 42 | P08_14 43 | P08_15 44 | P08_16 45 | P08_17 46 | P09_07 47 | P09_08 48 | P10_03 49 | P11_17 50 | P11_18 51 | P11_19 52 | P11_20 53 | P11_21 54 | P11_22 55 | P11_23 56 | P11_24 57 | P12_03 58 | P12_08 59 | P13_01 60 | P13_02 61 | P13_03 62 | P14_06 63 | P14_08 64 | P15_04 65 | P15_05 66 | P15_06 67 | P16_04 68 | P17_02 69 | P18_01 70 | P18_02 71 | P18_03 72 | P18_04 73 | P18_05 74 | P18_06 75 | P18_07 76 | P18_08 77 | P18_09 78 | P18_10 79 | P18_11 80 | P18_12 81 | P19_05 82 | P19_06 83 | P20_05 84 | P20_06 85 | P20_07 86 | P21_02 87 | P22_01 88 | P22_02 89 | P22_03 90 | P22_04 91 | P23_05 92 | P24_09 93 | P25_06 94 | P25_07 95 | P25_08 96 | P26_30 97 | P26_31 98 | P26_32 99 | P26_33 100 | P26_34 101 | P26_35 102 | P26_36 103 | P26_37 104 | P26_38 105 | P26_39 106 | P26_40 107 | P26_41 108 | P27_05 109 | P28_15 110 | P28_16 111 | P28_17 112 | P28_18 113 | P28_19 114 | P28_20 115 | P28_21 116 | P28_22 117 | P28_23 118 | P28_24 119 | P28_25 120 | P28_26 121 | P29_05 122 | P29_06 123 | P30_07 124 | P30_08 125 | P30_09 126 | P31_10 127 | P31_11 128 | P31_12 129 | P32_01 130 | P32_02 131 | P32_03 132 | P32_04 133 | P32_05 134 | P32_06 135 | P32_07 136 | P32_08 137 | P32_09 138 | P32_10 139 | -------------------------------------------------------------------------------- /FEATEXT/extract_example_flow.py: -------------------------------------------------------------------------------- 1 | import torch 2 | from torch import nn 3 | from pretrainedmodels import bninception 4 | from torchvision import transforms 5 | from glob import glob 6 | from PIL import Image 7 | import lmdb 8 | from tqdm import tqdm 9 | from os.path import basename 10 | from argparse import ArgumentParser 11 | 12 | env = lmdb.open('features/flow', map_size=1099511627776) 13 | 14 | device = 'cuda' if torch.cuda.is_available() else 'cpu' 15 | 16 | model = bninception(pretrained=None) 17 | model.conv1_7x7_s2 = nn.Conv2d(10, 64,kernel_size=(7,7), stride=(2,2), padding=(3,3)) 18 | state_dict = torch.load('models/TSN-flow.pth.tar')['state_dict'] 19 | state_dict = {k.replace('module.base_model.','') : v for k,v in state_dict.items()} 20 | model.load_state_dict(state_dict, strict=False) 21 | 22 | model.last_linear = nn.Identity() 23 | model.global_pool = nn.AdaptiveAvgPool2d(1) 24 | 25 | model.to(device) 26 | 27 | transform = transforms.Compose([ 28 | transforms.Resize([256, 454]), 29 | transforms.ToTensor(), 30 | transforms.Lambda(lambda x: x*255), 31 | transforms.Normalize(mean=[128], 32 | std=[1]), 33 | ]) 34 | 35 | imgs = sorted(glob('data/sample_flow/*_u_*.jpg')) 36 | 37 | flow_buffer = [] 38 | 39 | model.eval() 40 | for im in tqdm(imgs,'Extracting features'): 41 | key = basename(im).replace('flow_u_','frame_') 42 | img_u = Image.open(im).convert('L') 43 | img_v = Image.open(im.replace('_u_','_v_')).convert('L') 44 | #repeat the first five frames 45 | for _ in range(1 if len(flow_buffer)>0 else 5): 46 | flow_buffer.append(transform(img_u)) 47 | flow_buffer.append(transform(img_v)) 48 | if len(flow_buffer)>10: 49 | del flow_buffer[0] 50 | del flow_buffer[0] 51 | if len(flow_buffer)==10: 52 | data = torch.cat(flow_buffer[-10:],0).unsqueeze(0).to(device) 53 | feat = model(data).squeeze().detach().cpu().numpy() 54 | with env.begin(write=True) as txn: 55 | txn.put(key.encode(),feat.tobytes()) 56 | 57 | 58 | -------------------------------------------------------------------------------- /RULSTM/data/ek55/training_videos.csv: -------------------------------------------------------------------------------- 1 | P01_02 2 | P01_03 3 | P01_04 4 | P01_05 5 | P01_06 6 | P01_07 7 | P01_08 8 | P01_09 9 | P01_16 10 | P01_17 11 | P01_18 12 | P01_19 13 | P02_01 14 | P02_02 15 | P02_04 16 | P02_06 17 | P02_07 18 | P02_08 19 | P02_09 20 | P02_10 21 | P02_11 22 | P03_02 23 | P03_03 24 | P03_04 25 | P03_05 26 | P03_07 27 | P03_08 28 | P03_09 29 | P03_10 30 | P03_12 31 | P03_13 32 | P03_14 33 | P03_15 34 | P03_16 35 | P03_17 36 | P03_18 37 | P03_19 38 | P03_20 39 | P03_27 40 | P03_28 41 | P04_01 42 | P04_02 43 | P04_03 44 | P04_04 45 | P04_05 46 | P04_06 47 | P04_07 48 | P04_08 49 | P04_10 50 | P04_11 51 | P04_12 52 | P04_13 53 | P04_14 54 | P04_15 55 | P04_16 56 | P04_17 57 | P04_18 58 | P04_19 59 | P04_20 60 | P04_21 61 | P04_22 62 | P04_23 63 | P05_01 64 | P05_02 65 | P05_03 66 | P05_04 67 | P05_05 68 | P05_06 69 | P05_08 70 | P06_01 71 | P06_02 72 | P06_03 73 | P06_07 74 | P06_08 75 | P06_09 76 | P07_01 77 | P07_03 78 | P07_04 79 | P07_05 80 | P07_06 81 | P07_07 82 | P07_09 83 | P07_11 84 | P08_02 85 | P08_03 86 | P08_04 87 | P08_06 88 | P08_07 89 | P08_08 90 | P08_11 91 | P08_13 92 | P08_18 93 | P08_19 94 | P08_20 95 | P08_21 96 | P08_22 97 | P08_23 98 | P08_24 99 | P08_25 100 | P08_26 101 | P08_27 102 | P08_28 103 | P10_02 104 | P10_04 105 | P12_01 106 | P12_02 107 | P12_04 108 | P12_05 109 | P12_06 110 | P12_07 111 | P13_05 112 | P13_07 113 | P13_08 114 | P13_10 115 | P14_03 116 | P14_04 117 | P14_05 118 | P14_07 119 | P14_09 120 | P15_01 121 | P15_02 122 | P15_03 123 | P15_07 124 | P15_08 125 | P15_09 126 | P15_10 127 | P15_11 128 | P15_12 129 | P15_13 130 | P16_01 131 | P16_02 132 | P16_03 133 | P17_01 134 | P17_03 135 | P17_04 136 | P19_01 137 | P19_02 138 | P19_03 139 | P19_04 140 | P20_01 141 | P20_02 142 | P21_01 143 | P21_03 144 | P21_04 145 | P22_05 146 | P22_06 147 | P22_07 148 | P22_09 149 | P22_12 150 | P22_14 151 | P22_15 152 | P22_16 153 | P22_17 154 | P23_01 155 | P23_02 156 | P23_04 157 | P24_01 158 | P24_02 159 | P24_03 160 | P24_04 161 | P24_05 162 | P24_06 163 | P24_07 164 | P25_01 165 | P25_02 166 | P25_03 167 | P25_04 168 | P25_05 169 | P25_09 170 | P25_10 171 | P25_12 172 | P26_01 173 | P26_03 174 | P26_04 175 | P26_05 176 | P26_06 177 | P26_07 178 | P26_08 179 | P26_09 180 | P26_10 181 | P26_12 182 | P26_13 183 | P26_14 184 | P26_15 185 | P26_17 186 | P26_18 187 | P26_19 188 | P26_20 189 | P26_21 190 | P26_22 191 | P26_23 192 | P26_24 193 | P26_25 194 | P26_26 195 | P26_27 196 | P26_28 197 | P26_29 198 | P27_01 199 | P27_02 200 | P27_04 201 | P27_06 202 | P27_07 203 | P28_01 204 | P28_02 205 | P28_03 206 | P28_04 207 | P28_06 208 | P28_07 209 | P28_08 210 | P28_09 211 | P28_10 212 | P28_11 213 | P28_14 214 | P29_01 215 | P29_02 216 | P29_03 217 | P29_04 218 | P30_02 219 | P30_04 220 | P30_05 221 | P30_06 222 | P30_10 223 | P30_11 224 | P31_02 225 | P31_03 226 | P31_04 227 | P31_05 228 | P31_06 229 | P31_07 230 | P31_09 231 | P31_13 232 | P31_14 233 | -------------------------------------------------------------------------------- /RULSTM/data/egtea/actions.csv: -------------------------------------------------------------------------------- 1 | 0, 0_0, Inspect/Read_recipe 2 | 1, 1_1, Open_fridge 3 | 2, 2_2, Take_eating:utensil 4 | 3, 3_3, Cut_tomato 5 | 4, 4_4, Turn on_faucet 6 | 5, 5_2, Put_eating:utensil 7 | 6, 1_5, Open_cabinet 8 | 7, 2_6, Take_condiment:container 9 | 8, 3_7, Cut_cucumber 10 | 9, 6_8, Operate_stove 11 | 10, 7_1, Close_fridge 12 | 11, 3_9, Cut_carrot 13 | 12, 5_6, Put_condiment:container 14 | 13, 3_10, Cut_onion 15 | 14, 1_11, Open_drawer 16 | 15, 2_12, Take_plate 17 | 16, 2_13, Take_bowl 18 | 17, 5_13, Put_bowl 19 | 18, 5_14, Put_trash 20 | 19, 5_12, Put_plate 21 | 20, 3_15, Cut_bell:pepper 22 | 21, 5_16, Put_cooking:utensil 23 | 22, 2_17, Take_paper:towel 24 | 23, 8_18, Move Around_bacon 25 | 24, 1_6, Open_condiment:container 26 | 25, 9_2, Wash_eating:utensil 27 | 26, 10_19, Spread_condiment 28 | 27, 11_4, Turn off_faucet 29 | 28, 5_20, Put_pan 30 | 29, 2_16, Take_cooking:utensil 31 | 30, 5_21, Put_lettuce 32 | 31, 8_22, Move Around_patty 33 | 32, 5_23, Put_pot 34 | 33, 7_5, Close_cabinet 35 | 34, 5_24, Put_bread 36 | 35, 2_24, Take_bread 37 | 36, 7_6, Close_condiment:container 38 | 37, 1_25, Open_fridge:drawer 39 | 38, 9_26, Wash_hand 40 | 39, 5_3, Put_tomato 41 | 40, 2_27, Take_seasoning:container 42 | 41, 2_28, Take_cup 43 | 42, 12_21, Divide/Pull Apart_lettuce 44 | 43, 5_28, Put_cup 45 | 44, 2_23, Take_pot 46 | 45, 13_29, Clean/Wipe_counter 47 | 46, 2_30, Take_bread:container 48 | 47, 2_3, Take_tomato 49 | 48, 2_20, Take_pan 50 | 49, 8_20, Move Around_pan 51 | 50, 9_31, Wash_cutting:board 52 | 51, 5_30, Put_bread:container 53 | 52, 2_32, Take_sponge 54 | 53, 2_21, Take_lettuce 55 | 54, 2_10, Take_onion 56 | 55, 5_32, Put_sponge 57 | 56, 12_17, Divide/Pull Apart_paper:towel 58 | 57, 1_33, Open_dishwasher 59 | 58, 2_34, Take_cheese:container 60 | 59, 2_35, Take_oil:container 61 | 60, 5_27, Put_seasoning:container 62 | 61, 2_7, Take_cucumber 63 | 62, 9_20, Wash_pan 64 | 63, 2_15, Take_bell:pepper 65 | 64, 12_10, Divide/Pull Apart_onion 66 | 65, 5_31, Put_cutting:board 67 | 66, 14_36, Mix_mixture 68 | 67, 2_37, Take_tomato:container 69 | 68, 5_38, Put_cheese 70 | 69, 8_2, Move Around_eating:utensil 71 | 70, 5_15, Put_bell:pepper 72 | 71, 15_39, Pour_oil 73 | 72, 2_40, Take_pasta:container 74 | 73, 3_21, Cut_lettuce 75 | 74, 5_37, Put_tomato:container 76 | 75, 9_13, Wash_bowl 77 | 76, 3_41, Cut_olive 78 | 77, 7_11, Close_drawer 79 | 78, 15_19, Pour_condiment 80 | 79, 9_23, Wash_pot 81 | 80, 14_42, Mix_pasta 82 | 81, 1_30, Open_bread:container 83 | 82, 2_43, Take_grocery:bag 84 | 83, 2_38, Take_cheese 85 | 84, 15_44, Pour_seasoning 86 | 85, 14_45, Mix_egg 87 | 86, 15_46, Pour_water 88 | 87, 5_17, Put_paper:towel 89 | 88, 5_7, Put_cucumber 90 | 89, 16_47, Compress_sandwich 91 | 90, 5_34, Put_cheese:container 92 | 91, 5_10, Put_onion 93 | 92, 17_45, Crack_egg 94 | 93, 2_31, Take_cutting:board 95 | 94, 1_35, Open_oil:container 96 | 95, 18_48, Squeeze_washing:liquid 97 | 96, 6_49, Operate_microwave 98 | 97, 7_25, Close_fridge:drawer 99 | 98, 9_50, Wash_strainer 100 | 99, 8_13, Move Around_bowl 101 | 100, 8_23, Move Around_pot 102 | 101, 5_43, Put_grocery:bag 103 | 102, 2_45, Take_egg 104 | 103, 1_34, Open_cheese:container 105 | 104, 7_35, Close_oil:container 106 | 105, 5_35, Put_oil:container 107 | -------------------------------------------------------------------------------- /FasterRCNN/tools/detect_video.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | # Copyright (c) 2017-present, Facebook, Inc. 4 | # 5 | # Licensed under the Apache License, Version 2.0 (the "License"); 6 | # you may not use this file except in compliance with the License. 7 | # You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | ############################################################################## 17 | 18 | """Perform inference on all the frames of a video 19 | """ 20 | 21 | from __future__ import absolute_import 22 | from __future__ import division 23 | from __future__ import print_function 24 | from __future__ import unicode_literals 25 | 26 | from collections import defaultdict 27 | import argparse 28 | import cv2 # NOQA (Must import before importing caffe2 due to bug in cv2) 29 | import glob 30 | import logging 31 | import os 32 | import sys 33 | import time 34 | import numpy as np 35 | 36 | from caffe2.python import workspace 37 | 38 | from detectron.core.config import assert_and_infer_cfg 39 | from detectron.core.config import cfg 40 | from detectron.core.config import merge_cfg_from_file 41 | from detectron.utils.io import cache_url 42 | from detectron.utils.logging import setup_logging 43 | from detectron.utils.timer import Timer 44 | import detectron.core.test_engine as infer_engine 45 | import detectron.datasets.dummy_datasets as dummy_datasets 46 | import detectron.utils.c2 as c2_utils 47 | import detectron.utils.vis as vis_utils 48 | 49 | c2_utils.import_detectron_ops() 50 | 51 | # OpenCL may be enabled by default in OpenCV3; disable it because it's not 52 | # thread safe and causes unwanted GPU memory allocations. 53 | cv2.ocl.setUseOpenCL(False) 54 | 55 | 56 | def parse_args(): 57 | parser = argparse.ArgumentParser(description='End-to-end inference') 58 | parser.add_argument( 59 | '--cfg', 60 | dest='cfg', 61 | help='cfg model file (/path/to/model_config.yaml)', 62 | default=None, 63 | type=str 64 | ) 65 | parser.add_argument( 66 | '--wts', 67 | dest='weights', 68 | help='weights model file (/path/to/model_weights.pkl)', 69 | default=None, 70 | type=str 71 | ) 72 | parser.add_argument( 73 | '--top_predictions', 74 | dest='top_predictions', 75 | help='Number of predictions to store', 76 | default=100, 77 | type=int 78 | ) 79 | parser.add_argument( 80 | 'path_to_video', help='path_to_video', default=None 81 | ) 82 | if len(sys.argv) == 1: 83 | parser.print_help() 84 | sys.exit(1) 85 | return parser.parse_args() 86 | 87 | def format_dets(boxes): 88 | all_boxes = [] 89 | for i,b in enumerate(boxes): 90 | if len(b)>0: 91 | b=np.array(b) 92 | ii = np.ones((len(b),1))*i-1 93 | b=np.hstack([ii,b]) 94 | all_boxes.append(b) 95 | if len(all_boxes)>0: 96 | all_boxes = np.concatenate(all_boxes) 97 | else: 98 | all_boxes = np.zeros((0, 6)) 99 | return all_boxes 100 | 101 | 102 | def main(args): 103 | logger = logging.getLogger(__name__) 104 | 105 | merge_cfg_from_file(args.cfg) 106 | cfg.NUM_GPUS = 1 107 | args.weights = cache_url(args.weights, cfg.DOWNLOAD_CACHE) 108 | assert_and_infer_cfg(cache_urls=False) 109 | 110 | if os.path.isfile(args.path_to_video+'_detections.npy'): 111 | return 112 | 113 | assert not cfg.MODEL.RPN_ONLY, \ 114 | 'RPN models are not supported' 115 | assert not cfg.TEST.PRECOMPUTED_PROPOSALS, \ 116 | 'Models that require precomputed proposals are not supported' 117 | 118 | model = infer_engine.initialize_model_from_cfg(args.weights) 119 | dummy_coco_dataset = dummy_datasets.get_coco_dataset() 120 | 121 | vid = cv2.VideoCapture(args.path_to_video) 122 | 123 | ret, im = vid.read() 124 | 125 | all_boxes = [] 126 | 127 | while ret: 128 | timers = defaultdict(Timer) 129 | t = time.time() 130 | with c2_utils.NamedCudaScope(0): 131 | cls_boxes, cls_segms, cls_keyps = infer_engine.im_detect_all( 132 | model, im, None, timers=timers 133 | ) 134 | all_boxes.append(format_dets(cls_boxes)) 135 | logger.info('Inference time: {:.3f}s'.format(time.time() - t)) 136 | for k, v in timers.items(): 137 | logger.info(' | {}: {:.3f}s'.format(k, v.average_time)) 138 | ret, im = vid.read() 139 | 140 | np.save(args.path_to_video+'_detections',all_boxes) 141 | 142 | if __name__ == '__main__': 143 | workspace.GlobalInit(['caffe2', '--caffe2_log_level=0']) 144 | setup_logging(__name__) 145 | args = parse_args() 146 | main(args) 147 | -------------------------------------------------------------------------------- /RULSTM/data/ek100/training_videos.csv: -------------------------------------------------------------------------------- 1 | P01_01 2 | P01_02 3 | P01_03 4 | P01_04 5 | P01_05 6 | P01_06 7 | P01_07 8 | P01_08 9 | P01_09 10 | P01_102 11 | P01_103 12 | P01_104 13 | P01_105 14 | P01_106 15 | P01_107 16 | P01_108 17 | P01_109 18 | P01_10 19 | P01_16 20 | P01_17 21 | P01_18 22 | P01_19 23 | P02_01 24 | P02_02 25 | P02_03 26 | P02_04 27 | P02_05 28 | P02_06 29 | P02_07 30 | P02_08 31 | P02_09 32 | P02_101 33 | P02_102 34 | P02_103 35 | P02_104 36 | P02_105 37 | P02_107 38 | P02_108 39 | P02_109 40 | P02_10 41 | P02_110 42 | P02_111 43 | P02_112 44 | P02_113 45 | P02_114 46 | P02_115 47 | P02_116 48 | P02_118 49 | P02_119 50 | P02_11 51 | P02_120 52 | P02_121 53 | P02_122 54 | P02_123 55 | P02_124 56 | P02_126 57 | P02_127 58 | P02_128 59 | P02_129 60 | P02_130 61 | P02_131 62 | P02_132 63 | P02_133 64 | P02_134 65 | P02_135 66 | P03_02 67 | P03_03 68 | P03_04 69 | P03_05 70 | P03_06 71 | P03_07 72 | P03_08 73 | P03_09 74 | P03_101 75 | P03_102 76 | P03_106 77 | P03_107 78 | P03_108 79 | P03_109 80 | P03_10 81 | P03_110 82 | P03_111 83 | P03_112 84 | P03_113 85 | P03_114 86 | P03_115 87 | P03_116 88 | P03_117 89 | P03_118 90 | P03_119 91 | P03_11 92 | P03_120 93 | P03_121 94 | P03_122 95 | P03_123 96 | P03_12 97 | P03_13 98 | P03_14 99 | P03_15 100 | P03_16 101 | P03_17 102 | P03_18 103 | P03_19 104 | P03_20 105 | P03_27 106 | P03_28 107 | P04_01 108 | P04_02 109 | P04_03 110 | P04_04 111 | P04_05 112 | P04_06 113 | P04_07 114 | P04_08 115 | P04_09 116 | P04_101 117 | P04_102 118 | P04_103 119 | P04_104 120 | P04_106 121 | P04_107 122 | P04_108 123 | P04_109 124 | P04_10 125 | P04_110 126 | P04_111 127 | P04_112 128 | P04_113 129 | P04_114 130 | P04_115 131 | P04_116 132 | P04_117 133 | P04_118 134 | P04_119 135 | P04_11 136 | P04_120 137 | P04_121 138 | P04_12 139 | P04_13 140 | P04_14 141 | P04_15 142 | P04_16 143 | P04_17 144 | P04_18 145 | P04_19 146 | P04_20 147 | P04_21 148 | P04_22 149 | P04_23 150 | P05_01 151 | P05_02 152 | P05_03 153 | P05_04 154 | P05_05 155 | P05_06 156 | P05_08 157 | P06_01 158 | P06_02 159 | P06_03 160 | P06_05 161 | P06_07 162 | P06_08 163 | P06_09 164 | P06_101 165 | P06_102 166 | P06_103 167 | P06_104 168 | P06_105 169 | P06_106 170 | P06_107 171 | P06_108 172 | P06_109 173 | P06_110 174 | P06_113 175 | P07_01 176 | P07_02 177 | P07_03 178 | P07_04 179 | P07_05 180 | P07_06 181 | P07_07 182 | P07_08 183 | P07_09 184 | P07_101 185 | P07_102 186 | P07_103 187 | P07_106 188 | P07_107 189 | P07_10 190 | P07_110 191 | P07_111 192 | P07_112 193 | P07_113 194 | P07_114 195 | P07_115 196 | P07_116 197 | P07_117 198 | P07_11 199 | P08_01 200 | P08_02 201 | P08_03 202 | P08_04 203 | P08_05 204 | P08_06 205 | P08_07 206 | P08_08 207 | P08_11 208 | P08_12 209 | P08_13 210 | P08_18 211 | P08_19 212 | P08_20 213 | P08_21 214 | P08_22 215 | P08_23 216 | P08_24 217 | P08_25 218 | P08_26 219 | P08_27 220 | P08_28 221 | P09_01 222 | P09_02 223 | P09_03 224 | P09_04 225 | P09_05 226 | P09_06 227 | P09_103 228 | P09_104 229 | P09_105 230 | P09_106 231 | P10_01 232 | P10_02 233 | P10_04 234 | P11_01 235 | P11_02 236 | P11_03 237 | P11_04 238 | P11_05 239 | P11_06 240 | P11_07 241 | P11_08 242 | P11_09 243 | P11_101 244 | P11_102 245 | P11_103 246 | P11_104 247 | P11_105 248 | P11_107 249 | P11_109 250 | P11_10 251 | P11_11 252 | P11_12 253 | P11_13 254 | P11_14 255 | P11_15 256 | P11_16 257 | P12_01 258 | P12_02 259 | P12_04 260 | P12_05 261 | P12_06 262 | P12_07 263 | P12_101 264 | P12_103 265 | P12_104 266 | P12_105 267 | P13_04 268 | P13_05 269 | P13_06 270 | P13_07 271 | P13_08 272 | P13_09 273 | P13_10 274 | P14_01 275 | P14_02 276 | P14_03 277 | P14_04 278 | P14_05 279 | P14_07 280 | P14_09 281 | P15_01 282 | P15_02 283 | P15_03 284 | P15_07 285 | P15_08 286 | P15_09 287 | P15_10 288 | P15_11 289 | P15_12 290 | P15_13 291 | P16_01 292 | P16_02 293 | P16_03 294 | P17_01 295 | P17_03 296 | P17_04 297 | P19_01 298 | P19_02 299 | P19_03 300 | P19_04 301 | P20_01 302 | P20_02 303 | P20_03 304 | P20_04 305 | P21_01 306 | P21_03 307 | P21_04 308 | P22_05 309 | P22_06 310 | P22_07 311 | P22_08 312 | P22_09 313 | P22_101 314 | P22_102 315 | P22_103 316 | P22_104 317 | P22_105 318 | P22_106 319 | P22_107 320 | P22_108 321 | P22_109 322 | P22_10 323 | P22_110 324 | P22_111 325 | P22_112 326 | P22_113 327 | P22_115 328 | P22_116 329 | P22_117 330 | P22_11 331 | P22_12 332 | P22_13 333 | P22_14 334 | P22_15 335 | P22_16 336 | P22_17 337 | P23_01 338 | P23_02 339 | P23_03 340 | P23_04 341 | P23_101 342 | P23_102 343 | P24_01 344 | P24_02 345 | P24_03 346 | P24_04 347 | P24_05 348 | P24_06 349 | P24_07 350 | P24_08 351 | P25_01 352 | P25_02 353 | P25_03 354 | P25_04 355 | P25_05 356 | P25_09 357 | P25_101 358 | P25_102 359 | P25_103 360 | P25_104 361 | P25_106 362 | P25_107 363 | P25_10 364 | P25_11 365 | P25_12 366 | P26_01 367 | P26_02 368 | P26_03 369 | P26_04 370 | P26_05 371 | P26_06 372 | P26_07 373 | P26_08 374 | P26_09 375 | P26_101 376 | P26_102 377 | P26_103 378 | P26_104 379 | P26_105 380 | P26_106 381 | P26_107 382 | P26_108 383 | P26_109 384 | P26_10 385 | P26_110 386 | P26_111 387 | P26_112 388 | P26_113 389 | P26_114 390 | P26_115 391 | P26_116 392 | P26_117 393 | P26_118 394 | P26_119 395 | P26_11 396 | P26_124 397 | P26_12 398 | P26_13 399 | P26_14 400 | P26_15 401 | P26_16 402 | P26_17 403 | P26_18 404 | P26_19 405 | P26_20 406 | P26_21 407 | P26_22 408 | P26_23 409 | P26_24 410 | P26_25 411 | P26_26 412 | P26_27 413 | P26_28 414 | P26_29 415 | P27_01 416 | P27_02 417 | P27_03 418 | P27_04 419 | P27_06 420 | P27_07 421 | P27_101 422 | P27_103 423 | P27_104 424 | P27_105 425 | P28_01 426 | P28_02 427 | P28_03 428 | P28_04 429 | P28_05 430 | P28_06 431 | P28_07 432 | P28_08 433 | P28_09 434 | P28_101 435 | P28_102 436 | P28_103 437 | P28_104 438 | P28_105 439 | P28_106 440 | P28_107 441 | P28_108 442 | P28_109 443 | P28_10 444 | P28_110 445 | P28_111 446 | P28_112 447 | P28_113 448 | P28_11 449 | P28_12 450 | P28_13 451 | P28_14 452 | P29_01 453 | P29_02 454 | P29_03 455 | P29_04 456 | P30_01 457 | P30_02 458 | P30_03 459 | P30_04 460 | P30_05 461 | P30_06 462 | P30_101 463 | P30_103 464 | P30_104 465 | P30_107 466 | P30_108 467 | P30_109 468 | P30_10 469 | P30_110 470 | P30_111 471 | P30_112 472 | P30_113 473 | P30_114 474 | P30_11 475 | P31_01 476 | P31_02 477 | P31_03 478 | P31_04 479 | P31_05 480 | P31_06 481 | P31_07 482 | P31_08 483 | P31_09 484 | P31_13 485 | P31_14 486 | P35_101 487 | P35_103 488 | P35_104 489 | P35_105 490 | P35_107 491 | P35_108 492 | P35_109 493 | P37_101 494 | P37_102 495 | P37_103 496 | -------------------------------------------------------------------------------- /RULSTM/utils.py: -------------------------------------------------------------------------------- 1 | """ Set of utilities """ 2 | import numpy as np 3 | 4 | 5 | class MeanTopKRecallMeter(object): 6 | def __init__(self, num_classes, k=5): 7 | self.num_classes = num_classes 8 | self.k = k 9 | self.reset() 10 | 11 | def reset(self): 12 | self.tps = np.zeros(self.num_classes) 13 | self.nums = np.zeros(self.num_classes) 14 | 15 | def add(self, scores, labels): 16 | tp = (np.argsort(scores, axis=1)[:, -self.k:] == labels.reshape(-1, 1)).max(1) 17 | for l in np.unique(labels): 18 | self.tps[l]+=tp[labels==l].sum() 19 | self.nums[l]+=(labels==l).sum() 20 | 21 | def value(self): 22 | recalls = (self.tps/self.nums)[self.nums>0] 23 | if len(recalls)>0: 24 | return recalls.mean()*100 25 | else: 26 | return None 27 | class ValueMeter(object): 28 | def __init__(self): 29 | self.sum = 0 30 | self.total = 0 31 | 32 | def add(self, value, n): 33 | self.sum += value*n 34 | self.total += n 35 | 36 | def value(self): 37 | return self.sum/self.total 38 | 39 | 40 | class ArrayValueMeter(object): 41 | def __init__(self, dim=1): 42 | self.sum = np.zeros(dim) 43 | self.total = 0 44 | 45 | def add(self, arr, n): 46 | self.sum += arr*n 47 | self.total += n 48 | 49 | def value(self): 50 | val = self.sum/self.total 51 | if len(val) == 1: 52 | return val[0] 53 | else: 54 | return val 55 | 56 | 57 | def topk_accuracy(scores, labels, ks, selected_class=None): 58 | """Computes TOP-K accuracies for different values of k 59 | Args: 60 | rankings: numpy ndarray, shape = (instance_count, label_count) 61 | labels: numpy ndarray, shape = (instance_count,) 62 | ks: tuple of integers 63 | 64 | Returns: 65 | list of float: TOP-K accuracy for each k in ks 66 | """ 67 | if selected_class is not None: 68 | idx = labels == selected_class 69 | scores = scores[idx] 70 | labels = labels[idx] 71 | rankings = scores.argsort()[:, ::-1] 72 | # trim to max k to avoid extra computation 73 | maxk = np.max(ks) 74 | 75 | # compute true positives in the top-maxk predictions 76 | tp = rankings[:, :maxk] == labels.reshape(-1, 1) 77 | 78 | # trim to selected ks and compute accuracies 79 | return [tp[:, :k].max(1).mean() for k in ks] 80 | 81 | 82 | def topk_accuracy_multiple_timesteps(preds, labels, ks=(1, 5)): 83 | accs = np.array(list( 84 | zip(*[topk_accuracy(preds[:, t, :], labels, ks) for t in range(preds.shape[1])]))) 85 | return accs 86 | 87 | 88 | def get_marginal_indexes(actions, mode): 89 | """For each verb/noun retrieve the list of actions containing that verb/name 90 | Input: 91 | mode: "verb" or "noun" 92 | Output: 93 | a list of numpy array of indexes. If verb/noun 3 is contained in actions 2,8,19, 94 | then output[3] will be np.array([2,8,19]) 95 | """ 96 | vi = [] 97 | for v in range(actions[mode].max()+1): 98 | vals = actions[actions[mode] == v].index.values 99 | if len(vals) > 0: 100 | vi.append(vals) 101 | else: 102 | vi.append(np.array([0])) 103 | return vi 104 | 105 | 106 | def marginalize(probs, indexes): 107 | mprobs = [] 108 | for ilist in indexes: 109 | mprobs.append(probs[:, ilist].sum(1)) 110 | return np.array(mprobs).T 111 | 112 | 113 | def softmax(x): 114 | """Compute softmax values for each sets of scores in x.""" 115 | xx = x 116 | x = x.reshape((-1, x.shape[-1])) 117 | e_x = np.exp(x - np.max(x, 1).reshape(-1, 1)) 118 | res = e_x / e_x.sum(axis=1).reshape(-1, 1) 119 | return res.reshape(xx.shape) 120 | 121 | 122 | def topk_recall(scores, labels, k=5, classes=None): 123 | unique = np.unique(labels) 124 | if classes is None: 125 | classes = unique 126 | else: 127 | classes = np.intersect1d(classes, unique) 128 | recalls = 0 129 | #np.zeros((scores.shape[0], scores.shape[1])) 130 | for c in classes: 131 | recalls += topk_accuracy(scores, labels, ks=(k,), selected_class=c)[0] 132 | return recalls/len(classes) 133 | 134 | 135 | def topk_recall_multiple_timesteps(preds, labels, k=5, classes=None): 136 | accs = np.array([topk_recall(preds[:, t, :], labels, k, classes) 137 | for t in range(preds.shape[1])]) 138 | return accs.reshape(1, -1) 139 | 140 | 141 | def tta(scores, labels): 142 | """Implementation of time to action curve""" 143 | rankings = scores.argsort()[..., ::-1] 144 | comparisons = rankings == labels.reshape(rankings.shape[0], 1, 1) 145 | cum_comparisons = np.cumsum(comparisons, 2) 146 | cum_comparisons = np.concatenate([cum_comparisons, np.ones( 147 | (cum_comparisons.shape[0], 1, cum_comparisons.shape[2]))], 1) 148 | time_stamps = np.array([2.0, 1.75, 1.5, 1.25, 1.0, 0.75, 0.5, 0.25, 0]) 149 | return np.nanmean(time_stamps[np.argmax(cum_comparisons, 1)], 0)[4] 150 | 151 | 152 | def predictions_to_json(verb_scores, noun_scores, action_scores, action_ids, a_to_vn, top_actions=100, version='0.1', sls=None): 153 | """Save verb, noun and action predictions to json for submitting them to the EPIC-Kitchens leaderboard""" 154 | predictions = {'version': version, 155 | 'challenge': 'action_anticipation', 'results': {}} 156 | 157 | if sls is not None: 158 | predictions['sls_pt'] = 1 159 | predictions['sls_tl'] = 4 160 | predictions['sls_td'] = 3 161 | 162 | 163 | row_idxs = np.argsort(action_scores)[:, ::-1] 164 | top_100_idxs = row_idxs[:, :top_actions] 165 | 166 | action_scores = action_scores[np.arange( 167 | len(action_scores)).reshape(-1, 1), top_100_idxs] 168 | 169 | for i, v, n, a, ai in zip(action_ids, verb_scores, noun_scores, action_scores, top_100_idxs): 170 | predictions['results'][str(i)] = {} 171 | predictions['results'][str(i)]['verb'] = {str( 172 | ii): float(vv) for ii, vv in enumerate(v)} 173 | predictions['results'][str(i)]['noun'] = {str( 174 | ii): float(nn) for ii, nn in enumerate(n)} 175 | predictions['results'][str(i)]['action'] = { 176 | "%d,%d" % a_to_vn[ii]: float(aa) for ii, aa in zip(ai, a)} 177 | return predictions 178 | -------------------------------------------------------------------------------- /RULSTM/models.py: -------------------------------------------------------------------------------- 1 | from torch import nn 2 | import torch 3 | from torch.nn.init import normal, constant 4 | import numpy as np 5 | from torch.nn import functional as F 6 | 7 | class OpenLSTM(nn.Module): 8 | """"An LSTM implementation that returns the intermediate hidden and cell states. 9 | The original implementation of PyTorch only returns the last cell vector. 10 | For RULSTM, we want all cell vectors computed at intermediate steps""" 11 | def __init__(self, feat_in, feat_out, num_layers=1, dropout=0): 12 | """ 13 | feat_in: input feature size 14 | feat_out: output feature size 15 | num_layers: number of layers 16 | dropout: dropout probability 17 | """ 18 | super(OpenLSTM, self).__init__() 19 | 20 | # simply create an LSTM with the given parameters 21 | self.lstm = nn.LSTM(feat_in, feat_out, num_layers=num_layers, dropout=dropout) 22 | 23 | def forward(self, seq): 24 | # manually iterate over each input to save the individual cell vectors 25 | last_cell=None 26 | last_hid=None 27 | hid = [] 28 | cell = [] 29 | for i in range(seq.shape[0]): 30 | el = seq[i,...].unsqueeze(0) 31 | if last_cell is not None: 32 | _, (last_hid, last_cell) = self.lstm(el, (last_hid,last_cell)) 33 | else: 34 | _, (last_hid, last_cell) = self.lstm(el) 35 | hid.append(last_hid) 36 | cell.append(last_cell) 37 | 38 | return torch.stack(hid, 0), torch.stack(cell, 0) 39 | 40 | class RULSTM(nn.Module): 41 | def __init__(self, num_class, feat_in, hidden, dropout=0.8, depth=1, 42 | sequence_completion=False, return_context=False): 43 | """ 44 | num_class: number of classes 45 | feat_in: number of input features 46 | hidden: number of hidden units 47 | dropout: dropout probability 48 | depth: number of LSTM layers 49 | sequence_completion: if the network should be arranged for sequence completion pre-training 50 | return_context: whether to return the Rolling LSTM hidden and cell state (useful for MATT) during forward 51 | """ 52 | super(RULSTM, self).__init__() 53 | self.feat_in = feat_in 54 | self.dropout = nn.Dropout(dropout) 55 | self.hidden=hidden 56 | self.rolling_lstm = OpenLSTM(feat_in, hidden, num_layers=depth, dropout=dropout if depth>1 else 0) 57 | self.unrolling_lstm = nn.LSTM(feat_in, hidden, num_layers=depth, dropout=dropout if depth>1 else 0) 58 | self.classifier = nn.Sequential(nn.Dropout(dropout), nn.Linear(hidden, num_class)) 59 | self.sequence_completion = sequence_completion 60 | self.return_context = return_context 61 | 62 | def forward(self, inputs): 63 | # permute the inputs for compatibility with the LSTM 64 | inputs=inputs.permute(1,0,2) 65 | 66 | # pass the frames through the rolling LSTM 67 | # and get the hidden (x) and cell (c) states at each time-step 68 | x, c = self.rolling_lstm(self.dropout(inputs)) 69 | x = x.contiguous() # batchsize x timesteps x hidden 70 | c = c.contiguous() # batchsize x timesteps x hidden 71 | 72 | # accumulate the predictions in a list 73 | predictions = [] # accumulate the predictions in a list 74 | 75 | # for each time-step 76 | for t in range(x.shape[0]): 77 | # get the hidden and cell states at current time-step 78 | hid = x[t,...] 79 | cel = c[t,...] 80 | 81 | if self.sequence_completion: 82 | # take current + future inputs (looks into the future) 83 | ins = inputs[t:,...] 84 | else: 85 | # replicate the current input for the correct number of times (time-steps remaining to the beginning of the action) 86 | ins = inputs[t,...].unsqueeze(0).expand(inputs.shape[0]-t+1,inputs.shape[1],inputs.shape[2]).to(inputs.device) 87 | 88 | # initialize the LSTM and iterate over the inputs 89 | h_t, (_,_) = self.unrolling_lstm(self.dropout(ins), (hid.contiguous(), cel.contiguous())) 90 | # get last hidden state 91 | h_n = h_t[-1,...] 92 | 93 | # append the last hidden state to the list 94 | predictions.append(h_n) 95 | 96 | # obtain the final prediction tensor by concatenating along dimension 1 97 | x = torch.stack(predictions,1) 98 | 99 | # apply the classifier to each output feature vector (independently) 100 | y = self.classifier(x.view(-1,x.size(2))).view(x.size(0), x.size(1), -1) 101 | 102 | if self.return_context: 103 | # return y and the concatenation of hidden and cell states 104 | c=c.squeeze().permute(1,0,2) 105 | return y, torch.cat([x, c],2) 106 | else: 107 | return y 108 | 109 | class RULSTMFusion(nn.Module): 110 | def __init__(self, branches, hidden, dropout=0.8): 111 | """ 112 | branches: list of pre-trained branches. Each branch should have the "return_context" property to True 113 | hidden: size of hidden vectors of the branches 114 | dropout: dropout probability 115 | """ 116 | super(RULSTMFusion, self).__init__() 117 | self.branches = nn.ModuleList(branches) 118 | 119 | # input size for the MATT network 120 | # given by 2 (hidden and cell state) * num_branches * hidden_size 121 | in_size = 2*len(self.branches)*hidden 122 | 123 | # MATT network: an MLP with 3 layers 124 | self.MATT = nn.Sequential(nn.Linear(in_size,int(in_size/4)), 125 | nn.ReLU(), 126 | nn.Dropout(dropout), 127 | nn.Linear(int(in_size/4), int(in_size/8)), 128 | nn.ReLU(), 129 | nn.Dropout(dropout), 130 | nn.Linear(int(in_size/8), len(self.branches))) 131 | 132 | 133 | def forward(self, inputs): 134 | """inputs: tuple containing the inputs to the single branches""" 135 | scores, contexts = [], [] 136 | 137 | # for each branch 138 | for i in range(len(inputs)): 139 | # feed the inputs to the LSTM and get the scores and context vectors 140 | s, c = self.branches[i](inputs[i]) 141 | scores.append(s) 142 | contexts.append(c) 143 | 144 | context = torch.cat(contexts, 2) 145 | context = context.view(-1, context.shape[-1]) 146 | 147 | # Apply the MATT network to the context vectors 148 | # and normalize the outputs using softmax 149 | a = F.softmax(self.MATT(context),1) 150 | 151 | # array to contain the fused scores 152 | sc = torch.zeros_like(scores[0]) 153 | 154 | # fuse all scores multiplying by the weights 155 | for i in range(len(inputs)): 156 | s = (scores[i].view(-1,scores[i].shape[-1])*a[:,i].unsqueeze(1)).view(sc.shape) 157 | sc += s 158 | 159 | # return the fused scores 160 | return sc 161 | -------------------------------------------------------------------------------- /RULSTM/dataset.py: -------------------------------------------------------------------------------- 1 | """ Implements a dataset object which allows to read representations from LMDB datasets in a multi-modal fashion 2 | The dataset can sample frames for both the anticipation and early recognition tasks.""" 3 | 4 | import numpy as np 5 | import lmdb 6 | from tqdm import tqdm 7 | from torch.utils import data 8 | import pandas as pd 9 | 10 | def read_representations(frames, env, tran=None): 11 | """ Reads a set of representations, given their frame names and an LMDB environment. 12 | Applies a transformation to the features if provided""" 13 | features = [] 14 | # for each frame 15 | for f in frames: 16 | # read the current frame 17 | with env.begin() as e: 18 | dd = e.get(f.strip().encode('utf-8')) 19 | if dd is None: 20 | print(f) 21 | # convert to numpy array 22 | data = np.frombuffer(dd, 'float32') 23 | # append to list 24 | features.append(data) 25 | # convert list to numpy array 26 | features=np.array(features) 27 | # apply transform if provided 28 | if tran: 29 | features=tran(features) 30 | return features 31 | 32 | def read_data(frames, env, tran=None): 33 | """A wrapper form read_representations to handle loading from more environments. 34 | This is used for multimodal data loading (e.g., RGB + Flow)""" 35 | # if env is a list 36 | if isinstance(env, list): 37 | # read the representations from all environments 38 | l = [read_representations(frames, e, tran) for e in env] 39 | return l 40 | else: 41 | # otherwise, just read the representations 42 | return read_representations(frames, env, tran) 43 | 44 | class SequenceDataset(data.Dataset): 45 | def __init__(self, path_to_lmdb, path_to_csv, label_type = 'action', 46 | time_step = 0.25, sequence_length = 14, fps = 30, 47 | img_tmpl = "frame_{:010d}.jpg", 48 | transform = None, 49 | challenge = False, 50 | past_features = True, 51 | action_samples = None): 52 | """ 53 | Inputs: 54 | path_to_lmdb: path to the folder containing the LMDB dataset 55 | path_to_csv: path to training/validation csv 56 | label_type: which label to return (verb, noun, or action) 57 | time_step: in seconds 58 | sequence_length: in time steps 59 | fps: framerate 60 | img_tmpl: image template to load the features 61 | tranform: transformation to apply to each sample 62 | challenge: allows to load csvs containing only time-stamp for the challenge 63 | past_features: if past features should be returned 64 | action_samples: number of frames to be evenly sampled from each action 65 | """ 66 | 67 | # read the csv file 68 | if challenge: 69 | self.annotations = pd.read_csv(path_to_csv, header=None, names=['video','start','end']) 70 | else: 71 | self.annotations = pd.read_csv(path_to_csv, header=None, names=['video','start','end','verb','noun','action']) 72 | 73 | 74 | self.challenge=challenge 75 | self.path_to_lmdb = path_to_lmdb 76 | self.time_step = time_step 77 | self.past_features = past_features 78 | self.action_samples = action_samples 79 | self.fps=fps 80 | self.transform = transform 81 | self.label_type = label_type 82 | self.sequence_length = sequence_length 83 | self.img_tmpl = img_tmpl 84 | self.action_samples = action_samples 85 | 86 | # initialize some lists 87 | self.ids = [] # action ids 88 | self.discarded_ids = [] # list of ids discarded (e.g., if there were no enough frames before the beginning of the action 89 | self.discarded_labels = [] # list of labels discarded (e.g., if there were no enough frames before the beginning of the action 90 | self.past_frames = [] # names of frames sampled before each action 91 | self.action_frames = [] # names of frames sampled from each action 92 | self.labels = [] # labels of each action 93 | 94 | # populate them 95 | self.__populate_lists() 96 | 97 | # if a list to datasets has been provided, load all of them 98 | if isinstance(self.path_to_lmdb, list): 99 | self.env = [lmdb.open(l, readonly=True, lock=False) for l in self.path_to_lmdb] 100 | else: 101 | # otherwise, just load the single LMDB dataset 102 | self.env = lmdb.open(self.path_to_lmdb, readonly=True, lock=False) 103 | 104 | def __get_frames(self, frames, video): 105 | """ format file names using the image template """ 106 | frames = np.array(list(map(lambda x: video+"_"+self.img_tmpl.format(x), frames))) 107 | return frames 108 | 109 | def __populate_lists(self): 110 | """ Samples a sequence for each action and populates the lists. """ 111 | for _, a in tqdm(self.annotations.iterrows(), 'Populating Dataset', total = len(self.annotations)): 112 | 113 | # sample frames before the beginning of the action 114 | frames = self.__sample_frames_past(a.start) 115 | 116 | if self.action_samples: 117 | # sample frames from the action 118 | # to sample n frames, we first sample n+1 frames with linspace, then discard the first one 119 | action_frames = np.linspace(a.start, a.end, self.action_samples+1, dtype=int)[1:] 120 | 121 | # check if there were enough frames before the beginning of the action 122 | if frames.min()>=1: #if the smaller frame is at least 1, the sequence is valid 123 | self.past_frames.append(self.__get_frames(frames, a.video)) 124 | self.ids.append(a.name) 125 | # handle whether a list of labels is required (e.g., [verb, noun]), rather than a single action 126 | if isinstance(self.label_type, list): 127 | if self.challenge: # if sampling for the challenge, there are no labels, just add -1 128 | self.labels.append(-1) 129 | else: 130 | # otherwise get the required labels 131 | self.labels.append(a[self.label_type].values.astype(int)) 132 | else: #single label version 133 | if self.challenge: 134 | self.labels.append(-1) 135 | else: 136 | self.labels.append(a[self.label_type]) 137 | if self.action_samples: 138 | self.action_frames.append(self.__get_frames(action_frames, a.video)) 139 | else: 140 | #if the sequence is invalid, do nothing, but add the id to the discarded_ids list 141 | self.discarded_ids.append(a.name) 142 | if isinstance(self.label_type, list): 143 | if self.challenge: # if sampling for the challenge, there are no labels, just add -1 144 | self.discarded_labels.append(-1) 145 | else: 146 | # otherwise get the required labels 147 | self.discarded_labels.append(a[self.label_type].values.astype(int)) 148 | else: #single label version 149 | if self.challenge: 150 | self.discarded_labels.append(-1) 151 | else: 152 | self.discarded_labels.append(a[self.label_type]) 153 | 154 | def __sample_frames_past(self, point): 155 | """Samples frames before the beginning of the action "point" """ 156 | # generate the relative timestamps, depending on the requested sequence_length 157 | # e.g., 2. , 1.75, 1.5 , 1.25, 1. , 0.75, 0.5 , 0.25 158 | # in this case "2" means, sample 2s before the beginning of the action 159 | time_stamps = np.arange(self.time_step,self.time_step*(self.sequence_length+1),self.time_step)[::-1] 160 | 161 | # compute the time stamp corresponding to the beginning of the action 162 | end_time_stamp = point/self.fps 163 | 164 | # subtract time stamps to the timestamp of the last frame 165 | time_stamps = end_time_stamp-time_stamps 166 | 167 | # convert timestamps to frames 168 | # use floor to be sure to consider the last frame before the timestamp (important for anticipation!) 169 | # and never sample any frame after that time stamp 170 | frames = np.floor(time_stamps*self.fps).astype(int) 171 | 172 | # sometimes there are not enough frames before the beginning of the action 173 | # in this case, we just pad the sequence with the first frame 174 | # this is done by replacing all frames smaller than 1 175 | # with the first frame of the sequence 176 | if frames.max()>=1: 177 | frames[frames<1]=frames[frames>=1].min() 178 | 179 | return frames 180 | 181 | def __len__(self): 182 | return len(self.ids) 183 | 184 | def __getitem__(self, index): 185 | """ sample a given sequence """ 186 | # get past frames 187 | past_frames = self.past_frames[index] 188 | 189 | if self.action_samples: 190 | # get action frames 191 | action_frames = self.action_frames[index] 192 | 193 | # return a dictionary containing the id of the current sequence 194 | # this is useful to produce the jsons for the challenge 195 | out = {'id':self.ids[index]} 196 | 197 | if self.past_features: 198 | # read representations for past frames 199 | out['past_features'] = read_data(past_frames, self.env, self.transform) 200 | 201 | # get the label of the current sequence 202 | label = self.labels[index] 203 | out['label'] = label 204 | 205 | if self.action_samples: 206 | # read representations for the action samples 207 | out['action_features'] = read_data(action_frames, self.env, self.transform) 208 | 209 | return out 210 | 211 | -------------------------------------------------------------------------------- /RULSTM/data/ek100/validation_unseen_participants_ids.csv: -------------------------------------------------------------------------------- 1 | P18_01_0 2 | P18_01_1 3 | P18_01_10 4 | P18_01_11 5 | P18_01_12 6 | P18_01_13 7 | P18_01_14 8 | P18_01_15 9 | P18_01_16 10 | P18_01_17 11 | P18_01_18 12 | P18_01_19 13 | P18_01_2 14 | P18_01_20 15 | P18_01_21 16 | P18_01_22 17 | P18_01_23 18 | P18_01_24 19 | P18_01_25 20 | P18_01_26 21 | P18_01_27 22 | P18_01_28 23 | P18_01_29 24 | P18_01_3 25 | P18_01_30 26 | P18_01_31 27 | P18_01_32 28 | P18_01_33 29 | P18_01_34 30 | P18_01_35 31 | P18_01_36 32 | P18_01_37 33 | P18_01_38 34 | P18_01_39 35 | P18_01_4 36 | P18_01_40 37 | P18_01_41 38 | P18_01_42 39 | P18_01_5 40 | P18_01_6 41 | P18_01_7 42 | P18_01_8 43 | P18_01_9 44 | P18_02_0 45 | P18_02_1 46 | P18_02_10 47 | P18_02_11 48 | P18_02_12 49 | P18_02_13 50 | P18_02_14 51 | P18_02_15 52 | P18_02_16 53 | P18_02_17 54 | P18_02_18 55 | P18_02_2 56 | P18_02_20 57 | P18_02_21 58 | P18_02_22 59 | P18_02_23 60 | P18_02_25 61 | P18_02_26 62 | P18_02_27 63 | P18_02_28 64 | P18_02_3 65 | P18_02_4 66 | P18_02_5 67 | P18_02_6 68 | P18_02_7 69 | P18_02_8 70 | P18_02_9 71 | P18_03_0 72 | P18_03_1 73 | P18_03_10 74 | P18_03_100 75 | P18_03_101 76 | P18_03_102 77 | P18_03_103 78 | P18_03_104 79 | P18_03_105 80 | P18_03_106 81 | P18_03_107 82 | P18_03_108 83 | P18_03_109 84 | P18_03_11 85 | P18_03_110 86 | P18_03_111 87 | P18_03_112 88 | P18_03_12 89 | P18_03_13 90 | P18_03_14 91 | P18_03_15 92 | P18_03_16 93 | P18_03_17 94 | P18_03_18 95 | P18_03_19 96 | P18_03_2 97 | P18_03_20 98 | P18_03_21 99 | P18_03_22 100 | P18_03_23 101 | P18_03_24 102 | P18_03_25 103 | P18_03_26 104 | P18_03_27 105 | P18_03_28 106 | P18_03_29 107 | P18_03_3 108 | P18_03_30 109 | P18_03_31 110 | P18_03_32 111 | P18_03_33 112 | P18_03_34 113 | P18_03_35 114 | P18_03_36 115 | P18_03_37 116 | P18_03_38 117 | P18_03_39 118 | P18_03_4 119 | P18_03_40 120 | P18_03_41 121 | P18_03_42 122 | P18_03_43 123 | P18_03_44 124 | P18_03_45 125 | P18_03_46 126 | P18_03_47 127 | P18_03_48 128 | P18_03_49 129 | P18_03_5 130 | P18_03_50 131 | P18_03_51 132 | P18_03_52 133 | P18_03_53 134 | P18_03_54 135 | P18_03_55 136 | P18_03_56 137 | P18_03_57 138 | P18_03_58 139 | P18_03_59 140 | P18_03_6 141 | P18_03_60 142 | P18_03_61 143 | P18_03_62 144 | P18_03_63 145 | P18_03_64 146 | P18_03_65 147 | P18_03_66 148 | P18_03_67 149 | P18_03_68 150 | P18_03_69 151 | P18_03_7 152 | P18_03_70 153 | P18_03_71 154 | P18_03_72 155 | P18_03_73 156 | P18_03_74 157 | P18_03_75 158 | P18_03_76 159 | P18_03_77 160 | P18_03_78 161 | P18_03_79 162 | P18_03_8 163 | P18_03_80 164 | P18_03_81 165 | P18_03_82 166 | P18_03_83 167 | P18_03_84 168 | P18_03_85 169 | P18_03_86 170 | P18_03_87 171 | P18_03_88 172 | P18_03_89 173 | P18_03_9 174 | P18_03_90 175 | P18_03_91 176 | P18_03_92 177 | P18_03_93 178 | P18_03_94 179 | P18_03_95 180 | P18_03_96 181 | P18_03_97 182 | P18_03_98 183 | P18_03_99 184 | P18_04_0 185 | P18_04_1 186 | P18_04_10 187 | P18_04_11 188 | P18_04_12 189 | P18_04_13 190 | P18_04_14 191 | P18_04_15 192 | P18_04_16 193 | P18_04_17 194 | P18_04_18 195 | P18_04_19 196 | P18_04_2 197 | P18_04_20 198 | P18_04_21 199 | P18_04_22 200 | P18_04_23 201 | P18_04_24 202 | P18_04_25 203 | P18_04_26 204 | P18_04_27 205 | P18_04_28 206 | P18_04_29 207 | P18_04_3 208 | P18_04_30 209 | P18_04_31 210 | P18_04_32 211 | P18_04_33 212 | P18_04_34 213 | P18_04_35 214 | P18_04_36 215 | P18_04_37 216 | P18_04_4 217 | P18_04_5 218 | P18_04_6 219 | P18_04_7 220 | P18_04_8 221 | P18_04_9 222 | P18_05_0 223 | P18_05_1 224 | P18_05_10 225 | P18_05_100 226 | P18_05_101 227 | P18_05_102 228 | P18_05_103 229 | P18_05_104 230 | P18_05_105 231 | P18_05_106 232 | P18_05_107 233 | P18_05_108 234 | P18_05_109 235 | P18_05_11 236 | P18_05_110 237 | P18_05_111 238 | P18_05_112 239 | P18_05_113 240 | P18_05_114 241 | P18_05_115 242 | P18_05_116 243 | P18_05_117 244 | P18_05_118 245 | P18_05_119 246 | P18_05_12 247 | P18_05_120 248 | P18_05_121 249 | P18_05_122 250 | P18_05_123 251 | P18_05_124 252 | P18_05_125 253 | P18_05_126 254 | P18_05_127 255 | P18_05_128 256 | P18_05_129 257 | P18_05_13 258 | P18_05_130 259 | P18_05_131 260 | P18_05_132 261 | P18_05_133 262 | P18_05_134 263 | P18_05_14 264 | P18_05_15 265 | P18_05_16 266 | P18_05_17 267 | P18_05_18 268 | P18_05_19 269 | P18_05_2 270 | P18_05_20 271 | P18_05_21 272 | P18_05_22 273 | P18_05_23 274 | P18_05_24 275 | P18_05_25 276 | P18_05_26 277 | P18_05_27 278 | P18_05_28 279 | P18_05_29 280 | P18_05_3 281 | P18_05_30 282 | P18_05_31 283 | P18_05_32 284 | P18_05_33 285 | P18_05_34 286 | P18_05_35 287 | P18_05_36 288 | P18_05_37 289 | P18_05_38 290 | P18_05_39 291 | P18_05_4 292 | P18_05_40 293 | P18_05_41 294 | P18_05_42 295 | P18_05_43 296 | P18_05_44 297 | P18_05_45 298 | P18_05_46 299 | P18_05_47 300 | P18_05_48 301 | P18_05_49 302 | P18_05_5 303 | P18_05_50 304 | P18_05_51 305 | P18_05_52 306 | P18_05_53 307 | P18_05_54 308 | P18_05_55 309 | P18_05_56 310 | P18_05_57 311 | P18_05_58 312 | P18_05_59 313 | P18_05_6 314 | P18_05_60 315 | P18_05_61 316 | P18_05_62 317 | P18_05_63 318 | P18_05_64 319 | P18_05_65 320 | P18_05_66 321 | P18_05_67 322 | P18_05_68 323 | P18_05_69 324 | P18_05_7 325 | P18_05_70 326 | P18_05_71 327 | P18_05_72 328 | P18_05_73 329 | P18_05_74 330 | P18_05_75 331 | P18_05_76 332 | P18_05_77 333 | P18_05_78 334 | P18_05_79 335 | P18_05_8 336 | P18_05_80 337 | P18_05_81 338 | P18_05_82 339 | P18_05_83 340 | P18_05_84 341 | P18_05_85 342 | P18_05_86 343 | P18_05_87 344 | P18_05_88 345 | P18_05_89 346 | P18_05_9 347 | P18_05_90 348 | P18_05_91 349 | P18_05_92 350 | P18_05_93 351 | P18_05_94 352 | P18_05_95 353 | P18_05_96 354 | P18_05_97 355 | P18_05_98 356 | P18_05_99 357 | P18_06_0 358 | P18_06_1 359 | P18_06_10 360 | P18_06_11 361 | P18_06_12 362 | P18_06_13 363 | P18_06_14 364 | P18_06_15 365 | P18_06_16 366 | P18_06_17 367 | P18_06_18 368 | P18_06_19 369 | P18_06_2 370 | P18_06_20 371 | P18_06_21 372 | P18_06_22 373 | P18_06_23 374 | P18_06_24 375 | P18_06_25 376 | P18_06_26 377 | P18_06_27 378 | P18_06_28 379 | P18_06_29 380 | P18_06_3 381 | P18_06_30 382 | P18_06_31 383 | P18_06_32 384 | P18_06_33 385 | P18_06_34 386 | P18_06_35 387 | P18_06_36 388 | P18_06_37 389 | P18_06_38 390 | P18_06_39 391 | P18_06_4 392 | P18_06_40 393 | P18_06_41 394 | P18_06_42 395 | P18_06_43 396 | P18_06_44 397 | P18_06_45 398 | P18_06_46 399 | P18_06_47 400 | P18_06_48 401 | P18_06_49 402 | P18_06_5 403 | P18_06_50 404 | P18_06_51 405 | P18_06_52 406 | P18_06_53 407 | P18_06_54 408 | P18_06_55 409 | P18_06_56 410 | P18_06_57 411 | P18_06_58 412 | P18_06_59 413 | P18_06_6 414 | P18_06_60 415 | P18_06_61 416 | P18_06_62 417 | P18_06_63 418 | P18_06_64 419 | P18_06_65 420 | P18_06_66 421 | P18_06_67 422 | P18_06_68 423 | P18_06_69 424 | P18_06_7 425 | P18_06_70 426 | P18_06_71 427 | P18_06_72 428 | P18_06_73 429 | P18_06_74 430 | P18_06_75 431 | P18_06_76 432 | P18_06_77 433 | P18_06_78 434 | P18_06_79 435 | P18_06_8 436 | P18_06_80 437 | P18_06_81 438 | P18_06_82 439 | P18_06_83 440 | P18_06_84 441 | P18_06_85 442 | P18_06_86 443 | P18_06_87 444 | P18_06_88 445 | P18_06_89 446 | P18_06_9 447 | P18_06_90 448 | P18_07_0 449 | P18_07_1 450 | P18_07_10 451 | P18_07_11 452 | P18_07_12 453 | P18_07_13 454 | P18_07_14 455 | P18_07_15 456 | P18_07_16 457 | P18_07_17 458 | P18_07_18 459 | P18_07_19 460 | P18_07_2 461 | P18_07_20 462 | P18_07_21 463 | P18_07_22 464 | P18_07_23 465 | P18_07_24 466 | P18_07_25 467 | P18_07_26 468 | P18_07_27 469 | P18_07_28 470 | P18_07_29 471 | P18_07_3 472 | P18_07_30 473 | P18_07_31 474 | P18_07_32 475 | P18_07_33 476 | P18_07_34 477 | P18_07_35 478 | P18_07_36 479 | P18_07_37 480 | P18_07_38 481 | P18_07_39 482 | P18_07_4 483 | P18_07_40 484 | P18_07_41 485 | P18_07_42 486 | P18_07_43 487 | P18_07_44 488 | P18_07_45 489 | P18_07_46 490 | P18_07_47 491 | P18_07_48 492 | P18_07_49 493 | P18_07_5 494 | P18_07_50 495 | P18_07_51 496 | P18_07_52 497 | P18_07_6 498 | P18_07_7 499 | P18_07_8 500 | P18_07_9 501 | P18_08_0 502 | P18_08_1 503 | P18_08_10 504 | P18_08_11 505 | P18_08_12 506 | P18_08_13 507 | P18_08_14 508 | P18_08_15 509 | P18_08_16 510 | P18_08_17 511 | P18_08_18 512 | P18_08_2 513 | P18_08_3 514 | P18_08_4 515 | P18_08_5 516 | P18_08_6 517 | P18_08_7 518 | P18_08_8 519 | P18_08_9 520 | P18_09_0 521 | P18_09_1 522 | P18_09_10 523 | P18_09_11 524 | P18_09_12 525 | P18_09_13 526 | P18_09_14 527 | P18_09_15 528 | P18_09_16 529 | P18_09_17 530 | P18_09_18 531 | P18_09_19 532 | P18_09_2 533 | P18_09_20 534 | P18_09_21 535 | P18_09_22 536 | P18_09_23 537 | P18_09_24 538 | P18_09_25 539 | P18_09_26 540 | P18_09_27 541 | P18_09_28 542 | P18_09_29 543 | P18_09_3 544 | P18_09_30 545 | P18_09_31 546 | P18_09_32 547 | P18_09_33 548 | P18_09_34 549 | P18_09_35 550 | P18_09_36 551 | P18_09_37 552 | P18_09_38 553 | P18_09_39 554 | P18_09_4 555 | P18_09_40 556 | P18_09_41 557 | P18_09_5 558 | P18_09_6 559 | P18_09_7 560 | P18_09_8 561 | P18_09_9 562 | P18_10_0 563 | P18_10_1 564 | P18_10_10 565 | P18_10_11 566 | P18_10_12 567 | P18_10_13 568 | P18_10_14 569 | P18_10_15 570 | P18_10_16 571 | P18_10_17 572 | P18_10_18 573 | P18_10_19 574 | P18_10_2 575 | P18_10_20 576 | P18_10_21 577 | P18_10_22 578 | P18_10_23 579 | P18_10_24 580 | P18_10_25 581 | P18_10_26 582 | P18_10_27 583 | P18_10_28 584 | P18_10_29 585 | P18_10_3 586 | P18_10_30 587 | P18_10_31 588 | P18_10_32 589 | P18_10_33 590 | P18_10_34 591 | P18_10_35 592 | P18_10_36 593 | P18_10_37 594 | P18_10_38 595 | P18_10_39 596 | P18_10_4 597 | P18_10_40 598 | P18_10_41 599 | P18_10_42 600 | P18_10_43 601 | P18_10_44 602 | P18_10_45 603 | P18_10_5 604 | P18_10_6 605 | P18_10_7 606 | P18_10_8 607 | P18_10_9 608 | P18_11_0 609 | P18_11_1 610 | P18_11_10 611 | P18_11_11 612 | P18_11_12 613 | P18_11_13 614 | P18_11_14 615 | P18_11_15 616 | P18_11_16 617 | P18_11_17 618 | P18_11_18 619 | P18_11_19 620 | P18_11_2 621 | P18_11_20 622 | P18_11_21 623 | P18_11_22 624 | P18_11_23 625 | P18_11_24 626 | P18_11_25 627 | P18_11_26 628 | P18_11_27 629 | P18_11_28 630 | P18_11_29 631 | P18_11_3 632 | P18_11_30 633 | P18_11_31 634 | P18_11_32 635 | P18_11_33 636 | P18_11_34 637 | P18_11_35 638 | P18_11_36 639 | P18_11_37 640 | P18_11_38 641 | P18_11_39 642 | P18_11_4 643 | P18_11_40 644 | P18_11_41 645 | P18_11_42 646 | P18_11_43 647 | P18_11_44 648 | P18_11_45 649 | P18_11_46 650 | P18_11_47 651 | P18_11_48 652 | P18_11_49 653 | P18_11_5 654 | P18_11_50 655 | P18_11_51 656 | P18_11_52 657 | P18_11_53 658 | P18_11_54 659 | P18_11_55 660 | P18_11_56 661 | P18_11_57 662 | P18_11_59 663 | P18_11_6 664 | P18_11_60 665 | P18_11_61 666 | P18_11_62 667 | P18_11_63 668 | P18_11_64 669 | P18_11_65 670 | P18_11_66 671 | P18_11_67 672 | P18_11_68 673 | P18_11_69 674 | P18_11_7 675 | P18_11_70 676 | P18_11_71 677 | P18_11_72 678 | P18_11_73 679 | P18_11_74 680 | P18_11_75 681 | P18_11_76 682 | P18_11_77 683 | P18_11_78 684 | P18_11_79 685 | P18_11_8 686 | P18_11_80 687 | P18_11_81 688 | P18_11_82 689 | P18_11_83 690 | P18_11_84 691 | P18_11_85 692 | P18_11_86 693 | P18_11_87 694 | P18_11_88 695 | P18_11_89 696 | P18_11_9 697 | P18_11_90 698 | P18_11_91 699 | P18_11_92 700 | P18_11_93 701 | P18_11_94 702 | P18_11_95 703 | P18_11_96 704 | P18_11_97 705 | P18_11_98 706 | P18_12_0 707 | P18_12_1 708 | P18_12_10 709 | P18_12_11 710 | P18_12_12 711 | P18_12_13 712 | P18_12_14 713 | P18_12_15 714 | P18_12_16 715 | P18_12_17 716 | P18_12_18 717 | P18_12_19 718 | P18_12_2 719 | P18_12_20 720 | P18_12_21 721 | P18_12_22 722 | P18_12_23 723 | P18_12_24 724 | P18_12_25 725 | P18_12_26 726 | P18_12_27 727 | P18_12_28 728 | P18_12_29 729 | P18_12_3 730 | P18_12_30 731 | P18_12_31 732 | P18_12_32 733 | P18_12_33 734 | P18_12_4 735 | P18_12_5 736 | P18_12_6 737 | P18_12_7 738 | P18_12_8 739 | P18_12_9 740 | P32_01_0 741 | P32_01_1 742 | P32_01_10 743 | P32_01_11 744 | P32_01_12 745 | P32_01_13 746 | P32_01_14 747 | P32_01_15 748 | P32_01_16 749 | P32_01_17 750 | P32_01_18 751 | P32_01_19 752 | P32_01_2 753 | P32_01_20 754 | P32_01_21 755 | P32_01_22 756 | P32_01_23 757 | P32_01_24 758 | P32_01_25 759 | P32_01_26 760 | P32_01_27 761 | P32_01_28 762 | P32_01_29 763 | P32_01_3 764 | P32_01_30 765 | P32_01_31 766 | P32_01_32 767 | P32_01_33 768 | P32_01_34 769 | P32_01_35 770 | P32_01_36 771 | P32_01_37 772 | P32_01_38 773 | P32_01_39 774 | P32_01_4 775 | P32_01_40 776 | P32_01_41 777 | P32_01_42 778 | P32_01_43 779 | P32_01_44 780 | P32_01_45 781 | P32_01_46 782 | P32_01_47 783 | P32_01_48 784 | P32_01_49 785 | P32_01_5 786 | P32_01_50 787 | P32_01_51 788 | P32_01_52 789 | P32_01_53 790 | P32_01_54 791 | P32_01_55 792 | P32_01_56 793 | P32_01_57 794 | P32_01_58 795 | P32_01_59 796 | P32_01_6 797 | P32_01_7 798 | P32_01_8 799 | P32_01_9 800 | P32_02_0 801 | P32_02_1 802 | P32_02_10 803 | P32_02_11 804 | P32_02_12 805 | P32_02_13 806 | P32_02_14 807 | P32_02_15 808 | P32_02_16 809 | P32_02_17 810 | P32_02_18 811 | P32_02_19 812 | P32_02_2 813 | P32_02_20 814 | P32_02_21 815 | P32_02_22 816 | P32_02_23 817 | P32_02_24 818 | P32_02_25 819 | P32_02_26 820 | P32_02_3 821 | P32_02_4 822 | P32_02_5 823 | P32_02_6 824 | P32_02_7 825 | P32_02_8 826 | P32_02_9 827 | P32_03_0 828 | P32_03_1 829 | P32_03_10 830 | P32_03_11 831 | P32_03_2 832 | P32_03_3 833 | P32_03_4 834 | P32_03_5 835 | P32_03_6 836 | P32_03_7 837 | P32_03_8 838 | P32_03_9 839 | P32_04_0 840 | P32_04_1 841 | P32_04_10 842 | P32_04_11 843 | P32_04_12 844 | P32_04_13 845 | P32_04_14 846 | P32_04_15 847 | P32_04_16 848 | P32_04_17 849 | P32_04_18 850 | P32_04_19 851 | P32_04_2 852 | P32_04_20 853 | P32_04_21 854 | P32_04_22 855 | P32_04_23 856 | P32_04_24 857 | P32_04_25 858 | P32_04_26 859 | P32_04_27 860 | P32_04_28 861 | P32_04_29 862 | P32_04_3 863 | P32_04_30 864 | P32_04_31 865 | P32_04_32 866 | P32_04_33 867 | P32_04_34 868 | P32_04_35 869 | P32_04_36 870 | P32_04_37 871 | P32_04_38 872 | P32_04_39 873 | P32_04_4 874 | P32_04_5 875 | P32_04_6 876 | P32_04_7 877 | P32_04_8 878 | P32_04_9 879 | P32_05_0 880 | P32_05_1 881 | P32_05_10 882 | P32_05_11 883 | P32_05_12 884 | P32_05_13 885 | P32_05_14 886 | P32_05_15 887 | P32_05_16 888 | P32_05_17 889 | P32_05_18 890 | P32_05_19 891 | P32_05_2 892 | P32_05_20 893 | P32_05_21 894 | P32_05_22 895 | P32_05_23 896 | P32_05_24 897 | P32_05_25 898 | P32_05_26 899 | P32_05_27 900 | P32_05_28 901 | P32_05_29 902 | P32_05_3 903 | P32_05_30 904 | P32_05_31 905 | P32_05_32 906 | P32_05_33 907 | P32_05_34 908 | P32_05_35 909 | P32_05_36 910 | P32_05_4 911 | P32_05_5 912 | P32_05_6 913 | P32_05_7 914 | P32_05_8 915 | P32_05_9 916 | P32_06_0 917 | P32_06_1 918 | P32_06_10 919 | P32_06_11 920 | P32_06_12 921 | P32_06_13 922 | P32_06_14 923 | P32_06_15 924 | P32_06_16 925 | P32_06_17 926 | P32_06_18 927 | P32_06_19 928 | P32_06_2 929 | P32_06_20 930 | P32_06_21 931 | P32_06_22 932 | P32_06_23 933 | P32_06_24 934 | P32_06_25 935 | P32_06_26 936 | P32_06_27 937 | P32_06_28 938 | P32_06_29 939 | P32_06_3 940 | P32_06_30 941 | P32_06_31 942 | P32_06_32 943 | P32_06_33 944 | P32_06_34 945 | P32_06_35 946 | P32_06_36 947 | P32_06_37 948 | P32_06_38 949 | P32_06_39 950 | P32_06_4 951 | P32_06_40 952 | P32_06_41 953 | P32_06_42 954 | P32_06_43 955 | P32_06_44 956 | P32_06_45 957 | P32_06_46 958 | P32_06_47 959 | P32_06_48 960 | P32_06_6 961 | P32_06_7 962 | P32_06_8 963 | P32_06_9 964 | P32_07_0 965 | P32_07_1 966 | P32_07_2 967 | P32_08_0 968 | P32_08_1 969 | P32_08_10 970 | P32_08_11 971 | P32_08_12 972 | P32_08_13 973 | P32_08_14 974 | P32_08_15 975 | P32_08_2 976 | P32_08_3 977 | P32_08_4 978 | P32_08_5 979 | P32_08_6 980 | P32_08_7 981 | P32_08_8 982 | P32_08_9 983 | P32_09_0 984 | P32_09_1 985 | P32_09_10 986 | P32_09_11 987 | P32_09_12 988 | P32_09_13 989 | P32_09_14 990 | P32_09_15 991 | P32_09_16 992 | P32_09_17 993 | P32_09_18 994 | P32_09_19 995 | P32_09_2 996 | P32_09_20 997 | P32_09_21 998 | P32_09_22 999 | P32_09_23 1000 | P32_09_24 1001 | P32_09_25 1002 | P32_09_26 1003 | P32_09_27 1004 | P32_09_28 1005 | P32_09_29 1006 | P32_09_3 1007 | P32_09_30 1008 | P32_09_31 1009 | P32_09_32 1010 | P32_09_33 1011 | P32_09_34 1012 | P32_09_35 1013 | P32_09_36 1014 | P32_09_37 1015 | P32_09_38 1016 | P32_09_4 1017 | P32_09_5 1018 | P32_09_6 1019 | P32_09_7 1020 | P32_09_8 1021 | P32_09_9 1022 | P32_10_0 1023 | P32_10_1 1024 | P32_10_10 1025 | P32_10_11 1026 | P32_10_12 1027 | P32_10_13 1028 | P32_10_14 1029 | P32_10_15 1030 | P32_10_16 1031 | P32_10_17 1032 | P32_10_18 1033 | P32_10_19 1034 | P32_10_2 1035 | P32_10_20 1036 | P32_10_21 1037 | P32_10_22 1038 | P32_10_23 1039 | P32_10_24 1040 | P32_10_25 1041 | P32_10_26 1042 | P32_10_27 1043 | P32_10_28 1044 | P32_10_29 1045 | P32_10_3 1046 | P32_10_30 1047 | P32_10_31 1048 | P32_10_32 1049 | P32_10_33 1050 | P32_10_34 1051 | P32_10_35 1052 | P32_10_36 1053 | P32_10_37 1054 | P32_10_38 1055 | P32_10_39 1056 | P32_10_4 1057 | P32_10_40 1058 | P32_10_41 1059 | P32_10_42 1060 | P32_10_43 1061 | P32_10_5 1062 | P32_10_6 1063 | P32_10_7 1064 | P32_10_8 1065 | P32_10_9 1066 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # What Would You Expect? Anticipating Egocentric Actions with Rolling-Unrolling LSTMs and Modality Attention 2 | See the quickstart here 👉 [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/fpv-iplab/rulstm/blob/master/RULSTM/Rolling-Unrolling-LSTM-Quickstart.ipynb) 3 | 4 | This repository hosts the code related to the following papers: 5 | 6 | Antonino Furnari and Giovanni Maria Farinella, Rolling-Unrolling LSTMs for Action Anticipation from First-Person Video. IEEE Transactions on Pattern Analysis and Machine Intelligence (PAMI). 2020. [Download](http://arxiv.org/pdf/2005.02190.pdf) 7 | 8 | Antonino Furnari and Giovanni Maria Farinella, What Would You Expect? Anticipating Egocentric Actions with Rolling-Unrolling LSTMs and Modality Attention. International Conference on Computer Vision, 2019. [Download](https://arxiv.org/pdf/1905.09035.pdf) 9 | 10 | Please also see the project web page at [http://iplab.dmi.unict.it/rulstm](http://iplab.dmi.unict.it/rulstm). 11 | 12 | If you use the code/models hosted in this repository, please cite the following papers: 13 | 14 | ``` 15 | @article{furnari2020rulstm, 16 | author = {Antonino Furnari and Giovanni Maria Farinella}, 17 | journal = {IEEE Transactions on Pattern Analysis and Machine Intelligence (PAMI)}, 18 | title = {Rolling-Unrolling LSTMs for Action Anticipation from First-Person Video}, 19 | year = {2020} 20 | } 21 | ``` 22 | 23 | ``` 24 | @inproceedings{furnari2019rulstm, 25 | title = { What Would You Expect? Anticipating Egocentric Actions with Rolling-Unrolling LSTMs and Modality Attention. }, 26 | author = { Antonino Furnari and Giovanni Maria Farinella }, 27 | year = { 2019 }, 28 | booktitle = { International Conference on Computer Vision (ICCV) }, 29 | } 30 | ``` 31 | ## Updates: 32 | * **23/08/2023** A quickstart notebook is available [here](RULSTM/Rolling-Unrolling-LSTM-Quickstart.ipynb). You can also open it directly in Colab clicking on the badge: [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/fpv-iplab/rulstm/blob/master/RULSTM/Rolling-Unrolling-LSTM-Quickstart.ipynb) 33 | * **28/06/2021** We are now providing object detections on all frames of EPIC-KITCHENS-100. Please see this README (below) for more information; 34 | * **11/01/2021** We have updated the archive providing the EGTEA Gaze+ pre-extracted features. Please see this README (below) for more information; 35 | * **01/10/2020** We are now sharing the rgb/flow/obj EPIC-KITCHENS-100 features and pre-trained models used to report baseline results in the [Rescaling Egocentric Vision](https://arxiv.org/abs/2006.13256) paper; 36 | * **04/05/2020** We have now published an extended version of this work on PAMI. Please check the text above for the updated references; 37 | * **23/03/2020** We are now providing pre-extracted features for EGTEA Gaze+. See README for more information; 38 | * **11/10/2019** We are now also providing TSN and object-based features extracted for **each frame of EPIC-KITCHENS**. They can be downloaded using the `download_data_full.sh` script rather than `download_data.sh`; 39 | * **23/10/2019** Added some scripts to show how to extract features from videos. The scripts can be found under `FEATEXT` and are documented in this README. 40 | 41 | ## Overview 42 | This repository provides the following components: 43 | * The official PyTorch implementation of the proposed Rolling-Unrolling LSTM approach, including Sequence-Completion Pre-Training and Modality ATTention (MATT); 44 | * A program to train, validate and test the proposed method on the EPIC-KITCHENS-55 and EPIC-KITCHENS-100 datasets; 45 | * Pre-extracted features for EPIC-KITCHENS-55 and EPIC-KITCHENS-100. Specifically, we include: 46 | * RGB features: extracted from RGB iamges using a BNInception CNN trained for the task of *egocentric action recognition* using [Temporal Segment Networks](https://github.com/yjxiong/tsn-pytorch); 47 | * Flow features: similar to RGB features, but extracted with a BNInception CNN trained on optical flow; 48 | * OBJ features: object-based features obtained by running a Faster R-CNN object detector trained on EPIC-KITCHENS-55; 49 | * The checkpoints of the RGB/Flow/OBJ/Fusion models trained for both tasks: egocentric action anticipation and early action recognition; 50 | * The checkpoints of the TSN models (to be used with the [official PyTorch implementation of TSN](https://github.com/yjxiong/tsn-pytorch)); 51 | * The checkpoint of the Faster R-CNN object detector trained on EPIC-KITCHENS-55; 52 | * The training/validation split used for the experiments. Note that the TSN and Faster R-CNN models have been trained on the training set of this split. 53 | 54 | Please, refer to the paper for more technical details. The following sections document the released material. 55 | 56 | ## RU-LSTM Implementation and main training/validation/test program 57 | The provided implementation and training/validation/test program can be found in the `RULSTM` directory. In order to proceed to training, it is necessary to retrieve the pre-extracted features from our website. To save space and bandwidth, we provide features extracted only on the subset of frames used for the experiments (we sampled frames at about 4fps - please see the paper). These features are sufficient to train/validate/test the methods on the whole EPIC-KITCHENS-55 dataset following the settings reported in the paper. 58 | 59 | ### Requirements 60 | To run the code, you will need a Python3 interpreter and some libraries (including PyTorch). 61 | 62 | #### Anaconda 63 | An Anaconda environment file with a minimal set of requirements is provided in `environment.yml`. If you are using Anaconda, you can create a suitable environment with: 64 | 65 | `conda env create -f environment.yml` 66 | 67 | To activate the environment, type: 68 | 69 | `conda activate rulstm` 70 | 71 | #### Pip 72 | If you are not using Anaconda, we provide a list of libraries in `requirements.txt`. You can install these libraries with: 73 | 74 | `pip install -r requirements.txt` 75 | 76 | ### Dataset, training/validaiton splits, and features 77 | We provide CSVs for training/validation/and testing on EPIC-KITCHENS-55 in the `data/ek55` directory. A brief description of each csv follows: 78 | * `actions.csv`: maps action ids to (verb,noun) pairs; 79 | * `EPIC_many_shot_nouns.csv`: contains the list of many shot nouns for class-aware metrics (please refer to [the EPIC-KITCHENS-55 paper](https://arxiv.org/abs/1804.02748) for more details); 80 | * `EPIC_many_shot_verbs.csv`: similar to the previous one, but related to verbs; 81 | * `test_seen.csv`: contains the timestamps (expressed in number of frames) of the "seen" test set (S1); 82 | * `test_unseen.csv`: contains the timestamps (expressed in number of frames) of the "unseen" test set (S2); 83 | * `training.csv`: contains annotations for the training set in our training/validation split; 84 | * `validation.csv`: contains annotations for the validation set in our training/validation split; 85 | * `training_videos.csv`: contains the list of training videos in our training/validation split; 86 | * `validation_videos.csv`: contains the list of validation videos in our training/validation split; 87 | We also provide CSVs for training/validation/testing on EPIC-KITCHENS-100 in the `data/ek100` directory. 88 | 89 | Training and validation CSVs report the following columns: 90 | * Annotation ID; 91 | * Video name (without extension); 92 | * Start frame; 93 | * End frame; 94 | * Verb ID; 95 | * Noun ID; 96 | * Action ID. 97 | 98 | The test CSVs do not report the last three columns since test annotations are not public. These CSVs are provided to allow producing predicitons in JSON format to be submitted to the leaderboard. 99 | 100 | Please note that time-stamps are reported in terms of frame numbers in the csvs. This has been done by assuming a fixed framerate of 30fps. Since the original videos have been collected a different framerates, we first converted all videos to 30fps using ffmpeg. 101 | 102 | We provide pre-extracted features. The features are stored in LMDB datasets. To download them, run the following commands: 103 | 104 | * EPIC-KITCHENS-55: `./scripts/download_data_ek55.sh`; 105 | 106 | Alternatively, you can download features extracted from each frame by using the script: 107 | 108 | * EPIC-KITCHENS-55: `./scripts/download_data_ek55_full.sh`; 109 | * EPIC-KITCHENS-100: `./scripts/download_data_ek100_full.sh`; 110 | 111 | Please note that this download is significantly heavier and that it is not required to run the training with default parameters on EPIC-KITCHENS-55. 112 | 113 | This should populate three directories `data/ek{55|100}/rgb`, `data/ek{55|100}/flow`, `data/ek{55|100}/obj` with the LMDB datasets. 114 | 115 | ### Trainining 116 | Models can be trained using the `main.py` program. For instance, to train the RGB branch for the action anticipation task, use the following commands: 117 | 118 | #### EPIC-KITCHENS-55 119 | * `mkdir models/` 120 | * `python main.py train data/ek55 models/ek55 --modality rgb --task anticipation --sequence_completion` 121 | * `python main.py train data/ek55 models/ek55 --modality rgb --task anticipation` 122 | 123 | #### EPIC-KITCHENS-100 124 | * `mkdir models/` 125 | * `python main.py train data/ek100 models/ek100 --modality rgb --task anticipation --sequence_completion --num_class 3806 --mt5r` 126 | * `python main.py train data/ek100 models/ek100 --modality rgb --task anticipation --num_class 3806 --mt5r` 127 | 128 | This will first pre-train using sequence completion, then fine-tune to the main anticipation task. All models will be stored in the `models/ek{55|100}` directory. 129 | 130 | Optionally, a `--visdom` flag can be passed to the training program in order to enable loggin using visdom. To allow this, it is necessary to install visdom with: 131 | 132 | `pip install visdom` 133 | 134 | And run it with: 135 | 136 | `python -m visdom.server` 137 | 138 | Similar commands can be used to train all models. The following scripts contain all commands required to train the models for egocentric action anticipation and early action recognition: 139 | * `scripts/train_anticipation_ek{55|100}.sh`; 140 | * `scripts/train_recognition_ek55.sh`. 141 | 142 | ### Validation 143 | The anticipation models can be validated using the following commands: 144 | 145 | #### Action Anticipation 146 | 147 | ##### EPIC-KITCHENS-55 148 | * RGB branch: `python main.py validate data/ek55 models/ek55 --modality rgb --task anticipation`; 149 | * Optical Flow branch: `python main.py validate data/ek55 models/ek55 --modality flow --task anticipation`; 150 | * Object branch: `python main.py validate data/ek55 models/ek55 --modality obj --task anticipation --feat_in 352`; 151 | * Complete architecture with MATT: `python main.py validate data/ek55 models/ek55 --modality fusion --task anticipation`. 152 | 153 | ##### EPIC-KITCHENS-100 154 | * RGB branch: `python main.py validate data/ek100 models/ek100 --modality rgb --task anticipation --num_class 3806 --mt5r -ek100`; 155 | * Optical Flow branch: `python main.py validate data/ek100 models/ek100 --modality flow --task anticipation --num_class 3806 --mt5r -ek100`; 156 | * Object branch: `python main.py validate data/ek100 models/ek100 --modality obj --task anticipation --feat_in 352 --num_class 3806 --mt5r -ek100`; 157 | * Complete architecture with MATT: `python main.py validate data/ek100 models/ek100 --modality fusion --task anticipation --num_class 3806 --mt5r -ek100`. 158 | 159 | These instructions will evaluate the models using the official measures of the EPIC-KITCHENS-100 dataset for the action anticipation challenge. 160 | 161 | ###### Validation Jsons 162 | You can produce validation jsons as follows: 163 | 164 | * `mkdir -p jsons/ek100`; 165 | * Anticipation: `python main.py validate_json data/ek100 models/ek100 --modality fusion --task anticipation --json_directory jsons/ek100 --ek100 --num_class 3806 --mt5r`; 166 | * Early recognition: `python main.py validate_json data/ek100 models/ek100 --modality fusion --task early_recognition --json_directory jsons/ek100 -ek100 --num_class 3806 --mt5r`. 167 | 168 | #### Early Action Recognition 169 | Similarly, for early action recognition: 170 | 171 | #### EPIC-KITCHENS-55 172 | * RGB branch: `python main.py validate data models --modality rgb --task early_recognition`; 173 | * Optical Flow branch: `python main.py validate data models --modality flow --task early_recognition`; 174 | * Object branch: `python main.py validate data models --modality obj --task early_recognition --feat_in 352`; 175 | * Late fusion model: `python main.py validate data models --modality fusion --task early_recognition`. 176 | 177 | ### Test 178 | The `main.py` program also allows to run the models on the EPIC-KITCHENS-55 and EPIC-KITCHENS-100 test sets and produce jsons to be sent to the leaderboard (see [http://epic-kitchens.github.io/](http://epic-kitchens.github.io/)). To test models, you can use the following commands: 179 | 180 | #### EPIC-KITCHENS-55 181 | * `mkdir -p jsons/ek55`; 182 | * Anticipation: `python main.py test data/ek55 models/ek55 --modality fusion --task anticipation --json_directory jsons/ek55`; 183 | * Early recognition: `python main.py test data/ek55 models/ek55 --modality fusion --task early_recognition --json_directory jsons/ek55`. 184 | 185 | #### EPIC-KITCHENS-100 186 | * `mkdir -p jsons/ek100`; 187 | * Anticipation: `python main.py test data/ek100 models/ek100 --modality fusion --task anticipation --json_directory jsons/ek100 --ek100 --num_class 3806 --mt5r`; 188 | * Early recognition: `python main.py test data/ek100 models/ek100 --modality fusion --task early_recognition --json_directory jsons/ek100 -ek100 --num_class 3806 --mt5r`. 189 | 190 | ### Pretrained Models 191 | 192 | #### EPIC-KITCHENS-55 193 | We provide the official checkpoints used to report the results on EPIC-KITCHENS-55 in our ICCV paper. These can be downloaded using the script: 194 | 195 | `./script/download_models_ek55.sh` 196 | 197 | The models will be downloaded in `models/ek55`. You can test the model and obtain the results reported in the paper using the same `main.py` program. For instance: 198 | 199 | `python main.py test data/ek55 models/ek55 --modality fusion --task anticipation --json_directory jsons` 200 | 201 | #### EPIC-KITCHENS-100 202 | We provide the checkpoints used to report the results in the EPIC-KITCHENS-100 paper (https://arxiv.org/abs/2006.13256). These can be downloaded using the script: 203 | 204 | `./script/download_models_ek100.sh` 205 | 206 | The models will be downloaded in `models/ek100`. You can produce the validation and test jsons replicating the results of the paper as follows: 207 | 208 | * `python main.py test data/ek100 models/ek100 --modality fusion --task anticipation --json_directory jsons --ek100 --mt5r` 209 | * `python main.py validate_json data/ek100 models/ek100 --modality fusion --task anticipation --json_directory jsons --ek100 --mt5r` 210 | 211 | ## TSN models 212 | Can be downloaded from the following URLs: 213 | 214 | ### EPIC-KITCHENS-55 215 | * RGB: `http://iplab.dmi.unict.it/sharing/rulstm/TSN-rgb.pth.tar`; 216 | * Flow: `http://iplab.dmi.unict.it/sharing/rulstm/TSN-flow.pth.tar`. 217 | 218 | ### EPIC-KITCHENS-100 219 | * RGB: `http://iplab.dmi.unict.it/sharing/rulstm/TSN-rgb-ek100.pth.tar`; 220 | * Flow: `http://iplab.dmi.unict.it/sharing/rulstm/TSN-flow-ek100.pth.tar`. 221 | 222 | ## Faster-RCNN Model Trained on EPIC-KITCHENS-55 223 | We release the Faster-RCNN object detector trained on EPIC-KITCHENS-55 that we used for our experiments. The detector has been trained using the [detectron](https://github.com/facebookresearch/Detectron) library. The `yaml` configuration file used to train the model is available in the `FasterRCNN` directory of this repository. The weights can be downloaded from [this link](http://iplab.dmi.unict.it/rulstm/downloads/ek18-2gpu-e2e-faster-rcnn-R-101-FPN_1x.pkl). 224 | 225 | ### Usage 226 | Make sure the detectron library is installed and available in the system path. A good idea might be to use a docker container. Please refer to [https://github.com/facebookresearch/Detectron/blob/master/INSTALL.md](https://github.com/facebookresearch/Detectron/blob/master/INSTALL.md) for more details. 227 | 228 | Sample usage: 229 | * clone the repository with `git clone https://github.com/antoninofurnari/rulstm.git`; 230 | * move to the FasterRCNN directory with `cd rulstm/FasterRCNN/`; 231 | * download the weights with `curl -o weights/ek18-2gpu-e2e-faster-rcnn-R-101-FPN_1x.pkl http://iplab.dmi.unict.it/rulstm/downloads/ek18-2gpu-e2e-faster-rcnn-R-101-FPN_1x.pkl`; 232 | * run the sample detection script on a video with `python tools/detect_video.py --cfg config/ek18-2gpu-e2e-faster-rcnn-R-101-FPN_1x.yaml --wts weights/ek18-2gpu-e2e-faster-rcnn-R-101-FPN_1x.pkl path/to/video.mp4`. 233 | 234 | A new file `path/to/video.mp4_detections.npy` will be created. The file will contain a list of arrays reporting the coordinates of the objects detected in each frame of the video. Specifically, the detections of a given frame will be contained in a tensor of shape `N x 6`, where: 235 | * `N` is the number of object detected in the frame; 236 | * the first column contains the IDs of the detected objects; 237 | * the fifth column contains the detection confidence scores; 238 | * columns 2-5 contain the coordinates of the detected bounding boxes in the format: `[xmin, ymin, xmax, ymax]`. 239 | Please refer to [https://github.com/epic-kitchens/annotations/blob/master/EPIC_noun_classes.csv](https://github.com/epic-kitchens/annotations/blob/master/EPIC_noun_classes.csv) for the list of object ids. 240 | 241 | ## Feature Extraction 242 | A few example scripts showing how we performed feature extraction from video, can be found in the `FEATEXT` directory. 243 | 244 | To extract features using the TSN models, it is necessary to install the `pretrainedmodels` package through `pip install pretrainedmodels`. 245 | 246 | To run the examples follow these steps: 247 | * `cd FEATEXT`; 248 | * `./scripts/download_models.sh`; 249 | * Extract sample data with `tar xvf data.tar`. This will extract a few files in the `data` folder. These include: 250 | * RGB frames. In general, these can be extracted using programs such as `ffmpeg`; 251 | * Optical flows. These have been extracted using the TVL1 algorithm. Different programs can be used to extract the optical flow son GPU. For instance, https://github.com/feichtenhofer/gpu_flow; 252 | * Object bounding boxes. These can be extracted using the Faster R-CNN model provided with this repository (see the `detect_video.py` script in `FasterRCNN/tools`; 253 | * Create the destination directory for the features with `mkdir features`; 254 | * Run the example scripts: 255 | * `python extract_sample_rgb.py`; 256 | * `python extract_sample_flow.py`; 257 | * `python extract_sample_obj.py`; 258 | * The scripts will create sample LMDB dbs in `features`; 259 | 260 | ## EGTEA Gaze+ Pre-Extracted Features 261 | We provide the EGTEA Gaze+ features used for the experiments (see paper for the details) at [https://iplab.dmi.unict.it/sharing/rulstm/features/egtea.zip](https://iplab.dmi.unict.it/sharing/rulstm/features/egtea.zip). The features have been extracted using three different TSN models trained following the official splits proposed by the authors of EGTEA Gaze+ (see [http://cbs.ic.gatech.edu/fpv/](http://cbs.ic.gatech.edu/fpv/)). The annotations formatted in a way to be directly usable with this repository can be found in `RULSTM/data/egtea`. 262 | 263 | **Note**: a previous version of the zip file contained the following LMDB databases: 264 | * `TSN-C_3_egtea_action_CE_flow_model_best_fcfull_hd`; 265 | * `TSN-C_3_egtea_action_CE_rgb_model_best_fcfull_hd`; 266 | * `TSN-C_3_egtea_action_CE_s1_rgb_model_best_fcfull_hd`; 267 | * `TSN-C_3_egtea_action_CE_s1_flow_model_best_fcfull_hd`; 268 | * `TSN-C_3_egtea_action_CE_s2_rgb_model_best_fcfull_hd`; 269 | * `TSN-C_3_egtea_action_CE_s2_flow_model_best_fcfull_hd`; 270 | * `TSN-C_3_egtea_action_CE_s3_rgb_model_best_fcfull_hd`; 271 | * `TSN-C_3_egtea_action_CE_s3_flow_model_best_fcfull_hd`. 272 | 273 | The first two databases had been included **by mistake and should be ignored**, instead, the remaining six databases should be used for the experiments when the standard evaluation protocol based on three splits is adopted. The following paragraph explains in detail how they have been created: 274 | * `TSN-C_3_egtea_action_CE_s1_rgb_model_best_fcfull_hd`: features extracted using an RGB TSN model training using s2 and s3 as training set; 275 | * `TSN-C_3_egtea_action_CE_s1_flow_model_best_fcfull_hd`: features extracted using a Flow TSN model training using s2 and s3 as training set; 276 | * `TSN-C_3_egtea_action_CE_s2_rgb_model_best_fcfull_hd`: features extracted using an RGB TSN model training using s1 and s3 as training set; 277 | * `TSN-C_3_egtea_action_CE_s2_flow_model_best_fcfull_hd`: features extracted using a Flow TSN model training using s1 and s3 as training set; 278 | * `TSN-C_3_egtea_action_CE_s3_rgb_model_best_fcfull_hd`: features extracted using an RGB TSN model training using s1 and s2 as training set; 279 | * `TSN-C_3_egtea_action_CE_s3_flow_model_best_fcfull_hd`: features extracted using a Flow TSN model training using s1 and s2 as training set. 280 | 281 | An updated version of the zip file including only the correct databases is available at [https://iplab.dmi.unict.it/sharing/rulstm/features/egtea.zip](https://iplab.dmi.unict.it/sharing/rulstm/features/egtea.zip). 282 | 283 | ## Object detections on EPIC-KITCHENS-100 284 | We provide object detections obtained on each frame of EPIC-KITCHENS-100. The detections have been obtained by running the Faster RCNN model trained on EPIC-KITCHENS-55 described above and included in this repository. You can download a zip file containing all detections through this link: [https://iplab.dmi.unict.it/sharing/rulstm/detected_objects.zip](https://iplab.dmi.unict.it/sharing/rulstm/detected_objects.zip). 285 | 286 | **Note** These detections are a superset of the ones used for the original experiments on EPIC-KITCHENS-55. If you are experimenting with EK-55, you can just discard the extra videos not belonging to EK-55. 287 | 288 | The zip file contains a `npy` file for each video in EPIC-KITCHENS-100. For examle: 289 | 290 | ``` 291 | P01_01.MP4_detections.npy 292 | P01_02.MP4_detections.npy 293 | P01_03.MP4_detections.npy 294 | P01_04.MP4_detections.npy 295 | P01_05.MP4_detections.npy 296 | P01_06.MP4_detections.npy 297 | ... 298 | ``` 299 | 300 | Each file contains all object detections obtained in the video referenced in the filename. You can load these `npy` files as in this example code: 301 | 302 | ```python 303 | import numpy as np 304 | data=np.load('P04_101.MP4_detections.npy', allow_pickle=True, encoding='latin1') 305 | ``` 306 | 307 | `data` will be a 1-dimensional numpy ndarray containing `n` entries, where `n` is the number of frames in the video. The `n-th` entry of the dataframe will be an array of shape `m \times 6` where, `m` is the number of objects detected in the frame. The six columns contain respectively: 308 | * The class id. Please note that the background class is specified as `0`, so it is necessary to subtract `1` in order to match the noun class IDs reported in https://github.com/epic-kitchens/epic-kitchens-55-annotations/blob/master/EPIC_noun_classes.csv; 309 | * The `x1`, `y1`, `x2`, `y2` bounding box coordinates; 310 | * The detection confidence score. 311 | 312 | The following example code separates class ids, box coordinates and confidence scores: 313 | 314 | ```python 315 | object_classes = data[:,0]-1 316 | object_boxes = data[:,1:5] 317 | detection_scores = data[:,-1] 318 | ``` 319 | 320 | ## Related Works 321 | * A. Furnari, S. Battiato, K. Grauman, G. M. Farinella, Next-Active-Object Prediction from Egocentric Videos, Journal of Visual Communication and Image Representation, 2017. [Download](https://arxiv.org/pdf/1904.05250.pdf); 322 | * A. Furnari, S. Battiato, G. M. Farinella (2018). Leveraging Uncertainty to Rethink Loss Functions and Evaluation Measures for Egocentric Action Anticipation . In International Workshop on Egocentric Perception, Interaction and Computing (EPIC) in conjunction with ECCV . [code](https://github.com/fpv-iplab/action-anticipation-losses); 323 | * More related works at [http://iplab.dmi.unict.it/fpv/](http://iplab.dmi.unict.it/fpv/) and [https://github.com/fpv-iplab](https://github.com/fpv-iplab). 324 | -------------------------------------------------------------------------------- /RULSTM/data/ek100/validation_tail_verbs_ids.csv: -------------------------------------------------------------------------------- 1 | P01_11_102 2 | P01_11_106 3 | P01_11_111 4 | P01_11_115 5 | P01_11_118 6 | P01_11_119 7 | P01_11_12 8 | P01_11_121 9 | P01_11_128 10 | P01_11_132 11 | P01_11_133 12 | P01_11_134 13 | P01_11_20 14 | P01_11_24 15 | P01_11_43 16 | P01_11_9 17 | P01_12_22 18 | P01_12_25 19 | P01_12_40 20 | P01_12_41 21 | P01_12_50 22 | P01_12_59 23 | P01_13_1 24 | P01_13_10 25 | P01_13_21 26 | P01_13_28 27 | P01_14_12 28 | P01_14_120 29 | P01_14_127 30 | P01_14_148 31 | P01_14_168 32 | P01_14_169 33 | P01_14_17 34 | P01_14_170 35 | P01_14_176 36 | P01_14_180 37 | P01_14_181 38 | P01_14_182 39 | P01_14_190 40 | P01_14_195 41 | P01_14_20 42 | P01_14_218 43 | P01_14_219 44 | P01_14_22 45 | P01_14_226 46 | P01_14_227 47 | P01_14_235 48 | P01_14_238 49 | P01_14_241 50 | P01_14_244 51 | P01_14_258 52 | P01_14_261 53 | P01_14_279 54 | P01_14_282 55 | P01_14_283 56 | P01_14_286 57 | P01_14_287 58 | P01_14_288 59 | P01_14_291 60 | P01_14_296 61 | P01_14_297 62 | P01_14_298 63 | P01_14_3 64 | P01_14_303 65 | P01_14_306 66 | P01_14_307 67 | P01_14_308 68 | P01_14_310 69 | P01_14_311 70 | P01_14_312 71 | P01_14_313 72 | P01_14_314 73 | P01_14_334 74 | P01_14_340 75 | P01_14_341 76 | P01_14_343 77 | P01_14_37 78 | P01_14_48 79 | P01_14_59 80 | P01_14_98 81 | P01_14_99 82 | P01_15_109 83 | P01_15_114 84 | P01_15_15 85 | P01_15_161 86 | P01_15_165 87 | P01_15_169 88 | P01_15_170 89 | P01_15_172 90 | P01_15_18 91 | P01_15_204 92 | P01_15_212 93 | P01_15_216 94 | P01_15_22 95 | P01_15_223 96 | P01_15_225 97 | P01_15_228 98 | P01_15_230 99 | P01_15_234 100 | P01_15_245 101 | P01_15_249 102 | P01_15_255 103 | P01_15_264 104 | P01_15_270 105 | P01_15_274 106 | P01_15_284 107 | P01_15_285 108 | P01_15_31 109 | P01_15_5 110 | P01_15_99 111 | P02_12_0 112 | P02_12_100 113 | P02_12_105 114 | P02_12_106 115 | P02_12_107 116 | P02_12_108 117 | P02_12_109 118 | P02_12_110 119 | P02_12_122 120 | P02_12_123 121 | P02_12_130 122 | P02_12_131 123 | P02_12_135 124 | P02_12_137 125 | P02_12_139 126 | P02_12_141 127 | P02_12_142 128 | P02_12_145 129 | P02_12_147 130 | P02_12_148 131 | P02_12_151 132 | P02_12_153 133 | P02_12_154 134 | P02_12_156 135 | P02_12_160 136 | P02_12_161 137 | P02_12_163 138 | P02_12_166 139 | P02_12_168 140 | P02_12_170 141 | P02_12_171 142 | P02_12_172 143 | P02_12_177 144 | P02_12_178 145 | P02_12_179 146 | P02_12_18 147 | P02_12_182 148 | P02_12_184 149 | P02_12_185 150 | P02_12_19 151 | P02_12_193 152 | P02_12_20 153 | P02_12_21 154 | P02_12_224 155 | P02_12_227 156 | P02_12_229 157 | P02_12_230 158 | P02_12_234 159 | P02_12_235 160 | P02_12_239 161 | P02_12_240 162 | P02_12_243 163 | P02_12_245 164 | P02_12_246 165 | P02_12_249 166 | P02_12_251 167 | P02_12_253 168 | P02_12_256 169 | P02_12_259 170 | P02_12_260 171 | P02_12_261 172 | P02_12_279 173 | P02_12_28 174 | P02_12_287 175 | P02_12_289 176 | P02_12_290 177 | P02_12_293 178 | P02_12_294 179 | P02_12_295 180 | P02_12_303 181 | P02_12_306 182 | P02_12_307 183 | P02_12_309 184 | P02_12_316 185 | P02_12_318 186 | P02_12_320 187 | P02_12_321 188 | P02_12_323 189 | P02_12_324 190 | P02_12_327 191 | P02_12_328 192 | P02_12_33 193 | P02_12_331 194 | P02_12_332 195 | P02_12_338 196 | P02_12_34 197 | P02_12_343 198 | P02_12_350 199 | P02_12_351 200 | P02_12_352 201 | P02_12_353 202 | P02_12_354 203 | P02_12_358 204 | P02_12_359 205 | P02_12_368 206 | P02_12_369 207 | P02_12_371 208 | P02_12_5 209 | P02_12_52 210 | P02_12_53 211 | P02_12_54 212 | P02_12_57 213 | P02_12_6 214 | P02_12_61 215 | P02_12_62 216 | P02_12_63 217 | P02_12_64 218 | P02_12_65 219 | P02_12_80 220 | P02_12_83 221 | P02_12_91 222 | P02_12_93 223 | P02_12_95 224 | P02_12_98 225 | P02_13_1 226 | P02_13_12 227 | P02_13_13 228 | P02_14_0 229 | P02_14_1 230 | P02_14_12 231 | P02_14_13 232 | P02_14_4 233 | P02_14_6 234 | P02_15_21 235 | P02_15_22 236 | P02_15_23 237 | P02_15_36 238 | P02_15_37 239 | P02_15_42 240 | P03_23_114 241 | P03_23_115 242 | P03_23_116 243 | P03_23_44 244 | P03_23_45 245 | P03_23_56 246 | P03_23_59 247 | P03_23_6 248 | P03_23_60 249 | P03_23_61 250 | P03_23_66 251 | P03_23_67 252 | P03_23_77 253 | P03_23_78 254 | P03_23_79 255 | P03_23_82 256 | P03_23_84 257 | P03_23_91 258 | P03_24_0 259 | P03_24_103 260 | P03_24_107 261 | P03_24_110 262 | P03_24_125 263 | P03_24_126 264 | P03_24_28 265 | P03_24_40 266 | P03_24_41 267 | P03_24_42 268 | P03_24_43 269 | P03_24_44 270 | P03_24_45 271 | P03_24_46 272 | P03_24_48 273 | P03_24_53 274 | P03_24_54 275 | P03_24_55 276 | P03_24_56 277 | P03_24_59 278 | P03_24_62 279 | P03_24_65 280 | P03_24_66 281 | P03_24_67 282 | P03_24_68 283 | P03_24_69 284 | P03_24_70 285 | P03_24_80 286 | P03_24_81 287 | P03_24_83 288 | P03_24_84 289 | P03_24_91 290 | P03_24_92 291 | P03_24_93 292 | P03_24_94 293 | P03_24_95 294 | P03_24_97 295 | P03_24_98 296 | P03_25_1 297 | P03_25_13 298 | P04_24_2 299 | P04_24_20 300 | P04_24_27 301 | P04_24_28 302 | P04_24_29 303 | P04_24_30 304 | P04_24_32 305 | P04_24_33 306 | P04_24_38 307 | P04_24_39 308 | P04_24_41 309 | P04_24_42 310 | P04_24_44 311 | P04_24_45 312 | P04_24_46 313 | P04_24_47 314 | P04_24_48 315 | P04_24_50 316 | P04_24_51 317 | P04_24_60 318 | P04_24_63 319 | P04_25_0 320 | P04_25_1 321 | P04_25_10 322 | P04_25_12 323 | P04_25_13 324 | P04_25_14 325 | P04_25_15 326 | P04_25_16 327 | P04_25_17 328 | P04_25_19 329 | P04_25_20 330 | P04_25_26 331 | P04_25_4 332 | P04_25_6 333 | P04_26_2 334 | P04_27_0 335 | P04_27_3 336 | P04_27_7 337 | P04_27_8 338 | P04_28_0 339 | P04_28_10 340 | P04_28_13 341 | P04_28_14 342 | P04_28_19 343 | P04_28_2 344 | P04_28_20 345 | P04_28_23 346 | P04_28_7 347 | P04_28_8 348 | P04_29_0 349 | P04_29_1 350 | P04_29_14 351 | P04_29_15 352 | P04_29_2 353 | P04_29_20 354 | P04_29_28 355 | P04_29_3 356 | P04_29_46 357 | P04_29_49 358 | P04_29_56 359 | P04_29_7 360 | P04_29_8 361 | P04_30_10 362 | P04_30_33 363 | P04_30_48 364 | P04_31_103 365 | P04_31_109 366 | P04_31_110 367 | P04_31_112 368 | P04_31_12 369 | P04_31_23 370 | P04_31_26 371 | P04_31_30 372 | P04_31_34 373 | P04_31_35 374 | P04_31_37 375 | P04_31_38 376 | P04_31_4 377 | P04_31_41 378 | P04_31_49 379 | P04_31_50 380 | P04_31_55 381 | P04_31_56 382 | P04_31_61 383 | P04_31_63 384 | P04_31_65 385 | P04_31_70 386 | P04_31_71 387 | P04_31_74 388 | P04_31_77 389 | P04_31_81 390 | P04_31_87 391 | P04_31_91 392 | P04_31_92 393 | P04_31_94 394 | P04_31_95 395 | P04_31_96 396 | P04_31_97 397 | P04_31_98 398 | P04_32_1 399 | P04_32_2 400 | P04_32_6 401 | P04_33_1 402 | P04_33_11 403 | P04_33_12 404 | P04_33_16 405 | P04_33_2 406 | P04_33_5 407 | P04_33_7 408 | P05_07_100 409 | P05_07_104 410 | P05_07_107 411 | P05_07_108 412 | P05_07_15 413 | P05_07_16 414 | P05_07_17 415 | P05_07_20 416 | P05_07_22 417 | P05_07_24 418 | P05_07_33 419 | P05_07_34 420 | P05_07_42 421 | P05_07_44 422 | P05_07_49 423 | P05_07_5 424 | P05_07_55 425 | P05_07_6 426 | P05_07_60 427 | P05_07_61 428 | P05_07_63 429 | P05_07_65 430 | P05_07_68 431 | P05_07_71 432 | P05_07_72 433 | P05_07_73 434 | P05_07_76 435 | P05_07_77 436 | P05_07_78 437 | P05_07_81 438 | P05_07_83 439 | P05_07_85 440 | P05_07_96 441 | P05_07_97 442 | P05_07_98 443 | P05_09_10 444 | P05_09_11 445 | P05_09_25 446 | P05_09_31 447 | P05_09_35 448 | P05_09_55 449 | P05_09_61 450 | P05_09_64 451 | P05_09_65 452 | P06_10_1 453 | P06_10_12 454 | P06_10_13 455 | P06_10_15 456 | P06_10_22 457 | P06_10_3 458 | P06_10_6 459 | P06_11_1 460 | P06_11_12 461 | P06_11_19 462 | P06_11_3 463 | P06_11_5 464 | P06_11_8 465 | P06_11_9 466 | P06_12_11 467 | P06_12_18 468 | P06_12_20 469 | P06_12_22 470 | P06_12_25 471 | P06_12_27 472 | P06_12_28 473 | P06_12_29 474 | P06_12_7 475 | P06_12_8 476 | P06_13_11 477 | P06_13_12 478 | P06_13_15 479 | P06_13_16 480 | P06_13_18 481 | P06_13_31 482 | P06_13_33 483 | P06_13_34 484 | P06_13_35 485 | P06_13_38 486 | P06_13_39 487 | P06_13_41 488 | P06_13_42 489 | P06_13_5 490 | P06_13_51 491 | P06_13_52 492 | P06_13_53 493 | P06_13_54 494 | P06_13_55 495 | P06_13_6 496 | P06_13_8 497 | P06_14_20 498 | P07_12_10 499 | P07_12_9 500 | P07_14_12 501 | P07_14_7 502 | P07_14_8 503 | P07_15_2 504 | P07_15_3 505 | P07_15_4 506 | P07_16_1 507 | P07_16_11 508 | P07_16_20 509 | P07_16_27 510 | P07_17_1 511 | P07_17_10 512 | P07_17_17 513 | P07_17_2 514 | P07_17_21 515 | P07_17_4 516 | P07_17_5 517 | P07_17_6 518 | P07_17_7 519 | P08_09_0 520 | P08_09_108 521 | P08_09_118 522 | P08_09_119 523 | P08_09_125 524 | P08_09_129 525 | P08_09_130 526 | P08_09_19 527 | P08_09_2 528 | P08_09_20 529 | P08_09_25 530 | P08_09_35 531 | P08_09_37 532 | P08_09_45 533 | P08_09_49 534 | P08_09_56 535 | P08_09_62 536 | P08_09_63 537 | P08_09_7 538 | P08_09_71 539 | P08_09_79 540 | P08_09_81 541 | P08_09_83 542 | P08_09_9 543 | P08_09_90 544 | P08_09_93 545 | P08_09_98 546 | P08_10_0 547 | P08_10_17 548 | P08_10_20 549 | P08_10_40 550 | P08_10_45 551 | P08_10_49 552 | P08_10_51 553 | P08_10_53 554 | P08_10_60 555 | P08_10_61 556 | P08_14_0 557 | P08_14_16 558 | P08_14_21 559 | P08_14_30 560 | P08_14_35 561 | P08_14_36 562 | P08_14_38 563 | P08_14_4 564 | P08_15_0 565 | P08_15_1 566 | P08_15_104 567 | P08_15_110 568 | P08_15_113 569 | P08_15_115 570 | P08_15_119 571 | P08_15_120 572 | P08_15_124 573 | P08_15_125 574 | P08_15_126 575 | P08_15_128 576 | P08_15_131 577 | P08_15_132 578 | P08_15_2 579 | P08_15_23 580 | P08_15_24 581 | P08_15_30 582 | P08_15_34 583 | P08_15_45 584 | P08_15_47 585 | P08_15_64 586 | P08_15_67 587 | P08_15_72 588 | P08_15_75 589 | P08_15_79 590 | P08_15_87 591 | P08_15_88 592 | P08_15_92 593 | P08_15_96 594 | P08_15_97 595 | P08_15_99 596 | P08_16_0 597 | P08_16_10 598 | P08_16_100 599 | P08_16_101 600 | P08_16_102 601 | P08_16_103 602 | P08_16_108 603 | P08_16_112 604 | P08_16_116 605 | P08_16_119 606 | P08_16_120 607 | P08_16_14 608 | P08_16_17 609 | P08_16_18 610 | P08_16_19 611 | P08_16_22 612 | P08_16_24 613 | P08_16_25 614 | P08_16_26 615 | P08_16_29 616 | P08_16_30 617 | P08_16_35 618 | P08_16_36 619 | P08_16_37 620 | P08_16_38 621 | P08_16_40 622 | P08_16_45 623 | P08_16_47 624 | P08_16_48 625 | P08_16_49 626 | P08_16_53 627 | P08_16_55 628 | P08_16_66 629 | P08_16_67 630 | P08_16_73 631 | P08_16_79 632 | P08_16_80 633 | P08_16_85 634 | P08_16_86 635 | P08_16_87 636 | P08_16_88 637 | P08_16_89 638 | P08_16_96 639 | P08_17_0 640 | P08_17_1 641 | P08_17_105 642 | P08_17_11 643 | P08_17_21 644 | P08_17_24 645 | P08_17_26 646 | P08_17_27 647 | P08_17_34 648 | P08_17_35 649 | P08_17_39 650 | P08_17_4 651 | P08_17_43 652 | P08_17_47 653 | P08_17_48 654 | P08_17_49 655 | P08_17_50 656 | P08_17_52 657 | P08_17_53 658 | P08_17_54 659 | P08_17_58 660 | P08_17_59 661 | P08_17_6 662 | P08_17_60 663 | P08_17_63 664 | P08_17_70 665 | P08_17_71 666 | P08_17_72 667 | P08_17_73 668 | P08_17_74 669 | P08_17_75 670 | P08_17_76 671 | P08_17_77 672 | P08_17_78 673 | P08_17_81 674 | P08_17_91 675 | P08_17_93 676 | P08_17_94 677 | P08_17_95 678 | P10_03_107 679 | P10_03_115 680 | P10_03_119 681 | P10_03_122 682 | P10_03_124 683 | P10_03_126 684 | P10_03_128 685 | P10_03_132 686 | P10_03_133 687 | P10_03_136 688 | P10_03_137 689 | P10_03_142 690 | P10_03_147 691 | P10_03_151 692 | P10_03_158 693 | P10_03_159 694 | P10_03_16 695 | P10_03_160 696 | P10_03_161 697 | P10_03_162 698 | P10_03_166 699 | P10_03_170 700 | P10_03_175 701 | P10_03_183 702 | P10_03_198 703 | P10_03_2 704 | P10_03_20 705 | P10_03_200 706 | P10_03_202 707 | P10_03_213 708 | P10_03_218 709 | P10_03_219 710 | P10_03_22 711 | P10_03_221 712 | P10_03_222 713 | P10_03_226 714 | P10_03_25 715 | P10_03_29 716 | P10_03_31 717 | P10_03_34 718 | P10_03_36 719 | P10_03_37 720 | P10_03_39 721 | P10_03_40 722 | P10_03_44 723 | P10_03_45 724 | P10_03_53 725 | P10_03_60 726 | P10_03_67 727 | P10_03_77 728 | P10_03_79 729 | P10_03_88 730 | P11_17_1 731 | P11_17_106 732 | P11_17_109 733 | P11_17_117 734 | P11_17_118 735 | P11_17_119 736 | P11_17_120 737 | P11_17_121 738 | P11_17_122 739 | P11_17_124 740 | P11_17_127 741 | P11_17_129 742 | P11_17_130 743 | P11_17_131 744 | P11_17_132 745 | P11_17_133 746 | P11_17_134 747 | P11_17_135 748 | P11_17_136 749 | P11_17_137 750 | P11_17_138 751 | P11_17_139 752 | P11_17_141 753 | P11_17_15 754 | P11_17_2 755 | P11_17_21 756 | P11_17_22 757 | P11_17_23 758 | P11_17_24 759 | P11_17_25 760 | P11_17_26 761 | P11_17_27 762 | P11_17_30 763 | P11_17_31 764 | P11_17_32 765 | P11_17_33 766 | P11_17_35 767 | P11_17_41 768 | P11_17_44 769 | P11_17_45 770 | P11_17_46 771 | P11_17_47 772 | P11_17_48 773 | P11_17_49 774 | P11_17_51 775 | P11_17_52 776 | P11_17_53 777 | P11_17_54 778 | P11_17_55 779 | P11_17_56 780 | P11_17_57 781 | P11_17_63 782 | P11_17_64 783 | P11_17_71 784 | P11_17_72 785 | P11_17_8 786 | P11_17_93 787 | P11_17_94 788 | P11_19_18 789 | P11_19_19 790 | P11_19_21 791 | P11_19_24 792 | P11_19_25 793 | P11_19_26 794 | P11_19_28 795 | P11_19_9 796 | P11_20_0 797 | P11_20_107 798 | P11_20_13 799 | P11_20_165 800 | P11_20_168 801 | P11_20_169 802 | P11_20_17 803 | P11_20_170 804 | P11_20_177 805 | P11_20_178 806 | P11_20_180 807 | P11_20_189 808 | P11_20_19 809 | P11_20_192 810 | P11_20_193 811 | P11_20_20 812 | P11_20_26 813 | P11_20_27 814 | P11_20_31 815 | P11_20_32 816 | P11_20_35 817 | P11_20_38 818 | P11_20_39 819 | P11_20_45 820 | P11_20_48 821 | P11_20_49 822 | P11_20_53 823 | P11_20_58 824 | P11_20_61 825 | P11_20_63 826 | P11_20_8 827 | P11_20_81 828 | P11_20_82 829 | P11_20_99 830 | P11_21_3 831 | P11_21_4 832 | P11_21_5 833 | P11_21_8 834 | P11_22_20 835 | P11_22_36 836 | P11_22_42 837 | P11_22_43 838 | P11_22_44 839 | P11_22_45 840 | P11_22_56 841 | P11_22_58 842 | P11_22_59 843 | P11_22_64 844 | P11_22_66 845 | P11_22_67 846 | P11_22_7 847 | P11_22_72 848 | P11_23_1 849 | P11_23_14 850 | P11_24_16 851 | P12_03_0 852 | P12_03_1 853 | P12_03_105 854 | P12_03_108 855 | P12_03_109 856 | P12_03_111 857 | P12_03_112 858 | P12_03_22 859 | P12_03_3 860 | P12_03_30 861 | P12_03_33 862 | P12_03_43 863 | P12_03_44 864 | P12_03_45 865 | P12_03_5 866 | P12_03_51 867 | P12_03_52 868 | P12_03_57 869 | P12_03_6 870 | P12_03_60 871 | P12_03_64 872 | P12_03_65 873 | P12_03_66 874 | P12_03_73 875 | P12_03_74 876 | P12_03_76 877 | P12_03_77 878 | P12_03_8 879 | P12_03_83 880 | P12_03_84 881 | P12_03_85 882 | P12_03_88 883 | P12_03_89 884 | P12_03_90 885 | P12_03_94 886 | P12_03_95 887 | P12_08_0 888 | P12_08_12 889 | P12_08_14 890 | P12_08_23 891 | P12_08_9 892 | P13_01_40 893 | P13_01_46 894 | P13_01_48 895 | P13_02_7 896 | P13_02_9 897 | P13_03_1 898 | P13_03_28 899 | P13_03_31 900 | P13_03_34 901 | P13_03_36 902 | P13_03_47 903 | P13_03_7 904 | P14_06_7 905 | P14_08_12 906 | P14_08_18 907 | P14_08_23 908 | P15_04_10 909 | P15_04_14 910 | P15_04_15 911 | P15_04_16 912 | P15_04_23 913 | P15_04_30 914 | P15_04_32 915 | P15_05_11 916 | P15_05_13 917 | P15_06_11 918 | P15_06_12 919 | P15_06_23 920 | P15_06_25 921 | P15_06_3 922 | P15_06_36 923 | P15_06_37 924 | P15_06_41 925 | P15_06_47 926 | P15_06_50 927 | P15_06_8 928 | P15_06_9 929 | P16_04_10 930 | P16_04_11 931 | P16_04_12 932 | P16_04_14 933 | P16_04_15 934 | P16_04_16 935 | P16_04_18 936 | P16_04_2 937 | P16_04_24 938 | P16_04_33 939 | P16_04_34 940 | P16_04_40 941 | P16_04_41 942 | P16_04_43 943 | P16_04_45 944 | P16_04_47 945 | P16_04_5 946 | P16_04_52 947 | P16_04_53 948 | P16_04_54 949 | P16_04_55 950 | P16_04_56 951 | P16_04_57 952 | P16_04_7 953 | P17_02_1 954 | P17_02_10 955 | P17_02_11 956 | P17_02_13 957 | P17_02_16 958 | P17_02_17 959 | P17_02_18 960 | P17_02_19 961 | P17_02_2 962 | P17_02_20 963 | P17_02_21 964 | P17_02_22 965 | P17_02_4 966 | P18_01_25 967 | P18_01_26 968 | P18_01_40 969 | P18_01_6 970 | P18_01_7 971 | P18_02_0 972 | P18_02_1 973 | P18_02_10 974 | P18_02_18 975 | P18_02_20 976 | P18_02_21 977 | P18_02_23 978 | P18_02_4 979 | P18_03_1 980 | P18_03_102 981 | P18_03_105 982 | P18_03_109 983 | P18_03_111 984 | P18_03_43 985 | P18_03_45 986 | P18_03_49 987 | P18_03_57 988 | P18_03_58 989 | P18_03_74 990 | P18_03_75 991 | P18_03_79 992 | P18_03_85 993 | P18_03_90 994 | P18_03_94 995 | P18_03_99 996 | P18_04_31 997 | P18_04_37 998 | P18_04_4 999 | P18_05_100 1000 | P18_05_104 1001 | P18_05_117 1002 | P18_05_12 1003 | P18_05_122 1004 | P18_05_124 1005 | P18_05_126 1006 | P18_05_129 1007 | P18_05_134 1008 | P18_05_18 1009 | P18_05_22 1010 | P18_05_23 1011 | P18_05_24 1012 | P18_05_26 1013 | P18_05_44 1014 | P18_05_46 1015 | P18_05_49 1016 | P18_05_50 1017 | P18_05_53 1018 | P18_05_62 1019 | P18_05_71 1020 | P18_05_76 1021 | P18_05_82 1022 | P18_05_9 1023 | P18_05_91 1024 | P18_05_97 1025 | P18_06_13 1026 | P18_06_31 1027 | P18_06_32 1028 | P18_06_38 1029 | P18_06_39 1030 | P18_06_53 1031 | P18_06_56 1032 | P18_06_58 1033 | P18_06_59 1034 | P18_06_66 1035 | P18_06_75 1036 | P18_06_84 1037 | P18_06_85 1038 | P18_06_86 1039 | P18_07_3 1040 | P18_07_33 1041 | P18_07_38 1042 | P18_07_52 1043 | P18_08_0 1044 | P18_08_12 1045 | P18_08_3 1046 | P18_08_5 1047 | P18_09_12 1048 | P18_09_15 1049 | P18_09_21 1050 | P18_09_41 1051 | P18_09_7 1052 | P18_09_8 1053 | P18_09_9 1054 | P18_10_23 1055 | P18_10_25 1056 | P18_10_26 1057 | P18_10_29 1058 | P18_10_42 1059 | P18_10_44 1060 | P18_11_28 1061 | P18_11_35 1062 | P18_11_37 1063 | P18_11_52 1064 | P18_11_54 1065 | P18_11_61 1066 | P18_11_62 1067 | P18_11_71 1068 | P18_11_80 1069 | P18_11_84 1070 | P18_12_24 1071 | P18_12_28 1072 | P18_12_32 1073 | P19_05_7 1074 | P19_06_11 1075 | P19_06_13 1076 | P19_06_14 1077 | P19_06_15 1078 | P19_06_16 1079 | P19_06_17 1080 | P19_06_20 1081 | P19_06_22 1082 | P19_06_23 1083 | P19_06_24 1084 | P19_06_25 1085 | P19_06_26 1086 | P19_06_28 1087 | P19_06_7 1088 | P20_05_25 1089 | P20_05_46 1090 | P20_05_65 1091 | P20_06_32 1092 | P20_06_61 1093 | P21_02_1 1094 | P21_02_13 1095 | P21_02_15 1096 | P21_02_18 1097 | P21_02_2 1098 | P21_02_22 1099 | P21_02_23 1100 | P21_02_28 1101 | P21_02_29 1102 | P21_02_37 1103 | P21_02_4 1104 | P21_02_51 1105 | P21_02_52 1106 | P21_02_54 1107 | P21_02_57 1108 | P21_02_6 1109 | P22_01_103 1110 | P22_01_105 1111 | P22_01_109 1112 | P22_01_110 1113 | P22_01_111 1114 | P22_01_112 1115 | P22_01_113 1116 | P22_01_114 1117 | P22_01_121 1118 | P22_01_125 1119 | P22_01_129 1120 | P22_01_131 1121 | P22_01_132 1122 | P22_01_145 1123 | P22_01_146 1124 | P22_01_147 1125 | P22_01_148 1126 | P22_01_149 1127 | P22_01_150 1128 | P22_01_151 1129 | P22_01_152 1130 | P22_01_153 1131 | P22_01_158 1132 | P22_01_164 1133 | P22_01_165 1134 | P22_01_166 1135 | P22_01_167 1136 | P22_01_168 1137 | P22_01_169 1138 | P22_01_175 1139 | P22_01_177 1140 | P22_01_178 1141 | P22_01_183 1142 | P22_01_186 1143 | P22_01_188 1144 | P22_01_190 1145 | P22_01_197 1146 | P22_01_218 1147 | P22_01_226 1148 | P22_01_228 1149 | P22_01_255 1150 | P22_01_265 1151 | P22_01_274 1152 | P22_01_28 1153 | P22_01_298 1154 | P22_01_299 1155 | P22_01_302 1156 | P22_01_31 1157 | P22_01_316 1158 | P22_01_345 1159 | P22_01_37 1160 | P22_01_41 1161 | P22_01_53 1162 | P22_01_61 1163 | P22_01_7 1164 | P22_01_72 1165 | P22_01_73 1166 | P22_01_74 1167 | P22_01_75 1168 | P22_01_76 1169 | P22_01_8 1170 | P22_01_85 1171 | P22_01_87 1172 | P22_02_174 1173 | P22_02_182 1174 | P22_02_183 1175 | P22_02_184 1176 | P22_02_193 1177 | P22_02_214 1178 | P22_02_215 1179 | P22_02_3 1180 | P22_02_32 1181 | P22_02_40 1182 | P22_02_41 1183 | P22_02_43 1184 | P22_02_44 1185 | P22_02_60 1186 | P22_02_61 1187 | P22_02_64 1188 | P22_02_65 1189 | P22_02_66 1190 | P22_02_67 1191 | P22_02_70 1192 | P22_02_72 1193 | P22_02_76 1194 | P22_02_79 1195 | P22_02_81 1196 | P22_02_83 1197 | P22_03_11 1198 | P22_03_13 1199 | P22_03_136 1200 | P22_03_137 1201 | P22_03_145 1202 | P22_03_158 1203 | P22_03_159 1204 | P22_03_16 1205 | P22_03_18 1206 | P22_03_190 1207 | P22_03_20 1208 | P22_03_209 1209 | P22_03_210 1210 | P22_03_211 1211 | P22_03_212 1212 | P22_03_222 1213 | P22_03_224 1214 | P22_03_3 1215 | P22_03_336 1216 | P22_03_337 1217 | P22_03_340 1218 | P22_03_344 1219 | P22_03_345 1220 | P22_03_348 1221 | P22_03_349 1222 | P22_03_354 1223 | P22_03_355 1224 | P22_03_358 1225 | P22_03_361 1226 | P22_03_363 1227 | P22_03_373 1228 | P22_03_374 1229 | P22_03_380 1230 | P22_03_4 1231 | P22_03_404 1232 | P22_03_405 1233 | P22_03_406 1234 | P22_03_411 1235 | P22_03_416 1236 | P22_03_418 1237 | P22_03_419 1238 | P22_03_431 1239 | P22_03_437 1240 | P22_03_444 1241 | P22_03_446 1242 | P22_03_448 1243 | P22_03_450 1244 | P22_03_452 1245 | P22_03_467 1246 | P22_03_473 1247 | P22_03_474 1248 | P22_03_485 1249 | P22_03_72 1250 | P22_03_8 1251 | P22_04_100 1252 | P22_04_101 1253 | P22_04_102 1254 | P22_04_103 1255 | P22_04_104 1256 | P22_04_105 1257 | P22_04_106 1258 | P22_04_107 1259 | P22_04_124 1260 | P22_04_128 1261 | P22_04_129 1262 | P22_04_13 1263 | P22_04_135 1264 | P22_04_136 1265 | P22_04_14 1266 | P22_04_140 1267 | P22_04_141 1268 | P22_04_145 1269 | P22_04_146 1270 | P22_04_147 1271 | P22_04_149 1272 | P22_04_15 1273 | P22_04_152 1274 | P22_04_153 1275 | P22_04_175 1276 | P22_04_177 1277 | P22_04_68 1278 | P22_04_70 1279 | P22_04_71 1280 | P22_04_75 1281 | P22_04_77 1282 | P22_04_82 1283 | P22_04_96 1284 | P23_05_12 1285 | P23_05_13 1286 | P23_05_24 1287 | P23_05_29 1288 | P23_05_30 1289 | P23_05_31 1290 | P23_05_42 1291 | P23_05_46 1292 | P23_05_49 1293 | P23_05_52 1294 | P23_05_56 1295 | P23_05_57 1296 | P23_05_59 1297 | P23_05_67 1298 | P23_05_7 1299 | P23_05_70 1300 | P23_05_89 1301 | P23_05_91 1302 | P23_05_93 1303 | P24_09_103 1304 | P24_09_11 1305 | P24_09_113 1306 | P24_09_121 1307 | P24_09_122 1308 | P24_09_125 1309 | P24_09_127 1310 | P24_09_133 1311 | P24_09_138 1312 | P24_09_139 1313 | P24_09_140 1314 | P24_09_141 1315 | P24_09_144 1316 | P24_09_154 1317 | P24_09_155 1318 | P24_09_156 1319 | P24_09_160 1320 | P24_09_164 1321 | P24_09_177 1322 | P24_09_178 1323 | P24_09_18 1324 | P24_09_184 1325 | P24_09_185 1326 | P24_09_187 1327 | P24_09_196 1328 | P24_09_20 1329 | P24_09_204 1330 | P24_09_205 1331 | P24_09_206 1332 | P24_09_207 1333 | P24_09_209 1334 | P24_09_22 1335 | P24_09_249 1336 | P24_09_250 1337 | P24_09_274 1338 | P24_09_28 1339 | P24_09_280 1340 | P24_09_285 1341 | P24_09_29 1342 | P24_09_293 1343 | P24_09_297 1344 | P24_09_299 1345 | P24_09_30 1346 | P24_09_346 1347 | P24_09_38 1348 | P24_09_39 1349 | P24_09_40 1350 | P24_09_41 1351 | P24_09_67 1352 | P24_09_68 1353 | P24_09_69 1354 | P24_09_80 1355 | P24_09_87 1356 | P24_09_88 1357 | P24_09_89 1358 | P25_06_27 1359 | P25_06_33 1360 | P25_07_11 1361 | P25_07_16 1362 | P25_07_28 1363 | P25_07_30 1364 | P25_07_36 1365 | P25_07_39 1366 | P25_07_8 1367 | P25_08_11 1368 | P25_08_16 1369 | P25_08_19 1370 | P25_08_4 1371 | P26_31_9 1372 | P26_32_14 1373 | P26_32_2 1374 | P26_33_0 1375 | P26_33_4 1376 | P26_34_2 1377 | P26_34_3 1378 | P26_34_8 1379 | P26_34_9 1380 | P26_35_10 1381 | P26_35_22 1382 | P26_35_30 1383 | P26_35_4 1384 | P26_36_12 1385 | P26_36_19 1386 | P26_36_20 1387 | P26_36_21 1388 | P26_36_22 1389 | P26_36_32 1390 | P26_36_38 1391 | P26_36_7 1392 | P26_36_9 1393 | P26_37_1 1394 | P26_37_11 1395 | P26_37_4 1396 | P26_37_6 1397 | P26_37_8 1398 | P26_38_0 1399 | P26_38_4 1400 | P26_38_5 1401 | P26_38_8 1402 | P26_39_2 1403 | P26_39_4 1404 | P26_39_5 1405 | P26_40_0 1406 | P26_40_10 1407 | P26_40_2 1408 | P26_40_7 1409 | P26_40_9 1410 | P26_41_0 1411 | P26_41_14 1412 | P26_41_16 1413 | P26_41_18 1414 | P26_41_19 1415 | P26_41_5 1416 | P26_41_7 1417 | P27_05_50 1418 | P28_15_26 1419 | P28_16_12 1420 | P28_16_13 1421 | P28_16_14 1422 | P28_16_2 1423 | P28_16_20 1424 | P28_16_22 1425 | P28_16_7 1426 | P28_16_8 1427 | P28_17_10 1428 | P28_17_12 1429 | P28_18_1 1430 | P28_18_19 1431 | P28_18_21 1432 | P28_18_24 1433 | P28_18_6 1434 | P28_18_8 1435 | P28_19_11 1436 | P28_19_14 1437 | P28_19_16 1438 | P28_19_19 1439 | P28_20_10 1440 | P28_20_11 1441 | P28_20_12 1442 | P28_20_13 1443 | P28_20_14 1444 | P28_20_15 1445 | P28_20_16 1446 | P28_20_17 1447 | P28_20_18 1448 | P28_20_19 1449 | P28_20_20 1450 | P28_20_21 1451 | P28_20_22 1452 | P28_20_23 1453 | P28_20_3 1454 | P28_20_5 1455 | P28_20_6 1456 | P28_20_8 1457 | P28_20_9 1458 | P28_21_1 1459 | P28_21_10 1460 | P28_21_2 1461 | P28_21_6 1462 | P28_22_1 1463 | P28_22_10 1464 | P28_22_11 1465 | P28_22_12 1466 | P28_22_13 1467 | P28_22_14 1468 | P28_22_15 1469 | P28_22_19 1470 | P28_22_2 1471 | P28_22_20 1472 | P28_22_25 1473 | P28_22_3 1474 | P28_22_5 1475 | P28_22_7 1476 | P28_22_8 1477 | P28_22_9 1478 | P28_23_4 1479 | P28_24_4 1480 | P28_25_107 1481 | P28_25_111 1482 | P28_25_129 1483 | P28_25_132 1484 | P28_25_30 1485 | P28_25_50 1486 | P28_25_53 1487 | P28_25_72 1488 | P28_25_89 1489 | P28_26_4 1490 | P29_05_10 1491 | P29_05_100 1492 | P29_05_101 1493 | P29_05_125 1494 | P29_05_127 1495 | P29_05_131 1496 | P29_05_15 1497 | P29_05_153 1498 | P29_05_169 1499 | P29_05_171 1500 | P29_05_173 1501 | P29_05_175 1502 | P29_05_178 1503 | P29_05_180 1504 | P29_05_185 1505 | P29_05_186 1506 | P29_05_189 1507 | P29_05_191 1508 | P29_05_194 1509 | P29_05_196 1510 | P29_05_259 1511 | P29_05_260 1512 | P29_05_262 1513 | P29_05_263 1514 | P29_05_268 1515 | P29_05_269 1516 | P29_05_277 1517 | P29_05_280 1518 | P29_05_284 1519 | P29_05_292 1520 | P29_05_294 1521 | P29_05_299 1522 | P29_05_304 1523 | P29_05_306 1524 | P29_05_308 1525 | P29_05_314 1526 | P29_05_322 1527 | P29_05_332 1528 | P29_05_335 1529 | P29_05_337 1530 | P29_05_342 1531 | P29_05_346 1532 | P29_05_383 1533 | P29_05_391 1534 | P29_05_392 1535 | P29_05_393 1536 | P29_05_400 1537 | P29_05_425 1538 | P29_05_426 1539 | P29_05_427 1540 | P29_05_431 1541 | P29_05_433 1542 | P29_05_444 1543 | P29_05_445 1544 | P29_05_447 1545 | P29_05_453 1546 | P29_05_455 1547 | P29_05_457 1548 | P29_05_464 1549 | P29_05_470 1550 | P29_05_475 1551 | P29_05_476 1552 | P29_05_478 1553 | P29_05_479 1554 | P29_05_487 1555 | P29_05_494 1556 | P29_05_495 1557 | P29_05_496 1558 | P29_05_497 1559 | P29_05_498 1560 | P29_05_503 1561 | P29_05_504 1562 | P29_05_505 1563 | P29_05_511 1564 | P29_05_512 1565 | P29_05_513 1566 | P29_05_514 1567 | P29_05_515 1568 | P29_05_516 1569 | P29_05_517 1570 | P29_05_518 1571 | P29_05_519 1572 | P29_05_525 1573 | P29_05_526 1574 | P29_05_527 1575 | P29_05_528 1576 | P29_05_529 1577 | P29_05_532 1578 | P29_05_540 1579 | P29_05_542 1580 | P29_05_547 1581 | P29_05_548 1582 | P29_05_549 1583 | P29_05_553 1584 | P29_05_557 1585 | P29_05_561 1586 | P29_05_563 1587 | P29_05_564 1588 | P29_05_73 1589 | P29_05_74 1590 | P29_05_75 1591 | P29_05_80 1592 | P29_05_87 1593 | P29_05_91 1594 | P29_05_93 1595 | P29_05_94 1596 | P29_05_95 1597 | P29_05_96 1598 | P29_05_97 1599 | P29_05_98 1600 | P29_05_99 1601 | P29_06_10 1602 | P29_06_12 1603 | P29_06_14 1604 | P29_06_39 1605 | P29_06_40 1606 | P29_06_41 1607 | P29_06_44 1608 | P30_07_105 1609 | P30_07_2 1610 | P30_07_25 1611 | P30_07_26 1612 | P30_07_27 1613 | P30_07_28 1614 | P30_07_29 1615 | P30_07_43 1616 | P30_07_51 1617 | P30_07_52 1618 | P30_07_57 1619 | P30_07_74 1620 | P30_07_82 1621 | P30_07_83 1622 | P30_07_92 1623 | P30_07_97 1624 | P30_07_98 1625 | P30_07_99 1626 | P30_08_10 1627 | P30_08_103 1628 | P30_08_105 1629 | P30_08_136 1630 | P30_08_15 1631 | P30_08_16 1632 | P30_08_176 1633 | P30_08_177 1634 | P30_08_184 1635 | P30_08_188 1636 | P30_08_189 1637 | P30_08_190 1638 | P30_08_192 1639 | P30_08_194 1640 | P30_08_20 1641 | P30_08_202 1642 | P30_08_207 1643 | P30_08_212 1644 | P30_08_213 1645 | P30_08_219 1646 | P30_08_224 1647 | P30_08_225 1648 | P30_08_229 1649 | P30_08_245 1650 | P30_08_251 1651 | P30_08_255 1652 | P30_08_262 1653 | P30_08_266 1654 | P30_08_267 1655 | P30_08_273 1656 | P30_08_275 1657 | P30_08_276 1658 | P30_08_299 1659 | P30_08_300 1660 | P30_08_301 1661 | P30_08_302 1662 | P30_08_308 1663 | P30_08_311 1664 | P30_08_324 1665 | P30_08_328 1666 | P30_08_329 1667 | P30_08_331 1668 | P30_08_89 1669 | P30_08_9 1670 | P30_08_95 1671 | P30_09_10 1672 | P30_09_136 1673 | P30_09_149 1674 | P30_09_15 1675 | P30_09_152 1676 | P30_09_154 1677 | P30_09_162 1678 | P30_09_20 1679 | P30_09_21 1680 | P30_09_26 1681 | P30_09_39 1682 | P30_09_4 1683 | P30_09_77 1684 | P30_09_79 1685 | P30_09_83 1686 | P30_09_86 1687 | P30_09_9 1688 | P31_10_30 1689 | P31_10_33 1690 | P31_10_40 1691 | P31_10_43 1692 | P31_11_11 1693 | P31_11_28 1694 | P31_11_6 1695 | P31_12_2 1696 | P31_12_20 1697 | P32_01_22 1698 | P32_01_23 1699 | P32_01_26 1700 | P32_01_55 1701 | P32_01_58 1702 | P32_01_59 1703 | P32_02_0 1704 | P32_02_10 1705 | P32_02_14 1706 | P32_02_15 1707 | P32_02_22 1708 | P32_02_25 1709 | P32_02_3 1710 | P32_02_9 1711 | P32_03_0 1712 | P32_03_2 1713 | P32_03_3 1714 | P32_03_6 1715 | P32_03_7 1716 | P32_03_8 1717 | P32_04_3 1718 | P32_04_6 1719 | P32_05_16 1720 | P32_05_24 1721 | P32_05_25 1722 | P32_05_28 1723 | P32_05_29 1724 | P32_05_30 1725 | P32_05_32 1726 | P32_05_33 1727 | P32_05_35 1728 | P32_05_9 1729 | P32_06_17 1730 | P32_06_22 1731 | P32_06_24 1732 | P32_06_26 1733 | P32_06_30 1734 | P32_06_31 1735 | P32_06_35 1736 | P32_06_36 1737 | P32_06_38 1738 | P32_06_4 1739 | P32_06_41 1740 | P32_06_42 1741 | P32_06_43 1742 | P32_06_47 1743 | P32_06_48 1744 | P32_08_12 1745 | P32_08_15 1746 | P32_08_9 1747 | P32_09_13 1748 | P32_09_28 1749 | P32_09_32 1750 | P32_09_34 1751 | P32_10_10 1752 | P32_10_15 1753 | P32_10_18 1754 | P32_10_22 1755 | P32_10_24 1756 | P32_10_33 1757 | P32_10_35 1758 | P32_10_39 1759 | P32_10_43 1760 | P32_10_5 1761 | -------------------------------------------------------------------------------- /RULSTM/data/ek100/validation_tail_nouns_ids.csv: -------------------------------------------------------------------------------- 1 | P01_11_2 2 | P01_11_28 3 | P01_11_29 4 | P01_11_3 5 | P01_11_30 6 | P01_11_33 7 | P01_11_34 8 | P01_11_4 9 | P01_11_5 10 | P01_11_50 11 | P01_11_52 12 | P01_11_53 13 | P01_11_54 14 | P01_11_6 15 | P01_11_7 16 | P01_11_85 17 | P01_11_86 18 | P01_11_87 19 | P01_12_34 20 | P01_12_37 21 | P01_12_46 22 | P01_12_50 23 | P01_12_51 24 | P01_12_52 25 | P01_12_53 26 | P01_12_54 27 | P01_12_56 28 | P01_12_57 29 | P01_12_6 30 | P01_12_60 31 | P01_12_61 32 | P01_12_7 33 | P01_13_12 34 | P01_14_114 35 | P01_14_120 36 | P01_14_121 37 | P01_14_127 38 | P01_14_157 39 | P01_14_159 40 | P01_14_16 41 | P01_14_161 42 | P01_14_165 43 | P01_14_168 44 | P01_14_17 45 | P01_14_170 46 | P01_14_176 47 | P01_14_18 48 | P01_14_20 49 | P01_14_214 50 | P01_14_215 51 | P01_14_231 52 | P01_14_233 53 | P01_14_273 54 | P01_14_274 55 | P01_14_287 56 | P01_14_34 57 | P01_14_345 58 | P01_14_346 59 | P01_14_35 60 | P01_14_40 61 | P01_14_41 62 | P01_14_49 63 | P01_14_50 64 | P01_14_53 65 | P01_14_54 66 | P01_14_56 67 | P01_14_62 68 | P01_14_77 69 | P01_14_78 70 | P01_14_90 71 | P01_14_91 72 | P01_14_92 73 | P01_14_93 74 | P01_14_94 75 | P01_14_95 76 | P01_14_98 77 | P01_15_0 78 | P01_15_286 79 | P01_15_287 80 | P01_15_289 81 | P02_12_114 82 | P02_12_12 83 | P02_12_122 84 | P02_12_123 85 | P02_12_13 86 | P02_12_203 87 | P02_12_205 88 | P02_12_207 89 | P02_12_209 90 | P02_12_26 91 | P02_12_28 92 | P02_12_29 93 | P02_12_3 94 | P02_12_303 95 | P02_12_304 96 | P02_12_306 97 | P02_12_307 98 | P02_12_308 99 | P02_12_309 100 | P02_12_31 101 | P02_12_310 102 | P02_12_317 103 | P02_12_318 104 | P02_12_32 105 | P02_12_320 106 | P02_12_326 107 | P02_12_331 108 | P02_12_332 109 | P02_12_333 110 | P02_12_340 111 | P02_12_341 112 | P02_12_342 113 | P02_12_343 114 | P02_12_344 115 | P02_12_345 116 | P02_12_346 117 | P02_12_350 118 | P02_12_351 119 | P02_12_352 120 | P02_12_353 121 | P02_12_354 122 | P02_12_356 123 | P02_12_358 124 | P02_12_359 125 | P02_12_360 126 | P02_12_4 127 | P02_12_5 128 | P02_12_55 129 | P02_12_57 130 | P02_12_58 131 | P02_12_6 132 | P02_12_60 133 | P02_12_7 134 | P02_12_8 135 | P02_12_87 136 | P02_12_88 137 | P02_12_90 138 | P02_12_91 139 | P02_12_98 140 | P02_13_12 141 | P02_14_0 142 | P02_14_12 143 | P02_15_1 144 | P02_15_10 145 | P02_15_11 146 | P02_15_12 147 | P02_15_13 148 | P02_15_14 149 | P02_15_15 150 | P02_15_18 151 | P02_15_19 152 | P02_15_2 153 | P02_15_24 154 | P02_15_25 155 | P02_15_27 156 | P02_15_28 157 | P02_15_29 158 | P02_15_3 159 | P02_15_30 160 | P02_15_31 161 | P02_15_32 162 | P02_15_33 163 | P02_15_42 164 | P02_15_5 165 | P02_15_6 166 | P02_15_7 167 | P02_15_8 168 | P02_15_9 169 | P03_21_10 170 | P03_21_4 171 | P03_21_6 172 | P03_21_7 173 | P03_21_8 174 | P03_22_10 175 | P03_22_12 176 | P03_22_16 177 | P03_22_21 178 | P03_23_10 179 | P03_23_115 180 | P03_23_116 181 | P03_23_16 182 | P03_23_18 183 | P03_23_19 184 | P03_23_20 185 | P03_23_24 186 | P03_23_25 187 | P03_23_26 188 | P03_23_27 189 | P03_23_29 190 | P03_23_30 191 | P03_23_31 192 | P03_23_32 193 | P03_23_48 194 | P03_23_49 195 | P03_23_50 196 | P03_23_51 197 | P03_23_52 198 | P03_23_54 199 | P03_23_56 200 | P03_23_58 201 | P03_23_59 202 | P03_23_60 203 | P03_23_61 204 | P03_23_62 205 | P03_23_64 206 | P03_23_8 207 | P03_23_82 208 | P03_23_83 209 | P03_23_84 210 | P03_23_85 211 | P03_23_86 212 | P03_23_88 213 | P03_24_1 214 | P03_24_104 215 | P03_24_105 216 | P03_24_107 217 | P03_24_114 218 | P03_24_121 219 | P03_24_122 220 | P03_24_125 221 | P03_24_126 222 | P03_24_128 223 | P03_24_13 224 | P03_24_130 225 | P03_24_131 226 | P03_24_17 227 | P03_24_18 228 | P03_24_2 229 | P03_24_20 230 | P03_24_23 231 | P03_24_25 232 | P03_24_4 233 | P03_24_40 234 | P03_24_47 235 | P03_24_52 236 | P03_24_62 237 | P03_24_63 238 | P03_24_64 239 | P03_24_7 240 | P03_24_74 241 | P03_24_75 242 | P03_24_96 243 | P03_25_4 244 | P03_25_6 245 | P03_25_7 246 | P03_25_8 247 | P03_25_9 248 | P03_26_2 249 | P04_24_10 250 | P04_24_16 251 | P04_24_18 252 | P04_24_20 253 | P04_24_22 254 | P04_24_25 255 | P04_24_26 256 | P04_24_27 257 | P04_24_28 258 | P04_24_29 259 | P04_24_30 260 | P04_24_33 261 | P04_24_41 262 | P04_24_42 263 | P04_24_43 264 | P04_24_44 265 | P04_24_45 266 | P04_24_46 267 | P04_24_47 268 | P04_24_48 269 | P04_24_50 270 | P04_24_51 271 | P04_24_53 272 | P04_24_55 273 | P04_24_56 274 | P04_24_57 275 | P04_24_58 276 | P04_24_61 277 | P04_24_62 278 | P04_24_63 279 | P04_25_0 280 | P04_25_11 281 | P04_25_14 282 | P04_25_17 283 | P04_25_19 284 | P04_25_3 285 | P04_25_4 286 | P04_25_6 287 | P04_25_9 288 | P04_26_0 289 | P04_26_1 290 | P04_26_2 291 | P04_27_0 292 | P04_27_1 293 | P04_27_10 294 | P04_27_2 295 | P04_27_3 296 | P04_27_4 297 | P04_27_5 298 | P04_27_6 299 | P04_27_7 300 | P04_27_8 301 | P04_27_9 302 | P04_28_1 303 | P04_28_13 304 | P04_28_14 305 | P04_28_20 306 | P04_28_22 307 | P04_28_23 308 | P04_28_24 309 | P04_29_11 310 | P04_29_14 311 | P04_29_15 312 | P04_29_2 313 | P04_29_25 314 | P04_29_26 315 | P04_29_27 316 | P04_29_28 317 | P04_29_29 318 | P04_29_31 319 | P04_29_35 320 | P04_29_37 321 | P04_29_4 322 | P04_29_52 323 | P04_29_53 324 | P04_29_54 325 | P04_29_6 326 | P04_29_60 327 | P04_29_7 328 | P04_30_10 329 | P04_30_11 330 | P04_30_14 331 | P04_30_15 332 | P04_30_16 333 | P04_30_17 334 | P04_30_18 335 | P04_30_19 336 | P04_30_2 337 | P04_30_20 338 | P04_30_24 339 | P04_30_25 340 | P04_30_27 341 | P04_30_37 342 | P04_30_38 343 | P04_30_41 344 | P04_30_42 345 | P04_30_43 346 | P04_30_45 347 | P04_30_48 348 | P04_30_49 349 | P04_31_10 350 | P04_31_100 351 | P04_31_102 352 | P04_31_105 353 | P04_31_15 354 | P04_31_20 355 | P04_31_27 356 | P04_31_28 357 | P04_31_29 358 | P04_31_32 359 | P04_31_33 360 | P04_31_34 361 | P04_31_35 362 | P04_31_4 363 | P04_31_41 364 | P04_31_43 365 | P04_31_44 366 | P04_31_49 367 | P04_31_50 368 | P04_31_51 369 | P04_31_53 370 | P04_31_55 371 | P04_31_56 372 | P04_31_58 373 | P04_31_59 374 | P04_31_60 375 | P04_31_61 376 | P04_31_62 377 | P04_31_77 378 | P04_31_80 379 | P04_31_84 380 | P04_31_85 381 | P04_31_86 382 | P04_31_87 383 | P04_31_89 384 | P04_31_90 385 | P04_31_91 386 | P04_31_92 387 | P04_31_97 388 | P04_31_98 389 | P04_32_1 390 | P04_33_10 391 | P04_33_11 392 | P04_33_12 393 | P04_33_14 394 | P04_33_16 395 | P04_33_17 396 | P04_33_21 397 | P04_33_6 398 | P05_07_102 399 | P05_07_104 400 | P05_07_108 401 | P05_07_15 402 | P05_07_16 403 | P05_07_24 404 | P05_07_31 405 | P05_07_37 406 | P05_07_42 407 | P05_07_5 408 | P05_07_51 409 | P05_07_59 410 | P05_07_6 411 | P05_07_60 412 | P05_07_61 413 | P05_07_65 414 | P05_07_68 415 | P05_07_7 416 | P05_07_72 417 | P05_07_77 418 | P05_07_81 419 | P05_07_85 420 | P05_07_91 421 | P05_07_97 422 | P05_09_15 423 | P05_09_16 424 | P05_09_26 425 | P05_09_28 426 | P05_09_29 427 | P05_09_31 428 | P05_09_36 429 | P05_09_55 430 | P05_09_57 431 | P05_09_60 432 | P05_09_62 433 | P05_09_64 434 | P06_10_21 435 | P06_10_30 436 | P06_10_33 437 | P06_10_6 438 | P06_11_17 439 | P06_11_19 440 | P06_11_21 441 | P06_12_1 442 | P06_12_10 443 | P06_12_13 444 | P06_12_25 445 | P06_12_27 446 | P06_12_28 447 | P06_12_29 448 | P06_12_3 449 | P06_12_6 450 | P06_13_15 451 | P06_13_16 452 | P06_13_17 453 | P06_13_31 454 | P06_13_32 455 | P06_13_33 456 | P06_13_34 457 | P06_13_35 458 | P06_13_41 459 | P06_14_0 460 | P06_14_10 461 | P06_14_11 462 | P06_14_12 463 | P06_14_13 464 | P06_14_14 465 | P06_14_17 466 | P06_14_18 467 | P06_14_19 468 | P06_14_2 469 | P06_14_20 470 | P06_14_22 471 | P06_14_23 472 | P06_14_26 473 | P06_14_27 474 | P06_14_3 475 | P06_14_30 476 | P07_13_10 477 | P07_13_11 478 | P07_13_12 479 | P07_13_14 480 | P07_13_4 481 | P07_13_5 482 | P07_13_6 483 | P07_13_7 484 | P07_14_27 485 | P07_14_29 486 | P07_14_32 487 | P07_14_37 488 | P07_14_38 489 | P07_14_39 490 | P07_14_40 491 | P07_14_41 492 | P07_14_42 493 | P07_14_7 494 | P07_14_8 495 | P07_16_11 496 | P07_16_23 497 | P07_16_25 498 | P07_16_26 499 | P07_16_27 500 | P07_18_10 501 | P07_18_2 502 | P07_18_3 503 | P07_18_4 504 | P07_18_5 505 | P07_18_6 506 | P07_18_7 507 | P07_18_9 508 | P08_09_0 509 | P08_09_10 510 | P08_09_100 511 | P08_09_102 512 | P08_09_106 513 | P08_09_11 514 | P08_09_12 515 | P08_09_121 516 | P08_09_122 517 | P08_09_123 518 | P08_09_125 519 | P08_09_127 520 | P08_09_134 521 | P08_09_136 522 | P08_09_144 523 | P08_09_148 524 | P08_09_18 525 | P08_09_20 526 | P08_09_28 527 | P08_09_34 528 | P08_09_36 529 | P08_09_38 530 | P08_09_56 531 | P08_09_6 532 | P08_09_61 533 | P08_09_62 534 | P08_09_63 535 | P08_09_7 536 | P08_09_70 537 | P08_09_71 538 | P08_09_8 539 | P08_09_81 540 | P08_09_87 541 | P08_09_88 542 | P08_09_90 543 | P08_09_91 544 | P08_09_93 545 | P08_09_95 546 | P08_10_0 547 | P08_10_23 548 | P08_10_25 549 | P08_10_3 550 | P08_10_5 551 | P08_10_58 552 | P08_10_60 553 | P08_10_62 554 | P08_10_7 555 | P08_14_0 556 | P08_14_1 557 | P08_14_15 558 | P08_14_16 559 | P08_14_17 560 | P08_14_18 561 | P08_14_20 562 | P08_14_22 563 | P08_14_24 564 | P08_14_28 565 | P08_14_29 566 | P08_14_30 567 | P08_14_32 568 | P08_15_0 569 | P08_15_1 570 | P08_15_104 571 | P08_15_107 572 | P08_15_108 573 | P08_15_110 574 | P08_15_115 575 | P08_15_120 576 | P08_15_123 577 | P08_15_128 578 | P08_15_132 579 | P08_15_2 580 | P08_15_49 581 | P08_15_67 582 | P08_15_70 583 | P08_15_78 584 | P08_15_80 585 | P08_15_87 586 | P08_15_88 587 | P08_15_92 588 | P08_15_94 589 | P08_15_96 590 | P08_15_97 591 | P08_15_99 592 | P08_16_0 593 | P08_16_10 594 | P08_16_102 595 | P08_16_11 596 | P08_16_113 597 | P08_16_114 598 | P08_16_116 599 | P08_16_15 600 | P08_16_16 601 | P08_16_19 602 | P08_16_23 603 | P08_16_24 604 | P08_16_25 605 | P08_16_26 606 | P08_16_28 607 | P08_16_29 608 | P08_16_40 609 | P08_16_5 610 | P08_16_52 611 | P08_16_53 612 | P08_16_79 613 | P08_16_8 614 | P08_16_82 615 | P08_16_84 616 | P08_16_85 617 | P08_16_87 618 | P08_16_88 619 | P08_16_94 620 | P08_17_0 621 | P08_17_1 622 | P08_17_106 623 | P08_17_117 624 | P08_17_119 625 | P08_17_12 626 | P08_17_27 627 | P08_17_35 628 | P08_17_53 629 | P08_17_56 630 | P08_17_6 631 | P08_17_60 632 | P08_17_69 633 | P08_17_79 634 | P08_17_81 635 | P08_17_84 636 | P08_17_85 637 | P09_07_0 638 | P09_07_1 639 | P09_07_10 640 | P09_07_11 641 | P09_07_12 642 | P09_07_14 643 | P09_07_16 644 | P09_07_3 645 | P09_07_4 646 | P09_07_5 647 | P09_07_6 648 | P09_07_7 649 | P09_07_8 650 | P09_08_12 651 | P09_08_13 652 | P09_08_14 653 | P09_08_15 654 | P09_08_17 655 | P09_08_19 656 | P09_08_2 657 | P09_08_22 658 | P09_08_24 659 | P09_08_5 660 | P09_08_8 661 | P10_03_0 662 | P10_03_1 663 | P10_03_112 664 | P10_03_113 665 | P10_03_116 666 | P10_03_117 667 | P10_03_121 668 | P10_03_123 669 | P10_03_125 670 | P10_03_127 671 | P10_03_128 672 | P10_03_130 673 | P10_03_132 674 | P10_03_133 675 | P10_03_134 676 | P10_03_136 677 | P10_03_138 678 | P10_03_139 679 | P10_03_141 680 | P10_03_142 681 | P10_03_147 682 | P10_03_149 683 | P10_03_150 684 | P10_03_153 685 | P10_03_155 686 | P10_03_156 687 | P10_03_157 688 | P10_03_161 689 | P10_03_162 690 | P10_03_166 691 | P10_03_171 692 | P10_03_172 693 | P10_03_173 694 | P10_03_175 695 | P10_03_187 696 | P10_03_193 697 | P10_03_2 698 | P10_03_201 699 | P10_03_203 700 | P10_03_207 701 | P10_03_209 702 | P10_03_214 703 | P10_03_223 704 | P10_03_224 705 | P10_03_225 706 | P10_03_23 707 | P10_03_230 708 | P10_03_231 709 | P10_03_233 710 | P10_03_236 711 | P10_03_24 712 | P10_03_26 713 | P10_03_29 714 | P10_03_3 715 | P10_03_34 716 | P10_03_36 717 | P10_03_37 718 | P10_03_38 719 | P10_03_41 720 | P10_03_42 721 | P10_03_45 722 | P10_03_50 723 | P10_03_52 724 | P10_03_53 725 | P10_03_55 726 | P10_03_57 727 | P10_03_59 728 | P10_03_73 729 | P10_03_74 730 | P10_03_75 731 | P10_03_76 732 | P10_03_82 733 | P10_03_85 734 | P10_03_86 735 | P10_03_89 736 | P10_03_91 737 | P10_03_92 738 | P10_03_93 739 | P10_03_94 740 | P10_03_98 741 | P11_17_100 742 | P11_17_101 743 | P11_17_11 744 | P11_17_116 745 | P11_17_117 746 | P11_17_118 747 | P11_17_119 748 | P11_17_120 749 | P11_17_121 750 | P11_17_122 751 | P11_17_124 752 | P11_17_125 753 | P11_17_126 754 | P11_17_127 755 | P11_17_130 756 | P11_17_131 757 | P11_17_132 758 | P11_17_133 759 | P11_17_134 760 | P11_17_135 761 | P11_17_136 762 | P11_17_137 763 | P11_17_138 764 | P11_17_139 765 | P11_17_140 766 | P11_17_141 767 | P11_17_16 768 | P11_17_21 769 | P11_17_29 770 | P11_17_31 771 | P11_17_32 772 | P11_17_33 773 | P11_17_34 774 | P11_17_35 775 | P11_17_44 776 | P11_17_45 777 | P11_17_46 778 | P11_17_47 779 | P11_17_48 780 | P11_17_49 781 | P11_17_5 782 | P11_17_51 783 | P11_17_52 784 | P11_17_53 785 | P11_17_54 786 | P11_17_55 787 | P11_17_56 788 | P11_17_57 789 | P11_17_7 790 | P11_17_8 791 | P11_17_93 792 | P11_17_94 793 | P11_17_95 794 | P11_18_10 795 | P11_18_11 796 | P11_18_12 797 | P11_18_13 798 | P11_18_14 799 | P11_18_7 800 | P11_18_8 801 | P11_18_9 802 | P11_19_17 803 | P11_19_23 804 | P11_19_24 805 | P11_19_25 806 | P11_19_26 807 | P11_19_28 808 | P11_19_30 809 | P11_19_32 810 | P11_20_0 811 | P11_20_108 812 | P11_20_109 813 | P11_20_110 814 | P11_20_120 815 | P11_20_129 816 | P11_20_130 817 | P11_20_131 818 | P11_20_132 819 | P11_20_158 820 | P11_20_159 821 | P11_20_16 822 | P11_20_160 823 | P11_20_161 824 | P11_20_162 825 | P11_20_163 826 | P11_20_164 827 | P11_20_168 828 | P11_20_169 829 | P11_20_17 830 | P11_20_177 831 | P11_20_178 832 | P11_20_180 833 | P11_20_19 834 | P11_20_20 835 | P11_20_21 836 | P11_20_22 837 | P11_20_24 838 | P11_20_26 839 | P11_20_27 840 | P11_20_29 841 | P11_20_3 842 | P11_20_30 843 | P11_20_32 844 | P11_20_34 845 | P11_20_35 846 | P11_20_37 847 | P11_20_39 848 | P11_20_4 849 | P11_20_45 850 | P11_20_48 851 | P11_20_5 852 | P11_20_51 853 | P11_20_52 854 | P11_20_53 855 | P11_20_56 856 | P11_20_58 857 | P11_20_59 858 | P11_20_6 859 | P11_20_60 860 | P11_20_61 861 | P11_20_67 862 | P11_20_68 863 | P11_20_69 864 | P11_20_7 865 | P11_20_70 866 | P11_20_71 867 | P11_20_72 868 | P11_20_73 869 | P11_20_74 870 | P11_20_75 871 | P11_20_76 872 | P11_20_77 873 | P11_20_78 874 | P11_20_79 875 | P11_20_8 876 | P11_20_80 877 | P11_20_81 878 | P11_20_82 879 | P11_20_83 880 | P11_20_84 881 | P11_20_85 882 | P11_20_86 883 | P11_20_87 884 | P11_20_88 885 | P11_20_89 886 | P11_20_9 887 | P11_20_90 888 | P11_20_91 889 | P11_20_92 890 | P11_20_93 891 | P11_20_94 892 | P11_20_95 893 | P11_20_96 894 | P11_20_97 895 | P11_20_98 896 | P11_20_99 897 | P11_21_0 898 | P11_21_2 899 | P11_21_3 900 | P11_21_6 901 | P11_21_8 902 | P11_22_20 903 | P11_22_22 904 | P11_22_23 905 | P11_22_24 906 | P11_22_25 907 | P11_22_31 908 | P11_22_32 909 | P11_22_33 910 | P11_22_34 911 | P11_22_35 912 | P11_22_51 913 | P11_23_1 914 | P11_23_11 915 | P11_23_12 916 | P11_23_13 917 | P11_23_14 918 | P11_23_15 919 | P11_23_16 920 | P11_23_17 921 | P11_23_18 922 | P11_23_8 923 | P11_23_9 924 | P11_24_19 925 | P12_03_0 926 | P12_03_111 927 | P12_03_15 928 | P12_03_26 929 | P12_03_27 930 | P12_03_28 931 | P12_03_29 932 | P12_03_3 933 | P12_03_30 934 | P12_03_52 935 | P12_03_65 936 | P12_03_67 937 | P12_03_68 938 | P12_03_77 939 | P12_03_78 940 | P12_03_79 941 | P12_03_80 942 | P12_03_81 943 | P12_03_94 944 | P12_03_95 945 | P12_08_0 946 | P12_08_10 947 | P12_08_12 948 | P12_08_13 949 | P12_08_15 950 | P12_08_16 951 | P12_08_17 952 | P12_08_18 953 | P12_08_2 954 | P12_08_24 955 | P12_08_25 956 | P12_08_7 957 | P12_08_8 958 | P13_01_23 959 | P13_01_24 960 | P13_01_25 961 | P13_01_30 962 | P13_01_32 963 | P13_01_42 964 | P13_01_46 965 | P13_03_25 966 | P14_06_10 967 | P14_06_11 968 | P14_06_2 969 | P14_06_4 970 | P14_06_5 971 | P14_06_7 972 | P14_06_9 973 | P14_08_16 974 | P14_08_22 975 | P14_08_23 976 | P14_08_28 977 | P14_08_29 978 | P15_04_2 979 | P15_04_21 980 | P15_04_22 981 | P15_04_31 982 | P15_04_7 983 | P15_06_1 984 | P15_06_17 985 | P15_06_18 986 | P15_06_19 987 | P15_06_2 988 | P15_06_20 989 | P15_06_21 990 | P15_06_22 991 | P15_06_28 992 | P15_06_3 993 | P15_06_35 994 | P15_06_36 995 | P15_06_39 996 | P15_06_4 997 | P15_06_40 998 | P15_06_5 999 | P15_06_52 1000 | P15_06_54 1001 | P15_06_55 1002 | P15_06_57 1003 | P15_06_62 1004 | P15_06_64 1005 | P16_04_11 1006 | P16_04_12 1007 | P16_04_14 1008 | P16_04_15 1009 | P16_04_16 1010 | P16_04_18 1011 | P16_04_19 1012 | P16_04_20 1013 | P16_04_22 1014 | P16_04_23 1015 | P16_04_24 1016 | P16_04_25 1017 | P16_04_26 1018 | P16_04_27 1019 | P16_04_28 1020 | P16_04_29 1021 | P16_04_30 1022 | P16_04_31 1023 | P16_04_32 1024 | P16_04_34 1025 | P16_04_40 1026 | P16_04_42 1027 | P16_04_43 1028 | P16_04_44 1029 | P16_04_45 1030 | P16_04_46 1031 | P16_04_47 1032 | P16_04_48 1033 | P16_04_49 1034 | P16_04_52 1035 | P16_04_57 1036 | P17_02_11 1037 | P17_02_14 1038 | P17_02_19 1039 | P17_02_20 1040 | P17_02_21 1041 | P17_02_22 1042 | P17_02_24 1043 | P17_02_25 1044 | P17_02_5 1045 | P18_01_13 1046 | P18_01_14 1047 | P18_01_15 1048 | P18_01_16 1049 | P18_01_19 1050 | P18_01_20 1051 | P18_01_25 1052 | P18_01_26 1053 | P18_01_27 1054 | P18_01_41 1055 | P18_01_6 1056 | P18_01_7 1057 | P18_02_0 1058 | P18_02_1 1059 | P18_02_2 1060 | P18_02_20 1061 | P18_02_21 1062 | P18_02_23 1063 | P18_02_25 1064 | P18_02_27 1065 | P18_02_5 1066 | P18_03_102 1067 | P18_03_103 1068 | P18_03_104 1069 | P18_03_105 1070 | P18_03_27 1071 | P18_03_28 1072 | P18_03_30 1073 | P18_03_35 1074 | P18_03_37 1075 | P18_03_39 1076 | P18_03_40 1077 | P18_03_42 1078 | P18_03_45 1079 | P18_03_46 1080 | P18_03_47 1081 | P18_03_53 1082 | P18_03_55 1083 | P18_03_56 1084 | P18_03_57 1085 | P18_03_58 1086 | P18_03_60 1087 | P18_03_71 1088 | P18_03_72 1089 | P18_03_75 1090 | P18_03_78 1091 | P18_03_79 1092 | P18_03_80 1093 | P18_03_81 1094 | P18_03_82 1095 | P18_03_83 1096 | P18_03_85 1097 | P18_03_86 1098 | P18_03_94 1099 | P18_03_96 1100 | P18_03_99 1101 | P18_04_4 1102 | P18_05_12 1103 | P18_05_126 1104 | P18_05_128 1105 | P18_05_13 1106 | P18_05_130 1107 | P18_05_15 1108 | P18_05_16 1109 | P18_05_19 1110 | P18_05_20 1111 | P18_05_23 1112 | P18_05_24 1113 | P18_05_52 1114 | P18_05_56 1115 | P18_05_58 1116 | P18_05_62 1117 | P18_05_68 1118 | P18_05_91 1119 | P18_05_97 1120 | P18_06_31 1121 | P18_06_32 1122 | P18_06_38 1123 | P18_06_39 1124 | P18_06_40 1125 | P18_06_42 1126 | P18_06_44 1127 | P18_06_46 1128 | P18_06_50 1129 | P18_06_51 1130 | P18_06_52 1131 | P18_06_58 1132 | P18_06_59 1133 | P18_06_60 1134 | P18_06_65 1135 | P18_06_66 1136 | P18_06_67 1137 | P18_06_68 1138 | P18_06_71 1139 | P18_06_74 1140 | P18_06_84 1141 | P18_06_85 1142 | P18_06_88 1143 | P18_07_3 1144 | P18_07_37 1145 | P18_09_9 1146 | P18_10_22 1147 | P18_10_23 1148 | P18_10_25 1149 | P18_10_26 1150 | P18_10_28 1151 | P18_10_29 1152 | P18_10_30 1153 | P18_10_32 1154 | P18_10_33 1155 | P18_10_35 1156 | P18_10_37 1157 | P18_10_38 1158 | P18_11_19 1159 | P18_11_20 1160 | P18_11_21 1161 | P18_11_22 1162 | P18_11_27 1163 | P18_11_29 1164 | P18_11_33 1165 | P18_11_34 1166 | P18_11_35 1167 | P18_11_36 1168 | P18_11_37 1169 | P18_11_39 1170 | P18_11_41 1171 | P18_11_50 1172 | P18_11_52 1173 | P18_11_53 1174 | P18_11_56 1175 | P18_11_57 1176 | P18_11_63 1177 | P18_11_64 1178 | P18_11_65 1179 | P18_11_66 1180 | P18_11_67 1181 | P18_11_69 1182 | P18_11_84 1183 | P18_11_87 1184 | P18_11_88 1185 | P18_11_89 1186 | P18_11_90 1187 | P18_11_94 1188 | P18_11_97 1189 | P18_12_29 1190 | P19_05_1 1191 | P19_05_12 1192 | P19_05_3 1193 | P19_05_7 1194 | P19_05_8 1195 | P19_06_0 1196 | P19_06_11 1197 | P19_06_12 1198 | P19_06_13 1199 | P19_06_14 1200 | P19_06_15 1201 | P19_06_16 1202 | P19_06_17 1203 | P19_06_22 1204 | P19_06_4 1205 | P19_06_7 1206 | P19_06_8 1207 | P19_06_9 1208 | P20_05_103 1209 | P20_05_104 1210 | P20_05_107 1211 | P20_05_11 1212 | P20_05_12 1213 | P20_05_13 1214 | P20_05_14 1215 | P20_05_15 1216 | P20_05_6 1217 | P20_05_60 1218 | P20_05_61 1219 | P20_05_63 1220 | P20_05_72 1221 | P20_05_8 1222 | P20_05_95 1223 | P20_05_96 1224 | P20_05_99 1225 | P20_06_15 1226 | P20_06_16 1227 | P20_06_17 1228 | P20_06_20 1229 | P20_06_21 1230 | P20_06_22 1231 | P20_06_23 1232 | P20_06_28 1233 | P20_06_29 1234 | P20_06_31 1235 | P20_06_34 1236 | P20_06_44 1237 | P20_06_47 1238 | P20_06_48 1239 | P20_06_52 1240 | P20_06_59 1241 | P20_06_60 1242 | P20_06_61 1243 | P20_06_64 1244 | P20_06_65 1245 | P20_06_66 1246 | P20_06_68 1247 | P20_06_8 1248 | P20_07_20 1249 | P20_07_21 1250 | P20_07_23 1251 | P21_02_18 1252 | P21_02_23 1253 | P21_02_27 1254 | P21_02_28 1255 | P21_02_29 1256 | P21_02_31 1257 | P21_02_32 1258 | P21_02_33 1259 | P21_02_37 1260 | P21_02_4 1261 | P21_02_40 1262 | P21_02_41 1263 | P21_02_42 1264 | P21_02_43 1265 | P21_02_44 1266 | P21_02_48 1267 | P21_02_52 1268 | P21_02_53 1269 | P21_02_54 1270 | P21_02_55 1271 | P21_02_58 1272 | P21_02_6 1273 | P22_01_101 1274 | P22_01_103 1275 | P22_01_104 1276 | P22_01_108 1277 | P22_01_109 1278 | P22_01_110 1279 | P22_01_111 1280 | P22_01_112 1281 | P22_01_113 1282 | P22_01_114 1283 | P22_01_115 1284 | P22_01_116 1285 | P22_01_117 1286 | P22_01_118 1287 | P22_01_127 1288 | P22_01_128 1289 | P22_01_129 1290 | P22_01_130 1291 | P22_01_131 1292 | P22_01_132 1293 | P22_01_133 1294 | P22_01_135 1295 | P22_01_136 1296 | P22_01_141 1297 | P22_01_142 1298 | P22_01_143 1299 | P22_01_144 1300 | P22_01_145 1301 | P22_01_146 1302 | P22_01_147 1303 | P22_01_148 1304 | P22_01_149 1305 | P22_01_153 1306 | P22_01_155 1307 | P22_01_156 1308 | P22_01_163 1309 | P22_01_164 1310 | P22_01_165 1311 | P22_01_166 1312 | P22_01_167 1313 | P22_01_168 1314 | P22_01_169 1315 | P22_01_171 1316 | P22_01_172 1317 | P22_01_173 1318 | P22_01_174 1319 | P22_01_175 1320 | P22_01_176 1321 | P22_01_177 1322 | P22_01_178 1323 | P22_01_179 1324 | P22_01_181 1325 | P22_01_182 1326 | P22_01_183 1327 | P22_01_184 1328 | P22_01_185 1329 | P22_01_186 1330 | P22_01_187 1331 | P22_01_188 1332 | P22_01_189 1333 | P22_01_193 1334 | P22_01_194 1335 | P22_01_195 1336 | P22_01_196 1337 | P22_01_197 1338 | P22_01_202 1339 | P22_01_203 1340 | P22_01_204 1341 | P22_01_205 1342 | P22_01_208 1343 | P22_01_209 1344 | P22_01_21 1345 | P22_01_210 1346 | P22_01_211 1347 | P22_01_212 1348 | P22_01_213 1349 | P22_01_215 1350 | P22_01_216 1351 | P22_01_228 1352 | P22_01_230 1353 | P22_01_231 1354 | P22_01_232 1355 | P22_01_236 1356 | P22_01_237 1357 | P22_01_248 1358 | P22_01_249 1359 | P22_01_252 1360 | P22_01_257 1361 | P22_01_269 1362 | P22_01_271 1363 | P22_01_272 1364 | P22_01_273 1365 | P22_01_274 1366 | P22_01_297 1367 | P22_01_298 1368 | P22_01_299 1369 | P22_01_300 1370 | P22_01_301 1371 | P22_01_302 1372 | P22_01_315 1373 | P22_01_316 1374 | P22_01_317 1375 | P22_01_332 1376 | P22_01_333 1377 | P22_01_345 1378 | P22_01_346 1379 | P22_01_348 1380 | P22_01_60 1381 | P22_01_61 1382 | P22_01_94 1383 | P22_01_95 1384 | P22_01_96 1385 | P22_01_97 1386 | P22_01_99 1387 | P22_02_0 1388 | P22_02_1 1389 | P22_02_102 1390 | P22_02_103 1391 | P22_02_104 1392 | P22_02_12 1393 | P22_02_135 1394 | P22_02_16 1395 | P22_02_18 1396 | P22_02_19 1397 | P22_02_206 1398 | P22_02_22 1399 | P22_02_23 1400 | P22_02_35 1401 | P22_02_38 1402 | P22_02_39 1403 | P22_02_40 1404 | P22_02_41 1405 | P22_02_42 1406 | P22_02_43 1407 | P22_02_44 1408 | P22_02_47 1409 | P22_02_63 1410 | P22_02_64 1411 | P22_02_65 1412 | P22_02_66 1413 | P22_02_67 1414 | P22_02_68 1415 | P22_02_70 1416 | P22_02_72 1417 | P22_02_75 1418 | P22_02_76 1419 | P22_02_78 1420 | P22_02_79 1421 | P22_02_81 1422 | P22_02_83 1423 | P22_03_10 1424 | P22_03_11 1425 | P22_03_12 1426 | P22_03_13 1427 | P22_03_14 1428 | P22_03_15 1429 | P22_03_16 1430 | P22_03_18 1431 | P22_03_2 1432 | P22_03_208 1433 | P22_03_209 1434 | P22_03_210 1435 | P22_03_211 1436 | P22_03_212 1437 | P22_03_213 1438 | P22_03_214 1439 | P22_03_215 1440 | P22_03_268 1441 | P22_03_269 1442 | P22_03_270 1443 | P22_03_271 1444 | P22_03_280 1445 | P22_03_281 1446 | P22_03_285 1447 | P22_03_286 1448 | P22_03_3 1449 | P22_03_347 1450 | P22_03_365 1451 | P22_03_366 1452 | P22_03_367 1453 | P22_03_368 1454 | P22_03_381 1455 | P22_03_382 1456 | P22_03_383 1457 | P22_03_4 1458 | P22_03_405 1459 | P22_03_408 1460 | P22_03_409 1461 | P22_03_410 1462 | P22_03_411 1463 | P22_03_416 1464 | P22_03_418 1465 | P22_03_419 1466 | P22_03_420 1467 | P22_03_424 1468 | P22_03_425 1469 | P22_03_449 1470 | P22_03_48 1471 | P22_03_49 1472 | P22_03_50 1473 | P22_03_6 1474 | P22_03_7 1475 | P22_03_8 1476 | P22_03_9 1477 | P22_04_100 1478 | P22_04_101 1479 | P22_04_102 1480 | P22_04_103 1481 | P22_04_105 1482 | P22_04_106 1483 | P22_04_108 1484 | P22_04_109 1485 | P22_04_113 1486 | P22_04_114 1487 | P22_04_115 1488 | P22_04_116 1489 | P22_04_118 1490 | P22_04_119 1491 | P22_04_120 1492 | P22_04_121 1493 | P22_04_132 1494 | P22_04_133 1495 | P22_04_134 1496 | P22_04_135 1497 | P22_04_136 1498 | P22_04_137 1499 | P22_04_138 1500 | P22_04_139 1501 | P22_04_149 1502 | P22_04_162 1503 | P22_04_169 1504 | P22_04_170 1505 | P22_04_33 1506 | P22_04_38 1507 | P22_04_39 1508 | P22_04_41 1509 | P22_04_90 1510 | P22_04_93 1511 | P22_04_99 1512 | P23_05_0 1513 | P23_05_100 1514 | P23_05_101 1515 | P23_05_102 1516 | P23_05_18 1517 | P23_05_19 1518 | P23_05_27 1519 | P23_05_36 1520 | P23_05_37 1521 | P23_05_43 1522 | P23_05_48 1523 | P23_05_5 1524 | P23_05_50 1525 | P23_05_52 1526 | P23_05_56 1527 | P23_05_58 1528 | P23_05_59 1529 | P23_05_65 1530 | P23_05_66 1531 | P23_05_67 1532 | P23_05_77 1533 | P23_05_80 1534 | P23_05_84 1535 | P23_05_91 1536 | P23_05_97 1537 | P24_09_136 1538 | P24_09_137 1539 | P24_09_138 1540 | P24_09_139 1541 | P24_09_140 1542 | P24_09_141 1543 | P24_09_172 1544 | P24_09_175 1545 | P24_09_190 1546 | P24_09_191 1547 | P24_09_192 1548 | P24_09_193 1549 | P24_09_194 1550 | P24_09_195 1551 | P24_09_198 1552 | P24_09_201 1553 | P24_09_213 1554 | P24_09_226 1555 | P24_09_227 1556 | P24_09_23 1557 | P24_09_24 1558 | P24_09_248 1559 | P24_09_249 1560 | P24_09_250 1561 | P24_09_269 1562 | P24_09_270 1563 | P24_09_274 1564 | P24_09_285 1565 | P24_09_297 1566 | P24_09_301 1567 | P25_06_26 1568 | P25_06_5 1569 | P25_06_9 1570 | P25_07_25 1571 | P25_07_34 1572 | P25_07_38 1573 | P25_08_11 1574 | P25_08_12 1575 | P25_08_15 1576 | P25_08_5 1577 | P26_31_11 1578 | P26_31_13 1579 | P26_32_0 1580 | P26_32_12 1581 | P26_32_14 1582 | P26_32_7 1583 | P26_33_0 1584 | P26_33_2 1585 | P26_33_3 1586 | P26_34_0 1587 | P26_34_10 1588 | P26_34_11 1589 | P26_34_5 1590 | P26_34_8 1591 | P26_34_9 1592 | P26_36_0 1593 | P26_40_4 1594 | P26_40_9 1595 | P26_41_13 1596 | P27_05_36 1597 | P27_05_37 1598 | P27_05_49 1599 | P27_05_50 1600 | P27_05_52 1601 | P27_05_53 1602 | P27_05_54 1603 | P27_05_55 1604 | P27_05_56 1605 | P28_16_12 1606 | P28_16_51 1607 | P28_16_55 1608 | P28_17_7 1609 | P28_17_9 1610 | P28_18_19 1611 | P28_18_20 1612 | P28_18_21 1613 | P28_18_22 1614 | P28_18_24 1615 | P28_19_11 1616 | P28_19_19 1617 | P28_21_10 1618 | P28_21_6 1619 | P28_21_8 1620 | P28_23_1 1621 | P28_23_3 1622 | P28_23_4 1623 | P28_23_5 1624 | P28_24_10 1625 | P28_24_8 1626 | P28_25_107 1627 | P28_25_108 1628 | P28_25_109 1629 | P28_25_11 1630 | P28_25_110 1631 | P28_25_111 1632 | P28_25_112 1633 | P28_25_113 1634 | P28_25_114 1635 | P28_25_115 1636 | P28_25_116 1637 | P28_25_117 1638 | P28_25_118 1639 | P28_25_119 1640 | P28_25_121 1641 | P28_25_122 1642 | P28_25_129 1643 | P28_25_13 1644 | P28_25_130 1645 | P28_25_131 1646 | P28_25_132 1647 | P28_25_14 1648 | P28_25_16 1649 | P28_25_17 1650 | P28_25_18 1651 | P28_25_19 1652 | P28_25_20 1653 | P28_25_21 1654 | P28_25_30 1655 | P28_25_31 1656 | P28_25_32 1657 | P28_25_34 1658 | P28_25_35 1659 | P28_25_36 1660 | P28_25_37 1661 | P28_25_38 1662 | P28_25_39 1663 | P28_25_40 1664 | P28_25_41 1665 | P28_25_50 1666 | P28_25_51 1667 | P28_25_52 1668 | P28_25_53 1669 | P28_25_55 1670 | P28_25_58 1671 | P28_25_59 1672 | P28_25_60 1673 | P28_25_61 1674 | P28_25_62 1675 | P28_25_63 1676 | P28_25_72 1677 | P28_25_73 1678 | P28_25_75 1679 | P28_25_76 1680 | P28_25_77 1681 | P28_25_78 1682 | P28_25_79 1683 | P28_25_80 1684 | P28_25_81 1685 | P28_25_82 1686 | P28_25_89 1687 | P28_25_9 1688 | P28_25_90 1689 | P28_25_91 1690 | P28_25_92 1691 | P28_25_93 1692 | P28_25_94 1693 | P28_25_95 1694 | P28_25_96 1695 | P28_25_97 1696 | P28_25_98 1697 | P28_26_5 1698 | P28_26_6 1699 | P29_05_0 1700 | P29_05_100 1701 | P29_05_101 1702 | P29_05_102 1703 | P29_05_128 1704 | P29_05_155 1705 | P29_05_156 1706 | P29_05_157 1707 | P29_05_158 1708 | P29_05_159 1709 | P29_05_160 1710 | P29_05_168 1711 | P29_05_169 1712 | P29_05_189 1713 | P29_05_190 1714 | P29_05_191 1715 | P29_05_210 1716 | P29_05_211 1717 | P29_05_212 1718 | P29_05_299 1719 | P29_05_300 1720 | P29_05_42 1721 | P29_05_44 1722 | P29_05_47 1723 | P29_05_476 1724 | P29_05_479 1725 | P29_05_48 1726 | P29_05_502 1727 | P29_05_506 1728 | P29_05_511 1729 | P29_05_520 1730 | P29_05_55 1731 | P29_05_552 1732 | P29_05_554 1733 | P29_05_56 1734 | P29_05_563 1735 | P29_05_564 1736 | P29_05_6 1737 | P29_05_66 1738 | P29_05_67 1739 | P29_05_69 1740 | P29_05_7 1741 | P29_05_72 1742 | P29_05_84 1743 | P29_05_85 1744 | P29_05_92 1745 | P29_05_94 1746 | P29_05_96 1747 | P29_05_97 1748 | P29_05_98 1749 | P29_05_99 1750 | P29_06_14 1751 | P30_07_14 1752 | P30_07_41 1753 | P30_07_43 1754 | P30_07_44 1755 | P30_07_50 1756 | P30_07_51 1757 | P30_07_52 1758 | P30_07_53 1759 | P30_07_54 1760 | P30_07_55 1761 | P30_07_59 1762 | P30_07_60 1763 | P30_07_61 1764 | P30_07_7 1765 | P30_07_71 1766 | P30_07_73 1767 | P30_07_74 1768 | P30_07_75 1769 | P30_07_79 1770 | P30_07_80 1771 | P30_07_9 1772 | P30_07_95 1773 | P30_08_110 1774 | P30_08_111 1775 | P30_08_113 1776 | P30_08_177 1777 | P30_08_189 1778 | P30_08_2 1779 | P30_08_203 1780 | P30_08_218 1781 | P30_08_234 1782 | P30_08_235 1783 | P30_08_248 1784 | P30_08_249 1785 | P30_08_252 1786 | P30_08_256 1787 | P30_08_261 1788 | P30_08_262 1789 | P30_08_263 1790 | P30_08_264 1791 | P30_08_267 1792 | P30_08_268 1793 | P30_08_270 1794 | P30_08_271 1795 | P30_08_272 1796 | P30_08_273 1797 | P30_08_278 1798 | P30_08_280 1799 | P30_08_281 1800 | P30_08_282 1801 | P30_08_285 1802 | P30_08_286 1803 | P30_08_287 1804 | P30_08_288 1805 | P30_08_289 1806 | P30_08_291 1807 | P30_08_45 1808 | P30_08_5 1809 | P30_08_78 1810 | P30_08_80 1811 | P30_09_1 1812 | P30_09_10 1813 | P30_09_11 1814 | P30_09_157 1815 | P30_09_16 1816 | P30_09_161 1817 | P30_09_162 1818 | P30_09_166 1819 | P30_09_18 1820 | P30_09_19 1821 | P30_09_20 1822 | P30_09_21 1823 | P30_09_22 1824 | P30_09_23 1825 | P30_09_24 1826 | P30_09_30 1827 | P30_09_32 1828 | P30_09_41 1829 | P30_09_76 1830 | P30_09_77 1831 | P31_10_16 1832 | P31_10_17 1833 | P31_10_23 1834 | P31_10_25 1835 | P31_10_30 1836 | P31_10_4 1837 | P31_10_40 1838 | P31_10_45 1839 | P31_11_14 1840 | P31_11_15 1841 | P31_11_16 1842 | P31_11_17 1843 | P31_11_18 1844 | P31_11_19 1845 | P31_11_21 1846 | P31_11_25 1847 | P31_11_26 1848 | P31_11_27 1849 | P31_11_28 1850 | P31_11_5 1851 | P31_11_6 1852 | P31_11_8 1853 | P32_01_14 1854 | P32_01_21 1855 | P32_01_31 1856 | P32_01_50 1857 | P32_01_51 1858 | P32_02_21 1859 | P32_02_5 1860 | P32_02_7 1861 | P32_05_15 1862 | P32_05_25 1863 | P32_05_26 1864 | P32_05_28 1865 | P32_05_32 1866 | P32_05_33 1867 | P32_05_35 1868 | P32_05_7 1869 | P32_06_10 1870 | P32_06_16 1871 | P32_06_17 1872 | P32_06_18 1873 | P32_06_25 1874 | P32_06_28 1875 | P32_06_30 1876 | P32_06_34 1877 | P32_06_35 1878 | P32_06_38 1879 | P32_06_39 1880 | P32_06_42 1881 | P32_06_47 1882 | P32_06_48 1883 | P32_06_8 1884 | P32_07_2 1885 | P32_08_12 1886 | P32_08_15 1887 | P32_08_2 1888 | P32_08_3 1889 | P32_08_6 1890 | P32_08_7 1891 | P32_09_24 1892 | P32_09_25 1893 | P32_09_26 1894 | P32_09_27 1895 | P32_09_29 1896 | P32_09_30 1897 | P32_09_31 1898 | P32_09_38 1899 | P32_10_12 1900 | P32_10_18 1901 | -------------------------------------------------------------------------------- /RULSTM/main.py: -------------------------------------------------------------------------------- 1 | """Main training/test program for RULSTM""" 2 | from argparse import ArgumentParser 3 | from dataset import SequenceDataset 4 | from os.path import join 5 | from models import RULSTM, RULSTMFusion 6 | import torch 7 | from torch.utils.data import DataLoader 8 | from torch.nn import functional as F 9 | from utils import topk_accuracy, ValueMeter, topk_accuracy_multiple_timesteps, get_marginal_indexes, marginalize, softmax, topk_recall_multiple_timesteps, tta, predictions_to_json, MeanTopKRecallMeter 10 | from tqdm import tqdm 11 | import numpy as np 12 | import pandas as pd 13 | import json 14 | pd.set_option('display.max_columns', None) # to show all columns in the table below 15 | pd.options.display.float_format = '{:05.2f}'.format 16 | 17 | parser = ArgumentParser(description="Training program for RULSTM") 18 | parser.add_argument('mode', type=str, choices=['train', 'validate', 'test', 'test', 'validate_json'], default='train', 19 | help="Whether to perform training, validation or test.\ 20 | If test is selected, --json_directory must be used to provide\ 21 | a directory in which to save the generated jsons.") 22 | parser.add_argument('path_to_data', type=str, 23 | help="Path to the data folder, \ 24 | containing all LMDB datasets") 25 | parser.add_argument('path_to_models', type=str, 26 | help="Path to the directory where to save all models") 27 | parser.add_argument('--alpha', type=float, default=0.25, 28 | help="Distance between time-steps in seconds") 29 | parser.add_argument('--S_enc', type=int, default=6, 30 | help="Number of encoding steps. \ 31 | If early recognition is performed, \ 32 | this value is discarded.") 33 | parser.add_argument('--S_ant', type=int, default=8, 34 | help="Number of anticipation steps. \ 35 | If early recognition is performed, \ 36 | this is the number of frames sampled for each action.") 37 | parser.add_argument('--task', type=str, default='anticipation', choices=[ 38 | 'anticipation', 'early_recognition'], help='Task to tackle: \ 39 | anticipation or early recognition') 40 | parser.add_argument('--img_tmpl', type=str, 41 | default='frame_{:010d}.jpg', help='Template to use to load the representation of a given frame') 42 | parser.add_argument('--modality', type=str, default='rgb', 43 | choices=['rgb', 'flow', 'obj', 'fusion'], help = "Modality. Rgb/flow/obj represent single branches, whereas fusion indicates the whole model with modality attention.") 44 | parser.add_argument('--sequence_completion', action='store_true', 45 | help='A flag to selec sequence completion pretraining rather than standard training.\ 46 | If not selected, a valid checkpoint for sequence completion pretraining\ 47 | should be available unless --ignore_checkpoints is specified') 48 | parser.add_argument('--mt5r', action='store_true') 49 | 50 | parser.add_argument('--num_class', type=int, default=2513, 51 | help='Number of classes') 52 | parser.add_argument('--hidden', type=int, default=1024, 53 | help='Number of hidden units') 54 | parser.add_argument('--feat_in', type=int, default=1024, 55 | help='Input size. If fusion, it is discarded (see --feats_in)') 56 | parser.add_argument('--feats_in', type=int, nargs='+', default=[1024, 1024, 352], 57 | help='Input sizes when the fusion modality is selected.') 58 | parser.add_argument('--dropout', type=float, default=0.8, help="Dropout rate") 59 | 60 | parser.add_argument('--batch_size', type=int, default=128, help="Batch Size") 61 | parser.add_argument('--num_workers', type=int, default=4, 62 | help="Number of parallel thread to fetch the data") 63 | parser.add_argument('--lr', type=float, default=0.01, help="Learning rate") 64 | parser.add_argument('--momentum', type=float, default=0.9, help="Momentum") 65 | 66 | parser.add_argument('--display_every', type=int, default=10, 67 | help="Display every n iterations") 68 | parser.add_argument('--epochs', type=int, default=100, help="Training epochs") 69 | parser.add_argument('--visdom', action='store_true', 70 | help="Whether to log using visdom") 71 | 72 | parser.add_argument('--ignore_checkpoints', action='store_true', 73 | help='If specified, avoid loading existing models (no pre-training)') 74 | parser.add_argument('--resume', action='store_true', 75 | help='Whether to resume suspended training') 76 | 77 | parser.add_argument('--ek100', action='store_true', 78 | help="Whether to use EPIC-KITCHENS-100") 79 | 80 | parser.add_argument('--json_directory', type=str, default = None, help = 'Directory in which to save the generated jsons.') 81 | 82 | args = parser.parse_args() 83 | 84 | if args.mode == 'test' or args.mode=='validate_json': 85 | assert args.json_directory is not None 86 | 87 | device = 'cuda' if torch.cuda.is_available() else 'cpu' 88 | 89 | if args.task == 'anticipation': 90 | exp_name = f"RULSTM-{args.task}_{args.alpha}_{args.S_enc}_{args.S_ant}_{args.modality}" 91 | else: 92 | exp_name = f"RULSTM-{args.task}_{args.alpha}_{args.S_ant}_{args.modality}" 93 | 94 | if args.mt5r: 95 | exp_name += '_mt5r' 96 | 97 | if args.sequence_completion: 98 | exp_name += '_sequence_completion' 99 | 100 | 101 | if args.visdom: 102 | # if visdom is required 103 | # load visdom loggers from torchent 104 | from torchnet.logger import VisdomPlotLogger, VisdomSaver 105 | # define loss and accuracy logger 106 | visdom_loss_logger = VisdomPlotLogger('line', env=exp_name, opts={ 107 | 'title': 'Loss', 'legend': ['training', 'validation']}) 108 | visdom_accuracy_logger = VisdomPlotLogger('line', env=exp_name, opts={ 109 | 'title': 'Top5 Acc@1s', 'legend': ['training', 'validation']}) 110 | # define a visdom saver to save the plots 111 | visdom_saver = VisdomSaver(envs=[exp_name]) 112 | 113 | def get_loader(mode, override_modality = None): 114 | if override_modality: 115 | path_to_lmdb = join(args.path_to_data, override_modality) 116 | else: 117 | path_to_lmdb = join(args.path_to_data, args.modality) if args.modality != 'fusion' else [join(args.path_to_data, m) for m in ['rgb', 'flow', 'obj']] 118 | 119 | kargs = { 120 | 'path_to_lmdb': path_to_lmdb, 121 | 'path_to_csv': join(args.path_to_data, f"{mode}.csv"), 122 | 'time_step': args.alpha, 123 | 'img_tmpl': args.img_tmpl, 124 | 'action_samples': args.S_ant if args.task == 'early_recognition' else None, 125 | 'past_features': args.task == 'anticipation', 126 | 'sequence_length': args.S_enc + args.S_ant, 127 | 'label_type': ['verb', 'noun', 'action'] if args.mode != 'train' else 'action', 128 | 'challenge': 'test' in mode 129 | } 130 | 131 | _set = SequenceDataset(**kargs) 132 | 133 | return DataLoader(_set, batch_size=args.batch_size, num_workers=args.num_workers, 134 | pin_memory=True, shuffle=mode == 'training') 135 | 136 | def get_model(): 137 | if args.modality != 'fusion': # single branch 138 | model = RULSTM(args.num_class, args.feat_in, args.hidden, 139 | args.dropout, sequence_completion=args.sequence_completion) 140 | # load checkpoint only if not in sequence completion mode 141 | # and inf the flag --ignore_checkpoints has not been specified 142 | if args.mode == 'train' and not args.ignore_checkpoints and not args.sequence_completion: 143 | checkpoint = torch.load(join( 144 | args.path_to_models, exp_name + '_sequence_completion_best.pth.tar'), map_location='cpu')['state_dict'] 145 | model.load_state_dict(checkpoint) 146 | else: 147 | rgb_model = RULSTM(args.num_class, args.feats_in[0], args.hidden, args.dropout, return_context = args.task=='anticipation') 148 | flow_model = RULSTM(args.num_class, args.feats_in[1], args.hidden, args.dropout, return_context = args.task=='anticipation') 149 | obj_model = RULSTM(args.num_class, args.feats_in[2], args.hidden, args.dropout, return_context = args.task=='anticipation') 150 | 151 | 152 | if args.task=='early_recognition' or (args.mode == 'train' and not args.ignore_checkpoints): 153 | checkpoint_rgb = torch.load(join(args.path_to_models,\ 154 | exp_name.replace('fusion','rgb') +'_best.pth.tar'), map_location='cpu')['state_dict'] 155 | checkpoint_flow = torch.load(join(args.path_to_models,\ 156 | exp_name.replace('fusion','flow') +'_best.pth.tar'), map_location='cpu')['state_dict'] 157 | checkpoint_obj = torch.load(join(args.path_to_models,\ 158 | exp_name.replace('fusion','obj') +'_best.pth.tar'), map_location='cpu')['state_dict'] 159 | 160 | rgb_model.load_state_dict(checkpoint_rgb) 161 | flow_model.load_state_dict(checkpoint_flow) 162 | obj_model.load_state_dict(checkpoint_obj) 163 | 164 | if args.task == 'early_recognition': 165 | return [rgb_model, flow_model, obj_model] 166 | 167 | model = RULSTMFusion([rgb_model, flow_model, obj_model], args.hidden, args.dropout) 168 | 169 | return model 170 | 171 | 172 | def load_checkpoint(model, best=False): 173 | if best: 174 | chk = torch.load(join(args.path_to_models, exp_name + '_best.pth.tar'), map_location='cpu') 175 | else: 176 | chk = torch.load(join(args.path_to_models, exp_name + '.pth.tar'), map_location='cpu') 177 | 178 | epoch = chk['epoch'] 179 | best_perf = chk['best_perf'] 180 | perf = chk['perf'] 181 | 182 | model.load_state_dict(chk['state_dict']) 183 | 184 | return epoch, perf, best_perf 185 | 186 | 187 | def save_model(model, epoch, perf, best_perf, is_best=False): 188 | torch.save({'state_dict': model.state_dict(), 'epoch': epoch, 189 | 'perf': perf, 'best_perf': best_perf}, join(args.path_to_models, exp_name + '.pth.tar')) 190 | if is_best: 191 | torch.save({'state_dict': model.state_dict(), 'epoch': epoch, 'perf': perf, 'best_perf': best_perf}, join( 192 | args.path_to_models, exp_name + '_best.pth.tar')) 193 | 194 | if args.visdom: 195 | # save visdom logs for persitency 196 | visdom_saver.save() 197 | 198 | 199 | def log(mode, epoch, loss_meter, accuracy_meter, best_perf=None, green=False): 200 | if green: 201 | print('\033[92m', end="") 202 | 203 | print( 204 | f"[{mode}] Epoch: {epoch:0.2f}. " 205 | f"Loss: {loss_meter.value():.2f}. " 206 | f"Accuracy: {accuracy_meter.value():.2f}% ", end="") 207 | 208 | if best_perf: 209 | print(f"[best: {best_perf:0.2f}]%", end="") 210 | 211 | print('\033[0m') 212 | 213 | if args.visdom: 214 | visdom_loss_logger.log(epoch, loss_meter.value(), name=mode) 215 | visdom_accuracy_logger.log(epoch, accuracy_meter.value(), name=mode) 216 | 217 | def get_scores_early_recognition_fusion(models, loaders): 218 | verb_scores = 0 219 | noun_scores = 0 220 | action_scores = 0 221 | for model, loader in zip(models, loaders): 222 | outs = get_scores(model, loader) 223 | verb_scores += outs[0] 224 | noun_scores += outs[1] 225 | action_scores += outs[2] 226 | 227 | verb_scores /= len(models) 228 | noun_scores /= len(models) 229 | action_scores /= len(models) 230 | 231 | return [verb_scores, noun_scores, action_scores] + list(outs[3:]) 232 | 233 | 234 | def get_scores(model, loader, challenge=False, include_discarded = False): 235 | model.eval() 236 | predictions = [] 237 | labels = [] 238 | ids = [] 239 | with torch.set_grad_enabled(False): 240 | for batch in tqdm(loader, 'Evaluating...', len(loader)): 241 | x = batch['past_features' if args.task == 242 | 'anticipation' else 'action_features'] 243 | if type(x) == list: 244 | x = [xx.to(device) for xx in x] 245 | else: 246 | x = x.to(device) 247 | 248 | y = batch['label'].numpy() 249 | 250 | ids.append(batch['id']) 251 | 252 | preds = model(x).cpu().numpy()[:, -args.S_ant:, :] 253 | 254 | predictions.append(preds) 255 | labels.append(y) 256 | 257 | action_scores = np.concatenate(predictions) 258 | labels = np.concatenate(labels) 259 | ids = np.concatenate(ids) 260 | 261 | actions = pd.read_csv( 262 | join(args.path_to_data, 'actions.csv'), index_col='id') 263 | 264 | vi = get_marginal_indexes(actions, 'verb') 265 | ni = get_marginal_indexes(actions, 'noun') 266 | 267 | action_probs = softmax(action_scores.reshape(-1, action_scores.shape[-1])) 268 | 269 | verb_scores = marginalize(action_probs, vi).reshape( 270 | action_scores.shape[0], action_scores.shape[1], -1) 271 | noun_scores = marginalize(action_probs, ni).reshape( 272 | action_scores.shape[0], action_scores.shape[1], -1) 273 | 274 | if include_discarded: 275 | dlab = np.array(loader.dataset.discarded_labels) 276 | dislab = np.array(loader.dataset.discarded_ids) 277 | ids = np.concatenate([ids, dislab]) 278 | num_disc = len(dlab) 279 | labels = np.concatenate([labels, dlab]) 280 | verb_scores = np.concatenate((verb_scores, np.zeros((num_disc, *verb_scores.shape[1:])))) 281 | noun_scores = np.concatenate((noun_scores, np.zeros((num_disc, *noun_scores.shape[1:])))) 282 | action_scores = np.concatenate((action_scores, np.zeros((num_disc, *action_scores.shape[1:])))) 283 | 284 | if labels.max()>0 and not challenge: 285 | return verb_scores, noun_scores, action_scores, labels[:, 0], labels[:, 1], labels[:, 2], ids 286 | else: 287 | return verb_scores, noun_scores, action_scores, ids 288 | 289 | 290 | def trainval(model, loaders, optimizer, epochs, start_epoch, start_best_perf): 291 | """Training/Validation code""" 292 | best_perf = start_best_perf # to keep track of the best performing epoch 293 | for epoch in range(start_epoch, epochs): 294 | # define training and validation meters 295 | loss_meter = {'training': ValueMeter(), 'validation': ValueMeter()} 296 | if args.mt5r: 297 | accuracy_meter = {'training': MeanTopKRecallMeter(args.num_class), 'validation': MeanTopKRecallMeter(args.num_class)} 298 | else: 299 | accuracy_meter = {'training': ValueMeter(), 'validation': ValueMeter()} 300 | for mode in ['training', 'validation']: 301 | # enable gradients only if training 302 | with torch.set_grad_enabled(mode == 'training'): 303 | if mode == 'training': 304 | model.train() 305 | else: 306 | model.eval() 307 | 308 | for i, batch in enumerate(loaders[mode]): 309 | x = batch['past_features' if args.task == 310 | 'anticipation' else 'action_features'] 311 | 312 | if type(x) == list: 313 | x = [xx.to(device) for xx in x] 314 | else: 315 | x = x.to(device) 316 | 317 | y = batch['label'].to(device) 318 | 319 | bs = y.shape[0] # batch size 320 | 321 | preds = model(x) 322 | 323 | # take only last S_ant predictions 324 | preds = preds[:, -args.S_ant:, :].contiguous() 325 | 326 | # linearize predictions 327 | linear_preds = preds.view(-1, preds.shape[-1]) 328 | # replicate the labels across timesteps and linearize 329 | linear_labels = y.view(-1, 1).expand(-1, 330 | preds.shape[1]).contiguous().view(-1) 331 | 332 | loss = F.cross_entropy(linear_preds, linear_labels) 333 | # get the predictions for anticipation time = 1s (index -4) (anticipation) 334 | # or for the last time-step (100%) (early recognition) 335 | # top5 accuracy at 1s 336 | idx = -4 if args.task == 'anticipation' else -1 337 | # use top-5 for anticipation and top-1 for early recognition 338 | k = 5 if args.task == 'anticipation' else 1 339 | acc = topk_accuracy( 340 | preds[:, idx, :].detach().cpu().numpy(), y.detach().cpu().numpy(), (k,))[0]*100 341 | 342 | # store the values in the meters to keep incremental averages 343 | loss_meter[mode].add(loss.item(), bs) 344 | if args.mt5r: 345 | accuracy_meter[mode].add(preds[:, idx, :].detach().cpu().numpy(), 346 | y.detach().cpu().numpy()) 347 | else: 348 | accuracy_meter[mode].add(acc, bs) 349 | 350 | # if in training mode 351 | if mode == 'training': 352 | optimizer.zero_grad() 353 | loss.backward() 354 | optimizer.step() 355 | 356 | # compute decimal epoch for logging 357 | e = epoch + i/len(loaders[mode]) 358 | 359 | # log training during loop 360 | # avoid logging the very first batch. It can be biased. 361 | if mode == 'training' and i != 0 and i % args.display_every == 0: 362 | log(mode, e, loss_meter[mode], accuracy_meter[mode]) 363 | 364 | # log at the end of each epoch 365 | log(mode, epoch+1, loss_meter[mode], accuracy_meter[mode], 366 | max(accuracy_meter[mode].value(), best_perf) if mode == 'validation' 367 | else None, green=True) 368 | 369 | if best_perf < accuracy_meter['validation'].value(): 370 | best_perf = accuracy_meter['validation'].value() 371 | is_best = True 372 | else: 373 | is_best = False 374 | 375 | # save checkpoint at the end of each train/val epoch 376 | save_model(model, epoch+1, accuracy_meter['validation'].value(), best_perf, 377 | is_best=is_best) 378 | 379 | def get_validation_ids(): 380 | unseen_participants_ids = pd.read_csv(join(args.path_to_data, 'validation_unseen_participants_ids.csv'), names=['id'], squeeze=True) 381 | tail_verbs_ids = pd.read_csv(join(args.path_to_data, 'validation_tail_verbs_ids.csv'), names=['id'], squeeze=True) 382 | tail_nouns_ids = pd.read_csv(join(args.path_to_data, 'validation_tail_nouns_ids.csv'), names=['id'], squeeze=True) 383 | tail_actions_ids = pd.read_csv(join(args.path_to_data, 'validation_tail_actions_ids.csv'), names=['id'], squeeze=True) 384 | 385 | return unseen_participants_ids, tail_verbs_ids, tail_nouns_ids, tail_actions_ids 386 | 387 | def get_many_shot(): 388 | """Get many shot verbs, nouns and actions for class-aware metrics (Mean Top-5 Recall)""" 389 | # read the list of many shot verbs 390 | many_shot_verbs = pd.read_csv( 391 | join(args.path_to_data, 'EPIC_many_shot_verbs.csv'))['verb_class'].values 392 | # read the list of many shot nouns 393 | many_shot_nouns = pd.read_csv( 394 | join(args.path_to_data, 'EPIC_many_shot_nouns.csv'))['noun_class'].values 395 | 396 | # read the list of actions 397 | actions = pd.read_csv(join(args.path_to_data, 'actions.csv')) 398 | # map actions to (verb, noun) pairs 399 | a_to_vn = {a[1]['id']: tuple(a[1][['verb', 'noun']].values) 400 | for a in actions.iterrows()} 401 | 402 | # create the list of many shot actions 403 | # an action is "many shot" if at least one 404 | # between the related verb and noun are many shot 405 | many_shot_actions = [] 406 | for a, (v, n) in a_to_vn.items(): 407 | if v in many_shot_verbs or n in many_shot_nouns: 408 | many_shot_actions.append(a) 409 | 410 | return many_shot_verbs, many_shot_nouns, many_shot_actions 411 | 412 | 413 | def main(): 414 | model = get_model() 415 | if type(model) == list: 416 | model = [m.to(device) for m in model] 417 | else: 418 | model.to(device) 419 | 420 | if args.mode == 'train': 421 | loaders = {m: get_loader(m) for m in ['training', 'validation']} 422 | 423 | if args.resume: 424 | start_epoch, _, start_best_perf = load_checkpoint(model) 425 | else: 426 | start_epoch = 0 427 | start_best_perf = 0 428 | 429 | optimizer = torch.optim.SGD( 430 | model.parameters(), lr=args.lr, momentum=args.momentum) 431 | 432 | trainval(model, loaders, optimizer, args.epochs, 433 | start_epoch, start_best_perf) 434 | 435 | elif args.mode == 'validate': 436 | if args.task == 'early_recognition' and args.modality == 'fusion': 437 | loaders = [get_loader('validation', 'rgb'), get_loader('validation', 'flow'), get_loader('validation', 'obj')] 438 | verb_scores, noun_scores, action_scores, verb_labels, noun_labels, action_labels = get_scores_early_recognition_fusion(model, loaders) 439 | else: 440 | epoch, perf, _ = load_checkpoint(model, best=True) 441 | print( 442 | f"Loaded checkpoint for model {type(model)}. Epoch: {epoch}. Perf: {perf:0.2f}.") 443 | 444 | loader = get_loader('validation') 445 | 446 | verb_scores, noun_scores, action_scores, verb_labels, noun_labels, action_labels, ids = get_scores(model, loader, include_discarded=args.ek100) 447 | 448 | if not args.ek100: 449 | verb_accuracies = topk_accuracy_multiple_timesteps( 450 | verb_scores, verb_labels) 451 | noun_accuracies = topk_accuracy_multiple_timesteps( 452 | noun_scores, noun_labels) 453 | action_accuracies = topk_accuracy_multiple_timesteps( 454 | action_scores, action_labels) 455 | 456 | many_shot_verbs, many_shot_nouns, many_shot_actions = get_many_shot() 457 | 458 | verb_recalls = topk_recall_multiple_timesteps( 459 | verb_scores, verb_labels, k=5, classes=many_shot_verbs) 460 | noun_recalls = topk_recall_multiple_timesteps( 461 | noun_scores, noun_labels, k=5, classes=many_shot_nouns) 462 | action_recalls = topk_recall_multiple_timesteps( 463 | action_scores, action_labels, k=5, classes=many_shot_actions) 464 | 465 | all_accuracies = np.concatenate( 466 | [verb_accuracies, noun_accuracies, action_accuracies, verb_recalls, noun_recalls, action_recalls]) 467 | all_accuracies = all_accuracies[[0, 1, 6, 2, 3, 7, 4, 5, 8]] 468 | indices = [ 469 | ('Verb', 'Top-1 Accuracy'), 470 | ('Verb', 'Top-5 Accuracy'), 471 | ('Verb', 'Mean Top-5 Recall'), 472 | ('Noun', 'Top-1 Accuracy'), 473 | ('Noun', 'Top-5 Accuracy'), 474 | ('Noun', 'Mean Top-5 Recall'), 475 | ('Action', 'Top-1 Accuracy'), 476 | ('Action', 'Top-5 Accuracy'), 477 | ('Action', 'Mean Top-5 Recall'), 478 | ] 479 | 480 | if args.task == 'anticipation': 481 | cc = np.linspace(args.alpha*args.S_ant, args.alpha, args.S_ant, dtype=str) 482 | else: 483 | cc = [f"{c:0.1f}%" for c in np.linspace(0,100,args.S_ant+1)[1:]] 484 | 485 | scores = pd.DataFrame(all_accuracies*100, columns=cc, index=pd.MultiIndex.from_tuples(indices)) 486 | else: 487 | overall_verb_recalls = topk_recall_multiple_timesteps( 488 | verb_scores, verb_labels, k=5) 489 | overall_noun_recalls = topk_recall_multiple_timesteps( 490 | noun_scores, noun_labels, k=5) 491 | overall_action_recalls = topk_recall_multiple_timesteps( 492 | action_scores, action_labels, k=5) 493 | 494 | unseen, tail_verbs, tail_nouns, tail_actions = get_validation_ids() 495 | 496 | unseen_bool_idx = pd.Series(ids).isin(unseen).values 497 | tail_verbs_bool_idx = pd.Series(ids).isin(tail_verbs).values 498 | tail_nouns_bool_idx = pd.Series(ids).isin(tail_nouns).values 499 | tail_actions_bool_idx = pd.Series(ids).isin(tail_actions).values 500 | 501 | tail_verb_recalls = topk_recall_multiple_timesteps( 502 | verb_scores[tail_verbs_bool_idx], verb_labels[tail_verbs_bool_idx], k=5) 503 | tail_noun_recalls = topk_recall_multiple_timesteps( 504 | noun_scores[tail_nouns_bool_idx], noun_labels[tail_nouns_bool_idx], k=5) 505 | tail_action_recalls = topk_recall_multiple_timesteps( 506 | action_scores[tail_actions_bool_idx], action_labels[tail_actions_bool_idx], k=5) 507 | 508 | 509 | unseen_verb_recalls = topk_recall_multiple_timesteps( 510 | verb_scores[unseen_bool_idx], verb_labels[unseen_bool_idx], k=5) 511 | unseen_noun_recalls = topk_recall_multiple_timesteps( 512 | noun_scores[unseen_bool_idx], noun_labels[unseen_bool_idx], k=5) 513 | unseen_action_recalls = topk_recall_multiple_timesteps( 514 | action_scores[unseen_bool_idx], action_labels[unseen_bool_idx], k=5) 515 | 516 | all_accuracies = np.concatenate( 517 | [overall_verb_recalls, overall_noun_recalls, overall_action_recalls, unseen_verb_recalls, unseen_noun_recalls, unseen_action_recalls, tail_verb_recalls, tail_noun_recalls, tail_action_recalls] 518 | ) #9 x 8 519 | 520 | #all_accuracies = all_accuracies[[0, 1, 6, 2, 3, 7, 4, 5, 8]] 521 | indices = [ 522 | ('Overall Mean Top-5 Recall', 'Verb'), 523 | ('Overall Mean Top-5 Recall', 'Noun'), 524 | ('Overall Mean Top-5 Recall', 'Action'), 525 | ('Unseen Mean Top-5 Recall', 'Verb'), 526 | ('Unseen Mean Top-5 Recall', 'Noun'), 527 | ('Unseen Mean Top-5 Recall', 'Action'), 528 | ('Tail Mean Top-5 Recall', 'Verb'), 529 | ('Tail Mean Top-5 Recall', 'Noun'), 530 | ('Tail Mean Top-5 Recall', 'Action'), 531 | ] 532 | 533 | if args.task == 'anticipation': 534 | cc = np.linspace(args.alpha*args.S_ant, args.alpha, args.S_ant, dtype=str) 535 | else: 536 | cc = [f"{c:0.1f}%" for c in np.linspace(0,100,args.S_ant+1)[1:]] 537 | 538 | scores = pd.DataFrame(all_accuracies*100, columns=cc, index=pd.MultiIndex.from_tuples(indices)) 539 | 540 | 541 | print(scores) 542 | 543 | if args.task == 'anticipation': 544 | tta_verb = tta(verb_scores, verb_labels) 545 | tta_noun = tta(noun_scores, noun_labels) 546 | tta_action = tta(action_scores, action_labels) 547 | 548 | print( 549 | f"\nMean TtA(5): VERB: {tta_verb:0.2f} NOUN: {tta_noun:0.2f} ACTION: {tta_action:0.2f}") 550 | 551 | elif args.mode == 'validate': 552 | if args.task == 'early_recognition' and args.modality == 'fusion': 553 | loaders = [get_loader('validation', 'rgb'), get_loader('validation', 'flow'), 554 | get_loader('validation', 'obj')] 555 | verb_scores, noun_scores, action_scores, verb_labels, noun_labels, action_labels = get_scores_early_recognition_fusion( 556 | model, loaders) 557 | else: 558 | epoch, perf, _ = load_checkpoint(model, best=True) 559 | print( 560 | f"Loaded checkpoint for model {type(model)}. Epoch: {epoch}. Perf: {perf:0.2f}.") 561 | 562 | loader = get_loader('validation') 563 | 564 | verb_scores, noun_scores, action_scores, verb_labels, noun_labels, action_labels,_ = get_scores(model, 565 | loader) 566 | elif 'test' in args.mode: 567 | if args.ek100: 568 | mm = ['timestamps'] 569 | else: 570 | mm = ['seen', 'unseen'] 571 | for m in mm: 572 | if args.task == 'early_recognition' and args.modality == 'fusion': 573 | loaders = [get_loader(f"test_{m}", 'rgb'), get_loader(f"test_{m}", 'flow'), get_loader(f"test_{m}", 'obj')] 574 | discarded_ids = loaders[0].dataset.discarded_ids 575 | verb_scores, noun_scores, action_scores, ids = get_scores_early_recognition_fusion(model, loaders) 576 | else: 577 | loader = get_loader(f"test_{m}") 578 | epoch, perf, _ = load_checkpoint(model, best=True) 579 | 580 | discarded_ids = loader.dataset.discarded_ids 581 | 582 | print( 583 | f"Loaded checkpoint for model {type(model)}. Epoch: {epoch}. Perf: {perf:0.2f}.") 584 | 585 | verb_scores, noun_scores, action_scores, ids = get_scores(model, loader) 586 | 587 | idx = -4 if args.task == 'anticipation' else -1 588 | ids = list(ids) + list(discarded_ids) 589 | verb_scores = np.concatenate((verb_scores, np.zeros((len(discarded_ids), *verb_scores.shape[1:])))) [:,idx,:] 590 | noun_scores = np.concatenate((noun_scores, np.zeros((len(discarded_ids), *noun_scores.shape[1:])))) [:,idx,:] 591 | action_scores = np.concatenate((action_scores, np.zeros((len(discarded_ids), *action_scores.shape[1:])))) [:,idx,:] 592 | 593 | actions = pd.read_csv(join(args.path_to_data, 'actions.csv')) 594 | # map actions to (verb, noun) pairs 595 | a_to_vn = {a[1]['id']: tuple(a[1][['verb', 'noun']].values) 596 | for a in actions.iterrows()} 597 | 598 | preds = predictions_to_json(verb_scores, noun_scores, action_scores, ids, a_to_vn, version = '0.2' if args.ek100 else '0.1', sls=True) 599 | 600 | if args.ek100: 601 | with open(join(args.json_directory,exp_name+f"_test.json"), 'w') as f: 602 | f.write(json.dumps(preds, indent=4, separators=(',',': '))) 603 | else: 604 | with open(join(args.json_directory,exp_name+f"_{m}.json"), 'w') as f: 605 | f.write(json.dumps(preds, indent=4, separators=(',',': '))) 606 | elif 'validate_json' in args.mode: 607 | if args.task == 'early_recognition' and args.modality == 'fusion': 608 | loaders = [get_loader("validation", 'rgb'), get_loader("validation", 'flow'), get_loader("validation", 'obj')] 609 | discarded_ids = loaders[0].dataset.discarded_ids 610 | verb_scores, noun_scores, action_scores, ids = get_scores_early_recognition_fusion(model, loaders) 611 | else: 612 | loader = get_loader("validation") 613 | epoch, perf, _ = load_checkpoint(model, best=True) 614 | 615 | discarded_ids = loader.dataset.discarded_ids 616 | 617 | print( 618 | f"Loaded checkpoint for model {type(model)}. Epoch: {epoch}. Perf: {perf:0.2f}.") 619 | 620 | verb_scores, noun_scores, action_scores, ids = get_scores(model, loader, challenge=True) 621 | 622 | idx = -4 if args.task == 'anticipation' else -1 623 | ids = list(ids) + list(discarded_ids) 624 | verb_scores = np.concatenate((verb_scores, np.zeros((len(discarded_ids), *verb_scores.shape[1:])))) [:,idx,:] 625 | noun_scores = np.concatenate((noun_scores, np.zeros((len(discarded_ids), *noun_scores.shape[1:])))) [:,idx,:] 626 | action_scores = np.concatenate((action_scores, np.zeros((len(discarded_ids), *action_scores.shape[1:])))) [:,idx,:] 627 | 628 | actions = pd.read_csv(join(args.path_to_data, 'actions.csv')) 629 | # map actions to (verb, noun) pairs 630 | a_to_vn = {a[1]['id']: tuple(a[1][['verb', 'noun']].values) 631 | for a in actions.iterrows()} 632 | 633 | preds = predictions_to_json(verb_scores, noun_scores, action_scores, ids, a_to_vn, version = '0.2' if args.ek100 else '0.1', sls=True) 634 | 635 | with open(join(args.json_directory,exp_name+f"_validation.json"), 'w') as f: 636 | f.write(json.dumps(preds, indent=4, separators=(',',': '))) 637 | 638 | if __name__ == '__main__': 639 | main() 640 | --------------------------------------------------------------------------------