├── Pytorch_code
├── densenet2
│ ├── test.sh
│ ├── train.sh
│ ├── distributed_utils.pyc
│ ├── memcached_dataset.pyc
│ ├── __pycache__
│ │ ├── model.cpython-36.pyc
│ │ ├── resnet.cpython-36.pyc
│ │ ├── base_tester.cpython-36.pyc
│ │ ├── distributed_utils.cpython-36.pyc
│ │ └── memcached_dataset.cpython-36.pyc
│ ├── base_tester.py
│ ├── memcached_dataset.py
│ └── distributed_utils.py
├── resnet101
│ ├── test.sh
│ ├── train.sh
│ ├── distributed_utils.pyc
│ ├── memcached_dataset.pyc
│ ├── __pycache__
│ │ ├── model.cpython-36.pyc
│ │ ├── resnet.cpython-36.pyc
│ │ ├── base_tester.cpython-36.pyc
│ │ ├── distributed_utils.cpython-36.pyc
│ │ └── memcached_dataset.cpython-36.pyc
│ ├── base_tester.py
│ ├── memcached_dataset.py
│ └── distributed_utils.py
├── Densenet
│ ├── test.sh
│ ├── base_tester.pyc
│ ├── train.sh
│ ├── distributed_utils.pyc
│ ├── memcached_dataset.pyc
│ ├── .main_allreduce.py.swp
│ ├── __pycache__
│ │ ├── model.cpython-36.pyc
│ │ ├── resnet.cpython-36.pyc
│ │ ├── augmentation.cpython-36.pyc
│ │ ├── base_tester.cpython-36.pyc
│ │ ├── distributed_utils.cpython-36.pyc
│ │ └── memcached_dataset.cpython-36.pyc
│ ├── augmentation.py
│ ├── base_tester.py
│ ├── memcached_dataset.py
│ └── distributed_utils.py
├── dense169
│ ├── test.sh
│ ├── train.sh
│ ├── distributed_utils.pyc
│ ├── memcached_dataset.pyc
│ ├── __pycache__
│ │ ├── model.cpython-36.pyc
│ │ ├── resnet.cpython-36.pyc
│ │ ├── densenet.cpython-36.pyc
│ │ ├── augmentation.cpython-36.pyc
│ │ ├── base_tester.cpython-36.pyc
│ │ ├── distributed_utils.cpython-36.pyc
│ │ └── memcached_dataset.cpython-36.pyc
│ ├── augmentation.py
│ ├── base_tester.py
│ ├── memcached_dataset.py
│ └── distributed_utils.py
├── resnet152
│ ├── test.sh
│ ├── train.sh
│ ├── distributed_utils.pyc
│ ├── memcached_dataset.pyc
│ ├── __pycache__
│ │ ├── model.cpython-36.pyc
│ │ ├── resnet.cpython-36.pyc
│ │ ├── base_tester.cpython-36.pyc
│ │ ├── augmentation.cpython-36.pyc
│ │ ├── distributed_utils.cpython-36.pyc
│ │ └── memcached_dataset.cpython-36.pyc
│ ├── augmentation.py
│ ├── base_tester.py
│ ├── memcached_dataset.py
│ └── distributed_utils.py
├── dense161
│ ├── test.sh
│ ├── train.sh
│ ├── augmentation.pyc
│ ├── distributed_utils.pyc
│ ├── memcached_dataset.pyc
│ ├── __pycache__
│ │ ├── model.cpython-36.pyc
│ │ ├── resnet.cpython-36.pyc
│ │ ├── densenet.cpython-36.pyc
│ │ ├── augmentation.cpython-36.pyc
│ │ ├── base_tester.cpython-36.pyc
│ │ ├── distributed_utils.cpython-36.pyc
│ │ └── memcached_dataset.cpython-36.pyc
│ ├── augmentation.py
│ ├── base_tester.py
│ ├── memcached_dataset.py
│ └── distributed_utils.py
├── drnD105
│ ├── test.sh
│ ├── train.sh
│ ├── augmentation.pyc
│ ├── distributed_utils.pyc
│ ├── memcached_dataset.pyc
│ ├── __pycache__
│ │ ├── drn.cpython-36.pyc
│ │ ├── model.cpython-36.pyc
│ │ ├── densenet.cpython-36.pyc
│ │ ├── resnet.cpython-36.pyc
│ │ ├── augmentation.cpython-36.pyc
│ │ ├── base_tester.cpython-36.pyc
│ │ ├── distributed_utils.cpython-36.pyc
│ │ └── memcached_dataset.cpython-36.pyc
│ ├── augmentation.py
│ ├── base_tester.py
│ ├── memcached_dataset.py
│ └── distributed_utils.py
├── resnet50_chair
│ ├── test.sh
│ ├── train.sh
│ ├── distributed_utils.pyc
│ ├── memcached_dataset.pyc
│ ├── __pycache__
│ │ ├── model.cpython-36.pyc
│ │ ├── resnet.cpython-36.pyc
│ │ ├── base_tester.cpython-36.pyc
│ │ ├── augmentation.cpython-36.pyc
│ │ ├── distributed_utils.cpython-36.pyc
│ │ └── memcached_dataset.cpython-36.pyc
│ ├── augmentation.py
│ ├── base_tester.py
│ └── memcached_dataset.py
├── dpn92
│ ├── test.sh
│ ├── train.sh
│ ├── distributed_utils.pyc
│ ├── memcached_dataset.pyc
│ ├── __pycache__
│ │ ├── dpn.cpython-36.pyc
│ │ ├── model.cpython-36.pyc
│ │ ├── resnet.cpython-36.pyc
│ │ ├── augmentation.cpython-36.pyc
│ │ ├── base_tester.cpython-36.pyc
│ │ ├── distributed_utils.cpython-36.pyc
│ │ └── memcached_dataset.cpython-36.pyc
│ ├── augmentation.py
│ ├── base_tester.py
│ ├── memcached_dataset.py
│ └── distributed_utils.py
├── dpn98
│ ├── test.sh
│ ├── train.sh
│ ├── distributed_utils.pyc
│ ├── memcached_dataset.pyc
│ ├── __pycache__
│ │ ├── dpn.cpython-36.pyc
│ │ ├── model.cpython-36.pyc
│ │ ├── resnet.cpython-36.pyc
│ │ ├── augmentation.cpython-36.pyc
│ │ ├── base_tester.cpython-36.pyc
│ │ ├── distributed_utils.cpython-36.pyc
│ │ └── memcached_dataset.cpython-36.pyc
│ ├── augmentation.py
│ ├── base_tester.py
│ ├── memcached_dataset.py
│ └── distributed_utils.py
├── dpn107
│ ├── test.sh
│ ├── train.sh
│ ├── distributed_utils.pyc
│ ├── memcached_dataset.pyc
│ ├── __pycache__
│ │ ├── dpn.cpython-36.pyc
│ │ ├── model.cpython-36.pyc
│ │ ├── resnet.cpython-36.pyc
│ │ ├── base_tester.cpython-36.pyc
│ │ ├── augmentation.cpython-36.pyc
│ │ ├── distributed_utils.cpython-36.pyc
│ │ └── memcached_dataset.cpython-36.pyc
│ ├── augmentation.py
│ ├── base_tester.py
│ ├── memcached_dataset.py
│ └── distributed_utils.py
├── dpn131
│ ├── test.sh
│ ├── train.sh
│ ├── distributed_utils.pyc
│ ├── memcached_dataset.pyc
│ ├── __pycache__
│ │ ├── dpn.cpython-36.pyc
│ │ ├── model.cpython-36.pyc
│ │ ├── resnet.cpython-36.pyc
│ │ ├── base_tester.cpython-36.pyc
│ │ ├── augmentation.cpython-36.pyc
│ │ ├── distributed_utils.cpython-36.pyc
│ │ └── memcached_dataset.cpython-36.pyc
│ ├── augmentation.py
│ ├── base_tester.py
│ ├── memcached_dataset.py
│ └── distributed_utils.py
├── nasnet
│ ├── test.sh
│ ├── train.sh
│ ├── distributed_utils.pyc
│ ├── memcached_dataset.pyc
│ ├── __pycache__
│ │ ├── dpn.cpython-36.pyc
│ │ ├── model.cpython-36.pyc
│ │ ├── nasnet.cpython-36.pyc
│ │ ├── resnet.cpython-36.pyc
│ │ ├── senet.cpython-36.pyc
│ │ ├── base_tester.cpython-36.pyc
│ │ ├── augmentation.cpython-36.pyc
│ │ ├── distributed_utils.cpython-36.pyc
│ │ ├── inceptionresnetv2.cpython-36.pyc
│ │ └── memcached_dataset.cpython-36.pyc
│ ├── augmentation.py
│ ├── base_tester.py
│ ├── memcached_dataset.py
│ └── distributed_utils.py
├── senet154
│ ├── test.sh
│ ├── train.sh
│ ├── distributed_utils.pyc
│ ├── memcached_dataset.pyc
│ ├── __pycache__
│ │ ├── dpn.cpython-36.pyc
│ │ ├── model.cpython-36.pyc
│ │ ├── resnet.cpython-36.pyc
│ │ ├── senet.cpython-36.pyc
│ │ ├── augmentation.cpython-36.pyc
│ │ ├── base_tester.cpython-36.pyc
│ │ ├── distributed_utils.cpython-36.pyc
│ │ ├── inceptionresnetv2.cpython-36.pyc
│ │ └── memcached_dataset.cpython-36.pyc
│ ├── augmentation.py
│ ├── base_tester.py
│ ├── memcached_dataset.py
│ └── distributed_utils.py
├── xception
│ ├── test.sh
│ ├── train.sh
│ ├── distributed_utils.pyc
│ ├── memcached_dataset.pyc
│ ├── __pycache__
│ │ ├── model.cpython-36.pyc
│ │ ├── resnet.cpython-36.pyc
│ │ ├── xception.cpython-36.pyc
│ │ ├── augmentation.cpython-36.pyc
│ │ ├── base_tester.cpython-36.pyc
│ │ ├── inceptionv4.cpython-36.pyc
│ │ ├── distributed_utils.cpython-36.pyc
│ │ ├── inceptionresnetv2.cpython-36.pyc
│ │ └── memcached_dataset.cpython-36.pyc
│ ├── augmentation.py
│ ├── base_tester.py
│ └── memcached_dataset.py
├── inceptionv4
│ ├── test.sh
│ ├── train.sh
│ ├── distributed_utils.pyc
│ ├── memcached_dataset.pyc
│ ├── __pycache__
│ │ ├── model.cpython-36.pyc
│ │ ├── resnet.cpython-36.pyc
│ │ ├── base_tester.cpython-36.pyc
│ │ ├── inceptionv4.cpython-36.pyc
│ │ ├── augmentation.cpython-36.pyc
│ │ ├── distributed_utils.cpython-36.pyc
│ │ └── memcached_dataset.cpython-36.pyc
│ ├── augmentation.py
│ ├── base_tester.py
│ ├── memcached_dataset.py
│ └── distributed_utils.py
├── seResnext50
│ ├── test.sh
│ ├── train.sh
│ ├── distributed_utils.pyc
│ ├── memcached_dataset.pyc
│ ├── __pycache__
│ │ ├── dpn.cpython-36.pyc
│ │ ├── model.cpython-36.pyc
│ │ ├── resnet.cpython-36.pyc
│ │ ├── senet.cpython-36.pyc
│ │ ├── base_tester.cpython-36.pyc
│ │ ├── augmentation.cpython-36.pyc
│ │ ├── distributed_utils.cpython-36.pyc
│ │ ├── inceptionresnetv2.cpython-36.pyc
│ │ └── memcached_dataset.cpython-36.pyc
│ ├── augmentation.py
│ ├── base_tester.py
│ ├── memcached_dataset.py
│ └── distributed_utils.py
├── seresnet152
│ ├── test.sh
│ ├── train.sh
│ ├── distributed_utils.pyc
│ ├── memcached_dataset.pyc
│ ├── __pycache__
│ │ ├── dpn.cpython-36.pyc
│ │ ├── model.cpython-36.pyc
│ │ ├── resnet.cpython-36.pyc
│ │ ├── senet.cpython-36.pyc
│ │ ├── base_tester.cpython-36.pyc
│ │ ├── augmentation.cpython-36.pyc
│ │ ├── distributed_utils.cpython-36.pyc
│ │ ├── inceptionresnetv2.cpython-36.pyc
│ │ └── memcached_dataset.cpython-36.pyc
│ ├── augmentation.py
│ ├── base_tester.py
│ └── memcached_dataset.py
├── seresnext101
│ ├── test.sh
│ ├── train.sh
│ ├── distributed_utils.pyc
│ ├── memcached_dataset.pyc
│ ├── __pycache__
│ │ ├── dpn.cpython-36.pyc
│ │ ├── model.cpython-36.pyc
│ │ ├── senet.cpython-36.pyc
│ │ ├── resnet.cpython-36.pyc
│ │ ├── augmentation.cpython-36.pyc
│ │ ├── base_tester.cpython-36.pyc
│ │ ├── distributed_utils.cpython-36.pyc
│ │ ├── inceptionresnetv2.cpython-36.pyc
│ │ └── memcached_dataset.cpython-36.pyc
│ ├── augmentation.py
│ ├── base_tester.py
│ └── memcached_dataset.py
├── inceptionResnetV2
│ ├── test.sh
│ ├── train.sh
│ ├── distributed_utils.pyc
│ ├── memcached_dataset.pyc
│ ├── __pycache__
│ │ ├── dpn.cpython-36.pyc
│ │ ├── model.cpython-36.pyc
│ │ ├── resnet.cpython-36.pyc
│ │ ├── augmentation.cpython-36.pyc
│ │ ├── base_tester.cpython-36.pyc
│ │ ├── distributed_utils.cpython-36.pyc
│ │ ├── inceptionresnetv2.cpython-36.pyc
│ │ └── memcached_dataset.cpython-36.pyc
│ ├── augmentation.py
│ ├── base_tester.py
│ └── memcached_dataset.py
├── resnext101_32x4d
│ ├── test.sh
│ ├── train.sh
│ ├── resnext_features
│ │ ├── __init__.py
│ │ └── __pycache__
│ │ │ ├── __init__.cpython-36.pyc
│ │ │ ├── resnext101_32x4d_features.cpython-36.pyc
│ │ │ └── resnext101_64x4d_features.cpython-36.pyc
│ ├── distributed_utils.pyc
│ ├── memcached_dataset.pyc
│ ├── __pycache__
│ │ ├── dpn.cpython-36.pyc
│ │ ├── model.cpython-36.pyc
│ │ ├── resnet.cpython-36.pyc
│ │ ├── senet.cpython-36.pyc
│ │ ├── resnext.cpython-36.pyc
│ │ ├── base_tester.cpython-36.pyc
│ │ ├── augmentation.cpython-36.pyc
│ │ ├── distributed_utils.cpython-36.pyc
│ │ ├── inceptionresnetv2.cpython-36.pyc
│ │ └── memcached_dataset.cpython-36.pyc
│ ├── augmentation.py
│ ├── base_tester.py
│ └── memcached_dataset.py
└── resnext101_64x4d
│ ├── test.sh
│ ├── train.sh
│ ├── resnext_features
│ ├── __init__.py
│ └── __pycache__
│ │ ├── __init__.cpython-36.pyc
│ │ ├── resnext101_32x4d_features.cpython-36.pyc
│ │ └── resnext101_64x4d_features.cpython-36.pyc
│ ├── distributed_utils.pyc
│ ├── memcached_dataset.pyc
│ ├── __pycache__
│ ├── dpn.cpython-36.pyc
│ ├── model.cpython-36.pyc
│ ├── resnet.cpython-36.pyc
│ ├── senet.cpython-36.pyc
│ ├── resnext.cpython-36.pyc
│ ├── base_tester.cpython-36.pyc
│ ├── augmentation.cpython-36.pyc
│ ├── distributed_utils.cpython-36.pyc
│ ├── inceptionresnetv2.cpython-36.pyc
│ └── memcached_dataset.cpython-36.pyc
│ ├── augmentation.py
│ ├── base_tester.py
│ └── memcached_dataset.py
├── Rank.png
├── loss.png
├── loss1.png
├── Label_Info.docx
├── furniture - Youngkl_pytorch.pdf
├── .idea
├── deployment.xml
├── vcs.xml
├── inspectionProfiles
│ └── profiles_settings.xml
├── modules.xml
├── furniture.iml
├── webServers.xml
└── misc.xml
├── README.md
├── GenerateTestLabelFile.py
├── data
├── txtTest.py
├── validname.py
├── validnum.txt
└── trainnum.txt
├── draw_nums.py
├── GenChairTrain.py
├── extra.txt
├── GetChairTest.py
└── ConvertNpy2Csv.py
/Pytorch_code/densenet2/test.sh:
--------------------------------------------------------------------------------
1 | srun -p AD1 -n1 --gres=gpu:1 --ntasks-per-node=1 python test.py
--------------------------------------------------------------------------------
/Pytorch_code/resnet101/test.sh:
--------------------------------------------------------------------------------
1 | srun -p AD2 -n1 --gres=gpu:1 --ntasks-per-node=1 python test.py
--------------------------------------------------------------------------------
/Pytorch_code/Densenet/test.sh:
--------------------------------------------------------------------------------
1 | srun -p AD2 -n1 --gres=gpu:1 --ntasks-per-node=1 python test.py
2 |
--------------------------------------------------------------------------------
/Pytorch_code/dense169/test.sh:
--------------------------------------------------------------------------------
1 | srun -p AD2 -n1 --gres=gpu:1 --ntasks-per-node=1 python test.py
2 |
--------------------------------------------------------------------------------
/Pytorch_code/resnet152/test.sh:
--------------------------------------------------------------------------------
1 | srun -p AD2 -n1 --gres=gpu:1 --ntasks-per-node=1 python test.py
2 |
--------------------------------------------------------------------------------
/Pytorch_code/dense161/test.sh:
--------------------------------------------------------------------------------
1 | srun -p AD2 -n1 --gres=gpu:1 --ntasks-per-node=1 python -u test.py
2 |
--------------------------------------------------------------------------------
/Pytorch_code/drnD105/test.sh:
--------------------------------------------------------------------------------
1 | srun -p AD2 -n1 --gres=gpu:1 --ntasks-per-node=1 python -u test.py
2 |
--------------------------------------------------------------------------------
/Pytorch_code/resnet50_chair/test.sh:
--------------------------------------------------------------------------------
1 | srun -p AD2 -n1 --gres=gpu:1 --ntasks-per-node=1 python -u test.py
2 |
--------------------------------------------------------------------------------
/Pytorch_code/dpn92/test.sh:
--------------------------------------------------------------------------------
1 | srun -p AD2 -n1 --gres=gpu:1 --ntasks-per-node=1 --job-name=test python -u test.py
2 |
--------------------------------------------------------------------------------
/Pytorch_code/dpn98/test.sh:
--------------------------------------------------------------------------------
1 | srun -p AD2 -n1 --gres=gpu:1 --ntasks-per-node=1 --job-name=test python -u test.py
2 |
--------------------------------------------------------------------------------
/Rank.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Rank.png
--------------------------------------------------------------------------------
/loss.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/loss.png
--------------------------------------------------------------------------------
/Pytorch_code/dpn107/test.sh:
--------------------------------------------------------------------------------
1 | srun -p AD2 -n1 --gres=gpu:1 --ntasks-per-node=1 --job-name=test python -u test.py
2 |
--------------------------------------------------------------------------------
/Pytorch_code/dpn131/test.sh:
--------------------------------------------------------------------------------
1 | srun -p AD2 -n1 --gres=gpu:1 --ntasks-per-node=1 --job-name=test python -u test.py
2 |
--------------------------------------------------------------------------------
/Pytorch_code/nasnet/test.sh:
--------------------------------------------------------------------------------
1 | srun -p AD2 -n1 --gres=gpu:1 --ntasks-per-node=1 --job-name=test python -u test.py
2 |
--------------------------------------------------------------------------------
/Pytorch_code/senet154/test.sh:
--------------------------------------------------------------------------------
1 | srun -p AD2 -n1 --gres=gpu:1 --ntasks-per-node=1 --job-name=test python -u test.py
2 |
--------------------------------------------------------------------------------
/Pytorch_code/xception/test.sh:
--------------------------------------------------------------------------------
1 | srun -p AD2 -n1 --gres=gpu:1 --ntasks-per-node=1 --job-name=test python -u test.py
2 |
--------------------------------------------------------------------------------
/loss1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/loss1.png
--------------------------------------------------------------------------------
/Pytorch_code/inceptionv4/test.sh:
--------------------------------------------------------------------------------
1 | srun -p AD2 -n1 --gres=gpu:1 --ntasks-per-node=1 --job-name=test python -u test.py
2 |
--------------------------------------------------------------------------------
/Pytorch_code/seResnext50/test.sh:
--------------------------------------------------------------------------------
1 | srun -p AD2 -n1 --gres=gpu:1 --ntasks-per-node=1 --job-name=test python -u test.py
2 |
--------------------------------------------------------------------------------
/Pytorch_code/seresnet152/test.sh:
--------------------------------------------------------------------------------
1 | srun -p AD2 -n1 --gres=gpu:1 --ntasks-per-node=1 --job-name=test python -u test.py
2 |
--------------------------------------------------------------------------------
/Pytorch_code/seresnext101/test.sh:
--------------------------------------------------------------------------------
1 | srun -p AD2 -n1 --gres=gpu:1 --ntasks-per-node=1 --job-name=test python -u test.py
2 |
--------------------------------------------------------------------------------
/Label_Info.docx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Label_Info.docx
--------------------------------------------------------------------------------
/Pytorch_code/inceptionResnetV2/test.sh:
--------------------------------------------------------------------------------
1 | srun -p AD2 -n1 --gres=gpu:1 --ntasks-per-node=1 --job-name=test python -u test.py
2 |
--------------------------------------------------------------------------------
/Pytorch_code/resnext101_32x4d/test.sh:
--------------------------------------------------------------------------------
1 | srun -p AD2 -n1 --gres=gpu:1 --ntasks-per-node=1 --job-name=test python -u test.py
2 |
--------------------------------------------------------------------------------
/Pytorch_code/resnext101_64x4d/test.sh:
--------------------------------------------------------------------------------
1 | srun -p AD2 -n1 --gres=gpu:1 --ntasks-per-node=1 --job-name=test python -u test.py
2 |
--------------------------------------------------------------------------------
/Pytorch_code/dense169/train.sh:
--------------------------------------------------------------------------------
1 | srun -p AD2 -n2 --gres=gpu:2 --ntasks-per-node=2 --job-name=6 python -u main_allreduce.py -j 16 -b 32 2>&1 | tee log7.txt
2 |
--------------------------------------------------------------------------------
/Pytorch_code/dpn107/train.sh:
--------------------------------------------------------------------------------
1 | srun -p $1 -n$2 --gres=gpu:$2 --ntasks-per-node=$2 --job-name=16 python -u main_allreduce.py -j 16 -b 32 2>&1 | tee log5.txt
2 |
--------------------------------------------------------------------------------
/Pytorch_code/dpn131/train.sh:
--------------------------------------------------------------------------------
1 | srun -p $1 -n$2 --gres=gpu:$2 --ntasks-per-node=$2 --job-name=12 python -u main_allreduce.py -j 16 -b 32 2>&1 | tee log6.txt
2 |
--------------------------------------------------------------------------------
/Pytorch_code/dpn92/train.sh:
--------------------------------------------------------------------------------
1 | srun -p $1 -n$2 --gres=gpu:$2 --ntasks-per-node=$2 --job-name=15 python -u main_allreduce.py -j 16 -b 32 2>&1 | tee log6.txt
2 |
--------------------------------------------------------------------------------
/Pytorch_code/dpn98/train.sh:
--------------------------------------------------------------------------------
1 | srun -p $1 -n$2 --gres=gpu:$2 --ntasks-per-node=$2 --job-name=7 python -u main_allreduce.py -j 16 -b 32 2>&1 | tee log3.txt
2 |
--------------------------------------------------------------------------------
/Pytorch_code/nasnet/train.sh:
--------------------------------------------------------------------------------
1 | srun -p $1 -n$2 --gres=gpu:$2 --ntasks-per-node=$2 --job-name=11 python -u main_allreduce.py -j 16 -b 12 2>&1 | tee log2.txt
2 |
--------------------------------------------------------------------------------
/furniture - Youngkl_pytorch.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/furniture - Youngkl_pytorch.pdf
--------------------------------------------------------------------------------
/.idea/deployment.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
--------------------------------------------------------------------------------
/Pytorch_code/dense161/train.sh:
--------------------------------------------------------------------------------
1 | srun -p AD2 -n2 --gres=gpu:2 --ntasks-per-node=2 --job-name=3 python -u main_allreduce.py -j 16 -b 32 2>&1 | tee log6.txt
2 |
3 |
--------------------------------------------------------------------------------
/Pytorch_code/drnD105/train.sh:
--------------------------------------------------------------------------------
1 | srun -p $1 -n$2 --gres=gpu:$2 --ntasks-per-node=$2 --job-name=22 python -u main_allreduce.py -j 16 -b 24 2>&1 | tee log2.txt
2 |
3 |
--------------------------------------------------------------------------------
/Pytorch_code/inceptionv4/train.sh:
--------------------------------------------------------------------------------
1 | srun -p $1 -n$2 --gres=gpu:$2 --ntasks-per-node=$2 --job-name=14 python -u main_allreduce.py -j 16 -b 32 2>&1 | tee log4.txt
2 |
--------------------------------------------------------------------------------
/Pytorch_code/seResnext50/train.sh:
--------------------------------------------------------------------------------
1 | srun -p $1 -n$2 --gres=gpu:$2 --ntasks-per-node=$2 --job-name=9 python -u main_allreduce.py -j 16 -b 32 2>&1 | tee log4.txt
2 |
--------------------------------------------------------------------------------
/Pytorch_code/senet154/train.sh:
--------------------------------------------------------------------------------
1 | srun -p $1 -n$2 --gres=gpu:$2 --ntasks-per-node=$2 --job-name=10 python -u main_allreduce.py -j 16 -b 32 2>&1 | tee log3.txt
2 |
--------------------------------------------------------------------------------
/Pytorch_code/seresnet152/train.sh:
--------------------------------------------------------------------------------
1 | srun -p $1 -n$2 --gres=gpu:$2 --ntasks-per-node=$2 --job-name=21 python -u main_allreduce.py -j 16 -b 32 2>&1 | tee log4.txt
2 |
--------------------------------------------------------------------------------
/Pytorch_code/xception/train.sh:
--------------------------------------------------------------------------------
1 | srun -p $1 -n$2 --gres=gpu:$2 --ntasks-per-node=$2 --job-name=17 python -u main_allreduce.py -j 16 -b 32 2>&1 | tee log2.txt
2 |
--------------------------------------------------------------------------------
/Pytorch_code/inceptionResnetV2/train.sh:
--------------------------------------------------------------------------------
1 | srun -p $1 -n$2 --gres=gpu:$2 --ntasks-per-node=$2 --job-name=8 python -u main_allreduce.py -j 16 -b 32 2>&1 | tee log3.txt
2 |
--------------------------------------------------------------------------------
/Pytorch_code/resnext101_32x4d/train.sh:
--------------------------------------------------------------------------------
1 | srun -p $1 -n$2 --gres=gpu:$2 --ntasks-per-node=$2 --job-name=19 python -u main_allreduce.py -j 16 -b 32 2>&1 | tee log5.txt
2 |
--------------------------------------------------------------------------------
/Pytorch_code/resnext101_64x4d/train.sh:
--------------------------------------------------------------------------------
1 | srun -p $1 -n$2 --gres=gpu:$2 --ntasks-per-node=$2 --job-name=20 python -u main_allreduce.py -j 16 -b 32 2>&1 | tee log3.txt
2 |
--------------------------------------------------------------------------------
/Pytorch_code/seresnext101/train.sh:
--------------------------------------------------------------------------------
1 | srun -p $1 -n$2 --gres=gpu:$2 --ntasks-per-node=$2 --job-name=17 python -u main_allreduce.py -j 16 -b 32 2>&1 | tee log1.txt
2 |
--------------------------------------------------------------------------------
/Pytorch_code/Densenet/base_tester.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/Densenet/base_tester.pyc
--------------------------------------------------------------------------------
/Pytorch_code/dense161/augmentation.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/dense161/augmentation.pyc
--------------------------------------------------------------------------------
/Pytorch_code/drnD105/augmentation.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/drnD105/augmentation.pyc
--------------------------------------------------------------------------------
/Pytorch_code/resnet152/train.sh:
--------------------------------------------------------------------------------
1 | srun -p $1 -n$2 --gres=gpu:$2 --ntasks-per-node=$2 --job-name=5 python -u main_allreduce.py -j 16 -b 16 -a resnet152 2>&1 | tee log7.txt
2 |
--------------------------------------------------------------------------------
/Pytorch_code/Densenet/train.sh:
--------------------------------------------------------------------------------
1 | srun -p VIBackEnd2 -n2 --gres=gpu:2 --ntasks-per-node=2 --job-name=1 python -u main_allreduce.py -j 16 -b 32 -a densenet201 2>&1 | tee log6.txt
2 |
--------------------------------------------------------------------------------
/Pytorch_code/dpn107/distributed_utils.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/dpn107/distributed_utils.pyc
--------------------------------------------------------------------------------
/Pytorch_code/dpn107/memcached_dataset.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/dpn107/memcached_dataset.pyc
--------------------------------------------------------------------------------
/Pytorch_code/dpn131/distributed_utils.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/dpn131/distributed_utils.pyc
--------------------------------------------------------------------------------
/Pytorch_code/dpn131/memcached_dataset.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/dpn131/memcached_dataset.pyc
--------------------------------------------------------------------------------
/Pytorch_code/dpn92/distributed_utils.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/dpn92/distributed_utils.pyc
--------------------------------------------------------------------------------
/Pytorch_code/dpn92/memcached_dataset.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/dpn92/memcached_dataset.pyc
--------------------------------------------------------------------------------
/Pytorch_code/dpn98/distributed_utils.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/dpn98/distributed_utils.pyc
--------------------------------------------------------------------------------
/Pytorch_code/dpn98/memcached_dataset.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/dpn98/memcached_dataset.pyc
--------------------------------------------------------------------------------
/Pytorch_code/nasnet/distributed_utils.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/nasnet/distributed_utils.pyc
--------------------------------------------------------------------------------
/Pytorch_code/nasnet/memcached_dataset.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/nasnet/memcached_dataset.pyc
--------------------------------------------------------------------------------
/Pytorch_code/resnet101/train.sh:
--------------------------------------------------------------------------------
1 | srun -p VIBackEnd2 -n2 --gres=gpu:2 --ntasks-per-node=2 --job-name=4 python -u main_allreduce.py -j 16 -b 32 -a resnet101 2>&1 | tee log4.txt
2 |
--------------------------------------------------------------------------------
/Pytorch_code/resnet50_chair/train.sh:
--------------------------------------------------------------------------------
1 | srun -p $1 -n$2 --gres=gpu:$2 --ntasks-per-node=$2 --job-name=13 python -u main_allreduce.py -j 16 -b 32 -a resnet50 2>&1 | tee log1.txt
2 |
--------------------------------------------------------------------------------
/Pytorch_code/Densenet/distributed_utils.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/Densenet/distributed_utils.pyc
--------------------------------------------------------------------------------
/Pytorch_code/Densenet/memcached_dataset.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/Densenet/memcached_dataset.pyc
--------------------------------------------------------------------------------
/Pytorch_code/dense161/distributed_utils.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/dense161/distributed_utils.pyc
--------------------------------------------------------------------------------
/Pytorch_code/dense161/memcached_dataset.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/dense161/memcached_dataset.pyc
--------------------------------------------------------------------------------
/Pytorch_code/dense169/distributed_utils.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/dense169/distributed_utils.pyc
--------------------------------------------------------------------------------
/Pytorch_code/dense169/memcached_dataset.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/dense169/memcached_dataset.pyc
--------------------------------------------------------------------------------
/Pytorch_code/densenet2/train.sh:
--------------------------------------------------------------------------------
1 | srun -p VIBackEnd2 -n2 --gres=gpu:2 --ntasks-per-node=2 --job-name=2 python -u main_allreduce.py -j 16 -b 32 -a densenet201 2>&1 | tee log4.txt
2 |
--------------------------------------------------------------------------------
/Pytorch_code/drnD105/distributed_utils.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/drnD105/distributed_utils.pyc
--------------------------------------------------------------------------------
/Pytorch_code/drnD105/memcached_dataset.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/drnD105/memcached_dataset.pyc
--------------------------------------------------------------------------------
/Pytorch_code/senet154/distributed_utils.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/senet154/distributed_utils.pyc
--------------------------------------------------------------------------------
/Pytorch_code/senet154/memcached_dataset.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/senet154/memcached_dataset.pyc
--------------------------------------------------------------------------------
/Pytorch_code/xception/distributed_utils.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/xception/distributed_utils.pyc
--------------------------------------------------------------------------------
/Pytorch_code/xception/memcached_dataset.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/xception/memcached_dataset.pyc
--------------------------------------------------------------------------------
/Pytorch_code/Densenet/.main_allreduce.py.swp:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/Densenet/.main_allreduce.py.swp
--------------------------------------------------------------------------------
/Pytorch_code/densenet2/distributed_utils.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/densenet2/distributed_utils.pyc
--------------------------------------------------------------------------------
/Pytorch_code/densenet2/memcached_dataset.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/densenet2/memcached_dataset.pyc
--------------------------------------------------------------------------------
/Pytorch_code/inceptionv4/distributed_utils.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/inceptionv4/distributed_utils.pyc
--------------------------------------------------------------------------------
/Pytorch_code/inceptionv4/memcached_dataset.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/inceptionv4/memcached_dataset.pyc
--------------------------------------------------------------------------------
/Pytorch_code/resnet101/distributed_utils.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/resnet101/distributed_utils.pyc
--------------------------------------------------------------------------------
/Pytorch_code/resnet101/memcached_dataset.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/resnet101/memcached_dataset.pyc
--------------------------------------------------------------------------------
/Pytorch_code/resnet152/distributed_utils.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/resnet152/distributed_utils.pyc
--------------------------------------------------------------------------------
/Pytorch_code/resnet152/memcached_dataset.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/resnet152/memcached_dataset.pyc
--------------------------------------------------------------------------------
/Pytorch_code/seResnext50/distributed_utils.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/seResnext50/distributed_utils.pyc
--------------------------------------------------------------------------------
/Pytorch_code/seResnext50/memcached_dataset.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/seResnext50/memcached_dataset.pyc
--------------------------------------------------------------------------------
/Pytorch_code/seresnet152/distributed_utils.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/seresnet152/distributed_utils.pyc
--------------------------------------------------------------------------------
/Pytorch_code/seresnet152/memcached_dataset.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/seresnet152/memcached_dataset.pyc
--------------------------------------------------------------------------------
/Pytorch_code/resnext101_32x4d/resnext_features/__init__.py:
--------------------------------------------------------------------------------
1 | from .resnext101_32x4d_features import resnext101_32x4d_features
2 | from .resnext101_64x4d_features import resnext101_64x4d_features
--------------------------------------------------------------------------------
/Pytorch_code/resnext101_64x4d/resnext_features/__init__.py:
--------------------------------------------------------------------------------
1 | from .resnext101_32x4d_features import resnext101_32x4d_features
2 | from .resnext101_64x4d_features import resnext101_64x4d_features
--------------------------------------------------------------------------------
/Pytorch_code/seresnext101/distributed_utils.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/seresnext101/distributed_utils.pyc
--------------------------------------------------------------------------------
/Pytorch_code/seresnext101/memcached_dataset.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/seresnext101/memcached_dataset.pyc
--------------------------------------------------------------------------------
/Pytorch_code/dpn107/__pycache__/dpn.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/dpn107/__pycache__/dpn.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/dpn131/__pycache__/dpn.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/dpn131/__pycache__/dpn.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/dpn92/__pycache__/dpn.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/dpn92/__pycache__/dpn.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/dpn92/__pycache__/model.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/dpn92/__pycache__/model.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/dpn98/__pycache__/dpn.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/dpn98/__pycache__/dpn.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/dpn98/__pycache__/model.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/dpn98/__pycache__/model.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/drnD105/__pycache__/drn.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/drnD105/__pycache__/drn.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/nasnet/__pycache__/dpn.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/nasnet/__pycache__/dpn.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/resnet50_chair/distributed_utils.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/resnet50_chair/distributed_utils.pyc
--------------------------------------------------------------------------------
/Pytorch_code/resnet50_chair/memcached_dataset.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/resnet50_chair/memcached_dataset.pyc
--------------------------------------------------------------------------------
/Pytorch_code/resnext101_32x4d/distributed_utils.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/resnext101_32x4d/distributed_utils.pyc
--------------------------------------------------------------------------------
/Pytorch_code/resnext101_32x4d/memcached_dataset.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/resnext101_32x4d/memcached_dataset.pyc
--------------------------------------------------------------------------------
/Pytorch_code/resnext101_64x4d/distributed_utils.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/resnext101_64x4d/distributed_utils.pyc
--------------------------------------------------------------------------------
/Pytorch_code/resnext101_64x4d/memcached_dataset.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/resnext101_64x4d/memcached_dataset.pyc
--------------------------------------------------------------------------------
/Pytorch_code/dpn107/__pycache__/model.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/dpn107/__pycache__/model.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/dpn107/__pycache__/resnet.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/dpn107/__pycache__/resnet.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/dpn131/__pycache__/model.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/dpn131/__pycache__/model.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/dpn131/__pycache__/resnet.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/dpn131/__pycache__/resnet.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/dpn92/__pycache__/resnet.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/dpn92/__pycache__/resnet.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/dpn98/__pycache__/resnet.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/dpn98/__pycache__/resnet.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/drnD105/__pycache__/model.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/drnD105/__pycache__/model.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/inceptionResnetV2/distributed_utils.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/inceptionResnetV2/distributed_utils.pyc
--------------------------------------------------------------------------------
/Pytorch_code/inceptionResnetV2/memcached_dataset.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/inceptionResnetV2/memcached_dataset.pyc
--------------------------------------------------------------------------------
/Pytorch_code/nasnet/__pycache__/model.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/nasnet/__pycache__/model.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/nasnet/__pycache__/nasnet.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/nasnet/__pycache__/nasnet.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/nasnet/__pycache__/resnet.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/nasnet/__pycache__/resnet.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/nasnet/__pycache__/senet.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/nasnet/__pycache__/senet.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/senet154/__pycache__/dpn.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/senet154/__pycache__/dpn.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/Densenet/__pycache__/model.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/Densenet/__pycache__/model.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/Densenet/__pycache__/resnet.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/Densenet/__pycache__/resnet.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/dense161/__pycache__/model.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/dense161/__pycache__/model.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/dense161/__pycache__/resnet.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/dense161/__pycache__/resnet.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/dense169/__pycache__/model.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/dense169/__pycache__/model.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/dense169/__pycache__/resnet.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/dense169/__pycache__/resnet.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/densenet2/__pycache__/model.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/densenet2/__pycache__/model.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/densenet2/__pycache__/resnet.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/densenet2/__pycache__/resnet.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/drnD105/__pycache__/densenet.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/drnD105/__pycache__/densenet.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/drnD105/__pycache__/resnet.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/drnD105/__pycache__/resnet.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/resnet101/__pycache__/model.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/resnet101/__pycache__/model.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/resnet101/__pycache__/resnet.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/resnet101/__pycache__/resnet.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/resnet152/__pycache__/model.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/resnet152/__pycache__/model.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/resnet152/__pycache__/resnet.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/resnet152/__pycache__/resnet.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/seResnext50/__pycache__/dpn.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/seResnext50/__pycache__/dpn.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/senet154/__pycache__/model.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/senet154/__pycache__/model.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/senet154/__pycache__/resnet.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/senet154/__pycache__/resnet.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/senet154/__pycache__/senet.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/senet154/__pycache__/senet.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/seresnet152/__pycache__/dpn.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/seresnet152/__pycache__/dpn.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/seresnext101/__pycache__/dpn.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/seresnext101/__pycache__/dpn.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/xception/__pycache__/model.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/xception/__pycache__/model.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/xception/__pycache__/resnet.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/xception/__pycache__/resnet.cpython-36.pyc
--------------------------------------------------------------------------------
/.idea/vcs.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/Pytorch_code/dense161/__pycache__/densenet.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/dense161/__pycache__/densenet.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/dense169/__pycache__/densenet.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/dense169/__pycache__/densenet.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/dpn107/__pycache__/base_tester.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/dpn107/__pycache__/base_tester.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/dpn131/__pycache__/base_tester.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/dpn131/__pycache__/base_tester.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/dpn92/__pycache__/augmentation.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/dpn92/__pycache__/augmentation.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/dpn92/__pycache__/base_tester.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/dpn92/__pycache__/base_tester.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/dpn98/__pycache__/augmentation.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/dpn98/__pycache__/augmentation.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/dpn98/__pycache__/base_tester.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/dpn98/__pycache__/base_tester.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/inceptionv4/__pycache__/model.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/inceptionv4/__pycache__/model.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/inceptionv4/__pycache__/resnet.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/inceptionv4/__pycache__/resnet.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/nasnet/__pycache__/base_tester.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/nasnet/__pycache__/base_tester.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/seResnext50/__pycache__/model.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/seResnext50/__pycache__/model.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/seResnext50/__pycache__/resnet.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/seResnext50/__pycache__/resnet.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/seResnext50/__pycache__/senet.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/seResnext50/__pycache__/senet.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/seresnet152/__pycache__/model.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/seresnet152/__pycache__/model.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/seresnet152/__pycache__/resnet.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/seresnet152/__pycache__/resnet.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/seresnet152/__pycache__/senet.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/seresnet152/__pycache__/senet.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/seresnext101/__pycache__/model.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/seresnext101/__pycache__/model.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/seresnext101/__pycache__/senet.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/seresnext101/__pycache__/senet.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/xception/__pycache__/xception.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/xception/__pycache__/xception.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/Densenet/__pycache__/augmentation.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/Densenet/__pycache__/augmentation.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/Densenet/__pycache__/base_tester.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/Densenet/__pycache__/base_tester.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/dense161/__pycache__/augmentation.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/dense161/__pycache__/augmentation.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/dense161/__pycache__/base_tester.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/dense161/__pycache__/base_tester.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/dense169/__pycache__/augmentation.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/dense169/__pycache__/augmentation.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/dense169/__pycache__/base_tester.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/dense169/__pycache__/base_tester.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/densenet2/__pycache__/base_tester.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/densenet2/__pycache__/base_tester.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/dpn107/__pycache__/augmentation.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/dpn107/__pycache__/augmentation.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/dpn131/__pycache__/augmentation.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/dpn131/__pycache__/augmentation.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/drnD105/__pycache__/augmentation.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/drnD105/__pycache__/augmentation.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/drnD105/__pycache__/base_tester.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/drnD105/__pycache__/base_tester.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/inceptionResnetV2/__pycache__/dpn.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/inceptionResnetV2/__pycache__/dpn.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/nasnet/__pycache__/augmentation.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/nasnet/__pycache__/augmentation.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/resnet101/__pycache__/base_tester.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/resnet101/__pycache__/base_tester.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/resnet152/__pycache__/base_tester.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/resnet152/__pycache__/base_tester.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/resnet50_chair/__pycache__/model.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/resnet50_chair/__pycache__/model.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/resnet50_chair/__pycache__/resnet.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/resnet50_chair/__pycache__/resnet.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/resnext101_32x4d/__pycache__/dpn.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/resnext101_32x4d/__pycache__/dpn.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/resnext101_64x4d/__pycache__/dpn.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/resnext101_64x4d/__pycache__/dpn.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/senet154/__pycache__/augmentation.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/senet154/__pycache__/augmentation.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/senet154/__pycache__/base_tester.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/senet154/__pycache__/base_tester.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/seresnext101/__pycache__/resnet.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/seresnext101/__pycache__/resnet.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/xception/__pycache__/augmentation.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/xception/__pycache__/augmentation.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/xception/__pycache__/base_tester.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/xception/__pycache__/base_tester.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/xception/__pycache__/inceptionv4.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/xception/__pycache__/inceptionv4.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/dpn92/__pycache__/distributed_utils.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/dpn92/__pycache__/distributed_utils.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/dpn92/__pycache__/memcached_dataset.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/dpn92/__pycache__/memcached_dataset.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/dpn98/__pycache__/distributed_utils.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/dpn98/__pycache__/distributed_utils.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/dpn98/__pycache__/memcached_dataset.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/dpn98/__pycache__/memcached_dataset.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/inceptionResnetV2/__pycache__/model.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/inceptionResnetV2/__pycache__/model.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/inceptionv4/__pycache__/base_tester.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/inceptionv4/__pycache__/base_tester.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/inceptionv4/__pycache__/inceptionv4.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/inceptionv4/__pycache__/inceptionv4.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/resnet152/__pycache__/augmentation.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/resnet152/__pycache__/augmentation.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/resnext101_32x4d/__pycache__/model.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/resnext101_32x4d/__pycache__/model.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/resnext101_32x4d/__pycache__/resnet.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/resnext101_32x4d/__pycache__/resnet.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/resnext101_32x4d/__pycache__/senet.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/resnext101_32x4d/__pycache__/senet.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/resnext101_64x4d/__pycache__/model.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/resnext101_64x4d/__pycache__/model.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/resnext101_64x4d/__pycache__/resnet.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/resnext101_64x4d/__pycache__/resnet.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/resnext101_64x4d/__pycache__/senet.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/resnext101_64x4d/__pycache__/senet.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/seResnext50/__pycache__/base_tester.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/seResnext50/__pycache__/base_tester.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/seresnet152/__pycache__/base_tester.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/seresnet152/__pycache__/base_tester.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/Densenet/__pycache__/distributed_utils.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/Densenet/__pycache__/distributed_utils.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/Densenet/__pycache__/memcached_dataset.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/Densenet/__pycache__/memcached_dataset.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/dense161/__pycache__/distributed_utils.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/dense161/__pycache__/distributed_utils.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/dense161/__pycache__/memcached_dataset.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/dense161/__pycache__/memcached_dataset.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/dense169/__pycache__/distributed_utils.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/dense169/__pycache__/distributed_utils.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/dense169/__pycache__/memcached_dataset.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/dense169/__pycache__/memcached_dataset.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/dpn107/__pycache__/distributed_utils.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/dpn107/__pycache__/distributed_utils.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/dpn107/__pycache__/memcached_dataset.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/dpn107/__pycache__/memcached_dataset.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/dpn131/__pycache__/distributed_utils.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/dpn131/__pycache__/distributed_utils.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/dpn131/__pycache__/memcached_dataset.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/dpn131/__pycache__/memcached_dataset.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/drnD105/__pycache__/distributed_utils.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/drnD105/__pycache__/distributed_utils.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/drnD105/__pycache__/memcached_dataset.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/drnD105/__pycache__/memcached_dataset.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/inceptionResnetV2/__pycache__/resnet.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/inceptionResnetV2/__pycache__/resnet.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/inceptionv4/__pycache__/augmentation.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/inceptionv4/__pycache__/augmentation.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/nasnet/__pycache__/distributed_utils.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/nasnet/__pycache__/distributed_utils.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/nasnet/__pycache__/inceptionresnetv2.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/nasnet/__pycache__/inceptionresnetv2.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/nasnet/__pycache__/memcached_dataset.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/nasnet/__pycache__/memcached_dataset.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/resnet50_chair/__pycache__/base_tester.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/resnet50_chair/__pycache__/base_tester.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/resnext101_32x4d/__pycache__/resnext.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/resnext101_32x4d/__pycache__/resnext.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/resnext101_64x4d/__pycache__/resnext.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/resnext101_64x4d/__pycache__/resnext.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/seResnext50/__pycache__/augmentation.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/seResnext50/__pycache__/augmentation.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/senet154/__pycache__/distributed_utils.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/senet154/__pycache__/distributed_utils.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/senet154/__pycache__/inceptionresnetv2.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/senet154/__pycache__/inceptionresnetv2.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/senet154/__pycache__/memcached_dataset.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/senet154/__pycache__/memcached_dataset.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/seresnet152/__pycache__/augmentation.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/seresnet152/__pycache__/augmentation.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/seresnext101/__pycache__/augmentation.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/seresnext101/__pycache__/augmentation.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/seresnext101/__pycache__/base_tester.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/seresnext101/__pycache__/base_tester.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/xception/__pycache__/distributed_utils.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/xception/__pycache__/distributed_utils.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/xception/__pycache__/inceptionresnetv2.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/xception/__pycache__/inceptionresnetv2.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/xception/__pycache__/memcached_dataset.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/xception/__pycache__/memcached_dataset.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/densenet2/__pycache__/distributed_utils.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/densenet2/__pycache__/distributed_utils.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/densenet2/__pycache__/memcached_dataset.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/densenet2/__pycache__/memcached_dataset.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/resnet101/__pycache__/distributed_utils.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/resnet101/__pycache__/distributed_utils.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/resnet101/__pycache__/memcached_dataset.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/resnet101/__pycache__/memcached_dataset.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/resnet152/__pycache__/distributed_utils.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/resnet152/__pycache__/distributed_utils.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/resnet152/__pycache__/memcached_dataset.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/resnet152/__pycache__/memcached_dataset.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/resnet50_chair/__pycache__/augmentation.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/resnet50_chair/__pycache__/augmentation.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/resnext101_32x4d/__pycache__/base_tester.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/resnext101_32x4d/__pycache__/base_tester.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/resnext101_64x4d/__pycache__/base_tester.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/resnext101_64x4d/__pycache__/base_tester.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/inceptionResnetV2/__pycache__/augmentation.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/inceptionResnetV2/__pycache__/augmentation.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/inceptionResnetV2/__pycache__/base_tester.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/inceptionResnetV2/__pycache__/base_tester.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/inceptionv4/__pycache__/distributed_utils.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/inceptionv4/__pycache__/distributed_utils.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/inceptionv4/__pycache__/memcached_dataset.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/inceptionv4/__pycache__/memcached_dataset.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/resnext101_32x4d/__pycache__/augmentation.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/resnext101_32x4d/__pycache__/augmentation.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/resnext101_64x4d/__pycache__/augmentation.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/resnext101_64x4d/__pycache__/augmentation.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/seResnext50/__pycache__/distributed_utils.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/seResnext50/__pycache__/distributed_utils.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/seResnext50/__pycache__/inceptionresnetv2.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/seResnext50/__pycache__/inceptionresnetv2.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/seResnext50/__pycache__/memcached_dataset.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/seResnext50/__pycache__/memcached_dataset.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/seresnet152/__pycache__/distributed_utils.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/seresnet152/__pycache__/distributed_utils.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/seresnet152/__pycache__/inceptionresnetv2.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/seresnet152/__pycache__/inceptionresnetv2.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/seresnet152/__pycache__/memcached_dataset.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/seresnet152/__pycache__/memcached_dataset.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/seresnext101/__pycache__/distributed_utils.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/seresnext101/__pycache__/distributed_utils.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/seresnext101/__pycache__/inceptionresnetv2.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/seresnext101/__pycache__/inceptionresnetv2.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/seresnext101/__pycache__/memcached_dataset.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/seresnext101/__pycache__/memcached_dataset.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/resnet50_chair/__pycache__/distributed_utils.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/resnet50_chair/__pycache__/distributed_utils.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/resnet50_chair/__pycache__/memcached_dataset.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/resnet50_chair/__pycache__/memcached_dataset.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/inceptionResnetV2/__pycache__/distributed_utils.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/inceptionResnetV2/__pycache__/distributed_utils.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/inceptionResnetV2/__pycache__/inceptionresnetv2.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/inceptionResnetV2/__pycache__/inceptionresnetv2.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/inceptionResnetV2/__pycache__/memcached_dataset.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/inceptionResnetV2/__pycache__/memcached_dataset.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/resnext101_32x4d/__pycache__/distributed_utils.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/resnext101_32x4d/__pycache__/distributed_utils.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/resnext101_32x4d/__pycache__/inceptionresnetv2.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/resnext101_32x4d/__pycache__/inceptionresnetv2.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/resnext101_32x4d/__pycache__/memcached_dataset.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/resnext101_32x4d/__pycache__/memcached_dataset.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/resnext101_64x4d/__pycache__/distributed_utils.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/resnext101_64x4d/__pycache__/distributed_utils.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/resnext101_64x4d/__pycache__/inceptionresnetv2.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/resnext101_64x4d/__pycache__/inceptionresnetv2.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/resnext101_64x4d/__pycache__/memcached_dataset.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/resnext101_64x4d/__pycache__/memcached_dataset.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/resnext101_32x4d/resnext_features/__pycache__/__init__.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/resnext101_32x4d/resnext_features/__pycache__/__init__.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/resnext101_64x4d/resnext_features/__pycache__/__init__.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/resnext101_64x4d/resnext_features/__pycache__/__init__.cpython-36.pyc
--------------------------------------------------------------------------------
/.idea/inspectionProfiles/profiles_settings.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
--------------------------------------------------------------------------------
/Pytorch_code/resnext101_32x4d/resnext_features/__pycache__/resnext101_32x4d_features.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/resnext101_32x4d/resnext_features/__pycache__/resnext101_32x4d_features.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/resnext101_32x4d/resnext_features/__pycache__/resnext101_64x4d_features.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/resnext101_32x4d/resnext_features/__pycache__/resnext101_64x4d_features.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/resnext101_64x4d/resnext_features/__pycache__/resnext101_32x4d_features.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/resnext101_64x4d/resnext_features/__pycache__/resnext101_32x4d_features.cpython-36.pyc
--------------------------------------------------------------------------------
/Pytorch_code/resnext101_64x4d/resnext_features/__pycache__/resnext101_64x4d_features.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Youngkl0726/kaggle_iMaterialist-Challenge-Furniture-at-FGVC5/HEAD/Pytorch_code/resnext101_64x4d/resnext_features/__pycache__/resnext101_64x4d_features.cpython-36.pyc
--------------------------------------------------------------------------------
/.idea/modules.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # kaggle_iMaterialist-Challenge-Furniture-at-FGVC5
2 | ## Competition: image classification with 128 classes.
3 | ### Link: https://www.kaggle.com/c/imaterialist-challenge-furniture-2018
4 | ## Results:
5 | ### Public Leaderboard score 0.11915
6 | ### Private Leaderboard score 0.12274
7 | ### Rank 4
8 |
--------------------------------------------------------------------------------
/Pytorch_code/dpn92/augmentation.py:
--------------------------------------------------------------------------------
1 |
2 | import torchvision.transforms.functional as F
3 |
4 |
5 | class HorizontalFlip(object):
6 | """Horizontally flip the given PIL Image."""
7 |
8 | def __call__(self, img):
9 | """
10 | Args:
11 | img (PIL Image): Image to be flipped.
12 | Returns:
13 | PIL Image: Flipped image.
14 | """
15 | return F.hflip(img)
--------------------------------------------------------------------------------
/Pytorch_code/dpn98/augmentation.py:
--------------------------------------------------------------------------------
1 |
2 | import torchvision.transforms.functional as F
3 |
4 |
5 | class HorizontalFlip(object):
6 | """Horizontally flip the given PIL Image."""
7 |
8 | def __call__(self, img):
9 | """
10 | Args:
11 | img (PIL Image): Image to be flipped.
12 | Returns:
13 | PIL Image: Flipped image.
14 | """
15 | return F.hflip(img)
--------------------------------------------------------------------------------
/Pytorch_code/Densenet/augmentation.py:
--------------------------------------------------------------------------------
1 |
2 | import torchvision.transforms.functional as F
3 |
4 |
5 | class HorizontalFlip(object):
6 | """Horizontally flip the given PIL Image."""
7 |
8 | def __call__(self, img):
9 | """
10 | Args:
11 | img (PIL Image): Image to be flipped.
12 | Returns:
13 | PIL Image: Flipped image.
14 | """
15 | return F.hflip(img)
--------------------------------------------------------------------------------
/Pytorch_code/dense161/augmentation.py:
--------------------------------------------------------------------------------
1 |
2 | import torchvision.transforms.functional as F
3 |
4 |
5 | class HorizontalFlip(object):
6 | """Horizontally flip the given PIL Image."""
7 |
8 | def __call__(self, img):
9 | """
10 | Args:
11 | img (PIL Image): Image to be flipped.
12 | Returns:
13 | PIL Image: Flipped image.
14 | """
15 | return F.hflip(img)
--------------------------------------------------------------------------------
/Pytorch_code/dense169/augmentation.py:
--------------------------------------------------------------------------------
1 |
2 | import torchvision.transforms.functional as F
3 |
4 |
5 | class HorizontalFlip(object):
6 | """Horizontally flip the given PIL Image."""
7 |
8 | def __call__(self, img):
9 | """
10 | Args:
11 | img (PIL Image): Image to be flipped.
12 | Returns:
13 | PIL Image: Flipped image.
14 | """
15 | return F.hflip(img)
--------------------------------------------------------------------------------
/Pytorch_code/dpn107/augmentation.py:
--------------------------------------------------------------------------------
1 |
2 | import torchvision.transforms.functional as F
3 |
4 |
5 | class HorizontalFlip(object):
6 | """Horizontally flip the given PIL Image."""
7 |
8 | def __call__(self, img):
9 | """
10 | Args:
11 | img (PIL Image): Image to be flipped.
12 | Returns:
13 | PIL Image: Flipped image.
14 | """
15 | return F.hflip(img)
--------------------------------------------------------------------------------
/Pytorch_code/dpn131/augmentation.py:
--------------------------------------------------------------------------------
1 |
2 | import torchvision.transforms.functional as F
3 |
4 |
5 | class HorizontalFlip(object):
6 | """Horizontally flip the given PIL Image."""
7 |
8 | def __call__(self, img):
9 | """
10 | Args:
11 | img (PIL Image): Image to be flipped.
12 | Returns:
13 | PIL Image: Flipped image.
14 | """
15 | return F.hflip(img)
--------------------------------------------------------------------------------
/Pytorch_code/drnD105/augmentation.py:
--------------------------------------------------------------------------------
1 |
2 | import torchvision.transforms.functional as F
3 |
4 |
5 | class HorizontalFlip(object):
6 | """Horizontally flip the given PIL Image."""
7 |
8 | def __call__(self, img):
9 | """
10 | Args:
11 | img (PIL Image): Image to be flipped.
12 | Returns:
13 | PIL Image: Flipped image.
14 | """
15 | return F.hflip(img)
--------------------------------------------------------------------------------
/Pytorch_code/nasnet/augmentation.py:
--------------------------------------------------------------------------------
1 |
2 | import torchvision.transforms.functional as F
3 |
4 |
5 | class HorizontalFlip(object):
6 | """Horizontally flip the given PIL Image."""
7 |
8 | def __call__(self, img):
9 | """
10 | Args:
11 | img (PIL Image): Image to be flipped.
12 | Returns:
13 | PIL Image: Flipped image.
14 | """
15 | return F.hflip(img)
--------------------------------------------------------------------------------
/Pytorch_code/resnet152/augmentation.py:
--------------------------------------------------------------------------------
1 |
2 | import torchvision.transforms.functional as F
3 |
4 |
5 | class HorizontalFlip(object):
6 | """Horizontally flip the given PIL Image."""
7 |
8 | def __call__(self, img):
9 | """
10 | Args:
11 | img (PIL Image): Image to be flipped.
12 | Returns:
13 | PIL Image: Flipped image.
14 | """
15 | return F.hflip(img)
--------------------------------------------------------------------------------
/Pytorch_code/senet154/augmentation.py:
--------------------------------------------------------------------------------
1 |
2 | import torchvision.transforms.functional as F
3 |
4 |
5 | class HorizontalFlip(object):
6 | """Horizontally flip the given PIL Image."""
7 |
8 | def __call__(self, img):
9 | """
10 | Args:
11 | img (PIL Image): Image to be flipped.
12 | Returns:
13 | PIL Image: Flipped image.
14 | """
15 | return F.hflip(img)
--------------------------------------------------------------------------------
/Pytorch_code/xception/augmentation.py:
--------------------------------------------------------------------------------
1 |
2 | import torchvision.transforms.functional as F
3 |
4 |
5 | class HorizontalFlip(object):
6 | """Horizontally flip the given PIL Image."""
7 |
8 | def __call__(self, img):
9 | """
10 | Args:
11 | img (PIL Image): Image to be flipped.
12 | Returns:
13 | PIL Image: Flipped image.
14 | """
15 | return F.hflip(img)
--------------------------------------------------------------------------------
/Pytorch_code/inceptionv4/augmentation.py:
--------------------------------------------------------------------------------
1 |
2 | import torchvision.transforms.functional as F
3 |
4 |
5 | class HorizontalFlip(object):
6 | """Horizontally flip the given PIL Image."""
7 |
8 | def __call__(self, img):
9 | """
10 | Args:
11 | img (PIL Image): Image to be flipped.
12 | Returns:
13 | PIL Image: Flipped image.
14 | """
15 | return F.hflip(img)
--------------------------------------------------------------------------------
/Pytorch_code/resnet50_chair/augmentation.py:
--------------------------------------------------------------------------------
1 |
2 | import torchvision.transforms.functional as F
3 |
4 |
5 | class HorizontalFlip(object):
6 | """Horizontally flip the given PIL Image."""
7 |
8 | def __call__(self, img):
9 | """
10 | Args:
11 | img (PIL Image): Image to be flipped.
12 | Returns:
13 | PIL Image: Flipped image.
14 | """
15 | return F.hflip(img)
--------------------------------------------------------------------------------
/Pytorch_code/seResnext50/augmentation.py:
--------------------------------------------------------------------------------
1 |
2 | import torchvision.transforms.functional as F
3 |
4 |
5 | class HorizontalFlip(object):
6 | """Horizontally flip the given PIL Image."""
7 |
8 | def __call__(self, img):
9 | """
10 | Args:
11 | img (PIL Image): Image to be flipped.
12 | Returns:
13 | PIL Image: Flipped image.
14 | """
15 | return F.hflip(img)
--------------------------------------------------------------------------------
/Pytorch_code/seresnet152/augmentation.py:
--------------------------------------------------------------------------------
1 |
2 | import torchvision.transforms.functional as F
3 |
4 |
5 | class HorizontalFlip(object):
6 | """Horizontally flip the given PIL Image."""
7 |
8 | def __call__(self, img):
9 | """
10 | Args:
11 | img (PIL Image): Image to be flipped.
12 | Returns:
13 | PIL Image: Flipped image.
14 | """
15 | return F.hflip(img)
--------------------------------------------------------------------------------
/Pytorch_code/seresnext101/augmentation.py:
--------------------------------------------------------------------------------
1 |
2 | import torchvision.transforms.functional as F
3 |
4 |
5 | class HorizontalFlip(object):
6 | """Horizontally flip the given PIL Image."""
7 |
8 | def __call__(self, img):
9 | """
10 | Args:
11 | img (PIL Image): Image to be flipped.
12 | Returns:
13 | PIL Image: Flipped image.
14 | """
15 | return F.hflip(img)
--------------------------------------------------------------------------------
/Pytorch_code/inceptionResnetV2/augmentation.py:
--------------------------------------------------------------------------------
1 |
2 | import torchvision.transforms.functional as F
3 |
4 |
5 | class HorizontalFlip(object):
6 | """Horizontally flip the given PIL Image."""
7 |
8 | def __call__(self, img):
9 | """
10 | Args:
11 | img (PIL Image): Image to be flipped.
12 | Returns:
13 | PIL Image: Flipped image.
14 | """
15 | return F.hflip(img)
--------------------------------------------------------------------------------
/Pytorch_code/resnext101_32x4d/augmentation.py:
--------------------------------------------------------------------------------
1 |
2 | import torchvision.transforms.functional as F
3 |
4 |
5 | class HorizontalFlip(object):
6 | """Horizontally flip the given PIL Image."""
7 |
8 | def __call__(self, img):
9 | """
10 | Args:
11 | img (PIL Image): Image to be flipped.
12 | Returns:
13 | PIL Image: Flipped image.
14 | """
15 | return F.hflip(img)
--------------------------------------------------------------------------------
/Pytorch_code/resnext101_64x4d/augmentation.py:
--------------------------------------------------------------------------------
1 |
2 | import torchvision.transforms.functional as F
3 |
4 |
5 | class HorizontalFlip(object):
6 | """Horizontally flip the given PIL Image."""
7 |
8 | def __call__(self, img):
9 | """
10 | Args:
11 | img (PIL Image): Image to be flipped.
12 | Returns:
13 | PIL Image: Flipped image.
14 | """
15 | return F.hflip(img)
--------------------------------------------------------------------------------
/GenerateTestLabelFile.py:
--------------------------------------------------------------------------------
1 | file = open('result_resnext101_32x4d_ck4_label.txt')
2 | test_label_txt = open('./data/test_label3.txt', 'wb')
3 | for i in xrange(12704):
4 | line = file.readline()
5 | line = line.strip()
6 | line = line.split(",")
7 | name = line[0]
8 | id = int(line[1])-1
9 | # print name, id
10 | print 'test/test/{}.jpg'.format(name)+' '+str(id)+'\n'
11 | test_label_txt.write('test/test/{}.jpg'.format(name)+' '+str(id)+'\n')
--------------------------------------------------------------------------------
/.idea/furniture.iml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
--------------------------------------------------------------------------------
/data/txtTest.py:
--------------------------------------------------------------------------------
1 |
2 | # coding: utf-8
3 |
4 | # In[1]:
5 |
6 |
7 | import os
8 | import sys
9 |
10 |
11 | # In[9]:
12 |
13 |
14 | basepath = 'test'
15 | test = os.listdir(basepath)
16 | A = []
17 | for x in test:
18 | A.append(os.path.join(basepath,x))
19 | dct = sorted(A,key=lambda x: x[0])
20 | print(dct)
21 |
22 |
23 | # In[14]:
24 |
25 |
26 | with open("test.txt","wb") as fw:
27 | for x in dct:
28 | x = x.replace("\\","/")
29 | fw.write((x+' '+str(0)+'\r\n').encode('utf-8'))
30 |
31 |
--------------------------------------------------------------------------------
/.idea/webServers.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
14 |
15 |
--------------------------------------------------------------------------------
/.idea/misc.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
--------------------------------------------------------------------------------
/draw_nums.py:
--------------------------------------------------------------------------------
1 | import matplotlib.pyplot as plt
2 | import numpy as np
3 | import csv
4 |
5 | filename = 'result_resnext101_32x4d_ck4.csv'
6 | nums = np.zeros(128)
7 | with open(filename) as f:
8 | reader = csv.reader(f)
9 | head_row = next(reader)
10 | for row in reader:
11 | nums[int(row[1])-1] += 1
12 | print(nums)
13 |
14 | plt.figure(figsize=(20, 5))
15 | plt.plot(range(0,128),nums)
16 | plt.xlabel("Label")
17 | plt.ylabel("Sum")
18 | plt.title("Label-Sum")
19 | plt.show()
20 |
21 | plt.figure(figsize=(20, 5))
22 | plt.hist(nums,128)
23 | plt.xlabel("Sum of a label")
24 | plt.ylabel("The numbers of the same")
25 | plt.title("data distribution")
26 | plt.show()
27 |
28 |
--------------------------------------------------------------------------------
/Pytorch_code/densenet2/base_tester.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import
2 |
3 | import torch
4 | import torchvision
5 | import numpy as np
6 |
7 | class BaseTester():
8 | def __init__(self, model):
9 | self.model = model
10 |
11 | def extract(self, data_loader):
12 | self.model.eval()
13 | res_features = []
14 |
15 | for batch_index, (data, label) in enumerate(data_loader):
16 | print("batch_index is: {}".format(batch_index))
17 | data = data.cuda()
18 | data = torch.autograd.Variable(data, volatile=True)
19 | output = self.model(data)
20 | output = output.data.cpu()
21 | res_features.extend(output.numpy())
22 | return np.array(res_features)
23 |
--------------------------------------------------------------------------------
/Pytorch_code/resnet101/base_tester.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import
2 |
3 | import torch
4 | import torchvision
5 | import numpy as np
6 |
7 | class BaseTester():
8 | def __init__(self, model):
9 | self.model = model
10 |
11 | def extract(self, data_loader):
12 | self.model.eval()
13 | res_features = []
14 |
15 | for batch_index, (data, label) in enumerate(data_loader):
16 | print("batch_index is: {}".format(batch_index))
17 | data = data.cuda()
18 | data = torch.autograd.Variable(data, volatile=True)
19 | output = self.model(data)
20 | output = output.data.cpu()
21 | res_features.extend(output.numpy())
22 | return np.array(res_features)
23 |
--------------------------------------------------------------------------------
/GenChairTrain.py:
--------------------------------------------------------------------------------
1 | """
2 | ori_id, chair_id
3 | 125, 0
4 | 28, 1
5 | 3, 2
6 | 62, 3
7 | 2, 4
8 | 25, 5
9 | 14, 6
10 | 22, 7
11 | 102, 8
12 | """
13 | id_list =[125, 28, 3, 62, 2, 25, 14, 22, 102]
14 | train_txt = open("./data/valid.txt")
15 | chair_train_txt = open("./data/chair_valid.txt", 'wb')
16 | num = []
17 | for i in xrange(9):
18 | num.append(0)
19 | for i in xrange(6309):
20 | line = train_txt.readline()
21 | line = line.strip()
22 | li = line.split(" ")
23 | id = int(li[1])
24 | for j in xrange(9):
25 | if id == id_list[j]:
26 | num[j]+=1
27 | chair_train_txt.write(li[0]+" "+str(j)+"\n")
28 | chair_train_txt.close()
29 | print num
30 | # [1922, 1416, 1479, 1733, 2355, 1549, 1053, 1178, 2186]
31 | # [48, 49, 49, 50, 49, 47, 47, 49, 49]
--------------------------------------------------------------------------------
/extra.txt:
--------------------------------------------------------------------------------
1 | 156.jpg 633.jpg 710.jpg 721.jpg 722.jpg 883.jpg 1045.jpg 1298.jpg 1419.jpg 1761.jpg 1783.jpg 1799.jpg 1844.jpg 2072.jpg 2282.jpg 2318.jpg 3042.jpg 3094.jpg 3176.jpg 3268.jpg 3319.jpg 3387.jpg 3513.jpg 3627.jpg 3645.jpg 3717.jpg 3911.jpg 3969.jpg 4075.jpg 4190.jpg 4250.jpg 4381.jpg 4532.jpg 4598.jpg 4656.jpg 4887.jpg 4959.jpg 5127.jpg 5141.jpg 5207.jpg 5313.jpg 5359.jpg 5733.jpg 5776.jpg 5815.jpg 6397.jpg 6398.jpg 6619.jpg 6646.jpg 6698.jpg 6824.jpg 6873.jpg 7080.jpg 7536.jpg 7615.jpg 7667.jpg 7829.jpg 7855.jpg 7876.jpg 8092.jpg 8161.jpg 8250.jpg 8407.jpg 8523.jpg 8535.jpg 8555.jpg 8568.jpg 8569.jpg 8598.jpg 8974.jpg 9023.jpg 9063.jpg 9193.jpg 9262.jpg 9433.jpg 9769.jpg 9796.jpg 9918.jpg 9981.jpg 10157.jpg 10167.jpg 10232.jpg 10534.jpg 10627.jpg 10687.jpg 10755.jpg 10969.jpg 11288.jpg 11624.jpg 11633.jpg 11701.jpg 11875.jpg 12023.jpg 12281.jpg 12514.jpg 12620.jpg 12786.jpg
2 |
3 |
--------------------------------------------------------------------------------
/data/validname.py:
--------------------------------------------------------------------------------
1 |
2 | # coding: utf-8
3 |
4 | # In[2]:
5 |
6 |
7 | import os
8 | import sys
9 |
10 |
11 | # In[25]:
12 |
13 |
14 | basepath = 'valid'
15 | valid = os.listdir(basepath)
16 | dct = []
17 | for label in valid:
18 | path = os.path.join(basepath,label)
19 | lst = os.listdir(path)
20 | A = []
21 | for x in lst:
22 | A.append(os.path.join(path,x))
23 | dct.append((int(label),A))
24 | dct2 = sorted(dct,key=lambda x: x[0])
25 |
26 |
27 |
28 | # In[27]:
29 |
30 |
31 | with open("valid.txt","wb") as fw:
32 | for x in dct2:
33 | label = str(x[0]-1)
34 | A = x[1]
35 | for y in A:
36 | fw.write((y+' '+label+'\r\n').encode('utf-8'))
37 |
38 |
39 | # In[28]:
40 |
41 |
42 | with open('validnum.txt','wb') as fw:
43 | for x in dct2:
44 | label = str(x[0]-1)
45 | A = x[1]
46 | fw.write((label+','+str(len(A))+'\r\n').encode('utf-8'))
47 |
48 |
--------------------------------------------------------------------------------
/GetChairTest.py:
--------------------------------------------------------------------------------
1 | def get_name(filename):
2 | file = open(filename)
3 | res_line = []
4 | for i in xrange(12704):
5 | line = file.readline()
6 | line = line.strip()
7 | line = line.split(" ")
8 | # print line[0]
9 | res_line.append(line[0])
10 | return res_line
11 |
12 | fname = get_name('./data/test.txt')
13 | # print fname
14 | def get_test_pre(filename):
15 | file = open(filename)
16 | res_line = []
17 | for i in xrange(12704):
18 | line = file.readline()
19 | line = line.strip()
20 | # line = line.split(" ")
21 | # print line[0]
22 | res_line.append(int(line))
23 | return res_line
24 | test_pre = get_test_pre("result.txt")
25 | # print test_pre
26 | chair_test_file = open('chair_test.txt', 'wb')
27 | id_list =[125, 28, 3, 62, 2, 25, 14, 22, 102]
28 | for i in xrange(12704):
29 | for j in xrange(9):
30 | if test_pre[i] == id_list[j]:
31 | chair_test_file.write(fname[i]+" "+str(i)+'\n')
32 | break
33 |
--------------------------------------------------------------------------------
/ConvertNpy2Csv.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import csv
3 |
4 | txt_file = open('result.txt', 'wb')
5 | npy_name = 'nasnet_ck2.npy' #change the npy_name
6 | print npy_name
7 | npy = np.load(npy_name)
8 | len = npy.shape[0]
9 | print len
10 | for j in xrange(len):
11 | res = int(np.where(npy[j] == np.max(npy[j]))[0][0])
12 | txt_file.write(str(res)+'\n')
13 | txt_file.close()
14 |
15 |
16 | def get_name(filename):
17 | file = open(filename)
18 | res_line = []
19 | for i in xrange(12704):
20 | line = file.readline()
21 | line = line.strip()
22 | line = line.split(" ")
23 | line = line[0]
24 | line = line.split("/")
25 | line = line[1]
26 | line = line.split(".")
27 | # print line[0]
28 | res_line.append(line[0])
29 | return res_line
30 |
31 | fname = get_name("test.txt")
32 | add = []
33 |
34 | csvfile = open("result_nasnet_ck2.csv", "w") #change the name of files you will save
35 | fileheader = ["id", "predicted"]
36 | writer = csv.writer(csvfile)
37 | writer.writerow(fileheader)
38 | res_file = open('result.txt')
39 | for i in xrange(12704):
40 | context = []
41 | line = res_file.readline()
42 | line = line.strip()
43 | line = line.split(" ")
44 | context.append(fname[i])
45 | context.append(int(line[0])+1)
46 | writer.writerow(context)
47 | csvfile.close()
48 |
--------------------------------------------------------------------------------
/Pytorch_code/dpn107/base_tester.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import
2 |
3 | import torch
4 | import torchvision
5 | import numpy as np
6 |
7 | class BaseTester():
8 | def __init__(self, model):
9 | self.model = model
10 |
11 | def extract(self, data_loader):
12 | self.model.eval()
13 | res_features = []
14 |
15 | for batch_index, (data, label) in enumerate(data_loader):
16 | print("batch_index is: {}".format(batch_index))
17 | data = data.cuda()
18 | data = torch.autograd.Variable(data, volatile=True)
19 | output = self.model(data)
20 | output = output.data.cpu()
21 | res_features.extend(output.numpy())
22 | return np.array(res_features)
23 |
24 |
25 | class TenCropTester():
26 | def __init__(self, model):
27 | self.model = model
28 |
29 | def extract(self, data_loader):
30 | self.model.eval()
31 | res_features = []
32 |
33 | for batch_index, (data, label) in enumerate(data_loader):
34 | print("batch_index is: {}".format(batch_index))
35 | data = data.cuda()
36 | data = torch.autograd.Variable(data, volatile=True)
37 | bs, ncrops, c, h, w = data.size()
38 | # print("ncrops is:{}".format(ncrops))
39 | output = self.model(data.view(-1, c, h, w))
40 | output = output.data.cpu()
41 | output_avg = output.view(bs, ncrops, -1).mean(1).view(bs,-1)
42 | res_features.extend(output_avg.numpy())
43 | return np.array(res_features)
--------------------------------------------------------------------------------
/Pytorch_code/dpn131/base_tester.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import
2 |
3 | import torch
4 | import torchvision
5 | import numpy as np
6 |
7 | class BaseTester():
8 | def __init__(self, model):
9 | self.model = model
10 |
11 | def extract(self, data_loader):
12 | self.model.eval()
13 | res_features = []
14 |
15 | for batch_index, (data, label) in enumerate(data_loader):
16 | print("batch_index is: {}".format(batch_index))
17 | data = data.cuda()
18 | data = torch.autograd.Variable(data, volatile=True)
19 | output = self.model(data)
20 | output = output.data.cpu()
21 | res_features.extend(output.numpy())
22 | return np.array(res_features)
23 |
24 |
25 | class TenCropTester():
26 | def __init__(self, model):
27 | self.model = model
28 |
29 | def extract(self, data_loader):
30 | self.model.eval()
31 | res_features = []
32 |
33 | for batch_index, (data, label) in enumerate(data_loader):
34 | print("batch_index is: {}".format(batch_index))
35 | data = data.cuda()
36 | data = torch.autograd.Variable(data, volatile=True)
37 | bs, ncrops, c, h, w = data.size()
38 | # print("ncrops is:{}".format(ncrops))
39 | output = self.model(data.view(-1, c, h, w))
40 | output = output.data.cpu()
41 | output_avg = output.view(bs, ncrops, -1).mean(1).view(bs,-1)
42 | res_features.extend(output_avg.numpy())
43 | return np.array(res_features)
--------------------------------------------------------------------------------
/Pytorch_code/dpn92/base_tester.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import
2 |
3 | import torch
4 | import torchvision
5 | import numpy as np
6 |
7 | class BaseTester():
8 | def __init__(self, model):
9 | self.model = model
10 |
11 | def extract(self, data_loader):
12 | self.model.eval()
13 | res_features = []
14 |
15 | for batch_index, (data, label) in enumerate(data_loader):
16 | print("batch_index is: {}".format(batch_index))
17 | data = data.cuda()
18 | data = torch.autograd.Variable(data, volatile=True)
19 | output = self.model(data)
20 | output = output.data.cpu()
21 | res_features.extend(output.numpy())
22 | return np.array(res_features)
23 |
24 |
25 | class TenCropTester():
26 | def __init__(self, model):
27 | self.model = model
28 |
29 | def extract(self, data_loader):
30 | self.model.eval()
31 | res_features = []
32 |
33 | for batch_index, (data, label) in enumerate(data_loader):
34 | print("batch_index is: {}".format(batch_index))
35 | data = data.cuda()
36 | data = torch.autograd.Variable(data, volatile=True)
37 | bs, ncrops, c, h, w = data.size()
38 | # print("ncrops is:{}".format(ncrops))
39 | output = self.model(data.view(-1, c, h, w))
40 | output = output.data.cpu()
41 | output_avg = output.view(bs, ncrops, -1).mean(1).view(bs,-1)
42 | res_features.extend(output_avg.numpy())
43 | return np.array(res_features)
--------------------------------------------------------------------------------
/Pytorch_code/dpn98/base_tester.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import
2 |
3 | import torch
4 | import torchvision
5 | import numpy as np
6 |
7 | class BaseTester():
8 | def __init__(self, model):
9 | self.model = model
10 |
11 | def extract(self, data_loader):
12 | self.model.eval()
13 | res_features = []
14 |
15 | for batch_index, (data, label) in enumerate(data_loader):
16 | print("batch_index is: {}".format(batch_index))
17 | data = data.cuda()
18 | data = torch.autograd.Variable(data, volatile=True)
19 | output = self.model(data)
20 | output = output.data.cpu()
21 | res_features.extend(output.numpy())
22 | return np.array(res_features)
23 |
24 |
25 | class TenCropTester():
26 | def __init__(self, model):
27 | self.model = model
28 |
29 | def extract(self, data_loader):
30 | self.model.eval()
31 | res_features = []
32 |
33 | for batch_index, (data, label) in enumerate(data_loader):
34 | print("batch_index is: {}".format(batch_index))
35 | data = data.cuda()
36 | data = torch.autograd.Variable(data, volatile=True)
37 | bs, ncrops, c, h, w = data.size()
38 | # print("ncrops is:{}".format(ncrops))
39 | output = self.model(data.view(-1, c, h, w))
40 | output = output.data.cpu()
41 | output_avg = output.view(bs, ncrops, -1).mean(1).view(bs,-1)
42 | res_features.extend(output_avg.numpy())
43 | return np.array(res_features)
--------------------------------------------------------------------------------
/Pytorch_code/drnD105/base_tester.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import
2 |
3 | import torch
4 | import torchvision
5 | import numpy as np
6 |
7 | class BaseTester():
8 | def __init__(self, model):
9 | self.model = model
10 |
11 | def extract(self, data_loader):
12 | self.model.eval()
13 | res_features = []
14 |
15 | for batch_index, (data, label) in enumerate(data_loader):
16 | print("batch_index is: {}".format(batch_index))
17 | data = data.cuda()
18 | data = torch.autograd.Variable(data, volatile=True)
19 | output = self.model(data)
20 | output = output.data.cpu()
21 | res_features.extend(output.numpy())
22 | return np.array(res_features)
23 |
24 |
25 | class TenCropTester():
26 | def __init__(self, model):
27 | self.model = model
28 |
29 | def extract(self, data_loader):
30 | self.model.eval()
31 | res_features = []
32 |
33 | for batch_index, (data, label) in enumerate(data_loader):
34 | print("batch_index is: {}".format(batch_index))
35 | data = data.cuda()
36 | data = torch.autograd.Variable(data, volatile=True)
37 | bs, ncrops, c, h, w = data.size()
38 | # print("ncrops is:{}".format(ncrops))
39 | output = self.model(data.view(-1, c, h, w))
40 | output = output.data.cpu()
41 | output_avg = output.view(bs, ncrops, -1).mean(1).view(bs,-1)
42 | res_features.extend(output_avg.numpy())
43 | return np.array(res_features)
--------------------------------------------------------------------------------
/Pytorch_code/nasnet/base_tester.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import
2 |
3 | import torch
4 | import torchvision
5 | import numpy as np
6 |
7 | class BaseTester():
8 | def __init__(self, model):
9 | self.model = model
10 |
11 | def extract(self, data_loader):
12 | self.model.eval()
13 | res_features = []
14 |
15 | for batch_index, (data, label) in enumerate(data_loader):
16 | print("batch_index is: {}".format(batch_index))
17 | data = data.cuda()
18 | data = torch.autograd.Variable(data, volatile=True)
19 | output = self.model(data)
20 | output = output.data.cpu()
21 | res_features.extend(output.numpy())
22 | return np.array(res_features)
23 |
24 |
25 | class TenCropTester():
26 | def __init__(self, model):
27 | self.model = model
28 |
29 | def extract(self, data_loader):
30 | self.model.eval()
31 | res_features = []
32 |
33 | for batch_index, (data, label) in enumerate(data_loader):
34 | print("batch_index is: {}".format(batch_index))
35 | data = data.cuda()
36 | data = torch.autograd.Variable(data, volatile=True)
37 | bs, ncrops, c, h, w = data.size()
38 | # print("ncrops is:{}".format(ncrops))
39 | output = self.model(data.view(-1, c, h, w))
40 | output = output.data.cpu()
41 | output_avg = output.view(bs, ncrops, -1).mean(1).view(bs,-1)
42 | res_features.extend(output_avg.numpy())
43 | return np.array(res_features)
--------------------------------------------------------------------------------
/Pytorch_code/Densenet/base_tester.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import
2 |
3 | import torch
4 | import torchvision
5 | import numpy as np
6 |
7 | class BaseTester():
8 | def __init__(self, model):
9 | self.model = model
10 |
11 | def extract(self, data_loader):
12 | self.model.eval()
13 | res_features = []
14 |
15 | for batch_index, (data, label) in enumerate(data_loader):
16 | print("batch_index is: {}".format(batch_index))
17 | data = data.cuda()
18 | data = torch.autograd.Variable(data, volatile=True)
19 | output = self.model(data)
20 | output = output.data.cpu()
21 | res_features.extend(output.numpy())
22 | return np.array(res_features)
23 |
24 |
25 | class TenCropTester():
26 | def __init__(self, model):
27 | self.model = model
28 |
29 | def extract(self, data_loader):
30 | self.model.eval()
31 | res_features = []
32 |
33 | for batch_index, (data, label) in enumerate(data_loader):
34 | print("batch_index is: {}".format(batch_index))
35 | data = data.cuda()
36 | data = torch.autograd.Variable(data, volatile=True)
37 | bs, ncrops, c, h, w = data.size()
38 | # print("ncrops is:{}".format(ncrops))
39 | output = self.model(data.view(-1, c, h, w))
40 | output = output.data.cpu()
41 | output_avg = output.view(bs, ncrops, -1).mean(1).view(bs,-1)
42 | res_features.extend(output_avg.numpy())
43 | return np.array(res_features)
--------------------------------------------------------------------------------
/Pytorch_code/dense161/base_tester.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import
2 |
3 | import torch
4 | import torchvision
5 | import numpy as np
6 |
7 | class BaseTester():
8 | def __init__(self, model):
9 | self.model = model
10 |
11 | def extract(self, data_loader):
12 | self.model.eval()
13 | res_features = []
14 |
15 | for batch_index, (data, label) in enumerate(data_loader):
16 | print("batch_index is: {}".format(batch_index))
17 | data = data.cuda()
18 | data = torch.autograd.Variable(data, volatile=True)
19 | output = self.model(data)
20 | output = output.data.cpu()
21 | res_features.extend(output.numpy())
22 | return np.array(res_features)
23 |
24 |
25 | class TenCropTester():
26 | def __init__(self, model):
27 | self.model = model
28 |
29 | def extract(self, data_loader):
30 | self.model.eval()
31 | res_features = []
32 |
33 | for batch_index, (data, label) in enumerate(data_loader):
34 | print("batch_index is: {}".format(batch_index))
35 | data = data.cuda()
36 | data = torch.autograd.Variable(data, volatile=True)
37 | bs, ncrops, c, h, w = data.size()
38 | # print("ncrops is:{}".format(ncrops))
39 | output = self.model(data.view(-1, c, h, w))
40 | output = output.data.cpu()
41 | output_avg = output.view(bs, ncrops, -1).mean(1).view(bs,-1)
42 | res_features.extend(output_avg.numpy())
43 | return np.array(res_features)
--------------------------------------------------------------------------------
/Pytorch_code/dense169/base_tester.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import
2 |
3 | import torch
4 | import torchvision
5 | import numpy as np
6 |
7 | class BaseTester():
8 | def __init__(self, model):
9 | self.model = model
10 |
11 | def extract(self, data_loader):
12 | self.model.eval()
13 | res_features = []
14 |
15 | for batch_index, (data, label) in enumerate(data_loader):
16 | print("batch_index is: {}".format(batch_index))
17 | data = data.cuda()
18 | data = torch.autograd.Variable(data, volatile=True)
19 | output = self.model(data)
20 | output = output.data.cpu()
21 | res_features.extend(output.numpy())
22 | return np.array(res_features)
23 |
24 |
25 | class TenCropTester():
26 | def __init__(self, model):
27 | self.model = model
28 |
29 | def extract(self, data_loader):
30 | self.model.eval()
31 | res_features = []
32 |
33 | for batch_index, (data, label) in enumerate(data_loader):
34 | print("batch_index is: {}".format(batch_index))
35 | data = data.cuda()
36 | data = torch.autograd.Variable(data, volatile=True)
37 | bs, ncrops, c, h, w = data.size()
38 | # print("ncrops is:{}".format(ncrops))
39 | output = self.model(data.view(-1, c, h, w))
40 | output = output.data.cpu()
41 | output_avg = output.view(bs, ncrops, -1).mean(1).view(bs,-1)
42 | res_features.extend(output_avg.numpy())
43 | return np.array(res_features)
--------------------------------------------------------------------------------
/Pytorch_code/inceptionv4/base_tester.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import
2 |
3 | import torch
4 | import torchvision
5 | import numpy as np
6 |
7 | class BaseTester():
8 | def __init__(self, model):
9 | self.model = model
10 |
11 | def extract(self, data_loader):
12 | self.model.eval()
13 | res_features = []
14 |
15 | for batch_index, (data, label) in enumerate(data_loader):
16 | print("batch_index is: {}".format(batch_index))
17 | data = data.cuda()
18 | data = torch.autograd.Variable(data, volatile=True)
19 | output = self.model(data)
20 | output = output.data.cpu()
21 | res_features.extend(output.numpy())
22 | return np.array(res_features)
23 |
24 |
25 | class TenCropTester():
26 | def __init__(self, model):
27 | self.model = model
28 |
29 | def extract(self, data_loader):
30 | self.model.eval()
31 | res_features = []
32 |
33 | for batch_index, (data, label) in enumerate(data_loader):
34 | print("batch_index is: {}".format(batch_index))
35 | data = data.cuda()
36 | data = torch.autograd.Variable(data, volatile=True)
37 | bs, ncrops, c, h, w = data.size()
38 | # print("ncrops is:{}".format(ncrops))
39 | output = self.model(data.view(-1, c, h, w))
40 | output = output.data.cpu()
41 | output_avg = output.view(bs, ncrops, -1).mean(1).view(bs,-1)
42 | res_features.extend(output_avg.numpy())
43 | return np.array(res_features)
--------------------------------------------------------------------------------
/Pytorch_code/resnet152/base_tester.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import
2 |
3 | import torch
4 | import torchvision
5 | import numpy as np
6 |
7 | class BaseTester():
8 | def __init__(self, model):
9 | self.model = model
10 |
11 | def extract(self, data_loader):
12 | self.model.eval()
13 | res_features = []
14 |
15 | for batch_index, (data, label) in enumerate(data_loader):
16 | print("batch_index is: {}".format(batch_index))
17 | data = data.cuda()
18 | data = torch.autograd.Variable(data, volatile=True)
19 | output = self.model(data)
20 | output = output.data.cpu()
21 | res_features.extend(output.numpy())
22 | return np.array(res_features)
23 |
24 |
25 | class TenCropTester():
26 | def __init__(self, model):
27 | self.model = model
28 |
29 | def extract(self, data_loader):
30 | self.model.eval()
31 | res_features = []
32 |
33 | for batch_index, (data, label) in enumerate(data_loader):
34 | print("batch_index is: {}".format(batch_index))
35 | data = data.cuda()
36 | data = torch.autograd.Variable(data, volatile=True)
37 | bs, ncrops, c, h, w = data.size()
38 | # print("ncrops is:{}".format(ncrops))
39 | output = self.model(data.view(-1, c, h, w))
40 | output = output.data.cpu()
41 | output_avg = output.view(bs, ncrops, -1).mean(1).view(bs,-1)
42 | res_features.extend(output_avg.numpy())
43 | return np.array(res_features)
--------------------------------------------------------------------------------
/Pytorch_code/seResnext50/base_tester.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import
2 |
3 | import torch
4 | import torchvision
5 | import numpy as np
6 |
7 | class BaseTester():
8 | def __init__(self, model):
9 | self.model = model
10 |
11 | def extract(self, data_loader):
12 | self.model.eval()
13 | res_features = []
14 |
15 | for batch_index, (data, label) in enumerate(data_loader):
16 | print("batch_index is: {}".format(batch_index))
17 | data = data.cuda()
18 | data = torch.autograd.Variable(data, volatile=True)
19 | output = self.model(data)
20 | output = output.data.cpu()
21 | res_features.extend(output.numpy())
22 | return np.array(res_features)
23 |
24 |
25 | class TenCropTester():
26 | def __init__(self, model):
27 | self.model = model
28 |
29 | def extract(self, data_loader):
30 | self.model.eval()
31 | res_features = []
32 |
33 | for batch_index, (data, label) in enumerate(data_loader):
34 | print("batch_index is: {}".format(batch_index))
35 | data = data.cuda()
36 | data = torch.autograd.Variable(data, volatile=True)
37 | bs, ncrops, c, h, w = data.size()
38 | # print("ncrops is:{}".format(ncrops))
39 | output = self.model(data.view(-1, c, h, w))
40 | output = output.data.cpu()
41 | output_avg = output.view(bs, ncrops, -1).mean(1).view(bs,-1)
42 | res_features.extend(output_avg.numpy())
43 | return np.array(res_features)
--------------------------------------------------------------------------------
/Pytorch_code/senet154/base_tester.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import
2 |
3 | import torch
4 | import torchvision
5 | import numpy as np
6 |
7 | class BaseTester():
8 | def __init__(self, model):
9 | self.model = model
10 |
11 | def extract(self, data_loader):
12 | self.model.eval()
13 | res_features = []
14 |
15 | for batch_index, (data, label) in enumerate(data_loader):
16 | print("batch_index is: {}".format(batch_index))
17 | data = data.cuda()
18 | data = torch.autograd.Variable(data, volatile=True)
19 | output = self.model(data)
20 | output = output.data.cpu()
21 | res_features.extend(output.numpy())
22 | return np.array(res_features)
23 |
24 |
25 | class TenCropTester():
26 | def __init__(self, model):
27 | self.model = model
28 |
29 | def extract(self, data_loader):
30 | self.model.eval()
31 | res_features = []
32 |
33 | for batch_index, (data, label) in enumerate(data_loader):
34 | print("batch_index is: {}".format(batch_index))
35 | data = data.cuda()
36 | data = torch.autograd.Variable(data, volatile=True)
37 | bs, ncrops, c, h, w = data.size()
38 | # print("ncrops is:{}".format(ncrops))
39 | output = self.model(data.view(-1, c, h, w))
40 | output = output.data.cpu()
41 | output_avg = output.view(bs, ncrops, -1).mean(1).view(bs,-1)
42 | res_features.extend(output_avg.numpy())
43 | return np.array(res_features)
--------------------------------------------------------------------------------
/Pytorch_code/seresnet152/base_tester.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import
2 |
3 | import torch
4 | import torchvision
5 | import numpy as np
6 |
7 | class BaseTester():
8 | def __init__(self, model):
9 | self.model = model
10 |
11 | def extract(self, data_loader):
12 | self.model.eval()
13 | res_features = []
14 |
15 | for batch_index, (data, label) in enumerate(data_loader):
16 | print("batch_index is: {}".format(batch_index))
17 | data = data.cuda()
18 | data = torch.autograd.Variable(data, volatile=True)
19 | output = self.model(data)
20 | output = output.data.cpu()
21 | res_features.extend(output.numpy())
22 | return np.array(res_features)
23 |
24 |
25 | class TenCropTester():
26 | def __init__(self, model):
27 | self.model = model
28 |
29 | def extract(self, data_loader):
30 | self.model.eval()
31 | res_features = []
32 |
33 | for batch_index, (data, label) in enumerate(data_loader):
34 | print("batch_index is: {}".format(batch_index))
35 | data = data.cuda()
36 | data = torch.autograd.Variable(data, volatile=True)
37 | bs, ncrops, c, h, w = data.size()
38 | # print("ncrops is:{}".format(ncrops))
39 | output = self.model(data.view(-1, c, h, w))
40 | output = output.data.cpu()
41 | output_avg = output.view(bs, ncrops, -1).mean(1).view(bs,-1)
42 | res_features.extend(output_avg.numpy())
43 | return np.array(res_features)
--------------------------------------------------------------------------------
/Pytorch_code/seresnext101/base_tester.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import
2 |
3 | import torch
4 | import torchvision
5 | import numpy as np
6 |
7 | class BaseTester():
8 | def __init__(self, model):
9 | self.model = model
10 |
11 | def extract(self, data_loader):
12 | self.model.eval()
13 | res_features = []
14 |
15 | for batch_index, (data, label) in enumerate(data_loader):
16 | print("batch_index is: {}".format(batch_index))
17 | data = data.cuda()
18 | data = torch.autograd.Variable(data, volatile=True)
19 | output = self.model(data)
20 | output = output.data.cpu()
21 | res_features.extend(output.numpy())
22 | return np.array(res_features)
23 |
24 |
25 | class TenCropTester():
26 | def __init__(self, model):
27 | self.model = model
28 |
29 | def extract(self, data_loader):
30 | self.model.eval()
31 | res_features = []
32 |
33 | for batch_index, (data, label) in enumerate(data_loader):
34 | print("batch_index is: {}".format(batch_index))
35 | data = data.cuda()
36 | data = torch.autograd.Variable(data, volatile=True)
37 | bs, ncrops, c, h, w = data.size()
38 | # print("ncrops is:{}".format(ncrops))
39 | output = self.model(data.view(-1, c, h, w))
40 | output = output.data.cpu()
41 | output_avg = output.view(bs, ncrops, -1).mean(1).view(bs,-1)
42 | res_features.extend(output_avg.numpy())
43 | return np.array(res_features)
--------------------------------------------------------------------------------
/Pytorch_code/xception/base_tester.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import
2 |
3 | import torch
4 | import torchvision
5 | import numpy as np
6 |
7 | class BaseTester():
8 | def __init__(self, model):
9 | self.model = model
10 |
11 | def extract(self, data_loader):
12 | self.model.eval()
13 | res_features = []
14 |
15 | for batch_index, (data, label) in enumerate(data_loader):
16 | print("batch_index is: {}".format(batch_index))
17 | data = data.cuda()
18 | data = torch.autograd.Variable(data, volatile=True)
19 | output = self.model(data)
20 | output = output.data.cpu()
21 | res_features.extend(output.numpy())
22 | return np.array(res_features)
23 |
24 |
25 | class TenCropTester():
26 | def __init__(self, model):
27 | self.model = model
28 |
29 | def extract(self, data_loader):
30 | self.model.eval()
31 | res_features = []
32 |
33 | for batch_index, (data, label) in enumerate(data_loader):
34 | print("batch_index is: {}".format(batch_index))
35 | data = data.cuda()
36 | data = torch.autograd.Variable(data, volatile=True)
37 | bs, ncrops, c, h, w = data.size()
38 | # print("ncrops is:{}".format(ncrops))
39 | output = self.model(data.view(-1, c, h, w))
40 | output = output.data.cpu()
41 | output_avg = output.view(bs, ncrops, -1).mean(1).view(bs,-1)
42 | res_features.extend(output_avg.numpy())
43 | return np.array(res_features)
--------------------------------------------------------------------------------
/Pytorch_code/inceptionResnetV2/base_tester.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import
2 |
3 | import torch
4 | import torchvision
5 | import numpy as np
6 |
7 | class BaseTester():
8 | def __init__(self, model):
9 | self.model = model
10 |
11 | def extract(self, data_loader):
12 | self.model.eval()
13 | res_features = []
14 |
15 | for batch_index, (data, label) in enumerate(data_loader):
16 | print("batch_index is: {}".format(batch_index))
17 | data = data.cuda()
18 | data = torch.autograd.Variable(data, volatile=True)
19 | output = self.model(data)
20 | output = output.data.cpu()
21 | res_features.extend(output.numpy())
22 | return np.array(res_features)
23 |
24 |
25 | class TenCropTester():
26 | def __init__(self, model):
27 | self.model = model
28 |
29 | def extract(self, data_loader):
30 | self.model.eval()
31 | res_features = []
32 |
33 | for batch_index, (data, label) in enumerate(data_loader):
34 | print("batch_index is: {}".format(batch_index))
35 | data = data.cuda()
36 | data = torch.autograd.Variable(data, volatile=True)
37 | bs, ncrops, c, h, w = data.size()
38 | # print("ncrops is:{}".format(ncrops))
39 | output = self.model(data.view(-1, c, h, w))
40 | output = output.data.cpu()
41 | output_avg = output.view(bs, ncrops, -1).mean(1).view(bs,-1)
42 | res_features.extend(output_avg.numpy())
43 | return np.array(res_features)
--------------------------------------------------------------------------------
/Pytorch_code/resnet50_chair/base_tester.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import
2 |
3 | import torch
4 | import torchvision
5 | import numpy as np
6 |
7 | class BaseTester():
8 | def __init__(self, model):
9 | self.model = model
10 |
11 | def extract(self, data_loader):
12 | self.model.eval()
13 | res_features = []
14 |
15 | for batch_index, (data, label) in enumerate(data_loader):
16 | print("batch_index is: {}".format(batch_index))
17 | data = data.cuda()
18 | data = torch.autograd.Variable(data, volatile=True)
19 | output = self.model(data)
20 | output = output.data.cpu()
21 | res_features.extend(output.numpy())
22 | return np.array(res_features)
23 |
24 |
25 | class TenCropTester():
26 | def __init__(self, model):
27 | self.model = model
28 |
29 | def extract(self, data_loader):
30 | self.model.eval()
31 | res_features = []
32 |
33 | for batch_index, (data, label) in enumerate(data_loader):
34 | print("batch_index is: {}".format(batch_index))
35 | data = data.cuda()
36 | data = torch.autograd.Variable(data, volatile=True)
37 | bs, ncrops, c, h, w = data.size()
38 | # print("ncrops is:{}".format(ncrops))
39 | output = self.model(data.view(-1, c, h, w))
40 | output = output.data.cpu()
41 | output_avg = output.view(bs, ncrops, -1).mean(1).view(bs,-1)
42 | res_features.extend(output_avg.numpy())
43 | return np.array(res_features)
--------------------------------------------------------------------------------
/Pytorch_code/resnext101_32x4d/base_tester.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import
2 |
3 | import torch
4 | import torchvision
5 | import numpy as np
6 |
7 | class BaseTester():
8 | def __init__(self, model):
9 | self.model = model
10 |
11 | def extract(self, data_loader):
12 | self.model.eval()
13 | res_features = []
14 |
15 | for batch_index, (data, label) in enumerate(data_loader):
16 | print("batch_index is: {}".format(batch_index))
17 | data = data.cuda()
18 | data = torch.autograd.Variable(data, volatile=True)
19 | output = self.model(data)
20 | output = output.data.cpu()
21 | res_features.extend(output.numpy())
22 | return np.array(res_features)
23 |
24 |
25 | class TenCropTester():
26 | def __init__(self, model):
27 | self.model = model
28 |
29 | def extract(self, data_loader):
30 | self.model.eval()
31 | res_features = []
32 |
33 | for batch_index, (data, label) in enumerate(data_loader):
34 | print("batch_index is: {}".format(batch_index))
35 | data = data.cuda()
36 | data = torch.autograd.Variable(data, volatile=True)
37 | bs, ncrops, c, h, w = data.size()
38 | # print("ncrops is:{}".format(ncrops))
39 | output = self.model(data.view(-1, c, h, w))
40 | output = output.data.cpu()
41 | output_avg = output.view(bs, ncrops, -1).mean(1).view(bs,-1)
42 | res_features.extend(output_avg.numpy())
43 | return np.array(res_features)
--------------------------------------------------------------------------------
/Pytorch_code/resnext101_64x4d/base_tester.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import
2 |
3 | import torch
4 | import torchvision
5 | import numpy as np
6 |
7 | class BaseTester():
8 | def __init__(self, model):
9 | self.model = model
10 |
11 | def extract(self, data_loader):
12 | self.model.eval()
13 | res_features = []
14 |
15 | for batch_index, (data, label) in enumerate(data_loader):
16 | print("batch_index is: {}".format(batch_index))
17 | data = data.cuda()
18 | data = torch.autograd.Variable(data, volatile=True)
19 | output = self.model(data)
20 | output = output.data.cpu()
21 | res_features.extend(output.numpy())
22 | return np.array(res_features)
23 |
24 |
25 | class TenCropTester():
26 | def __init__(self, model):
27 | self.model = model
28 |
29 | def extract(self, data_loader):
30 | self.model.eval()
31 | res_features = []
32 |
33 | for batch_index, (data, label) in enumerate(data_loader):
34 | print("batch_index is: {}".format(batch_index))
35 | data = data.cuda()
36 | data = torch.autograd.Variable(data, volatile=True)
37 | bs, ncrops, c, h, w = data.size()
38 | # print("ncrops is:{}".format(ncrops))
39 | output = self.model(data.view(-1, c, h, w))
40 | output = output.data.cpu()
41 | output_avg = output.view(bs, ncrops, -1).mean(1).view(bs,-1)
42 | res_features.extend(output_avg.numpy())
43 | return np.array(res_features)
--------------------------------------------------------------------------------
/data/validnum.txt:
--------------------------------------------------------------------------------
1 | 0,50
2 | 1,50
3 | 2,49
4 | 3,49
5 | 4,49
6 | 5,50
7 | 6,50
8 | 7,49
9 | 8,47
10 | 9,46
11 | 10,50
12 | 11,49
13 | 12,49
14 | 13,49
15 | 14,47
16 | 15,48
17 | 16,49
18 | 17,48
19 | 18,50
20 | 19,49
21 | 20,48
22 | 21,50
23 | 22,49
24 | 23,48
25 | 24,50
26 | 25,47
27 | 26,50
28 | 27,49
29 | 28,49
30 | 29,50
31 | 30,48
32 | 31,50
33 | 32,50
34 | 33,49
35 | 34,48
36 | 35,49
37 | 36,50
38 | 37,49
39 | 38,50
40 | 39,50
41 | 40,50
42 | 41,47
43 | 42,50
44 | 43,50
45 | 44,49
46 | 45,49
47 | 46,50
48 | 47,48
49 | 48,50
50 | 49,49
51 | 50,50
52 | 51,50
53 | 52,50
54 | 53,49
55 | 54,49
56 | 55,50
57 | 56,50
58 | 57,50
59 | 58,50
60 | 59,50
61 | 60,48
62 | 61,50
63 | 62,50
64 | 63,47
65 | 64,49
66 | 65,50
67 | 66,48
68 | 67,50
69 | 68,49
70 | 69,48
71 | 70,48
72 | 71,49
73 | 72,50
74 | 73,49
75 | 74,50
76 | 75,49
77 | 76,50
78 | 77,50
79 | 78,49
80 | 79,49
81 | 80,50
82 | 81,50
83 | 82,49
84 | 83,50
85 | 84,49
86 | 85,48
87 | 86,50
88 | 87,50
89 | 88,50
90 | 89,50
91 | 90,49
92 | 91,50
93 | 92,50
94 | 93,50
95 | 94,50
96 | 95,50
97 | 96,50
98 | 97,50
99 | 98,50
100 | 99,49
101 | 100,49
102 | 101,50
103 | 102,49
104 | 103,50
105 | 104,49
106 | 105,50
107 | 106,50
108 | 107,49
109 | 108,50
110 | 109,50
111 | 110,50
112 | 111,49
113 | 112,49
114 | 113,50
115 | 114,49
116 | 115,50
117 | 116,50
118 | 117,48
119 | 118,50
120 | 119,50
121 | 120,49
122 | 121,49
123 | 122,49
124 | 123,50
125 | 124,50
126 | 125,48
127 | 126,49
128 | 127,48
129 |
--------------------------------------------------------------------------------
/data/trainnum.txt:
--------------------------------------------------------------------------------
1 | 0,1235
2 | 1,1495
3 | 2,2355
4 | 3,1479
5 | 4,1585
6 | 5,1092
7 | 6,1593
8 | 7,1353
9 | 8,473
10 | 9,1966
11 | 10,1734
12 | 11,2574
13 | 12,1599
14 | 13,1864
15 | 14,1053
16 | 15,1336
17 | 16,1462
18 | 17,1536
19 | 18,846
20 | 19,3937
21 | 20,2456
22 | 21,1392
23 | 22,1178
24 | 23,1761
25 | 24,526
26 | 25,1549
27 | 26,2170
28 | 27,1668
29 | 28,1416
30 | 29,1167
31 | 30,2070
32 | 31,1691
33 | 32,1226
34 | 33,716
35 | 34,638
36 | 35,1572
37 | 36,2242
38 | 37,2291
39 | 38,1103
40 | 39,2027
41 | 40,618
42 | 41,3913
43 | 42,1713
44 | 43,1447
45 | 44,2269
46 | 45,1483
47 | 46,733
48 | 47,1280
49 | 48,1853
50 | 49,1261
51 | 50,1008
52 | 51,1161
53 | 52,1279
54 | 53,1790
55 | 54,2137
56 | 55,1710
57 | 56,690
58 | 57,781
59 | 58,1396
60 | 59,1660
61 | 60,1911
62 | 61,675
63 | 62,1733
64 | 63,1938
65 | 64,1145
66 | 65,340
67 | 66,1058
68 | 67,2237
69 | 68,1235
70 | 69,1329
71 | 70,1227
72 | 71,1390
73 | 72,2244
74 | 73,602
75 | 74,1782
76 | 75,2116
77 | 76,542
78 | 77,1999
79 | 78,1501
80 | 79,1476
81 | 80,1578
82 | 81,1262
83 | 82,321
84 | 83,1255
85 | 84,617
86 | 85,661
87 | 86,1656
88 | 87,1219
89 | 88,2327
90 | 89,1227
91 | 90,2077
92 | 91,2646
93 | 92,2310
94 | 93,1275
95 | 94,1150
96 | 95,1294
97 | 96,1068
98 | 97,1830
99 | 98,1909
100 | 99,1367
101 | 100,1866
102 | 101,1336
103 | 102,2186
104 | 103,2014
105 | 104,830
106 | 105,865
107 | 106,1726
108 | 107,1498
109 | 108,742
110 | 109,811
111 | 110,1829
112 | 111,1752
113 | 112,1360
114 | 113,730
115 | 114,806
116 | 115,1868
117 | 116,2202
118 | 117,1756
119 | 118,1069
120 | 119,2033
121 | 120,432
122 | 121,2435
123 | 122,1964
124 | 123,411
125 | 124,2574
126 | 125,1922
127 | 126,728
128 | 127,1289
129 |
--------------------------------------------------------------------------------
/Pytorch_code/dpn107/memcached_dataset.py:
--------------------------------------------------------------------------------
1 | import mc
2 | from torch.utils.data import DataLoader, Dataset
3 | import numpy as np
4 | import io
5 | from PIL import Image
6 |
7 | def pil_loader(img_str):
8 | buff = io.BytesIO(img_str)
9 | with Image.open(buff) as img:
10 | img = img.convert('RGB')
11 | return img
12 |
13 | class McDataset(Dataset):
14 | def __init__(self, root_dir, meta_file, transform=None):
15 | self.root_dir = root_dir
16 | self.transform = transform
17 | with open(meta_file) as f:
18 | lines = f.readlines()
19 | print("building dataset from %s"%meta_file)
20 | self.num = len(lines)
21 | self.metas = []
22 | for line in lines:
23 | path, cls = line.rstrip().split()
24 | self.metas.append((path, int(cls)))
25 | print("read meta done")
26 |
27 | def __len__(self):
28 | return self.num
29 |
30 | def __getitem__(self, idx):
31 | filename = self.root_dir + '/' + self.metas[idx][0]
32 | cls = self.metas[idx][1]
33 | ## memcached
34 | server_list_config_file = "/mnt/lustre/share/memcached_client/server_list.conf"
35 | client_config_file = "/mnt/lustre/share/memcached_client/client.conf"
36 | mclient = mc.MemcachedClient.GetInstance(server_list_config_file, client_config_file)
37 | value = mc.pyvector()
38 | mclient.Get(filename, value)
39 | value_str = mc.ConvertBuffer(value)
40 | img = pil_loader(value_str)
41 | #img = np.zeros((350, 350, 3), dtype=np.uint8)
42 | #img = Image.fromarray(img)
43 | #cls = 0
44 |
45 | ## transform
46 | if self.transform is not None:
47 | img = self.transform(img)
48 | return img, cls
49 |
--------------------------------------------------------------------------------
/Pytorch_code/dpn131/memcached_dataset.py:
--------------------------------------------------------------------------------
1 | import mc
2 | from torch.utils.data import DataLoader, Dataset
3 | import numpy as np
4 | import io
5 | from PIL import Image
6 |
7 | def pil_loader(img_str):
8 | buff = io.BytesIO(img_str)
9 | with Image.open(buff) as img:
10 | img = img.convert('RGB')
11 | return img
12 |
13 | class McDataset(Dataset):
14 | def __init__(self, root_dir, meta_file, transform=None):
15 | self.root_dir = root_dir
16 | self.transform = transform
17 | with open(meta_file) as f:
18 | lines = f.readlines()
19 | print("building dataset from %s"%meta_file)
20 | self.num = len(lines)
21 | self.metas = []
22 | for line in lines:
23 | path, cls = line.rstrip().split()
24 | self.metas.append((path, int(cls)))
25 | print("read meta done")
26 |
27 | def __len__(self):
28 | return self.num
29 |
30 | def __getitem__(self, idx):
31 | filename = self.root_dir + '/' + self.metas[idx][0]
32 | cls = self.metas[idx][1]
33 | ## memcached
34 | server_list_config_file = "/mnt/lustre/share/memcached_client/server_list.conf"
35 | client_config_file = "/mnt/lustre/share/memcached_client/client.conf"
36 | mclient = mc.MemcachedClient.GetInstance(server_list_config_file, client_config_file)
37 | value = mc.pyvector()
38 | mclient.Get(filename, value)
39 | value_str = mc.ConvertBuffer(value)
40 | img = pil_loader(value_str)
41 | #img = np.zeros((350, 350, 3), dtype=np.uint8)
42 | #img = Image.fromarray(img)
43 | #cls = 0
44 |
45 | ## transform
46 | if self.transform is not None:
47 | img = self.transform(img)
48 | return img, cls
49 |
--------------------------------------------------------------------------------
/Pytorch_code/dpn92/memcached_dataset.py:
--------------------------------------------------------------------------------
1 | import mc
2 | from torch.utils.data import DataLoader, Dataset
3 | import numpy as np
4 | import io
5 | from PIL import Image
6 |
7 | def pil_loader(img_str):
8 | buff = io.BytesIO(img_str)
9 | with Image.open(buff) as img:
10 | img = img.convert('RGB')
11 | return img
12 |
13 | class McDataset(Dataset):
14 | def __init__(self, root_dir, meta_file, transform=None):
15 | self.root_dir = root_dir
16 | self.transform = transform
17 | with open(meta_file) as f:
18 | lines = f.readlines()
19 | print("building dataset from %s"%meta_file)
20 | self.num = len(lines)
21 | self.metas = []
22 | for line in lines:
23 | path, cls = line.rstrip().split()
24 | self.metas.append((path, int(cls)))
25 | print("read meta done")
26 |
27 | def __len__(self):
28 | return self.num
29 |
30 | def __getitem__(self, idx):
31 | filename = self.root_dir + '/' + self.metas[idx][0]
32 | cls = self.metas[idx][1]
33 | ## memcached
34 | server_list_config_file = "/mnt/lustre/share/memcached_client/server_list.conf"
35 | client_config_file = "/mnt/lustre/share/memcached_client/client.conf"
36 | mclient = mc.MemcachedClient.GetInstance(server_list_config_file, client_config_file)
37 | value = mc.pyvector()
38 | mclient.Get(filename, value)
39 | value_str = mc.ConvertBuffer(value)
40 | img = pil_loader(value_str)
41 | #img = np.zeros((350, 350, 3), dtype=np.uint8)
42 | #img = Image.fromarray(img)
43 | #cls = 0
44 |
45 | ## transform
46 | if self.transform is not None:
47 | img = self.transform(img)
48 | return img, cls
49 |
--------------------------------------------------------------------------------
/Pytorch_code/dpn98/memcached_dataset.py:
--------------------------------------------------------------------------------
1 | import mc
2 | from torch.utils.data import DataLoader, Dataset
3 | import numpy as np
4 | import io
5 | from PIL import Image
6 |
7 | def pil_loader(img_str):
8 | buff = io.BytesIO(img_str)
9 | with Image.open(buff) as img:
10 | img = img.convert('RGB')
11 | return img
12 |
13 | class McDataset(Dataset):
14 | def __init__(self, root_dir, meta_file, transform=None):
15 | self.root_dir = root_dir
16 | self.transform = transform
17 | with open(meta_file) as f:
18 | lines = f.readlines()
19 | print("building dataset from %s"%meta_file)
20 | self.num = len(lines)
21 | self.metas = []
22 | for line in lines:
23 | path, cls = line.rstrip().split()
24 | self.metas.append((path, int(cls)))
25 | print("read meta done")
26 |
27 | def __len__(self):
28 | return self.num
29 |
30 | def __getitem__(self, idx):
31 | filename = self.root_dir + '/' + self.metas[idx][0]
32 | cls = self.metas[idx][1]
33 | ## memcached
34 | server_list_config_file = "/mnt/lustre/share/memcached_client/server_list.conf"
35 | client_config_file = "/mnt/lustre/share/memcached_client/client.conf"
36 | mclient = mc.MemcachedClient.GetInstance(server_list_config_file, client_config_file)
37 | value = mc.pyvector()
38 | mclient.Get(filename, value)
39 | value_str = mc.ConvertBuffer(value)
40 | img = pil_loader(value_str)
41 | #img = np.zeros((350, 350, 3), dtype=np.uint8)
42 | #img = Image.fromarray(img)
43 | #cls = 0
44 |
45 | ## transform
46 | if self.transform is not None:
47 | img = self.transform(img)
48 | return img, cls
49 |
--------------------------------------------------------------------------------
/Pytorch_code/nasnet/memcached_dataset.py:
--------------------------------------------------------------------------------
1 | import mc
2 | from torch.utils.data import DataLoader, Dataset
3 | import numpy as np
4 | import io
5 | from PIL import Image
6 |
7 | def pil_loader(img_str):
8 | buff = io.BytesIO(img_str)
9 | with Image.open(buff) as img:
10 | img = img.convert('RGB')
11 | return img
12 |
13 | class McDataset(Dataset):
14 | def __init__(self, root_dir, meta_file, transform=None):
15 | self.root_dir = root_dir
16 | self.transform = transform
17 | with open(meta_file) as f:
18 | lines = f.readlines()
19 | print("building dataset from %s"%meta_file)
20 | self.num = len(lines)
21 | self.metas = []
22 | for line in lines:
23 | path, cls = line.rstrip().split()
24 | self.metas.append((path, int(cls)))
25 | print("read meta done")
26 |
27 | def __len__(self):
28 | return self.num
29 |
30 | def __getitem__(self, idx):
31 | filename = self.root_dir + '/' + self.metas[idx][0]
32 | cls = self.metas[idx][1]
33 | ## memcached
34 | server_list_config_file = "/mnt/lustre/share/memcached_client/server_list.conf"
35 | client_config_file = "/mnt/lustre/share/memcached_client/client.conf"
36 | mclient = mc.MemcachedClient.GetInstance(server_list_config_file, client_config_file)
37 | value = mc.pyvector()
38 | mclient.Get(filename, value)
39 | value_str = mc.ConvertBuffer(value)
40 | img = pil_loader(value_str)
41 | #img = np.zeros((350, 350, 3), dtype=np.uint8)
42 | #img = Image.fromarray(img)
43 | #cls = 0
44 |
45 | ## transform
46 | if self.transform is not None:
47 | img = self.transform(img)
48 | return img, cls
49 |
--------------------------------------------------------------------------------
/Pytorch_code/Densenet/memcached_dataset.py:
--------------------------------------------------------------------------------
1 | import mc
2 | from torch.utils.data import DataLoader, Dataset
3 | import numpy as np
4 | import io
5 | from PIL import Image
6 |
7 | def pil_loader(img_str):
8 | buff = io.BytesIO(img_str)
9 | with Image.open(buff) as img:
10 | img = img.convert('RGB')
11 | return img
12 |
13 | class McDataset(Dataset):
14 | def __init__(self, root_dir, meta_file, transform=None):
15 | self.root_dir = root_dir
16 | self.transform = transform
17 | with open(meta_file) as f:
18 | lines = f.readlines()
19 | print("building dataset from %s"%meta_file)
20 | self.num = len(lines)
21 | self.metas = []
22 | for line in lines:
23 | path, cls = line.rstrip().split()
24 | self.metas.append((path, int(cls)))
25 | print("read meta done")
26 |
27 | def __len__(self):
28 | return self.num
29 |
30 | def __getitem__(self, idx):
31 | filename = self.root_dir + '/' + self.metas[idx][0]
32 | cls = self.metas[idx][1]
33 | ## memcached
34 | server_list_config_file = "/mnt/lustre/share/memcached_client/server_list.conf"
35 | client_config_file = "/mnt/lustre/share/memcached_client/client.conf"
36 | mclient = mc.MemcachedClient.GetInstance(server_list_config_file, client_config_file)
37 | value = mc.pyvector()
38 | mclient.Get(filename, value)
39 | value_str = mc.ConvertBuffer(value)
40 | img = pil_loader(value_str)
41 | #img = np.zeros((350, 350, 3), dtype=np.uint8)
42 | #img = Image.fromarray(img)
43 | #cls = 0
44 |
45 | ## transform
46 | if self.transform is not None:
47 | img = self.transform(img)
48 | return img, cls
49 |
--------------------------------------------------------------------------------
/Pytorch_code/dense161/memcached_dataset.py:
--------------------------------------------------------------------------------
1 | import mc
2 | from torch.utils.data import DataLoader, Dataset
3 | import numpy as np
4 | import io
5 | from PIL import Image
6 |
7 | def pil_loader(img_str):
8 | buff = io.BytesIO(img_str)
9 | with Image.open(buff) as img:
10 | img = img.convert('RGB')
11 | return img
12 |
13 | class McDataset(Dataset):
14 | def __init__(self, root_dir, meta_file, transform=None):
15 | self.root_dir = root_dir
16 | self.transform = transform
17 | with open(meta_file) as f:
18 | lines = f.readlines()
19 | print("building dataset from %s"%meta_file)
20 | self.num = len(lines)
21 | self.metas = []
22 | for line in lines:
23 | path, cls = line.rstrip().split()
24 | self.metas.append((path, int(cls)))
25 | print("read meta done")
26 |
27 | def __len__(self):
28 | return self.num
29 |
30 | def __getitem__(self, idx):
31 | filename = self.root_dir + '/' + self.metas[idx][0]
32 | cls = self.metas[idx][1]
33 | ## memcached
34 | server_list_config_file = "/mnt/lustre/share/memcached_client/server_list.conf"
35 | client_config_file = "/mnt/lustre/share/memcached_client/client.conf"
36 | mclient = mc.MemcachedClient.GetInstance(server_list_config_file, client_config_file)
37 | value = mc.pyvector()
38 | mclient.Get(filename, value)
39 | value_str = mc.ConvertBuffer(value)
40 | img = pil_loader(value_str)
41 | #img = np.zeros((350, 350, 3), dtype=np.uint8)
42 | #img = Image.fromarray(img)
43 | #cls = 0
44 |
45 | ## transform
46 | if self.transform is not None:
47 | img = self.transform(img)
48 | return img, cls
49 |
--------------------------------------------------------------------------------
/Pytorch_code/dense169/memcached_dataset.py:
--------------------------------------------------------------------------------
1 | import mc
2 | from torch.utils.data import DataLoader, Dataset
3 | import numpy as np
4 | import io
5 | from PIL import Image
6 |
7 | def pil_loader(img_str):
8 | buff = io.BytesIO(img_str)
9 | with Image.open(buff) as img:
10 | img = img.convert('RGB')
11 | return img
12 |
13 | class McDataset(Dataset):
14 | def __init__(self, root_dir, meta_file, transform=None):
15 | self.root_dir = root_dir
16 | self.transform = transform
17 | with open(meta_file) as f:
18 | lines = f.readlines()
19 | print("building dataset from %s"%meta_file)
20 | self.num = len(lines)
21 | self.metas = []
22 | for line in lines:
23 | path, cls = line.rstrip().split()
24 | self.metas.append((path, int(cls)))
25 | print("read meta done")
26 |
27 | def __len__(self):
28 | return self.num
29 |
30 | def __getitem__(self, idx):
31 | filename = self.root_dir + '/' + self.metas[idx][0]
32 | cls = self.metas[idx][1]
33 | ## memcached
34 | server_list_config_file = "/mnt/lustre/share/memcached_client/server_list.conf"
35 | client_config_file = "/mnt/lustre/share/memcached_client/client.conf"
36 | mclient = mc.MemcachedClient.GetInstance(server_list_config_file, client_config_file)
37 | value = mc.pyvector()
38 | mclient.Get(filename, value)
39 | value_str = mc.ConvertBuffer(value)
40 | img = pil_loader(value_str)
41 | #img = np.zeros((350, 350, 3), dtype=np.uint8)
42 | #img = Image.fromarray(img)
43 | #cls = 0
44 |
45 | ## transform
46 | if self.transform is not None:
47 | img = self.transform(img)
48 | return img, cls
49 |
--------------------------------------------------------------------------------
/Pytorch_code/densenet2/memcached_dataset.py:
--------------------------------------------------------------------------------
1 | import mc
2 | from torch.utils.data import DataLoader, Dataset
3 | import numpy as np
4 | import io
5 | from PIL import Image
6 |
7 | def pil_loader(img_str):
8 | buff = io.BytesIO(img_str)
9 | with Image.open(buff) as img:
10 | img = img.convert('RGB')
11 | return img
12 |
13 | class McDataset(Dataset):
14 | def __init__(self, root_dir, meta_file, transform=None):
15 | self.root_dir = root_dir
16 | self.transform = transform
17 | with open(meta_file) as f:
18 | lines = f.readlines()
19 | print("building dataset from %s"%meta_file)
20 | self.num = len(lines)
21 | self.metas = []
22 | for line in lines:
23 | path, cls = line.rstrip().split()
24 | self.metas.append((path, int(cls)))
25 | print("read meta done")
26 |
27 | def __len__(self):
28 | return self.num
29 |
30 | def __getitem__(self, idx):
31 | filename = self.root_dir + '/' + self.metas[idx][0]
32 | cls = self.metas[idx][1]
33 | ## memcached
34 | server_list_config_file = "/mnt/lustre/share/memcached_client/server_list.conf"
35 | client_config_file = "/mnt/lustre/share/memcached_client/client.conf"
36 | mclient = mc.MemcachedClient.GetInstance(server_list_config_file, client_config_file)
37 | value = mc.pyvector()
38 | mclient.Get(filename, value)
39 | value_str = mc.ConvertBuffer(value)
40 | img = pil_loader(value_str)
41 | #img = np.zeros((350, 350, 3), dtype=np.uint8)
42 | #img = Image.fromarray(img)
43 | #cls = 0
44 |
45 | ## transform
46 | if self.transform is not None:
47 | img = self.transform(img)
48 | return img, cls
49 |
--------------------------------------------------------------------------------
/Pytorch_code/drnD105/memcached_dataset.py:
--------------------------------------------------------------------------------
1 | import mc
2 | from torch.utils.data import DataLoader, Dataset
3 | import numpy as np
4 | import io
5 | from PIL import Image
6 |
7 | def pil_loader(img_str):
8 | buff = io.BytesIO(img_str)
9 | with Image.open(buff) as img:
10 | img = img.convert('RGB')
11 | return img
12 |
13 | class McDataset(Dataset):
14 | def __init__(self, root_dir, meta_file, transform=None):
15 | self.root_dir = root_dir
16 | self.transform = transform
17 | with open(meta_file) as f:
18 | lines = f.readlines()
19 | print("building dataset from %s"%meta_file)
20 | self.num = len(lines)
21 | self.metas = []
22 | for line in lines:
23 | path, cls = line.rstrip().split()
24 | self.metas.append((path, int(cls)))
25 | print("read meta done")
26 |
27 | def __len__(self):
28 | return self.num
29 |
30 | def __getitem__(self, idx):
31 | filename = self.root_dir + '/' + self.metas[idx][0]
32 | cls = self.metas[idx][1]
33 | ## memcached
34 | server_list_config_file = "/mnt/lustre/share/memcached_client/server_list.conf"
35 | client_config_file = "/mnt/lustre/share/memcached_client/client.conf"
36 | mclient = mc.MemcachedClient.GetInstance(server_list_config_file, client_config_file)
37 | value = mc.pyvector()
38 | mclient.Get(filename, value)
39 | value_str = mc.ConvertBuffer(value)
40 | img = pil_loader(value_str)
41 | #img = np.zeros((350, 350, 3), dtype=np.uint8)
42 | #img = Image.fromarray(img)
43 | #cls = 0
44 |
45 | ## transform
46 | if self.transform is not None:
47 | img = self.transform(img)
48 | return img, cls
49 |
--------------------------------------------------------------------------------
/Pytorch_code/inceptionv4/memcached_dataset.py:
--------------------------------------------------------------------------------
1 | import mc
2 | from torch.utils.data import DataLoader, Dataset
3 | import numpy as np
4 | import io
5 | from PIL import Image
6 |
7 | def pil_loader(img_str):
8 | buff = io.BytesIO(img_str)
9 | with Image.open(buff) as img:
10 | img = img.convert('RGB')
11 | return img
12 |
13 | class McDataset(Dataset):
14 | def __init__(self, root_dir, meta_file, transform=None):
15 | self.root_dir = root_dir
16 | self.transform = transform
17 | with open(meta_file) as f:
18 | lines = f.readlines()
19 | print("building dataset from %s"%meta_file)
20 | self.num = len(lines)
21 | self.metas = []
22 | for line in lines:
23 | path, cls = line.rstrip().split()
24 | self.metas.append((path, int(cls)))
25 | print("read meta done")
26 |
27 | def __len__(self):
28 | return self.num
29 |
30 | def __getitem__(self, idx):
31 | filename = self.root_dir + '/' + self.metas[idx][0]
32 | cls = self.metas[idx][1]
33 | ## memcached
34 | server_list_config_file = "/mnt/lustre/share/memcached_client/server_list.conf"
35 | client_config_file = "/mnt/lustre/share/memcached_client/client.conf"
36 | mclient = mc.MemcachedClient.GetInstance(server_list_config_file, client_config_file)
37 | value = mc.pyvector()
38 | mclient.Get(filename, value)
39 | value_str = mc.ConvertBuffer(value)
40 | img = pil_loader(value_str)
41 | #img = np.zeros((350, 350, 3), dtype=np.uint8)
42 | #img = Image.fromarray(img)
43 | #cls = 0
44 |
45 | ## transform
46 | if self.transform is not None:
47 | img = self.transform(img)
48 | return img, cls
49 |
--------------------------------------------------------------------------------
/Pytorch_code/resnet101/memcached_dataset.py:
--------------------------------------------------------------------------------
1 | import mc
2 | from torch.utils.data import DataLoader, Dataset
3 | import numpy as np
4 | import io
5 | from PIL import Image
6 |
7 | def pil_loader(img_str):
8 | buff = io.BytesIO(img_str)
9 | with Image.open(buff) as img:
10 | img = img.convert('RGB')
11 | return img
12 |
13 | class McDataset(Dataset):
14 | def __init__(self, root_dir, meta_file, transform=None):
15 | self.root_dir = root_dir
16 | self.transform = transform
17 | with open(meta_file) as f:
18 | lines = f.readlines()
19 | print("building dataset from %s"%meta_file)
20 | self.num = len(lines)
21 | self.metas = []
22 | for line in lines:
23 | path, cls = line.rstrip().split()
24 | self.metas.append((path, int(cls)))
25 | print("read meta done")
26 |
27 | def __len__(self):
28 | return self.num
29 |
30 | def __getitem__(self, idx):
31 | filename = self.root_dir + '/' + self.metas[idx][0]
32 | cls = self.metas[idx][1]
33 | ## memcached
34 | server_list_config_file = "/mnt/lustre/share/memcached_client/server_list.conf"
35 | client_config_file = "/mnt/lustre/share/memcached_client/client.conf"
36 | mclient = mc.MemcachedClient.GetInstance(server_list_config_file, client_config_file)
37 | value = mc.pyvector()
38 | mclient.Get(filename, value)
39 | value_str = mc.ConvertBuffer(value)
40 | img = pil_loader(value_str)
41 | #img = np.zeros((350, 350, 3), dtype=np.uint8)
42 | #img = Image.fromarray(img)
43 | #cls = 0
44 |
45 | ## transform
46 | if self.transform is not None:
47 | img = self.transform(img)
48 | return img, cls
49 |
--------------------------------------------------------------------------------
/Pytorch_code/resnet152/memcached_dataset.py:
--------------------------------------------------------------------------------
1 | import mc
2 | from torch.utils.data import DataLoader, Dataset
3 | import numpy as np
4 | import io
5 | from PIL import Image
6 |
7 | def pil_loader(img_str):
8 | buff = io.BytesIO(img_str)
9 | with Image.open(buff) as img:
10 | img = img.convert('RGB')
11 | return img
12 |
13 | class McDataset(Dataset):
14 | def __init__(self, root_dir, meta_file, transform=None):
15 | self.root_dir = root_dir
16 | self.transform = transform
17 | with open(meta_file) as f:
18 | lines = f.readlines()
19 | print("building dataset from %s"%meta_file)
20 | self.num = len(lines)
21 | self.metas = []
22 | for line in lines:
23 | path, cls = line.rstrip().split()
24 | self.metas.append((path, int(cls)))
25 | print("read meta done")
26 |
27 | def __len__(self):
28 | return self.num
29 |
30 | def __getitem__(self, idx):
31 | filename = self.root_dir + '/' + self.metas[idx][0]
32 | cls = self.metas[idx][1]
33 | ## memcached
34 | server_list_config_file = "/mnt/lustre/share/memcached_client/server_list.conf"
35 | client_config_file = "/mnt/lustre/share/memcached_client/client.conf"
36 | mclient = mc.MemcachedClient.GetInstance(server_list_config_file, client_config_file)
37 | value = mc.pyvector()
38 | mclient.Get(filename, value)
39 | value_str = mc.ConvertBuffer(value)
40 | img = pil_loader(value_str)
41 | #img = np.zeros((350, 350, 3), dtype=np.uint8)
42 | #img = Image.fromarray(img)
43 | #cls = 0
44 |
45 | ## transform
46 | if self.transform is not None:
47 | img = self.transform(img)
48 | return img, cls
49 |
--------------------------------------------------------------------------------
/Pytorch_code/seResnext50/memcached_dataset.py:
--------------------------------------------------------------------------------
1 | import mc
2 | from torch.utils.data import DataLoader, Dataset
3 | import numpy as np
4 | import io
5 | from PIL import Image
6 |
7 | def pil_loader(img_str):
8 | buff = io.BytesIO(img_str)
9 | with Image.open(buff) as img:
10 | img = img.convert('RGB')
11 | return img
12 |
13 | class McDataset(Dataset):
14 | def __init__(self, root_dir, meta_file, transform=None):
15 | self.root_dir = root_dir
16 | self.transform = transform
17 | with open(meta_file) as f:
18 | lines = f.readlines()
19 | print("building dataset from %s"%meta_file)
20 | self.num = len(lines)
21 | self.metas = []
22 | for line in lines:
23 | path, cls = line.rstrip().split()
24 | self.metas.append((path, int(cls)))
25 | print("read meta done")
26 |
27 | def __len__(self):
28 | return self.num
29 |
30 | def __getitem__(self, idx):
31 | filename = self.root_dir + '/' + self.metas[idx][0]
32 | cls = self.metas[idx][1]
33 | ## memcached
34 | server_list_config_file = "/mnt/lustre/share/memcached_client/server_list.conf"
35 | client_config_file = "/mnt/lustre/share/memcached_client/client.conf"
36 | mclient = mc.MemcachedClient.GetInstance(server_list_config_file, client_config_file)
37 | value = mc.pyvector()
38 | mclient.Get(filename, value)
39 | value_str = mc.ConvertBuffer(value)
40 | img = pil_loader(value_str)
41 | #img = np.zeros((350, 350, 3), dtype=np.uint8)
42 | #img = Image.fromarray(img)
43 | #cls = 0
44 |
45 | ## transform
46 | if self.transform is not None:
47 | img = self.transform(img)
48 | return img, cls
49 |
--------------------------------------------------------------------------------
/Pytorch_code/senet154/memcached_dataset.py:
--------------------------------------------------------------------------------
1 | import mc
2 | from torch.utils.data import DataLoader, Dataset
3 | import numpy as np
4 | import io
5 | from PIL import Image
6 |
7 | def pil_loader(img_str):
8 | buff = io.BytesIO(img_str)
9 | with Image.open(buff) as img:
10 | img = img.convert('RGB')
11 | return img
12 |
13 | class McDataset(Dataset):
14 | def __init__(self, root_dir, meta_file, transform=None):
15 | self.root_dir = root_dir
16 | self.transform = transform
17 | with open(meta_file) as f:
18 | lines = f.readlines()
19 | print("building dataset from %s"%meta_file)
20 | self.num = len(lines)
21 | self.metas = []
22 | for line in lines:
23 | path, cls = line.rstrip().split()
24 | self.metas.append((path, int(cls)))
25 | print("read meta done")
26 |
27 | def __len__(self):
28 | return self.num
29 |
30 | def __getitem__(self, idx):
31 | filename = self.root_dir + '/' + self.metas[idx][0]
32 | cls = self.metas[idx][1]
33 | ## memcached
34 | server_list_config_file = "/mnt/lustre/share/memcached_client/server_list.conf"
35 | client_config_file = "/mnt/lustre/share/memcached_client/client.conf"
36 | mclient = mc.MemcachedClient.GetInstance(server_list_config_file, client_config_file)
37 | value = mc.pyvector()
38 | mclient.Get(filename, value)
39 | value_str = mc.ConvertBuffer(value)
40 | img = pil_loader(value_str)
41 | #img = np.zeros((350, 350, 3), dtype=np.uint8)
42 | #img = Image.fromarray(img)
43 | #cls = 0
44 |
45 | ## transform
46 | if self.transform is not None:
47 | img = self.transform(img)
48 | return img, cls
49 |
--------------------------------------------------------------------------------
/Pytorch_code/seresnet152/memcached_dataset.py:
--------------------------------------------------------------------------------
1 | import mc
2 | from torch.utils.data import DataLoader, Dataset
3 | import numpy as np
4 | import io
5 | from PIL import Image
6 |
7 | def pil_loader(img_str):
8 | buff = io.BytesIO(img_str)
9 | with Image.open(buff) as img:
10 | img = img.convert('RGB')
11 | return img
12 |
13 | class McDataset(Dataset):
14 | def __init__(self, root_dir, meta_file, transform=None):
15 | self.root_dir = root_dir
16 | self.transform = transform
17 | with open(meta_file) as f:
18 | lines = f.readlines()
19 | print("building dataset from %s"%meta_file)
20 | self.num = len(lines)
21 | self.metas = []
22 | for line in lines:
23 | path, cls = line.rstrip().split()
24 | self.metas.append((path, int(cls)))
25 | print("read meta done")
26 |
27 | def __len__(self):
28 | return self.num
29 |
30 | def __getitem__(self, idx):
31 | filename = self.root_dir + '/' + self.metas[idx][0]
32 | cls = self.metas[idx][1]
33 | ## memcached
34 | server_list_config_file = "/mnt/lustre/share/memcached_client/server_list.conf"
35 | client_config_file = "/mnt/lustre/share/memcached_client/client.conf"
36 | mclient = mc.MemcachedClient.GetInstance(server_list_config_file, client_config_file)
37 | value = mc.pyvector()
38 | mclient.Get(filename, value)
39 | value_str = mc.ConvertBuffer(value)
40 | img = pil_loader(value_str)
41 | #img = np.zeros((350, 350, 3), dtype=np.uint8)
42 | #img = Image.fromarray(img)
43 | #cls = 0
44 |
45 | ## transform
46 | if self.transform is not None:
47 | img = self.transform(img)
48 | return img, cls
49 |
--------------------------------------------------------------------------------
/Pytorch_code/xception/memcached_dataset.py:
--------------------------------------------------------------------------------
1 | import mc
2 | from torch.utils.data import DataLoader, Dataset
3 | import numpy as np
4 | import io
5 | from PIL import Image
6 |
7 | def pil_loader(img_str):
8 | buff = io.BytesIO(img_str)
9 | with Image.open(buff) as img:
10 | img = img.convert('RGB')
11 | return img
12 |
13 | class McDataset(Dataset):
14 | def __init__(self, root_dir, meta_file, transform=None):
15 | self.root_dir = root_dir
16 | self.transform = transform
17 | with open(meta_file) as f:
18 | lines = f.readlines()
19 | print("building dataset from %s"%meta_file)
20 | self.num = len(lines)
21 | self.metas = []
22 | for line in lines:
23 | path, cls = line.rstrip().split()
24 | self.metas.append((path, int(cls)))
25 | print("read meta done")
26 |
27 | def __len__(self):
28 | return self.num
29 |
30 | def __getitem__(self, idx):
31 | filename = self.root_dir + '/' + self.metas[idx][0]
32 | cls = self.metas[idx][1]
33 | ## memcached
34 | server_list_config_file = "/mnt/lustre/share/memcached_client/server_list.conf"
35 | client_config_file = "/mnt/lustre/share/memcached_client/client.conf"
36 | mclient = mc.MemcachedClient.GetInstance(server_list_config_file, client_config_file)
37 | value = mc.pyvector()
38 | mclient.Get(filename, value)
39 | value_str = mc.ConvertBuffer(value)
40 | img = pil_loader(value_str)
41 | #img = np.zeros((350, 350, 3), dtype=np.uint8)
42 | #img = Image.fromarray(img)
43 | #cls = 0
44 |
45 | ## transform
46 | if self.transform is not None:
47 | img = self.transform(img)
48 | return img, cls
49 |
--------------------------------------------------------------------------------
/Pytorch_code/resnet50_chair/memcached_dataset.py:
--------------------------------------------------------------------------------
1 | import mc
2 | from torch.utils.data import DataLoader, Dataset
3 | import numpy as np
4 | import io
5 | from PIL import Image
6 |
7 | def pil_loader(img_str):
8 | buff = io.BytesIO(img_str)
9 | with Image.open(buff) as img:
10 | img = img.convert('RGB')
11 | return img
12 |
13 | class McDataset(Dataset):
14 | def __init__(self, root_dir, meta_file, transform=None):
15 | self.root_dir = root_dir
16 | self.transform = transform
17 | with open(meta_file) as f:
18 | lines = f.readlines()
19 | print("building dataset from %s"%meta_file)
20 | self.num = len(lines)
21 | self.metas = []
22 | for line in lines:
23 | path, cls = line.rstrip().split()
24 | self.metas.append((path, int(cls)))
25 | print("read meta done")
26 |
27 | def __len__(self):
28 | return self.num
29 |
30 | def __getitem__(self, idx):
31 | filename = self.root_dir + '/' + self.metas[idx][0]
32 | cls = self.metas[idx][1]
33 | ## memcached
34 | server_list_config_file = "/mnt/lustre/share/memcached_client/server_list.conf"
35 | client_config_file = "/mnt/lustre/share/memcached_client/client.conf"
36 | mclient = mc.MemcachedClient.GetInstance(server_list_config_file, client_config_file)
37 | value = mc.pyvector()
38 | mclient.Get(filename, value)
39 | value_str = mc.ConvertBuffer(value)
40 | img = pil_loader(value_str)
41 | #img = np.zeros((350, 350, 3), dtype=np.uint8)
42 | #img = Image.fromarray(img)
43 | #cls = 0
44 |
45 | ## transform
46 | if self.transform is not None:
47 | img = self.transform(img)
48 | return img, cls
49 |
--------------------------------------------------------------------------------
/Pytorch_code/resnext101_32x4d/memcached_dataset.py:
--------------------------------------------------------------------------------
1 | import mc
2 | from torch.utils.data import DataLoader, Dataset
3 | import numpy as np
4 | import io
5 | from PIL import Image
6 |
7 | def pil_loader(img_str):
8 | buff = io.BytesIO(img_str)
9 | with Image.open(buff) as img:
10 | img = img.convert('RGB')
11 | return img
12 |
13 | class McDataset(Dataset):
14 | def __init__(self, root_dir, meta_file, transform=None):
15 | self.root_dir = root_dir
16 | self.transform = transform
17 | with open(meta_file) as f:
18 | lines = f.readlines()
19 | print("building dataset from %s"%meta_file)
20 | self.num = len(lines)
21 | self.metas = []
22 | for line in lines:
23 | path, cls = line.rstrip().split()
24 | self.metas.append((path, int(cls)))
25 | print("read meta done")
26 |
27 | def __len__(self):
28 | return self.num
29 |
30 | def __getitem__(self, idx):
31 | filename = self.root_dir + '/' + self.metas[idx][0]
32 | cls = self.metas[idx][1]
33 | ## memcached
34 | server_list_config_file = "/mnt/lustre/share/memcached_client/server_list.conf"
35 | client_config_file = "/mnt/lustre/share/memcached_client/client.conf"
36 | mclient = mc.MemcachedClient.GetInstance(server_list_config_file, client_config_file)
37 | value = mc.pyvector()
38 | mclient.Get(filename, value)
39 | value_str = mc.ConvertBuffer(value)
40 | img = pil_loader(value_str)
41 | #img = np.zeros((350, 350, 3), dtype=np.uint8)
42 | #img = Image.fromarray(img)
43 | #cls = 0
44 |
45 | ## transform
46 | if self.transform is not None:
47 | img = self.transform(img)
48 | return img, cls
49 |
--------------------------------------------------------------------------------
/Pytorch_code/resnext101_64x4d/memcached_dataset.py:
--------------------------------------------------------------------------------
1 | import mc
2 | from torch.utils.data import DataLoader, Dataset
3 | import numpy as np
4 | import io
5 | from PIL import Image
6 |
7 | def pil_loader(img_str):
8 | buff = io.BytesIO(img_str)
9 | with Image.open(buff) as img:
10 | img = img.convert('RGB')
11 | return img
12 |
13 | class McDataset(Dataset):
14 | def __init__(self, root_dir, meta_file, transform=None):
15 | self.root_dir = root_dir
16 | self.transform = transform
17 | with open(meta_file) as f:
18 | lines = f.readlines()
19 | print("building dataset from %s"%meta_file)
20 | self.num = len(lines)
21 | self.metas = []
22 | for line in lines:
23 | path, cls = line.rstrip().split()
24 | self.metas.append((path, int(cls)))
25 | print("read meta done")
26 |
27 | def __len__(self):
28 | return self.num
29 |
30 | def __getitem__(self, idx):
31 | filename = self.root_dir + '/' + self.metas[idx][0]
32 | cls = self.metas[idx][1]
33 | ## memcached
34 | server_list_config_file = "/mnt/lustre/share/memcached_client/server_list.conf"
35 | client_config_file = "/mnt/lustre/share/memcached_client/client.conf"
36 | mclient = mc.MemcachedClient.GetInstance(server_list_config_file, client_config_file)
37 | value = mc.pyvector()
38 | mclient.Get(filename, value)
39 | value_str = mc.ConvertBuffer(value)
40 | img = pil_loader(value_str)
41 | #img = np.zeros((350, 350, 3), dtype=np.uint8)
42 | #img = Image.fromarray(img)
43 | #cls = 0
44 |
45 | ## transform
46 | if self.transform is not None:
47 | img = self.transform(img)
48 | return img, cls
49 |
--------------------------------------------------------------------------------
/Pytorch_code/seresnext101/memcached_dataset.py:
--------------------------------------------------------------------------------
1 | import mc
2 | from torch.utils.data import DataLoader, Dataset
3 | import numpy as np
4 | import io
5 | from PIL import Image
6 |
7 | def pil_loader(img_str):
8 | buff = io.BytesIO(img_str)
9 | with Image.open(buff) as img:
10 | img = img.convert('RGB')
11 | return img
12 |
13 | class McDataset(Dataset):
14 | def __init__(self, root_dir, meta_file, transform=None):
15 | self.root_dir = root_dir
16 | self.transform = transform
17 | with open(meta_file) as f:
18 | lines = f.readlines()
19 | print("building dataset from %s"%meta_file)
20 | self.num = len(lines)
21 | self.metas = []
22 | for line in lines:
23 | path, cls = line.rstrip().split()
24 | self.metas.append((path, int(cls)))
25 | print("read meta done")
26 |
27 | def __len__(self):
28 | return self.num
29 |
30 | def __getitem__(self, idx):
31 | filename = self.root_dir + '/' + self.metas[idx][0]
32 | cls = self.metas[idx][1]
33 | ## memcached
34 | server_list_config_file = "/mnt/lustre/share/memcached_client/server_list.conf"
35 | client_config_file = "/mnt/lustre/share/memcached_client/client.conf"
36 | mclient = mc.MemcachedClient.GetInstance(server_list_config_file, client_config_file)
37 | value = mc.pyvector()
38 | mclient.Get(filename, value)
39 | value_str = mc.ConvertBuffer(value)
40 | img = pil_loader(value_str)
41 | #img = np.zeros((350, 350, 3), dtype=np.uint8)
42 | #img = Image.fromarray(img)
43 | #cls = 0
44 |
45 | ## transform
46 | if self.transform is not None:
47 | img = self.transform(img)
48 | return img, cls
49 |
--------------------------------------------------------------------------------
/Pytorch_code/inceptionResnetV2/memcached_dataset.py:
--------------------------------------------------------------------------------
1 | import mc
2 | from torch.utils.data import DataLoader, Dataset
3 | import numpy as np
4 | import io
5 | from PIL import Image
6 |
7 | def pil_loader(img_str):
8 | buff = io.BytesIO(img_str)
9 | with Image.open(buff) as img:
10 | img = img.convert('RGB')
11 | return img
12 |
13 | class McDataset(Dataset):
14 | def __init__(self, root_dir, meta_file, transform=None):
15 | self.root_dir = root_dir
16 | self.transform = transform
17 | with open(meta_file) as f:
18 | lines = f.readlines()
19 | print("building dataset from %s"%meta_file)
20 | self.num = len(lines)
21 | self.metas = []
22 | for line in lines:
23 | path, cls = line.rstrip().split()
24 | self.metas.append((path, int(cls)))
25 | print("read meta done")
26 |
27 | def __len__(self):
28 | return self.num
29 |
30 | def __getitem__(self, idx):
31 | filename = self.root_dir + '/' + self.metas[idx][0]
32 | cls = self.metas[idx][1]
33 | ## memcached
34 | server_list_config_file = "/mnt/lustre/share/memcached_client/server_list.conf"
35 | client_config_file = "/mnt/lustre/share/memcached_client/client.conf"
36 | mclient = mc.MemcachedClient.GetInstance(server_list_config_file, client_config_file)
37 | value = mc.pyvector()
38 | mclient.Get(filename, value)
39 | value_str = mc.ConvertBuffer(value)
40 | img = pil_loader(value_str)
41 | #img = np.zeros((350, 350, 3), dtype=np.uint8)
42 | #img = Image.fromarray(img)
43 | #cls = 0
44 |
45 | ## transform
46 | if self.transform is not None:
47 | img = self.transform(img)
48 | return img, cls
49 |
--------------------------------------------------------------------------------
/Pytorch_code/dpn107/distributed_utils.py:
--------------------------------------------------------------------------------
1 | import os
2 | import torch
3 | import torch.distributed as dist
4 | from torch.nn import Module
5 | import multiprocessing as mp
6 |
7 | class DistModule(Module):
8 | def __init__(self, module):
9 | super(DistModule, self).__init__()
10 | self.module = module
11 | broadcast_params(self.module)
12 | def forward(self, *inputs, **kwargs):
13 | return self.module(*inputs, **kwargs)
14 | def train(self, mode=True):
15 | super(DistModule, self).train(mode)
16 | self.module.train(mode)
17 |
18 | def average_gradients(model):
19 | """ average gradients """
20 | for param in model.parameters():
21 | if param.requires_grad:
22 | dist.all_reduce(param.grad.data)
23 |
24 | def broadcast_params(model):
25 | """ broadcast model parameters """
26 | for p in model.state_dict().values():
27 | dist.broadcast(p, 0)
28 |
29 | def dist_init(port):
30 | if mp.get_start_method(allow_none=True) != 'spawn':
31 | mp.set_start_method('spawn')
32 | proc_id = int(os.environ['SLURM_PROCID'])
33 | ntasks = int(os.environ['SLURM_NTASKS'])
34 | node_list = os.environ['SLURM_NODELIST']
35 | num_gpus = torch.cuda.device_count()
36 | torch.cuda.set_device(proc_id%num_gpus)
37 |
38 | if '[' in node_list:
39 | beg = node_list.find('[')
40 | pos1 = node_list.find('-', beg)
41 | if pos1 < 0:
42 | pos1 = 1000
43 | pos2 = node_list.find(',', beg)
44 | if pos2 < 0:
45 | pos2 = 1000
46 | node_list = node_list[:min(pos1,pos2)].replace('[', '')
47 | addr = node_list[8:].replace('-', '.')
48 | print(addr)
49 |
50 | os.environ['MASTER_PORT'] = port
51 | os.environ['MASTER_ADDR'] = addr
52 | os.environ['WORLD_SIZE'] = str(ntasks)
53 | os.environ['RANK'] = str(proc_id)
54 | dist.init_process_group(backend='nccl')
55 |
56 | rank = dist.get_rank()
57 | world_size = dist.get_world_size()
58 | return rank, world_size
59 |
--------------------------------------------------------------------------------
/Pytorch_code/dpn131/distributed_utils.py:
--------------------------------------------------------------------------------
1 | import os
2 | import torch
3 | import torch.distributed as dist
4 | from torch.nn import Module
5 | import multiprocessing as mp
6 |
7 | class DistModule(Module):
8 | def __init__(self, module):
9 | super(DistModule, self).__init__()
10 | self.module = module
11 | broadcast_params(self.module)
12 | def forward(self, *inputs, **kwargs):
13 | return self.module(*inputs, **kwargs)
14 | def train(self, mode=True):
15 | super(DistModule, self).train(mode)
16 | self.module.train(mode)
17 |
18 | def average_gradients(model):
19 | """ average gradients """
20 | for param in model.parameters():
21 | if param.requires_grad:
22 | dist.all_reduce(param.grad.data)
23 |
24 | def broadcast_params(model):
25 | """ broadcast model parameters """
26 | for p in model.state_dict().values():
27 | dist.broadcast(p, 0)
28 |
29 | def dist_init(port):
30 | if mp.get_start_method(allow_none=True) != 'spawn':
31 | mp.set_start_method('spawn')
32 | proc_id = int(os.environ['SLURM_PROCID'])
33 | ntasks = int(os.environ['SLURM_NTASKS'])
34 | node_list = os.environ['SLURM_NODELIST']
35 | num_gpus = torch.cuda.device_count()
36 | torch.cuda.set_device(proc_id%num_gpus)
37 |
38 | if '[' in node_list:
39 | beg = node_list.find('[')
40 | pos1 = node_list.find('-', beg)
41 | if pos1 < 0:
42 | pos1 = 1000
43 | pos2 = node_list.find(',', beg)
44 | if pos2 < 0:
45 | pos2 = 1000
46 | node_list = node_list[:min(pos1,pos2)].replace('[', '')
47 | addr = node_list[8:].replace('-', '.')
48 | print(addr)
49 |
50 | os.environ['MASTER_PORT'] = port
51 | os.environ['MASTER_ADDR'] = addr
52 | os.environ['WORLD_SIZE'] = str(ntasks)
53 | os.environ['RANK'] = str(proc_id)
54 | dist.init_process_group(backend='nccl')
55 |
56 | rank = dist.get_rank()
57 | world_size = dist.get_world_size()
58 | return rank, world_size
59 |
--------------------------------------------------------------------------------
/Pytorch_code/dpn92/distributed_utils.py:
--------------------------------------------------------------------------------
1 | import os
2 | import torch
3 | import torch.distributed as dist
4 | from torch.nn import Module
5 | import multiprocessing as mp
6 |
7 | class DistModule(Module):
8 | def __init__(self, module):
9 | super(DistModule, self).__init__()
10 | self.module = module
11 | broadcast_params(self.module)
12 | def forward(self, *inputs, **kwargs):
13 | return self.module(*inputs, **kwargs)
14 | def train(self, mode=True):
15 | super(DistModule, self).train(mode)
16 | self.module.train(mode)
17 |
18 | def average_gradients(model):
19 | """ average gradients """
20 | for param in model.parameters():
21 | if param.requires_grad:
22 | dist.all_reduce(param.grad.data)
23 |
24 | def broadcast_params(model):
25 | """ broadcast model parameters """
26 | for p in model.state_dict().values():
27 | dist.broadcast(p, 0)
28 |
29 | def dist_init(port):
30 | if mp.get_start_method(allow_none=True) != 'spawn':
31 | mp.set_start_method('spawn')
32 | proc_id = int(os.environ['SLURM_PROCID'])
33 | ntasks = int(os.environ['SLURM_NTASKS'])
34 | node_list = os.environ['SLURM_NODELIST']
35 | num_gpus = torch.cuda.device_count()
36 | torch.cuda.set_device(proc_id%num_gpus)
37 |
38 | if '[' in node_list:
39 | beg = node_list.find('[')
40 | pos1 = node_list.find('-', beg)
41 | if pos1 < 0:
42 | pos1 = 1000
43 | pos2 = node_list.find(',', beg)
44 | if pos2 < 0:
45 | pos2 = 1000
46 | node_list = node_list[:min(pos1,pos2)].replace('[', '')
47 | addr = node_list[8:].replace('-', '.')
48 | print(addr)
49 |
50 | os.environ['MASTER_PORT'] = port
51 | os.environ['MASTER_ADDR'] = addr
52 | os.environ['WORLD_SIZE'] = str(ntasks)
53 | os.environ['RANK'] = str(proc_id)
54 | dist.init_process_group(backend='nccl')
55 |
56 | rank = dist.get_rank()
57 | world_size = dist.get_world_size()
58 | return rank, world_size
59 |
--------------------------------------------------------------------------------
/Pytorch_code/dpn98/distributed_utils.py:
--------------------------------------------------------------------------------
1 | import os
2 | import torch
3 | import torch.distributed as dist
4 | from torch.nn import Module
5 | import multiprocessing as mp
6 |
7 | class DistModule(Module):
8 | def __init__(self, module):
9 | super(DistModule, self).__init__()
10 | self.module = module
11 | broadcast_params(self.module)
12 | def forward(self, *inputs, **kwargs):
13 | return self.module(*inputs, **kwargs)
14 | def train(self, mode=True):
15 | super(DistModule, self).train(mode)
16 | self.module.train(mode)
17 |
18 | def average_gradients(model):
19 | """ average gradients """
20 | for param in model.parameters():
21 | if param.requires_grad:
22 | dist.all_reduce(param.grad.data)
23 |
24 | def broadcast_params(model):
25 | """ broadcast model parameters """
26 | for p in model.state_dict().values():
27 | dist.broadcast(p, 0)
28 |
29 | def dist_init(port):
30 | if mp.get_start_method(allow_none=True) != 'spawn':
31 | mp.set_start_method('spawn')
32 | proc_id = int(os.environ['SLURM_PROCID'])
33 | ntasks = int(os.environ['SLURM_NTASKS'])
34 | node_list = os.environ['SLURM_NODELIST']
35 | num_gpus = torch.cuda.device_count()
36 | torch.cuda.set_device(proc_id%num_gpus)
37 |
38 | if '[' in node_list:
39 | beg = node_list.find('[')
40 | pos1 = node_list.find('-', beg)
41 | if pos1 < 0:
42 | pos1 = 1000
43 | pos2 = node_list.find(',', beg)
44 | if pos2 < 0:
45 | pos2 = 1000
46 | node_list = node_list[:min(pos1,pos2)].replace('[', '')
47 | addr = node_list[8:].replace('-', '.')
48 | print(addr)
49 |
50 | os.environ['MASTER_PORT'] = port
51 | os.environ['MASTER_ADDR'] = addr
52 | os.environ['WORLD_SIZE'] = str(ntasks)
53 | os.environ['RANK'] = str(proc_id)
54 | dist.init_process_group(backend='nccl')
55 |
56 | rank = dist.get_rank()
57 | world_size = dist.get_world_size()
58 | return rank, world_size
59 |
--------------------------------------------------------------------------------
/Pytorch_code/nasnet/distributed_utils.py:
--------------------------------------------------------------------------------
1 | import os
2 | import torch
3 | import torch.distributed as dist
4 | from torch.nn import Module
5 | import multiprocessing as mp
6 |
7 | class DistModule(Module):
8 | def __init__(self, module):
9 | super(DistModule, self).__init__()
10 | self.module = module
11 | broadcast_params(self.module)
12 | def forward(self, *inputs, **kwargs):
13 | return self.module(*inputs, **kwargs)
14 | def train(self, mode=True):
15 | super(DistModule, self).train(mode)
16 | self.module.train(mode)
17 |
18 | def average_gradients(model):
19 | """ average gradients """
20 | for param in model.parameters():
21 | if param.requires_grad:
22 | dist.all_reduce(param.grad.data)
23 |
24 | def broadcast_params(model):
25 | """ broadcast model parameters """
26 | for p in model.state_dict().values():
27 | dist.broadcast(p, 0)
28 |
29 | def dist_init(port):
30 | if mp.get_start_method(allow_none=True) != 'spawn':
31 | mp.set_start_method('spawn')
32 | proc_id = int(os.environ['SLURM_PROCID'])
33 | ntasks = int(os.environ['SLURM_NTASKS'])
34 | node_list = os.environ['SLURM_NODELIST']
35 | num_gpus = torch.cuda.device_count()
36 | torch.cuda.set_device(proc_id%num_gpus)
37 |
38 | if '[' in node_list:
39 | beg = node_list.find('[')
40 | pos1 = node_list.find('-', beg)
41 | if pos1 < 0:
42 | pos1 = 1000
43 | pos2 = node_list.find(',', beg)
44 | if pos2 < 0:
45 | pos2 = 1000
46 | node_list = node_list[:min(pos1,pos2)].replace('[', '')
47 | addr = node_list[8:].replace('-', '.')
48 | print(addr)
49 |
50 | os.environ['MASTER_PORT'] = port
51 | os.environ['MASTER_ADDR'] = addr
52 | os.environ['WORLD_SIZE'] = str(ntasks)
53 | os.environ['RANK'] = str(proc_id)
54 | dist.init_process_group(backend='nccl')
55 |
56 | rank = dist.get_rank()
57 | world_size = dist.get_world_size()
58 | return rank, world_size
59 |
--------------------------------------------------------------------------------
/Pytorch_code/Densenet/distributed_utils.py:
--------------------------------------------------------------------------------
1 | import os
2 | import torch
3 | import torch.distributed as dist
4 | from torch.nn import Module
5 | import multiprocessing as mp
6 |
7 | class DistModule(Module):
8 | def __init__(self, module):
9 | super(DistModule, self).__init__()
10 | self.module = module
11 | broadcast_params(self.module)
12 | def forward(self, *inputs, **kwargs):
13 | return self.module(*inputs, **kwargs)
14 | def train(self, mode=True):
15 | super(DistModule, self).train(mode)
16 | self.module.train(mode)
17 |
18 | def average_gradients(model):
19 | """ average gradients """
20 | for param in model.parameters():
21 | if param.requires_grad:
22 | dist.all_reduce(param.grad.data)
23 |
24 | def broadcast_params(model):
25 | """ broadcast model parameters """
26 | for p in model.state_dict().values():
27 | dist.broadcast(p, 0)
28 |
29 | def dist_init(port):
30 | if mp.get_start_method(allow_none=True) != 'spawn':
31 | mp.set_start_method('spawn')
32 | proc_id = int(os.environ['SLURM_PROCID'])
33 | ntasks = int(os.environ['SLURM_NTASKS'])
34 | node_list = os.environ['SLURM_NODELIST']
35 | num_gpus = torch.cuda.device_count()
36 | torch.cuda.set_device(proc_id%num_gpus)
37 |
38 | if '[' in node_list:
39 | beg = node_list.find('[')
40 | pos1 = node_list.find('-', beg)
41 | if pos1 < 0:
42 | pos1 = 1000
43 | pos2 = node_list.find(',', beg)
44 | if pos2 < 0:
45 | pos2 = 1000
46 | node_list = node_list[:min(pos1,pos2)].replace('[', '')
47 | addr = node_list[8:].replace('-', '.')
48 | print(addr)
49 |
50 | os.environ['MASTER_PORT'] = port
51 | os.environ['MASTER_ADDR'] = addr
52 | os.environ['WORLD_SIZE'] = str(ntasks)
53 | os.environ['RANK'] = str(proc_id)
54 | dist.init_process_group(backend='nccl')
55 |
56 | rank = dist.get_rank()
57 | world_size = dist.get_world_size()
58 | return rank, world_size
59 |
--------------------------------------------------------------------------------
/Pytorch_code/dense161/distributed_utils.py:
--------------------------------------------------------------------------------
1 | import os
2 | import torch
3 | import torch.distributed as dist
4 | from torch.nn import Module
5 | import multiprocessing as mp
6 |
7 | class DistModule(Module):
8 | def __init__(self, module):
9 | super(DistModule, self).__init__()
10 | self.module = module
11 | broadcast_params(self.module)
12 | def forward(self, *inputs, **kwargs):
13 | return self.module(*inputs, **kwargs)
14 | def train(self, mode=True):
15 | super(DistModule, self).train(mode)
16 | self.module.train(mode)
17 |
18 | def average_gradients(model):
19 | """ average gradients """
20 | for param in model.parameters():
21 | if param.requires_grad:
22 | dist.all_reduce(param.grad.data)
23 |
24 | def broadcast_params(model):
25 | """ broadcast model parameters """
26 | for p in model.state_dict().values():
27 | dist.broadcast(p, 0)
28 |
29 | def dist_init(port):
30 | if mp.get_start_method(allow_none=True) != 'spawn':
31 | mp.set_start_method('spawn')
32 | proc_id = int(os.environ['SLURM_PROCID'])
33 | ntasks = int(os.environ['SLURM_NTASKS'])
34 | node_list = os.environ['SLURM_NODELIST']
35 | num_gpus = torch.cuda.device_count()
36 | torch.cuda.set_device(proc_id%num_gpus)
37 |
38 | if '[' in node_list:
39 | beg = node_list.find('[')
40 | pos1 = node_list.find('-', beg)
41 | if pos1 < 0:
42 | pos1 = 1000
43 | pos2 = node_list.find(',', beg)
44 | if pos2 < 0:
45 | pos2 = 1000
46 | node_list = node_list[:min(pos1,pos2)].replace('[', '')
47 | addr = node_list[8:].replace('-', '.')
48 | print(addr)
49 |
50 | os.environ['MASTER_PORT'] = port
51 | os.environ['MASTER_ADDR'] = addr
52 | os.environ['WORLD_SIZE'] = str(ntasks)
53 | os.environ['RANK'] = str(proc_id)
54 | dist.init_process_group(backend='nccl')
55 |
56 | rank = dist.get_rank()
57 | world_size = dist.get_world_size()
58 | return rank, world_size
59 |
--------------------------------------------------------------------------------
/Pytorch_code/dense169/distributed_utils.py:
--------------------------------------------------------------------------------
1 | import os
2 | import torch
3 | import torch.distributed as dist
4 | from torch.nn import Module
5 | import multiprocessing as mp
6 |
7 | class DistModule(Module):
8 | def __init__(self, module):
9 | super(DistModule, self).__init__()
10 | self.module = module
11 | broadcast_params(self.module)
12 | def forward(self, *inputs, **kwargs):
13 | return self.module(*inputs, **kwargs)
14 | def train(self, mode=True):
15 | super(DistModule, self).train(mode)
16 | self.module.train(mode)
17 |
18 | def average_gradients(model):
19 | """ average gradients """
20 | for param in model.parameters():
21 | if param.requires_grad:
22 | dist.all_reduce(param.grad.data)
23 |
24 | def broadcast_params(model):
25 | """ broadcast model parameters """
26 | for p in model.state_dict().values():
27 | dist.broadcast(p, 0)
28 |
29 | def dist_init(port):
30 | if mp.get_start_method(allow_none=True) != 'spawn':
31 | mp.set_start_method('spawn')
32 | proc_id = int(os.environ['SLURM_PROCID'])
33 | ntasks = int(os.environ['SLURM_NTASKS'])
34 | node_list = os.environ['SLURM_NODELIST']
35 | num_gpus = torch.cuda.device_count()
36 | torch.cuda.set_device(proc_id%num_gpus)
37 |
38 | if '[' in node_list:
39 | beg = node_list.find('[')
40 | pos1 = node_list.find('-', beg)
41 | if pos1 < 0:
42 | pos1 = 1000
43 | pos2 = node_list.find(',', beg)
44 | if pos2 < 0:
45 | pos2 = 1000
46 | node_list = node_list[:min(pos1,pos2)].replace('[', '')
47 | addr = node_list[8:].replace('-', '.')
48 | print(addr)
49 |
50 | os.environ['MASTER_PORT'] = port
51 | os.environ['MASTER_ADDR'] = addr
52 | os.environ['WORLD_SIZE'] = str(ntasks)
53 | os.environ['RANK'] = str(proc_id)
54 | dist.init_process_group(backend='nccl')
55 |
56 | rank = dist.get_rank()
57 | world_size = dist.get_world_size()
58 | return rank, world_size
59 |
--------------------------------------------------------------------------------
/Pytorch_code/densenet2/distributed_utils.py:
--------------------------------------------------------------------------------
1 | import os
2 | import torch
3 | import torch.distributed as dist
4 | from torch.nn import Module
5 | import multiprocessing as mp
6 |
7 | class DistModule(Module):
8 | def __init__(self, module):
9 | super(DistModule, self).__init__()
10 | self.module = module
11 | broadcast_params(self.module)
12 | def forward(self, *inputs, **kwargs):
13 | return self.module(*inputs, **kwargs)
14 | def train(self, mode=True):
15 | super(DistModule, self).train(mode)
16 | self.module.train(mode)
17 |
18 | def average_gradients(model):
19 | """ average gradients """
20 | for param in model.parameters():
21 | if param.requires_grad:
22 | dist.all_reduce(param.grad.data)
23 |
24 | def broadcast_params(model):
25 | """ broadcast model parameters """
26 | for p in model.state_dict().values():
27 | dist.broadcast(p, 0)
28 |
29 | def dist_init(port):
30 | if mp.get_start_method(allow_none=True) != 'spawn':
31 | mp.set_start_method('spawn')
32 | proc_id = int(os.environ['SLURM_PROCID'])
33 | ntasks = int(os.environ['SLURM_NTASKS'])
34 | node_list = os.environ['SLURM_NODELIST']
35 | num_gpus = torch.cuda.device_count()
36 | torch.cuda.set_device(proc_id%num_gpus)
37 |
38 | if '[' in node_list:
39 | beg = node_list.find('[')
40 | pos1 = node_list.find('-', beg)
41 | if pos1 < 0:
42 | pos1 = 1000
43 | pos2 = node_list.find(',', beg)
44 | if pos2 < 0:
45 | pos2 = 1000
46 | node_list = node_list[:min(pos1,pos2)].replace('[', '')
47 | addr = node_list[8:].replace('-', '.')
48 | print(addr)
49 |
50 | os.environ['MASTER_PORT'] = port
51 | os.environ['MASTER_ADDR'] = addr
52 | os.environ['WORLD_SIZE'] = str(ntasks)
53 | os.environ['RANK'] = str(proc_id)
54 | dist.init_process_group(backend='nccl')
55 |
56 | rank = dist.get_rank()
57 | world_size = dist.get_world_size()
58 | return rank, world_size
59 |
--------------------------------------------------------------------------------
/Pytorch_code/drnD105/distributed_utils.py:
--------------------------------------------------------------------------------
1 | import os
2 | import torch
3 | import torch.distributed as dist
4 | from torch.nn import Module
5 | import multiprocessing as mp
6 |
7 | class DistModule(Module):
8 | def __init__(self, module):
9 | super(DistModule, self).__init__()
10 | self.module = module
11 | broadcast_params(self.module)
12 | def forward(self, *inputs, **kwargs):
13 | return self.module(*inputs, **kwargs)
14 | def train(self, mode=True):
15 | super(DistModule, self).train(mode)
16 | self.module.train(mode)
17 |
18 | def average_gradients(model):
19 | """ average gradients """
20 | for param in model.parameters():
21 | if param.requires_grad:
22 | dist.all_reduce(param.grad.data)
23 |
24 | def broadcast_params(model):
25 | """ broadcast model parameters """
26 | for p in model.state_dict().values():
27 | dist.broadcast(p, 0)
28 |
29 | def dist_init(port):
30 | if mp.get_start_method(allow_none=True) != 'spawn':
31 | mp.set_start_method('spawn')
32 | proc_id = int(os.environ['SLURM_PROCID'])
33 | ntasks = int(os.environ['SLURM_NTASKS'])
34 | node_list = os.environ['SLURM_NODELIST']
35 | num_gpus = torch.cuda.device_count()
36 | torch.cuda.set_device(proc_id%num_gpus)
37 |
38 | if '[' in node_list:
39 | beg = node_list.find('[')
40 | pos1 = node_list.find('-', beg)
41 | if pos1 < 0:
42 | pos1 = 1000
43 | pos2 = node_list.find(',', beg)
44 | if pos2 < 0:
45 | pos2 = 1000
46 | node_list = node_list[:min(pos1,pos2)].replace('[', '')
47 | addr = node_list[8:].replace('-', '.')
48 | print(addr)
49 |
50 | os.environ['MASTER_PORT'] = port
51 | os.environ['MASTER_ADDR'] = addr
52 | os.environ['WORLD_SIZE'] = str(ntasks)
53 | os.environ['RANK'] = str(proc_id)
54 | dist.init_process_group(backend='nccl')
55 |
56 | rank = dist.get_rank()
57 | world_size = dist.get_world_size()
58 | return rank, world_size
59 |
--------------------------------------------------------------------------------
/Pytorch_code/inceptionv4/distributed_utils.py:
--------------------------------------------------------------------------------
1 | import os
2 | import torch
3 | import torch.distributed as dist
4 | from torch.nn import Module
5 | import multiprocessing as mp
6 |
7 | class DistModule(Module):
8 | def __init__(self, module):
9 | super(DistModule, self).__init__()
10 | self.module = module
11 | broadcast_params(self.module)
12 | def forward(self, *inputs, **kwargs):
13 | return self.module(*inputs, **kwargs)
14 | def train(self, mode=True):
15 | super(DistModule, self).train(mode)
16 | self.module.train(mode)
17 |
18 | def average_gradients(model):
19 | """ average gradients """
20 | for param in model.parameters():
21 | if param.requires_grad:
22 | dist.all_reduce(param.grad.data)
23 |
24 | def broadcast_params(model):
25 | """ broadcast model parameters """
26 | for p in model.state_dict().values():
27 | dist.broadcast(p, 0)
28 |
29 | def dist_init(port):
30 | if mp.get_start_method(allow_none=True) != 'spawn':
31 | mp.set_start_method('spawn')
32 | proc_id = int(os.environ['SLURM_PROCID'])
33 | ntasks = int(os.environ['SLURM_NTASKS'])
34 | node_list = os.environ['SLURM_NODELIST']
35 | num_gpus = torch.cuda.device_count()
36 | torch.cuda.set_device(proc_id%num_gpus)
37 |
38 | if '[' in node_list:
39 | beg = node_list.find('[')
40 | pos1 = node_list.find('-', beg)
41 | if pos1 < 0:
42 | pos1 = 1000
43 | pos2 = node_list.find(',', beg)
44 | if pos2 < 0:
45 | pos2 = 1000
46 | node_list = node_list[:min(pos1,pos2)].replace('[', '')
47 | addr = node_list[8:].replace('-', '.')
48 | print(addr)
49 |
50 | os.environ['MASTER_PORT'] = port
51 | os.environ['MASTER_ADDR'] = addr
52 | os.environ['WORLD_SIZE'] = str(ntasks)
53 | os.environ['RANK'] = str(proc_id)
54 | dist.init_process_group(backend='nccl')
55 |
56 | rank = dist.get_rank()
57 | world_size = dist.get_world_size()
58 | return rank, world_size
59 |
--------------------------------------------------------------------------------
/Pytorch_code/resnet101/distributed_utils.py:
--------------------------------------------------------------------------------
1 | import os
2 | import torch
3 | import torch.distributed as dist
4 | from torch.nn import Module
5 | import multiprocessing as mp
6 |
7 | class DistModule(Module):
8 | def __init__(self, module):
9 | super(DistModule, self).__init__()
10 | self.module = module
11 | broadcast_params(self.module)
12 | def forward(self, *inputs, **kwargs):
13 | return self.module(*inputs, **kwargs)
14 | def train(self, mode=True):
15 | super(DistModule, self).train(mode)
16 | self.module.train(mode)
17 |
18 | def average_gradients(model):
19 | """ average gradients """
20 | for param in model.parameters():
21 | if param.requires_grad:
22 | dist.all_reduce(param.grad.data)
23 |
24 | def broadcast_params(model):
25 | """ broadcast model parameters """
26 | for p in model.state_dict().values():
27 | dist.broadcast(p, 0)
28 |
29 | def dist_init(port):
30 | if mp.get_start_method(allow_none=True) != 'spawn':
31 | mp.set_start_method('spawn')
32 | proc_id = int(os.environ['SLURM_PROCID'])
33 | ntasks = int(os.environ['SLURM_NTASKS'])
34 | node_list = os.environ['SLURM_NODELIST']
35 | num_gpus = torch.cuda.device_count()
36 | torch.cuda.set_device(proc_id%num_gpus)
37 |
38 | if '[' in node_list:
39 | beg = node_list.find('[')
40 | pos1 = node_list.find('-', beg)
41 | if pos1 < 0:
42 | pos1 = 1000
43 | pos2 = node_list.find(',', beg)
44 | if pos2 < 0:
45 | pos2 = 1000
46 | node_list = node_list[:min(pos1,pos2)].replace('[', '')
47 | addr = node_list[8:].replace('-', '.')
48 | print(addr)
49 |
50 | os.environ['MASTER_PORT'] = port
51 | os.environ['MASTER_ADDR'] = addr
52 | os.environ['WORLD_SIZE'] = str(ntasks)
53 | os.environ['RANK'] = str(proc_id)
54 | dist.init_process_group(backend='nccl')
55 |
56 | rank = dist.get_rank()
57 | world_size = dist.get_world_size()
58 | return rank, world_size
59 |
--------------------------------------------------------------------------------
/Pytorch_code/resnet152/distributed_utils.py:
--------------------------------------------------------------------------------
1 | import os
2 | import torch
3 | import torch.distributed as dist
4 | from torch.nn import Module
5 | import multiprocessing as mp
6 |
7 | class DistModule(Module):
8 | def __init__(self, module):
9 | super(DistModule, self).__init__()
10 | self.module = module
11 | broadcast_params(self.module)
12 | def forward(self, *inputs, **kwargs):
13 | return self.module(*inputs, **kwargs)
14 | def train(self, mode=True):
15 | super(DistModule, self).train(mode)
16 | self.module.train(mode)
17 |
18 | def average_gradients(model):
19 | """ average gradients """
20 | for param in model.parameters():
21 | if param.requires_grad:
22 | dist.all_reduce(param.grad.data)
23 |
24 | def broadcast_params(model):
25 | """ broadcast model parameters """
26 | for p in model.state_dict().values():
27 | dist.broadcast(p, 0)
28 |
29 | def dist_init(port):
30 | if mp.get_start_method(allow_none=True) != 'spawn':
31 | mp.set_start_method('spawn')
32 | proc_id = int(os.environ['SLURM_PROCID'])
33 | ntasks = int(os.environ['SLURM_NTASKS'])
34 | node_list = os.environ['SLURM_NODELIST']
35 | num_gpus = torch.cuda.device_count()
36 | torch.cuda.set_device(proc_id%num_gpus)
37 |
38 | if '[' in node_list:
39 | beg = node_list.find('[')
40 | pos1 = node_list.find('-', beg)
41 | if pos1 < 0:
42 | pos1 = 1000
43 | pos2 = node_list.find(',', beg)
44 | if pos2 < 0:
45 | pos2 = 1000
46 | node_list = node_list[:min(pos1,pos2)].replace('[', '')
47 | addr = node_list[8:].replace('-', '.')
48 | print(addr)
49 |
50 | os.environ['MASTER_PORT'] = port
51 | os.environ['MASTER_ADDR'] = addr
52 | os.environ['WORLD_SIZE'] = str(ntasks)
53 | os.environ['RANK'] = str(proc_id)
54 | dist.init_process_group(backend='nccl')
55 |
56 | rank = dist.get_rank()
57 | world_size = dist.get_world_size()
58 | return rank, world_size
59 |
--------------------------------------------------------------------------------
/Pytorch_code/seResnext50/distributed_utils.py:
--------------------------------------------------------------------------------
1 | import os
2 | import torch
3 | import torch.distributed as dist
4 | from torch.nn import Module
5 | import multiprocessing as mp
6 |
7 | class DistModule(Module):
8 | def __init__(self, module):
9 | super(DistModule, self).__init__()
10 | self.module = module
11 | broadcast_params(self.module)
12 | def forward(self, *inputs, **kwargs):
13 | return self.module(*inputs, **kwargs)
14 | def train(self, mode=True):
15 | super(DistModule, self).train(mode)
16 | self.module.train(mode)
17 |
18 | def average_gradients(model):
19 | """ average gradients """
20 | for param in model.parameters():
21 | if param.requires_grad:
22 | dist.all_reduce(param.grad.data)
23 |
24 | def broadcast_params(model):
25 | """ broadcast model parameters """
26 | for p in model.state_dict().values():
27 | dist.broadcast(p, 0)
28 |
29 | def dist_init(port):
30 | if mp.get_start_method(allow_none=True) != 'spawn':
31 | mp.set_start_method('spawn')
32 | proc_id = int(os.environ['SLURM_PROCID'])
33 | ntasks = int(os.environ['SLURM_NTASKS'])
34 | node_list = os.environ['SLURM_NODELIST']
35 | num_gpus = torch.cuda.device_count()
36 | torch.cuda.set_device(proc_id%num_gpus)
37 |
38 | if '[' in node_list:
39 | beg = node_list.find('[')
40 | pos1 = node_list.find('-', beg)
41 | if pos1 < 0:
42 | pos1 = 1000
43 | pos2 = node_list.find(',', beg)
44 | if pos2 < 0:
45 | pos2 = 1000
46 | node_list = node_list[:min(pos1,pos2)].replace('[', '')
47 | addr = node_list[8:].replace('-', '.')
48 | print(addr)
49 |
50 | os.environ['MASTER_PORT'] = port
51 | os.environ['MASTER_ADDR'] = addr
52 | os.environ['WORLD_SIZE'] = str(ntasks)
53 | os.environ['RANK'] = str(proc_id)
54 | dist.init_process_group(backend='nccl')
55 |
56 | rank = dist.get_rank()
57 | world_size = dist.get_world_size()
58 | return rank, world_size
59 |
--------------------------------------------------------------------------------
/Pytorch_code/senet154/distributed_utils.py:
--------------------------------------------------------------------------------
1 | import os
2 | import torch
3 | import torch.distributed as dist
4 | from torch.nn import Module
5 | import multiprocessing as mp
6 |
7 | class DistModule(Module):
8 | def __init__(self, module):
9 | super(DistModule, self).__init__()
10 | self.module = module
11 | broadcast_params(self.module)
12 | def forward(self, *inputs, **kwargs):
13 | return self.module(*inputs, **kwargs)
14 | def train(self, mode=True):
15 | super(DistModule, self).train(mode)
16 | self.module.train(mode)
17 |
18 | def average_gradients(model):
19 | """ average gradients """
20 | for param in model.parameters():
21 | if param.requires_grad:
22 | dist.all_reduce(param.grad.data)
23 |
24 | def broadcast_params(model):
25 | """ broadcast model parameters """
26 | for p in model.state_dict().values():
27 | dist.broadcast(p, 0)
28 |
29 | def dist_init(port):
30 | if mp.get_start_method(allow_none=True) != 'spawn':
31 | mp.set_start_method('spawn')
32 | proc_id = int(os.environ['SLURM_PROCID'])
33 | ntasks = int(os.environ['SLURM_NTASKS'])
34 | node_list = os.environ['SLURM_NODELIST']
35 | num_gpus = torch.cuda.device_count()
36 | torch.cuda.set_device(proc_id%num_gpus)
37 |
38 | if '[' in node_list:
39 | beg = node_list.find('[')
40 | pos1 = node_list.find('-', beg)
41 | if pos1 < 0:
42 | pos1 = 1000
43 | pos2 = node_list.find(',', beg)
44 | if pos2 < 0:
45 | pos2 = 1000
46 | node_list = node_list[:min(pos1,pos2)].replace('[', '')
47 | addr = node_list[8:].replace('-', '.')
48 | print(addr)
49 |
50 | os.environ['MASTER_PORT'] = port
51 | os.environ['MASTER_ADDR'] = addr
52 | os.environ['WORLD_SIZE'] = str(ntasks)
53 | os.environ['RANK'] = str(proc_id)
54 | dist.init_process_group(backend='nccl')
55 |
56 | rank = dist.get_rank()
57 | world_size = dist.get_world_size()
58 | return rank, world_size
59 |
--------------------------------------------------------------------------------