├── .idea
├── Cosine-IVReID.iml
├── misc.xml
├── modules.xml
└── workspace.xml
├── README.md
├── config
├── __init__.py
├── __init__.pyc
├── __pycache__
│ ├── __init__.cpython-36.pyc
│ └── defaults.cpython-36.pyc
├── defaults.py
└── defaults.pyc
├── configs
├── all.yml
├── baseline.yml
└── softmax.yml
├── data
├── __init__.py
├── __pycache__
│ ├── __init__.cpython-36.pyc
│ ├── build.cpython-36.pyc
│ └── collate_batch.cpython-36.pyc
├── build.py
├── collate_batch.py
├── datasets
│ ├── __init__.py
│ ├── __pycache__
│ │ ├── __init__.cpython-36.pyc
│ │ ├── bases.cpython-36.pyc
│ │ ├── cuhk03.cpython-36.pyc
│ │ ├── dataset_loader.cpython-36.pyc
│ │ ├── dukemtmcreid.cpython-36.pyc
│ │ ├── eval_reid.cpython-36.pyc
│ │ ├── market1501.cpython-36.pyc
│ │ ├── msmt17.cpython-36.pyc
│ │ ├── regdb.cpython-36.pyc
│ │ └── sysu.cpython-36.pyc
│ ├── bases.py
│ ├── cuhk03.py
│ ├── dataset_loader.py
│ ├── dukemtmcreid.py
│ ├── eval_reid.py
│ ├── market1501.py
│ ├── msmt17.py
│ ├── regdb.py
│ └── sysu.py
├── samplers
│ ├── __init__.py
│ ├── __pycache__
│ │ ├── __init__.cpython-36.pyc
│ │ └── triplet_sampler.cpython-36.pyc
│ └── triplet_sampler.py
└── transforms
│ ├── __init__.py
│ ├── __pycache__
│ ├── __init__.cpython-36.pyc
│ ├── build.cpython-36.pyc
│ └── transforms.cpython-36.pyc
│ ├── build.py
│ └── transforms.py
├── engine
├── __pycache__
│ ├── inference.cpython-36.pyc
│ ├── trainer.cpython-36.pyc
│ ├── triplet_loss_anti.cpython-36.pyc
│ └── triplet_loss_ori.cpython-36.pyc
├── inference.py
├── trainer.py
├── triplet_loss_anti.py
└── triplet_loss_ori.py
├── imgs
└── pipeline.jpg
├── layers
├── __init__.py
├── __pycache__
│ ├── __init__.cpython-36.pyc
│ ├── center_loss.cpython-36.pyc
│ ├── cluster_loss.cpython-36.pyc
│ ├── range_loss.cpython-36.pyc
│ └── triplet_loss.cpython-36.pyc
├── center_loss.py
├── cluster_loss.py
├── range_loss.py
└── triplet_loss.py
├── modeling
├── AAAI.py
├── __init__.py
├── __pycache__
│ ├── AAAI.cpython-36.pyc
│ ├── __init__.cpython-36.pyc
│ ├── attention.cpython-36.pyc
│ └── baseline.cpython-36.pyc
├── attention.py
├── backbones
│ ├── __init__.py
│ ├── __pycache__
│ │ ├── __init__.cpython-36.pyc
│ │ ├── alexnet.cpython-36.pyc
│ │ ├── densenet.cpython-36.pyc
│ │ ├── resnet.cpython-36.pyc
│ │ ├── senet.cpython-36.pyc
│ │ ├── utils.cpython-36.pyc
│ │ ├── vgg.cpython-36.pyc
│ │ └── vgg2.cpython-36.pyc
│ ├── alexnet.py
│ ├── densenet.py
│ ├── resnet.py
│ ├── senet.py
│ ├── utils.py
│ ├── vgg.py
│ └── vgg2.py
├── baseline.py
└── block.py
├── scripts
├── Test-reranking-tri_center-feat_after_bn-cos-duke.sh
├── Test-reranking-tri_center-feat_after_bn-cos-market.sh
├── Test-tri_center-feat_after_bn-cos-duke.sh
├── Test-tri_center-feat_after_bn-cos-market.sh
├── Test-without_center-feat_after_bn-cos-duke.sh
├── Test-without_center-feat_after_bn-cos-market.sh
├── pretrain_choice-tri_center-market.sh
├── seresnext50-tri_center-duke.sh
├── seresnext50-tri_center-market.sh
├── test-sysu-without_center.sh
├── test-sysu.sh
├── tri_center-duke.sh
├── tri_center-market.sh
├── tri_center-regdb.sh
├── tri_center-sysu.sh
├── without_center-duke.sh
├── without_center-market.bk.sh
├── without_center-market.sh
├── without_center-regdb.sh
└── without_center-sysu.sh
├── solver
├── __init__.py
├── __pycache__
│ ├── __init__.cpython-36.pyc
│ ├── build.cpython-36.pyc
│ └── lr_scheduler.cpython-36.pyc
├── build.py
└── lr_scheduler.py
├── test.sh
├── tests
├── __init__.py
└── lr_scheduler_test.py
├── tools
├── __init__.py
├── test-attention.py
├── test-regdb.py
├── test-sysu.py
├── test.py
└── train.py
├── train.sh
├── utils
├── __init__.py
├── __pycache__
│ ├── __init__.cpython-36.pyc
│ ├── data_loader.cpython-36.pyc
│ ├── data_manager.cpython-36.pyc
│ ├── ecn.cpython-36.pyc
│ ├── eval_metrics.cpython-36.pyc
│ ├── iotools.cpython-36.pyc
│ ├── logger.cpython-36.pyc
│ ├── model.cpython-36.pyc
│ ├── re_ranking.cpython-36.pyc
│ ├── reid_metric.cpython-36.pyc
│ └── utils.cpython-36.pyc
├── data_loader.py
├── data_manager.py
├── ecn.py
├── eval_metrics.py
├── iotools.py
├── logger.py
├── model.py
├── pre_process_sysu.py
├── re_ranking.py
├── reid_metric.py
└── utils.py
└── venv
├── bin
├── activate
├── activate.csh
├── activate.fish
├── easy_install
├── easy_install-3.5
├── pip
├── pip3
├── pip3.5
├── python
├── python3
└── python3.5
├── lib
└── python3.5
│ └── site-packages
│ ├── easy-install.pth
│ ├── pip-10.0.1-py3.5.egg
│ ├── EGG-INFO
│ │ ├── PKG-INFO
│ │ ├── SOURCES.txt
│ │ ├── dependency_links.txt
│ │ ├── entry_points.txt
│ │ ├── not-zip-safe
│ │ ├── requires.txt
│ │ └── top_level.txt
│ └── pip
│ │ ├── __init__.py
│ │ ├── __main__.py
│ │ ├── _internal
│ │ ├── __init__.py
│ │ ├── basecommand.py
│ │ ├── baseparser.py
│ │ ├── build_env.py
│ │ ├── cache.py
│ │ ├── cmdoptions.py
│ │ ├── commands
│ │ │ ├── __init__.py
│ │ │ ├── check.py
│ │ │ ├── completion.py
│ │ │ ├── configuration.py
│ │ │ ├── download.py
│ │ │ ├── freeze.py
│ │ │ ├── hash.py
│ │ │ ├── help.py
│ │ │ ├── install.py
│ │ │ ├── list.py
│ │ │ ├── search.py
│ │ │ ├── show.py
│ │ │ ├── uninstall.py
│ │ │ └── wheel.py
│ │ ├── compat.py
│ │ ├── configuration.py
│ │ ├── download.py
│ │ ├── exceptions.py
│ │ ├── index.py
│ │ ├── locations.py
│ │ ├── models
│ │ │ ├── __init__.py
│ │ │ └── index.py
│ │ ├── operations
│ │ │ ├── __init__.py
│ │ │ ├── check.py
│ │ │ ├── freeze.py
│ │ │ └── prepare.py
│ │ ├── pep425tags.py
│ │ ├── req
│ │ │ ├── __init__.py
│ │ │ ├── req_file.py
│ │ │ ├── req_install.py
│ │ │ ├── req_set.py
│ │ │ └── req_uninstall.py
│ │ ├── resolve.py
│ │ ├── status_codes.py
│ │ ├── utils
│ │ │ ├── __init__.py
│ │ │ ├── appdirs.py
│ │ │ ├── deprecation.py
│ │ │ ├── encoding.py
│ │ │ ├── filesystem.py
│ │ │ ├── glibc.py
│ │ │ ├── hashes.py
│ │ │ ├── logging.py
│ │ │ ├── misc.py
│ │ │ ├── outdated.py
│ │ │ ├── packaging.py
│ │ │ ├── setuptools_build.py
│ │ │ ├── temp_dir.py
│ │ │ ├── typing.py
│ │ │ └── ui.py
│ │ ├── vcs
│ │ │ ├── __init__.py
│ │ │ ├── bazaar.py
│ │ │ ├── git.py
│ │ │ ├── mercurial.py
│ │ │ └── subversion.py
│ │ └── wheel.py
│ │ └── _vendor
│ │ ├── __init__.py
│ │ ├── appdirs.py
│ │ ├── cachecontrol
│ │ ├── __init__.py
│ │ ├── _cmd.py
│ │ ├── adapter.py
│ │ ├── cache.py
│ │ ├── caches
│ │ │ ├── __init__.py
│ │ │ ├── file_cache.py
│ │ │ └── redis_cache.py
│ │ ├── compat.py
│ │ ├── controller.py
│ │ ├── filewrapper.py
│ │ ├── heuristics.py
│ │ ├── serialize.py
│ │ └── wrapper.py
│ │ ├── certifi
│ │ ├── __init__.py
│ │ ├── __main__.py
│ │ ├── cacert.pem
│ │ └── core.py
│ │ ├── chardet
│ │ ├── __init__.py
│ │ ├── big5freq.py
│ │ ├── big5prober.py
│ │ ├── chardistribution.py
│ │ ├── charsetgroupprober.py
│ │ ├── charsetprober.py
│ │ ├── cli
│ │ │ ├── __init__.py
│ │ │ └── chardetect.py
│ │ ├── codingstatemachine.py
│ │ ├── compat.py
│ │ ├── cp949prober.py
│ │ ├── enums.py
│ │ ├── escprober.py
│ │ ├── escsm.py
│ │ ├── eucjpprober.py
│ │ ├── euckrfreq.py
│ │ ├── euckrprober.py
│ │ ├── euctwfreq.py
│ │ ├── euctwprober.py
│ │ ├── gb2312freq.py
│ │ ├── gb2312prober.py
│ │ ├── hebrewprober.py
│ │ ├── jisfreq.py
│ │ ├── jpcntx.py
│ │ ├── langbulgarianmodel.py
│ │ ├── langcyrillicmodel.py
│ │ ├── langgreekmodel.py
│ │ ├── langhebrewmodel.py
│ │ ├── langhungarianmodel.py
│ │ ├── langthaimodel.py
│ │ ├── langturkishmodel.py
│ │ ├── latin1prober.py
│ │ ├── mbcharsetprober.py
│ │ ├── mbcsgroupprober.py
│ │ ├── mbcssm.py
│ │ ├── sbcharsetprober.py
│ │ ├── sbcsgroupprober.py
│ │ ├── sjisprober.py
│ │ ├── universaldetector.py
│ │ ├── utf8prober.py
│ │ └── version.py
│ │ ├── colorama
│ │ ├── __init__.py
│ │ ├── ansi.py
│ │ ├── ansitowin32.py
│ │ ├── initialise.py
│ │ ├── win32.py
│ │ └── winterm.py
│ │ ├── distlib
│ │ ├── __init__.py
│ │ ├── _backport
│ │ │ ├── __init__.py
│ │ │ ├── misc.py
│ │ │ ├── shutil.py
│ │ │ ├── sysconfig.cfg
│ │ │ ├── sysconfig.py
│ │ │ └── tarfile.py
│ │ ├── compat.py
│ │ ├── database.py
│ │ ├── index.py
│ │ ├── locators.py
│ │ ├── manifest.py
│ │ ├── markers.py
│ │ ├── metadata.py
│ │ ├── resources.py
│ │ ├── scripts.py
│ │ ├── t32.exe
│ │ ├── t64.exe
│ │ ├── util.py
│ │ ├── version.py
│ │ ├── w32.exe
│ │ ├── w64.exe
│ │ └── wheel.py
│ │ ├── distro.py
│ │ ├── html5lib
│ │ ├── __init__.py
│ │ ├── _ihatexml.py
│ │ ├── _inputstream.py
│ │ ├── _tokenizer.py
│ │ ├── _trie
│ │ │ ├── __init__.py
│ │ │ ├── _base.py
│ │ │ ├── datrie.py
│ │ │ └── py.py
│ │ ├── _utils.py
│ │ ├── constants.py
│ │ ├── filters
│ │ │ ├── __init__.py
│ │ │ ├── alphabeticalattributes.py
│ │ │ ├── base.py
│ │ │ ├── inject_meta_charset.py
│ │ │ ├── lint.py
│ │ │ ├── optionaltags.py
│ │ │ ├── sanitizer.py
│ │ │ └── whitespace.py
│ │ ├── html5parser.py
│ │ ├── serializer.py
│ │ ├── treeadapters
│ │ │ ├── __init__.py
│ │ │ ├── genshi.py
│ │ │ └── sax.py
│ │ ├── treebuilders
│ │ │ ├── __init__.py
│ │ │ ├── base.py
│ │ │ ├── dom.py
│ │ │ ├── etree.py
│ │ │ └── etree_lxml.py
│ │ └── treewalkers
│ │ │ ├── __init__.py
│ │ │ ├── base.py
│ │ │ ├── dom.py
│ │ │ ├── etree.py
│ │ │ ├── etree_lxml.py
│ │ │ └── genshi.py
│ │ ├── idna
│ │ ├── __init__.py
│ │ ├── codec.py
│ │ ├── compat.py
│ │ ├── core.py
│ │ ├── idnadata.py
│ │ ├── intranges.py
│ │ ├── package_data.py
│ │ └── uts46data.py
│ │ ├── ipaddress.py
│ │ ├── lockfile
│ │ ├── __init__.py
│ │ ├── linklockfile.py
│ │ ├── mkdirlockfile.py
│ │ ├── pidlockfile.py
│ │ ├── sqlitelockfile.py
│ │ └── symlinklockfile.py
│ │ ├── msgpack
│ │ ├── __init__.py
│ │ ├── _version.py
│ │ ├── exceptions.py
│ │ └── fallback.py
│ │ ├── packaging
│ │ ├── __about__.py
│ │ ├── __init__.py
│ │ ├── _compat.py
│ │ ├── _structures.py
│ │ ├── markers.py
│ │ ├── requirements.py
│ │ ├── specifiers.py
│ │ ├── utils.py
│ │ └── version.py
│ │ ├── pkg_resources
│ │ ├── __init__.py
│ │ └── py31compat.py
│ │ ├── progress
│ │ ├── __init__.py
│ │ ├── bar.py
│ │ ├── counter.py
│ │ ├── helpers.py
│ │ └── spinner.py
│ │ ├── pyparsing.py
│ │ ├── pytoml
│ │ ├── __init__.py
│ │ ├── core.py
│ │ ├── parser.py
│ │ └── writer.py
│ │ ├── requests
│ │ ├── __init__.py
│ │ ├── __version__.py
│ │ ├── _internal_utils.py
│ │ ├── adapters.py
│ │ ├── api.py
│ │ ├── auth.py
│ │ ├── certs.py
│ │ ├── compat.py
│ │ ├── cookies.py
│ │ ├── exceptions.py
│ │ ├── help.py
│ │ ├── hooks.py
│ │ ├── models.py
│ │ ├── packages.py
│ │ ├── sessions.py
│ │ ├── status_codes.py
│ │ ├── structures.py
│ │ └── utils.py
│ │ ├── retrying.py
│ │ ├── six.py
│ │ ├── urllib3
│ │ ├── __init__.py
│ │ ├── _collections.py
│ │ ├── connection.py
│ │ ├── connectionpool.py
│ │ ├── contrib
│ │ │ ├── __init__.py
│ │ │ ├── _securetransport
│ │ │ │ ├── __init__.py
│ │ │ │ ├── bindings.py
│ │ │ │ └── low_level.py
│ │ │ ├── appengine.py
│ │ │ ├── ntlmpool.py
│ │ │ ├── pyopenssl.py
│ │ │ ├── securetransport.py
│ │ │ └── socks.py
│ │ ├── exceptions.py
│ │ ├── fields.py
│ │ ├── filepost.py
│ │ ├── packages
│ │ │ ├── __init__.py
│ │ │ ├── backports
│ │ │ │ ├── __init__.py
│ │ │ │ └── makefile.py
│ │ │ ├── ordered_dict.py
│ │ │ ├── six.py
│ │ │ └── ssl_match_hostname
│ │ │ │ ├── __init__.py
│ │ │ │ └── _implementation.py
│ │ ├── poolmanager.py
│ │ ├── request.py
│ │ ├── response.py
│ │ └── util
│ │ │ ├── __init__.py
│ │ │ ├── connection.py
│ │ │ ├── request.py
│ │ │ ├── response.py
│ │ │ ├── retry.py
│ │ │ ├── selectors.py
│ │ │ ├── ssl_.py
│ │ │ ├── timeout.py
│ │ │ ├── url.py
│ │ │ └── wait.py
│ │ └── webencodings
│ │ ├── __init__.py
│ │ ├── labels.py
│ │ ├── mklabels.py
│ │ ├── tests.py
│ │ └── x_user_defined.py
│ ├── setuptools-39.1.0-py3.5.egg
│ └── setuptools.pth
└── pyvenv.cfg
/.idea/Cosine-IVReID.iml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
--------------------------------------------------------------------------------
/.idea/misc.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
--------------------------------------------------------------------------------
/.idea/modules.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # XIVReID
2 | codes for AAAI2020 papar ["Infrared-Visible Cross-Modal Person Re-Identification with an X Modality"](https://aaai.org/Papers/AAAI/2020GB/AAAI-LiD.870.pdf)
3 |
4 | [Overview in Chinese]( https://zhuanlan.zhihu.com/p/121444988 )
5 |
6 | The codes are expanded on a [ReID-baseline](https://github.com/michuanhaohao/reid-strong-baseline) , which is open sourced by [Hao Luo](https://github.com/michuanhaohao)
7 |
--------------------------------------------------------------------------------
/config/__init__.py:
--------------------------------------------------------------------------------
1 | # encoding: utf-8
2 | """
3 | @author: sherlock
4 | @contact: sherlockliao01@gmail.com
5 | """
6 |
7 | from .defaults import _C as cfg
8 |
--------------------------------------------------------------------------------
/config/__init__.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Alexadlu/XIVReID/1f8a14e14cfd26790337c45e0e663a3a2acf9814/config/__init__.pyc
--------------------------------------------------------------------------------
/config/__pycache__/__init__.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Alexadlu/XIVReID/1f8a14e14cfd26790337c45e0e663a3a2acf9814/config/__pycache__/__init__.cpython-36.pyc
--------------------------------------------------------------------------------
/config/__pycache__/defaults.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Alexadlu/XIVReID/1f8a14e14cfd26790337c45e0e663a3a2acf9814/config/__pycache__/defaults.cpython-36.pyc
--------------------------------------------------------------------------------
/config/defaults.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Alexadlu/XIVReID/1f8a14e14cfd26790337c45e0e663a3a2acf9814/config/defaults.pyc
--------------------------------------------------------------------------------
/configs/all.yml:
--------------------------------------------------------------------------------
1 | MODEL:
2 | PRETRAIN_CHOICE: 'imagenet'
3 | PRETRAIN_PATH: '/home/lidg/.torch/models/resnet50-19c8e357.pth'
4 | METRIC_LOSS_TYPE: 'triplet'
5 | IF_WITH_CENTER: 'no'
6 | IF_LABELSMOOTH: 'on'
7 | NECK: 'bnneck'
8 | LAST_STRIDE: 1
9 |
10 |
11 | INPUT:
12 | SIZE_TRAIN: [256, 128]
13 | SIZE_TEST: [256, 128]
14 | PROB: 0.5
15 | RE_PROB: 0.5
16 | PADDING: 10
17 |
18 | DATASETS:
19 |
20 | NAMES: ('sysu')
21 | ROOT_DIR: ('/data1/lidg/reid_dataset/IV-ReID/split/')
22 | # NAMES: ('market1501')
23 | # ROOT_DIR: ('/data1/lidg/reid_dataset/')
24 |
25 | DATALOADER:
26 | SAMPLER: 'softmax_triplet'
27 | NUM_INSTANCE: 4
28 | NUM_WORKERS: 8
29 |
30 | SOLVER:
31 | OPTIMIZER_NAME: 'Adam'
32 | MAX_EPOCHS: 120
33 | BASE_LR: 0.00035
34 | MARGIN: 0.3
35 |
36 | CLUSTER_MARGIN: 0.3
37 |
38 | CENTER_LR: 0.5
39 | CENTER_LOSS_WEIGHT: 0.0005
40 |
41 | RANGE_K: 2
42 | RANGE_MARGIN: 0.3
43 | RANGE_ALPHA: 0
44 | RANGE_BETA: 1
45 | RANGE_LOSS_WEIGHT: 1
46 |
47 | BIAS_LR_FACTOR: 1
48 | WEIGHT_DECAY: 0.0005
49 | WEIGHT_DECAY_BIAS: 0.0005
50 | IMS_PER_BATCH: 48 # 64 36 48 28 44
51 |
52 | STEPS: [40,70]
53 | GAMMA: 0.1
54 |
55 | WARMUP_FACTOR: 0.01
56 | WARMUP_ITERS: 10
57 | WARMUP_METHOD: 'linear'
58 |
59 | CHECKPOINT_PERIOD: 5
60 | LOG_PERIOD: 90
61 | EVAL_PERIOD: 200
62 |
63 | TEST:
64 | IMS_PER_BATCH: 128
65 | RE_RANKING: 'no'
66 | WEIGHT: "./logs/sysu/cosine-norm-again/resnet50_model_120.pth"
67 | NECK_FEAT: 'before'
68 | FEAT_NORM: 'no'
69 |
70 |
71 | OUTPUT_DIR: "./logs/Experiment"
72 |
73 |
74 |
--------------------------------------------------------------------------------
/configs/baseline.yml:
--------------------------------------------------------------------------------
1 | MODEL:
2 | PRETRAIN_CHOICE: 'imagenet'
3 | PRETRAIN_PATH: '/home/haoluo/.torch/models/resnet50-19c8e357.pth'
4 | LAST_STRIDE: 2
5 | NECK: 'no'
6 | METRIC_LOSS_TYPE: 'triplet'
7 | IF_LABELSMOOTH: 'off'
8 | IF_WITH_CENTER: 'no'
9 |
10 |
11 | INPUT:
12 | SIZE_TRAIN: [256, 128]
13 | SIZE_TEST: [256, 128]
14 | PROB: 0.5 # random horizontal flip
15 | RE_PROB: 0.0 # random erasing
16 | PADDING: 10
17 |
18 | DATASETS:
19 | NAMES: ('market1501')
20 |
21 | DATALOADER:
22 | SAMPLER: 'softmax_triplet'
23 | NUM_INSTANCE: 4
24 | NUM_WORKERS: 8
25 |
26 | SOLVER:
27 | OPTIMIZER_NAME: 'Adam'
28 | MAX_EPOCHS: 120
29 | BASE_LR: 0.00035
30 |
31 | CLUSTER_MARGIN: 0.3
32 |
33 | CENTER_LR: 0.5
34 | CENTER_LOSS_WEIGHT: 0.0005
35 |
36 | RANGE_K: 2
37 | RANGE_MARGIN: 0.3
38 | RANGE_ALPHA: 0
39 | RANGE_BETA: 1
40 | RANGE_LOSS_WEIGHT: 1
41 |
42 | BIAS_LR_FACTOR: 1
43 | WEIGHT_DECAY: 0.0005
44 | WEIGHT_DECAY_BIAS: 0.0005
45 | IMS_PER_BATCH: 64
46 |
47 | STEPS: [40, 70]
48 | GAMMA: 0.1
49 |
50 | WARMUP_FACTOR: 0.01
51 | WARMUP_ITERS: 0
52 | WARMUP_METHOD: 'linear'
53 |
54 | CHECKPOINT_PERIOD: 40
55 | LOG_PERIOD: 20
56 | EVAL_PERIOD: 40
57 |
58 | TEST:
59 | IMS_PER_BATCH: 128
60 | RE_RANKING: 'no'
61 | WEIGHT: "path"
62 | NECK_FEAT: 'after'
63 | FEAT_NORM: 'yes'
64 |
65 | OUTPUT_DIR: "/home/haoluo/log/gu/reid_baseline_review/Opensource_test/market1501/Experiment-all-tricks-256x128-bs16x4-warmup10-erase0_5-labelsmooth_on-laststride1-bnneck_on"
66 |
67 |
68 |
--------------------------------------------------------------------------------
/configs/softmax.yml:
--------------------------------------------------------------------------------
1 | MODEL:
2 | PRETRAIN_PATH: '/home/haoluo/.torch/models/resnet50-19c8e357.pth'
3 |
4 |
5 | INPUT:
6 | SIZE_TRAIN: [256, 128]
7 | SIZE_TEST: [256, 128]
8 | PROB: 0.5 # random horizontal flip
9 | RE_PROB: 0.5 # random erasing
10 | PADDING: 10
11 |
12 | DATASETS:
13 | NAMES: ('market1501')
14 |
15 | DATALOADER:
16 | SAMPLER: 'softmax'
17 | NUM_WORKERS: 8
18 |
19 | SOLVER:
20 | OPTIMIZER_NAME: 'Adam'
21 | MAX_EPOCHS: 120
22 | BASE_LR: 0.00035
23 | BIAS_LR_FACTOR: 1
24 | WEIGHT_DECAY: 0.0005
25 | WEIGHT_DECAY_BIAS: 0.0005
26 | IMS_PER_BATCH: 64
27 |
28 | STEPS: [30, 55]
29 | GAMMA: 0.1
30 |
31 | WARMUP_FACTOR: 0.01
32 | WARMUP_ITERS: 5
33 | WARMUP_METHOD: 'linear'
34 |
35 | CHECKPOINT_PERIOD: 20
36 | LOG_PERIOD: 20
37 | EVAL_PERIOD: 20
38 |
39 | TEST:
40 | IMS_PER_BATCH: 128
41 |
42 | OUTPUT_DIR: "/home/haoluo/log/reid/market1501/softmax_bs64_256x128"
43 |
44 |
45 |
--------------------------------------------------------------------------------
/data/__init__.py:
--------------------------------------------------------------------------------
1 | # encoding: utf-8
2 | """
3 | @author: sherlock
4 | @contact: sherlockliao01@gmail.com
5 | """
6 |
7 | from .build import make_data_loader
8 |
--------------------------------------------------------------------------------
/data/__pycache__/__init__.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Alexadlu/XIVReID/1f8a14e14cfd26790337c45e0e663a3a2acf9814/data/__pycache__/__init__.cpython-36.pyc
--------------------------------------------------------------------------------
/data/__pycache__/build.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Alexadlu/XIVReID/1f8a14e14cfd26790337c45e0e663a3a2acf9814/data/__pycache__/build.cpython-36.pyc
--------------------------------------------------------------------------------
/data/__pycache__/collate_batch.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Alexadlu/XIVReID/1f8a14e14cfd26790337c45e0e663a3a2acf9814/data/__pycache__/collate_batch.cpython-36.pyc
--------------------------------------------------------------------------------
/data/collate_batch.py:
--------------------------------------------------------------------------------
1 | # encoding: utf-8
2 | """
3 | @author: liaoxingyu
4 | @contact: sherlockliao01@gmail.com
5 | """
6 |
7 | import torch
8 |
9 |
10 | def train_collate_fn(batch):
11 | imgs, pids, _, _, = zip(*batch)
12 | pids = torch.tensor(pids, dtype=torch.int64)
13 | return torch.stack(imgs, dim=0), pids
14 |
15 |
16 | def val_collate_fn(batch):
17 | imgs, pids, camids, _ = zip(*batch)
18 | return torch.stack(imgs, dim=0), pids, camids
19 |
--------------------------------------------------------------------------------
/data/datasets/__init__.py:
--------------------------------------------------------------------------------
1 | # encoding: utf-8
2 | """
3 | @author: liaoxingyu
4 | @contact: sherlockliao01@gmail.com
5 | """
6 | from .cuhk03 import CUHK03
7 | from .dukemtmcreid import DukeMTMCreID
8 | from .market1501 import Market1501
9 | from .msmt17 import MSMT17
10 | from .sysu import SYSU
11 | from .regdb import RegDB
12 | from .dataset_loader import ImageDataset
13 |
14 | __factory = {
15 | 'market1501': Market1501,
16 | 'cuhk03': CUHK03,
17 | 'dukemtmc': DukeMTMCreID,
18 | 'msmt17': MSMT17,
19 | 'sysu':SYSU,
20 | 'regdb':RegDB,
21 | }
22 |
23 |
24 | def get_names():
25 | return __factory.keys()
26 |
27 |
28 | def init_dataset(name, *args, **kwargs):
29 | if name not in __factory.keys():
30 | raise KeyError("Unknown datasets: {}".format(name))
31 | return __factory[name](*args, **kwargs)
32 |
--------------------------------------------------------------------------------
/data/datasets/__pycache__/__init__.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Alexadlu/XIVReID/1f8a14e14cfd26790337c45e0e663a3a2acf9814/data/datasets/__pycache__/__init__.cpython-36.pyc
--------------------------------------------------------------------------------
/data/datasets/__pycache__/bases.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Alexadlu/XIVReID/1f8a14e14cfd26790337c45e0e663a3a2acf9814/data/datasets/__pycache__/bases.cpython-36.pyc
--------------------------------------------------------------------------------
/data/datasets/__pycache__/cuhk03.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Alexadlu/XIVReID/1f8a14e14cfd26790337c45e0e663a3a2acf9814/data/datasets/__pycache__/cuhk03.cpython-36.pyc
--------------------------------------------------------------------------------
/data/datasets/__pycache__/dataset_loader.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Alexadlu/XIVReID/1f8a14e14cfd26790337c45e0e663a3a2acf9814/data/datasets/__pycache__/dataset_loader.cpython-36.pyc
--------------------------------------------------------------------------------
/data/datasets/__pycache__/dukemtmcreid.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Alexadlu/XIVReID/1f8a14e14cfd26790337c45e0e663a3a2acf9814/data/datasets/__pycache__/dukemtmcreid.cpython-36.pyc
--------------------------------------------------------------------------------
/data/datasets/__pycache__/eval_reid.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Alexadlu/XIVReID/1f8a14e14cfd26790337c45e0e663a3a2acf9814/data/datasets/__pycache__/eval_reid.cpython-36.pyc
--------------------------------------------------------------------------------
/data/datasets/__pycache__/market1501.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Alexadlu/XIVReID/1f8a14e14cfd26790337c45e0e663a3a2acf9814/data/datasets/__pycache__/market1501.cpython-36.pyc
--------------------------------------------------------------------------------
/data/datasets/__pycache__/msmt17.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Alexadlu/XIVReID/1f8a14e14cfd26790337c45e0e663a3a2acf9814/data/datasets/__pycache__/msmt17.cpython-36.pyc
--------------------------------------------------------------------------------
/data/datasets/__pycache__/regdb.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Alexadlu/XIVReID/1f8a14e14cfd26790337c45e0e663a3a2acf9814/data/datasets/__pycache__/regdb.cpython-36.pyc
--------------------------------------------------------------------------------
/data/datasets/__pycache__/sysu.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Alexadlu/XIVReID/1f8a14e14cfd26790337c45e0e663a3a2acf9814/data/datasets/__pycache__/sysu.cpython-36.pyc
--------------------------------------------------------------------------------
/data/datasets/dataset_loader.py:
--------------------------------------------------------------------------------
1 | # encoding: utf-8
2 | """
3 | @author: liaoxingyu
4 | @contact: sherlockliao01@gmail.com
5 | """
6 |
7 | import os.path as osp
8 | from PIL import Image
9 | from torch.utils.data import Dataset
10 |
11 |
12 | def read_image(img_path):
13 | """Keep reading image until succeed.
14 | This can avoid IOError incurred by heavy IO process."""
15 | got_img = False
16 | if not osp.exists(img_path):
17 | raise IOError("{} does not exist".format(img_path))
18 | while not got_img:
19 | try:
20 | img = Image.open(img_path).convert('RGB')
21 | got_img = True
22 | except IOError:
23 | print("IOError incurred when reading '{}'. Will redo. Don't worry. Just chill.".format(img_path))
24 | pass
25 | return img
26 |
27 |
28 | class ImageDataset(Dataset):
29 | """Image Person ReID Dataset"""
30 |
31 | def __init__(self, dataset, transform=None):
32 | self.dataset = dataset
33 | self.transform = transform
34 |
35 | def __len__(self):
36 | return len(self.dataset)
37 |
38 | def __getitem__(self, index):
39 | img_path, pid, camid = self.dataset[index]
40 | img = read_image(img_path)
41 |
42 | if self.transform is not None:
43 | img = self.transform(img)
44 |
45 | return img, pid, camid, img_path
46 |
--------------------------------------------------------------------------------
/data/datasets/eval_reid.py:
--------------------------------------------------------------------------------
1 | # encoding: utf-8
2 | """
3 | @author: liaoxingyu
4 | @contact: sherlockliao01@gmail.com
5 | """
6 |
7 | import numpy as np
8 |
9 |
10 | def eval_func(distmat, q_pids, g_pids, q_camids, g_camids, max_rank=50):
11 | """Evaluation with market1501 metric
12 | Key: for each query identity, its gallery images from the same camera view are discarded.
13 | """
14 | num_q, num_g = distmat.shape
15 | if num_g < max_rank:
16 | max_rank = num_g
17 | print("Note: number of gallery samples is quite small, got {}".format(num_g))
18 | indices = np.argsort(distmat, axis=1)
19 | matches = (g_pids[indices] == q_pids[:, np.newaxis]).astype(np.int32)
20 |
21 | # compute cmc curve for each query
22 | all_cmc = []
23 | all_AP = []
24 | num_valid_q = 0. # number of valid query
25 | for q_idx in range(num_q):
26 | # get query pid and camid
27 | q_pid = q_pids[q_idx]
28 | q_camid = q_camids[q_idx]
29 |
30 | # remove gallery samples that have the same pid and camid with query
31 | order = indices[q_idx]
32 | remove = (g_pids[order] == q_pid) & (g_camids[order] == q_camid)
33 | keep = np.invert(remove)
34 |
35 | # compute cmc curve
36 | # binary vector, positions with value 1 are correct matches
37 | orig_cmc = matches[q_idx][keep]
38 | if not np.any(orig_cmc):
39 | # this condition is true when query identity does not appear in gallery
40 | continue
41 |
42 | cmc = orig_cmc.cumsum()
43 | cmc[cmc > 1] = 1
44 |
45 | all_cmc.append(cmc[:max_rank])
46 | num_valid_q += 1.
47 |
48 | # compute average precision
49 | # reference: https://en.wikipedia.org/wiki/Evaluation_measures_(information_retrieval)#Average_precision
50 | num_rel = orig_cmc.sum()
51 | tmp_cmc = orig_cmc.cumsum()
52 | tmp_cmc = [x / (i + 1.) for i, x in enumerate(tmp_cmc)]
53 | tmp_cmc = np.asarray(tmp_cmc) * orig_cmc
54 | AP = tmp_cmc.sum() / num_rel
55 | all_AP.append(AP)
56 |
57 | assert num_valid_q > 0, "Error: all query identities do not appear in gallery"
58 |
59 | all_cmc = np.asarray(all_cmc).astype(np.float32)
60 | all_cmc = all_cmc.sum(0) / num_valid_q
61 | mAP = np.mean(all_AP)
62 |
63 | return all_cmc, mAP
64 |
--------------------------------------------------------------------------------
/data/datasets/regdb.py:
--------------------------------------------------------------------------------
1 | # encoding: utf-8
2 | """
3 | @author: sherlock
4 | @contact: sherlockliao01@gmail.com
5 | """
6 |
7 | import glob
8 | import re
9 | import os
10 |
11 | import os.path as osp
12 |
13 | from .bases import BaseImageDataset
14 |
15 |
16 | class RegDB(BaseImageDataset):
17 | """
18 | Market1501
19 | Reference:
20 | Zheng et al. Scalable Person Re-identification: A Benchmark. ICCV 2015.
21 | URL: http://www.liangzheng.org/Project/project_reid.html
22 |
23 | Dataset statistics:
24 | # identities: 1501 (+1 for background)
25 | # images: 12936 (train) + 3368 (query) + 15913 (gallery)
26 | """
27 | dataset_dir = ''
28 |
29 | def __init__(self, root='/data1/lidg/reid_dataset/IV-ReID/RegDB', verbose=True, **kwargs):
30 | super(RegDB, self).__init__()
31 | self.dataset_dir = osp.join(root, self.dataset_dir)
32 |
33 | self.train_i = osp.join(self.dataset_dir, '')
34 | self.train_v = osp.join(self.dataset_dir, '')
35 |
36 | self._check_before_run()
37 |
38 | train = self._process_dir(self.train_i, self.train_v, relabel=True)
39 |
40 | if verbose:
41 | print("=> RegDB loaded")
42 |
43 | self.train = train
44 |
45 | self.num_train_pids, self.num_train_imgs, self.num_train_cams = self.get_imagedata_info(self.train)
46 |
47 | def _check_before_run(self):
48 | """Check if all files are available before going deeper"""
49 | if not osp.exists(self.dataset_dir):
50 | raise RuntimeError("'{}' is not available".format(self.dataset_dir))
51 | if not osp.exists(self.train_i):
52 | raise RuntimeError("'{}' is not available".format(self.train_i))
53 | if not osp.exists(self.train_v):
54 | raise RuntimeError("'{}' is not available".format(self.train_v))
55 |
56 | def _process_dir(self, dir_path_i, dir_path_v, relabel=False):
57 |
58 | with open('/data1/lidg/reid_dataset/IV-ReID/RegDB/idx/train_visible_0.txt','r') as f:
59 | files_v = f.readlines()
60 | with open('/data1/lidg/reid_dataset/IV-ReID/RegDB/idx/train_thermal_0.txt','r') as f:
61 | files_i = f.readlines()
62 |
63 | dataset = []
64 |
65 | for i in range(len(files_v)):
66 | img_v, id = files_v[i].strip().split(' ')
67 | img = osp.join(dir_path_v, img_v)
68 | pid = int(id)
69 | camid = 0
70 | dataset.append((img, pid, camid))
71 |
72 | for i in range(len(files_i)):
73 | img_i, id = files_i[i].strip().split(' ')
74 | img = osp.join(dir_path_i, img_i)
75 | pid = int(id)
76 | camid = 1
77 | dataset.append((img, pid, camid))
78 |
79 | return dataset
80 |
--------------------------------------------------------------------------------
/data/samplers/__init__.py:
--------------------------------------------------------------------------------
1 | # encoding: utf-8
2 | """
3 | @author: liaoxingyu
4 | @contact: sherlockliao01@gmail.com
5 | """
6 |
7 | from .triplet_sampler import RandomIdentitySampler, RandomIdentitySampler_alignedreid, RandomIdentitySampler_SYSU, RandomIdentitySampler_SYSU_Thr, RandomIdentitySampler_RegDB # new add by gu
8 |
--------------------------------------------------------------------------------
/data/samplers/__pycache__/__init__.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Alexadlu/XIVReID/1f8a14e14cfd26790337c45e0e663a3a2acf9814/data/samplers/__pycache__/__init__.cpython-36.pyc
--------------------------------------------------------------------------------
/data/samplers/__pycache__/triplet_sampler.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Alexadlu/XIVReID/1f8a14e14cfd26790337c45e0e663a3a2acf9814/data/samplers/__pycache__/triplet_sampler.cpython-36.pyc
--------------------------------------------------------------------------------
/data/transforms/__init__.py:
--------------------------------------------------------------------------------
1 | # encoding: utf-8
2 | """
3 | @author: sherlock
4 | @contact: sherlockliao01@gmail.com
5 | """
6 |
7 | from .build import build_transforms
8 |
--------------------------------------------------------------------------------
/data/transforms/__pycache__/__init__.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Alexadlu/XIVReID/1f8a14e14cfd26790337c45e0e663a3a2acf9814/data/transforms/__pycache__/__init__.cpython-36.pyc
--------------------------------------------------------------------------------
/data/transforms/__pycache__/build.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Alexadlu/XIVReID/1f8a14e14cfd26790337c45e0e663a3a2acf9814/data/transforms/__pycache__/build.cpython-36.pyc
--------------------------------------------------------------------------------
/data/transforms/__pycache__/transforms.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Alexadlu/XIVReID/1f8a14e14cfd26790337c45e0e663a3a2acf9814/data/transforms/__pycache__/transforms.cpython-36.pyc
--------------------------------------------------------------------------------
/data/transforms/build.py:
--------------------------------------------------------------------------------
1 | # encoding: utf-8
2 | """
3 | @author: liaoxingyu
4 | @contact: liaoxingyu2@jd.com
5 | """
6 |
7 | import torchvision.transforms as T
8 |
9 | from .transforms import RandomErasing
10 |
11 |
12 | def build_transforms(cfg, is_train=True):
13 | normalize_transform = T.Normalize(mean=cfg.INPUT.PIXEL_MEAN, std=cfg.INPUT.PIXEL_STD)
14 | if is_train:
15 | transform = T.Compose([
16 | T.Resize(cfg.INPUT.SIZE_TRAIN),
17 | T.RandomHorizontalFlip(p=cfg.INPUT.PROB),
18 | T.Pad(cfg.INPUT.PADDING),
19 | T.RandomCrop(cfg.INPUT.SIZE_TRAIN),
20 | T.ToTensor(),
21 | normalize_transform,
22 | RandomErasing(probability=cfg.INPUT.RE_PROB, mean=cfg.INPUT.PIXEL_MEAN)
23 | ])
24 | else:
25 | transform = T.Compose([
26 | T.Resize(cfg.INPUT.SIZE_TEST),
27 | T.ToTensor(),
28 | normalize_transform
29 | ])
30 |
31 | return transform
32 |
--------------------------------------------------------------------------------
/data/transforms/transforms.py:
--------------------------------------------------------------------------------
1 | # encoding: utf-8
2 | """
3 | @author: liaoxingyu
4 | @contact: liaoxingyu2@jd.com
5 | """
6 |
7 | import math
8 | import random
9 |
10 |
11 | class RandomErasing(object):
12 | """ Randomly selects a rectangle region in an image and erases its pixels.
13 | 'Random Erasing Data Augmentation' by Zhong et al.
14 | See https://arxiv.org/pdf/1708.04896.pdf
15 | Args:
16 | probability: The probability that the Random Erasing operation will be performed.
17 | sl: Minimum proportion of erased area against input image.
18 | sh: Maximum proportion of erased area against input image.
19 | r1: Minimum aspect ratio of erased area.
20 | mean: Erasing value.
21 | """
22 |
23 | def __init__(self, probability=0.5, sl=0.02, sh=0.4, r1=0.3, mean=(0.4914, 0.4822, 0.4465)):
24 | self.probability = probability
25 | self.mean = mean
26 | self.sl = sl
27 | self.sh = sh
28 | self.r1 = r1
29 |
30 | def __call__(self, img):
31 |
32 | if random.uniform(0, 1) >= self.probability:
33 | return img
34 |
35 | for attempt in range(100):
36 | area = img.size()[1] * img.size()[2]
37 |
38 | target_area = random.uniform(self.sl, self.sh) * area
39 | aspect_ratio = random.uniform(self.r1, 1 / self.r1)
40 |
41 | h = int(round(math.sqrt(target_area * aspect_ratio)))
42 | w = int(round(math.sqrt(target_area / aspect_ratio)))
43 |
44 | if w < img.size()[2] and h < img.size()[1]:
45 | x1 = random.randint(0, img.size()[1] - h)
46 | y1 = random.randint(0, img.size()[2] - w)
47 | if img.size()[0] == 3:
48 | img[0, x1:x1 + h, y1:y1 + w] = self.mean[0]
49 | img[1, x1:x1 + h, y1:y1 + w] = self.mean[1]
50 | img[2, x1:x1 + h, y1:y1 + w] = self.mean[2]
51 | else:
52 | img[0, x1:x1 + h, y1:y1 + w] = self.mean[0]
53 | return img
54 |
55 | return img
56 |
--------------------------------------------------------------------------------
/engine/__pycache__/inference.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Alexadlu/XIVReID/1f8a14e14cfd26790337c45e0e663a3a2acf9814/engine/__pycache__/inference.cpython-36.pyc
--------------------------------------------------------------------------------
/engine/__pycache__/trainer.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Alexadlu/XIVReID/1f8a14e14cfd26790337c45e0e663a3a2acf9814/engine/__pycache__/trainer.cpython-36.pyc
--------------------------------------------------------------------------------
/engine/__pycache__/triplet_loss_anti.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Alexadlu/XIVReID/1f8a14e14cfd26790337c45e0e663a3a2acf9814/engine/__pycache__/triplet_loss_anti.cpython-36.pyc
--------------------------------------------------------------------------------
/engine/__pycache__/triplet_loss_ori.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Alexadlu/XIVReID/1f8a14e14cfd26790337c45e0e663a3a2acf9814/engine/__pycache__/triplet_loss_ori.cpython-36.pyc
--------------------------------------------------------------------------------
/engine/inference.py:
--------------------------------------------------------------------------------
1 | # encoding: utf-8
2 | """
3 | @author: sherlock
4 | @contact: sherlockliao01@gmail.com
5 | """
6 | import logging
7 |
8 | import torch
9 | import torch.nn as nn
10 | from ignite.engine import Engine
11 |
12 | from utils.reid_metric import R1_mAP, R1_mAP_reranking
13 |
14 |
15 | def create_supervised_evaluator(model, metrics, device=None):
16 | """
17 | Factory function for creating an evaluator for supervised models
18 |
19 | Args:
20 | model (`torch.nn.Module`): the model to train
21 | metrics (dict of str - :class:`ignite.metrics.Metric`): a map of metric names to Metrics
22 | device (str, optional): device type specification (default: None).
23 | Applies to both model and batches.
24 | Returns:
25 | Engine: an evaluator engine with supervised inference function
26 | """
27 | if device:
28 | if torch.cuda.device_count() > 1:
29 | model = nn.DataParallel(model)
30 | model.to(device)
31 |
32 | def _inference(engine, batch):
33 | model.eval()
34 | with torch.no_grad():
35 | data, pids, camids = batch
36 | data = data.to(device) if torch.cuda.device_count() >= 1 else data
37 | feat = model(data)
38 | return feat, pids, camids
39 |
40 | engine = Engine(_inference)
41 |
42 | for name, metric in metrics.items():
43 | metric.attach(engine, name)
44 |
45 | return engine
46 |
47 |
48 | def inference(cfg,model,val_loader,num_query):
49 | device = cfg.MODEL.DEVICE
50 |
51 | logger = logging.getLogger("reid_baseline.inference")
52 | logger.info("Enter inferencing")
53 | if cfg.TEST.RE_RANKING == 'no':
54 | print("Create evaluator")
55 | evaluator = create_supervised_evaluator(model, metrics={'r1_mAP': R1_mAP(num_query, max_rank=50, feat_norm=cfg.TEST.FEAT_NORM)},
56 | device=device)
57 | elif cfg.TEST.RE_RANKING == 'yes':
58 | print("Create evaluator for reranking")
59 | evaluator = create_supervised_evaluator(model, metrics={'r1_mAP': R1_mAP_reranking(num_query, max_rank=50, feat_norm=cfg.TEST.FEAT_NORM)},
60 | device=device)
61 | else:
62 | print("Unsupported re_ranking config. Only support for no or yes, but got {}.".format(cfg.TEST.RE_RANKING))
63 |
64 | evaluator.run(val_loader)
65 | cmc, mAP = evaluator.state.metrics['r1_mAP']
66 | logger.info('Validation Results')
67 | logger.info("mAP: {:.1%}".format(mAP))
68 | for r in [1, 5, 10]:
69 | logger.info("CMC curve, Rank-{:<3}:{:.1%}".format(r, cmc[r - 1]))
70 |
--------------------------------------------------------------------------------
/imgs/pipeline.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Alexadlu/XIVReID/1f8a14e14cfd26790337c45e0e663a3a2acf9814/imgs/pipeline.jpg
--------------------------------------------------------------------------------
/layers/__pycache__/__init__.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Alexadlu/XIVReID/1f8a14e14cfd26790337c45e0e663a3a2acf9814/layers/__pycache__/__init__.cpython-36.pyc
--------------------------------------------------------------------------------
/layers/__pycache__/center_loss.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Alexadlu/XIVReID/1f8a14e14cfd26790337c45e0e663a3a2acf9814/layers/__pycache__/center_loss.cpython-36.pyc
--------------------------------------------------------------------------------
/layers/__pycache__/cluster_loss.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Alexadlu/XIVReID/1f8a14e14cfd26790337c45e0e663a3a2acf9814/layers/__pycache__/cluster_loss.cpython-36.pyc
--------------------------------------------------------------------------------
/layers/__pycache__/range_loss.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Alexadlu/XIVReID/1f8a14e14cfd26790337c45e0e663a3a2acf9814/layers/__pycache__/range_loss.cpython-36.pyc
--------------------------------------------------------------------------------
/layers/__pycache__/triplet_loss.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Alexadlu/XIVReID/1f8a14e14cfd26790337c45e0e663a3a2acf9814/layers/__pycache__/triplet_loss.cpython-36.pyc
--------------------------------------------------------------------------------
/layers/center_loss.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import
2 |
3 | import torch
4 | from torch import nn
5 |
6 |
7 | class CenterLoss(nn.Module):
8 | """Center loss.
9 |
10 | Reference:
11 | Wen et al. A Discriminative Feature Learning Approach for Deep Face Recognition. ECCV 2016.
12 |
13 | Args:
14 | num_classes (int): number of classes.
15 | feat_dim (int): feature dimension.
16 | """
17 |
18 | def __init__(self, num_classes=751, feat_dim=2048, use_gpu=True):
19 | super(CenterLoss, self).__init__()
20 | self.num_classes = num_classes
21 | self.feat_dim = feat_dim
22 | self.use_gpu = use_gpu
23 |
24 | if self.use_gpu:
25 | self.centers = nn.Parameter(torch.randn(self.num_classes, self.feat_dim).cuda())
26 | else:
27 | self.centers = nn.Parameter(torch.randn(self.num_classes, self.feat_dim))
28 |
29 | def forward(self, x, labels):
30 | """
31 | Args:
32 | x: feature matrix with shape (batch_size, feat_dim).
33 | labels: ground truth labels with shape (num_classes).
34 | """
35 | assert x.size(0) == labels.size(0), "features.size(0) is not equal to labels.size(0)"
36 |
37 | batch_size = x.size(0)
38 | distmat = torch.pow(x, 2).sum(dim=1, keepdim=True).expand(batch_size, self.num_classes) + \
39 | torch.pow(self.centers, 2).sum(dim=1, keepdim=True).expand(self.num_classes, batch_size).t()
40 | distmat.addmm_(1, -2, x, self.centers.t())
41 |
42 | classes = torch.arange(self.num_classes).long()
43 | if self.use_gpu: classes = classes.cuda()
44 | labels = labels.unsqueeze(1).expand(batch_size, self.num_classes)
45 | mask = labels.eq(classes.expand(batch_size, self.num_classes))
46 |
47 | dist = []
48 | for i in range(batch_size):
49 | value = distmat[i][mask[i]]
50 | value = value.clamp(min=1e-12, max=1e+12) # for numerical stability
51 |
52 | an = ((torch.sum(distmat[i])-value)/(distmat.shape[1]-1)).clamp(min=1e-12, max=1e+12)
53 |
54 | #dist.append(value/an)
55 | dist.append(torch.clamp(0.3 + value - an, min=0.0))
56 |
57 | #dist.append(value)
58 |
59 | dist = torch.cat(dist)
60 | loss = dist.mean()
61 | return loss
62 |
63 |
64 | if __name__ == '__main__':
65 | use_gpu = False
66 | center_loss = CenterLoss(use_gpu=use_gpu)
67 | features = torch.rand(16, 2048)
68 | targets = torch.Tensor([0, 1, 2, 3, 2, 3, 1, 4, 5, 3, 2, 1, 0, 0, 5, 4]).long()
69 | if use_gpu:
70 | features = torch.rand(16, 2048).cuda()
71 | targets = torch.Tensor([0, 1, 2, 3, 2, 3, 1, 4, 5, 3, 2, 1, 0, 0, 5, 4]).cuda()
72 |
73 | loss = center_loss(features, targets)
74 | print(loss)
--------------------------------------------------------------------------------
/modeling/__init__.py:
--------------------------------------------------------------------------------
1 | # encoding: utf-8
2 | """
3 | @author: sherlock
4 | @contact: sherlockliao01@gmail.com
5 | """
6 |
7 |
8 | # from .baseline import Baseline
9 | # from .attention import Baseline
10 | from .AAAI import Baseline
11 |
12 |
13 | def build_model(cfg, num_classes):
14 | model = Baseline(num_classes, cfg.MODEL.LAST_STRIDE, cfg.MODEL.PRETRAIN_PATH, cfg.MODEL.NECK, cfg.TEST.NECK_FEAT, cfg.MODEL.NAME, cfg.MODEL.PRETRAIN_CHOICE)
15 | return model
16 |
--------------------------------------------------------------------------------
/modeling/__pycache__/AAAI.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Alexadlu/XIVReID/1f8a14e14cfd26790337c45e0e663a3a2acf9814/modeling/__pycache__/AAAI.cpython-36.pyc
--------------------------------------------------------------------------------
/modeling/__pycache__/__init__.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Alexadlu/XIVReID/1f8a14e14cfd26790337c45e0e663a3a2acf9814/modeling/__pycache__/__init__.cpython-36.pyc
--------------------------------------------------------------------------------
/modeling/__pycache__/attention.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Alexadlu/XIVReID/1f8a14e14cfd26790337c45e0e663a3a2acf9814/modeling/__pycache__/attention.cpython-36.pyc
--------------------------------------------------------------------------------
/modeling/__pycache__/baseline.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Alexadlu/XIVReID/1f8a14e14cfd26790337c45e0e663a3a2acf9814/modeling/__pycache__/baseline.cpython-36.pyc
--------------------------------------------------------------------------------
/modeling/backbones/__init__.py:
--------------------------------------------------------------------------------
1 | # encoding: utf-8
2 | """
3 | @author: liaoxingyu
4 | @contact: sherlockliao01@gmail.com
5 | """
6 |
7 |
--------------------------------------------------------------------------------
/modeling/backbones/__pycache__/__init__.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Alexadlu/XIVReID/1f8a14e14cfd26790337c45e0e663a3a2acf9814/modeling/backbones/__pycache__/__init__.cpython-36.pyc
--------------------------------------------------------------------------------
/modeling/backbones/__pycache__/alexnet.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Alexadlu/XIVReID/1f8a14e14cfd26790337c45e0e663a3a2acf9814/modeling/backbones/__pycache__/alexnet.cpython-36.pyc
--------------------------------------------------------------------------------
/modeling/backbones/__pycache__/densenet.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Alexadlu/XIVReID/1f8a14e14cfd26790337c45e0e663a3a2acf9814/modeling/backbones/__pycache__/densenet.cpython-36.pyc
--------------------------------------------------------------------------------
/modeling/backbones/__pycache__/resnet.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Alexadlu/XIVReID/1f8a14e14cfd26790337c45e0e663a3a2acf9814/modeling/backbones/__pycache__/resnet.cpython-36.pyc
--------------------------------------------------------------------------------
/modeling/backbones/__pycache__/senet.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Alexadlu/XIVReID/1f8a14e14cfd26790337c45e0e663a3a2acf9814/modeling/backbones/__pycache__/senet.cpython-36.pyc
--------------------------------------------------------------------------------
/modeling/backbones/__pycache__/utils.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Alexadlu/XIVReID/1f8a14e14cfd26790337c45e0e663a3a2acf9814/modeling/backbones/__pycache__/utils.cpython-36.pyc
--------------------------------------------------------------------------------
/modeling/backbones/__pycache__/vgg.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Alexadlu/XIVReID/1f8a14e14cfd26790337c45e0e663a3a2acf9814/modeling/backbones/__pycache__/vgg.cpython-36.pyc
--------------------------------------------------------------------------------
/modeling/backbones/__pycache__/vgg2.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Alexadlu/XIVReID/1f8a14e14cfd26790337c45e0e663a3a2acf9814/modeling/backbones/__pycache__/vgg2.cpython-36.pyc
--------------------------------------------------------------------------------
/modeling/backbones/alexnet.py:
--------------------------------------------------------------------------------
1 | import torch.nn as nn
2 | import torch.utils.model_zoo as model_zoo
3 | import math
4 | import torch.nn.functional as F
5 |
6 | __all__ = ['AlexNet', 'alexnet']
7 |
8 |
9 | model_urls = {
10 | 'alexnet': 'https://download.pytorch.org/models/alexnet-owt-4df8aa71.pth',
11 | }
12 |
13 |
14 | class AlexNet(nn.Module):
15 |
16 | def __init__(self):
17 | super(AlexNet, self).__init__()
18 | self.features = nn.Sequential(
19 | nn.Conv2d(3, 64, kernel_size=11, stride=4, padding=2),
20 | nn.ReLU(inplace=True),
21 | nn.MaxPool2d(kernel_size=3, stride=2),
22 | nn.Conv2d(64, 192, kernel_size=5, padding=2),
23 | nn.ReLU(inplace=True),
24 | nn.MaxPool2d(kernel_size=3, stride=2),
25 | nn.Conv2d(192, 384, kernel_size=3, padding=1),
26 | nn.ReLU(inplace=True),
27 | nn.Conv2d(384, 256, kernel_size=3, padding=1),
28 | nn.ReLU(inplace=True),
29 | nn.Conv2d(256, 256, kernel_size=3, padding=1),
30 | nn.ReLU(inplace=True),
31 | nn.MaxPool2d(kernel_size=3, stride=2),
32 | )
33 | self.classifier = nn.Sequential(
34 | nn.Dropout(),
35 | nn.Linear(256 * 6 * 6, 4096),
36 | nn.ReLU(inplace=True),
37 | nn.Dropout(),
38 | nn.Linear(4096, 4096),
39 | nn.ReLU(inplace=True),
40 | )
41 |
42 | #for m in self.modules():
43 | # if isinstance(m, nn.Linear):
44 | # m.weight.data.normal_(0, 0.01)
45 |
46 | def forward(self, x):
47 | x = self.features(x)
48 | x = x.view(x.size(0), -1)
49 | x = self.classifier(x)
50 | return x
51 |
52 | def alexnet(pretrained=True, **kwargs):
53 | model = AlexNet(**kwargs)
54 | if pretrained:
55 | ckpt = model_zoo.load_url(model_urls['alexnet'])
56 | model.load_state_dict(remove_fc(ckpt))
57 | return model
58 |
59 | def remove_fc(state_dict):
60 |
61 | #del state_dict['classifier.1.weight']
62 | #del state_dict['classifier.1.bias']
63 | #del state_dict['classifier.4.weight']
64 | #del state_dict['classifier.4.bias']
65 | del state_dict['classifier.6.weight']
66 | del state_dict['classifier.6.bias']
67 |
68 | return state_dict
69 |
70 |
71 |
72 |
73 |
74 |
--------------------------------------------------------------------------------
/modeling/backbones/utils.py:
--------------------------------------------------------------------------------
1 | try:
2 | from torch.hub import load_state_dict_from_url
3 | except ImportError:
4 | from torch.utils.model_zoo import load_url as load_state_dict_from_url
5 |
--------------------------------------------------------------------------------
/scripts/Test-reranking-tri_center-feat_after_bn-cos-duke.sh:
--------------------------------------------------------------------------------
1 | # Experiment all tricks without center loss with re-ranking : 256x128-bs16x4-warmup10-erase0_5-labelsmooth_on-laststride1-bnneck_on-triplet_centerloss0_0005
2 | # Dataset 2: dukemtmc
3 | # imagesize: 256x128
4 | # batchsize: 16x4
5 | # warmup_step 10
6 | # random erase prob 0.5
7 | # labelsmooth: on
8 | # last stride 1
9 | # bnneck on
10 | # with center loss
11 | # with re-ranking
12 | python3 tools/test.py --config_file='configs/softmax_triplet_with_center.yml' MODEL.DEVICE_ID "('1')" DATASETS.NAMES "('dukemtmc')" TEST.RE_RANKING "('yes')" DATASETS.ROOT_DIR "('/home/haoluo/data')" MODEL.PRETRAIN_CHOICE "('self')" TEST.WEIGHT "('/home/haoluo/log/gu/reid_baseline_review/Opensource_test/dukemtmc/Experiment-all-tricks-tri_center-256x128-bs16x4-warmup10-erase0_5-labelsmooth_on-laststride1-bnneck_on-triplet_centerloss0_0005/resnet50_model_120.pth')"
--------------------------------------------------------------------------------
/scripts/Test-reranking-tri_center-feat_after_bn-cos-market.sh:
--------------------------------------------------------------------------------
1 | # Experiment all tricks without center loss with re-ranking : 256x128-bs16x4-warmup10-erase0_5-labelsmooth_on-laststride1-bnneck_on-triplet_centerloss0_0005
2 | # Dataset 1: market1501
3 | # imagesize: 256x128
4 | # batchsize: 16x4
5 | # warmup_step 10
6 | # random erase prob 0.5
7 | # labelsmooth: on
8 | # last stride 1
9 | # bnneck on
10 | # with center loss
11 | # with re-ranking
12 | python3 tools/test.py --config_file='configs/softmax_triplet_with_center.yml' MODEL.DEVICE_ID "('0')" DATASETS.NAMES "('market1501')" DATASETS.ROOT_DIR "('/home/haoluo/data')" TEST.RE_RANKING "('yes')" MODEL.PRETRAIN_CHOICE "('self')" TEST.WEIGHT "('/home/haoluo/log/gu/reid_baseline_review/Opensource_test/market1501/Experiment-all-tricks-tri_center-256x128-bs16x4-warmup10-erase0_5-labelsmooth_on-laststride1-bnneck_on-triplet_centerloss0_0005/resnet50_model_120.pth')"
--------------------------------------------------------------------------------
/scripts/Test-tri_center-feat_after_bn-cos-duke.sh:
--------------------------------------------------------------------------------
1 | # Experiment all tricks without center loss without re-ranking: 256x128-bs16x4-warmup10-erase0_5-labelsmooth_on-laststride1-bnneck_on
2 | # Dataset 2: dukemtmc
3 | # imagesize: 256x128
4 | # batchsize: 16x4
5 | # warmup_step 10
6 | # random erase prob 0.5
7 | # labelsmooth: on
8 | # last stride 1
9 | # bnneck on
10 | # with center loss
11 | # without re-ranking
12 | python3 tools/test.py --config_file='configs/softmax_triplet_with_center.yml' MODEL.DEVICE_ID "('1')" DATASETS.NAMES "('dukemtmc')" DATASETS.ROOT_DIR "('/home/haoluo/data')" MODEL.PRETRAIN_CHOICE "('self')" TEST.WEIGHT "('/home/haoluo/log/gu/reid_baseline_review/Opensource_test/dukemtmc/Experiment-all-tricks-tri_center-256x128-bs16x4-warmup10-erase0_5-labelsmooth_on-laststride1-bnneck_on-triplet_centerloss0_0005/resnet50_model_120.pth')"
--------------------------------------------------------------------------------
/scripts/Test-tri_center-feat_after_bn-cos-market.sh:
--------------------------------------------------------------------------------
1 | # Experiment all tricks without center loss : 256x128-bs16x4-warmup10-erase0_5-labelsmooth_on-laststride1-bnneck_on
2 | # Dataset 1: market1501
3 | # imagesize: 256x128
4 | # batchsize: 16x4
5 | # warmup_step 10
6 | # random erase prob 0.5
7 | # labelsmooth: on
8 | # last stride 1
9 | # bnneck on
10 | # with center loss
11 | # without re-ranking
12 | python3 tools/test.py --config_file='configs/softmax_triplet_with_center.yml' MODEL.DEVICE_ID "('0')" DATASETS.NAMES "('market1501')" DATASETS.ROOT_DIR "('/home/haoluo/data')" MODEL.PRETRAIN_CHOICE "('self')" TEST.WEIGHT "('/home/haoluo/log/gu/reid_baseline_review/Opensource_test/market1501/Experiment-all-tricks-tri_center-256x128-bs16x4-warmup10-erase0_5-labelsmooth_on-laststride1-bnneck_on-triplet_centerloss0_0005/resnet50_model_120.pth')"
--------------------------------------------------------------------------------
/scripts/Test-without_center-feat_after_bn-cos-duke.sh:
--------------------------------------------------------------------------------
1 | # Experiment all tricks without center loss without re-ranking: 256x128-bs16x4-warmup10-erase0_5-labelsmooth_on-laststride1-bnneck_on
2 | # Dataset 2: dukemtmc
3 | # imagesize: 256x128
4 | # batchsize: 16x4
5 | # warmup_step 10
6 | # random erase prob 0.5
7 | # labelsmooth: on
8 | # last stride 1
9 | # bnneck on
10 | # without center loss
11 | # without re-ranking
12 | python3 tools/test.py --config_file='configs/softmax_triplet.yml' MODEL.DEVICE_ID "('1')" DATASETS.NAMES "('dukemtmc')" DATASETS.ROOT_DIR "('/home/haoluo/data')" MODEL.PRETRAIN_CHOICE "('self')" TEST.WEIGHT "('/home/haoluo/log/gu/reid_baseline_review/Opensource_test/dukemtmc/Experiment-all-tricks-256x128-bs16x4-warmup10-erase0_5-labelsmooth_on-laststride1-bnneck_on/resnet50_model_120.pth')"
--------------------------------------------------------------------------------
/scripts/Test-without_center-feat_after_bn-cos-market.sh:
--------------------------------------------------------------------------------
1 | # Experiment all tricks without center loss without re-ranking: 256x128-bs16x4-warmup10-erase0_5-labelsmooth_on-laststride1-bnneck_on (=raw all trick, softmax_triplet.yml)
2 | # Dataset 1: market1501
3 | # imagesize: 256x128
4 | # batchsize: 16x4
5 | # warmup_step 10
6 | # random erase prob 0.5
7 | # labelsmooth: on
8 | # last stride 1
9 | # bnneck on
10 | # without center loss
11 | # without re-ranking
12 | python3 tools/test.py --config_file='configs/softmax_triplet.yml' MODEL.DEVICE_ID "('0')" DATASETS.NAMES "('market1501')" DATASETS.ROOT_DIR "('/home/haoluo/data')" MODEL.PRETRAIN_CHOICE "('self')" TEST.WEIGHT "('/home/haoluo/log/gu/reid_baseline_review/Opensource_test/market1501/Experiment-all-tricks-256x128-bs16x4-warmup10-erase0_5-labelsmooth_on-laststride1-bnneck_on/resnet50_model_120.pth')"
--------------------------------------------------------------------------------
/scripts/pretrain_choice-tri_center-market.sh:
--------------------------------------------------------------------------------
1 | # Experiment all tricks with center loss : 256x128-bs16x4-warmup10-erase0_5-labelsmooth_on-laststride1-bnneck_on-triplet_centerloss0_0005
2 | # Dataset 1: market1501
3 | # imagesize: 256x128
4 | # batchsize: 16x4
5 | # warmup_step 10
6 | # random erase prob 0.5
7 | # labelsmooth: on
8 | # last stride 1
9 | # bnneck on
10 | # with center loss
11 | python3 tools/train.py --config_file='configs/softmax_triplet_with_center.yml' MODEL.DEVICE_ID "('2')" DATASETS.NAMES "('market1501')" DATASETS.ROOT_DIR "('/home/haoluo/data')" MODEL.PRETRAIN_CHOICE "('self')" MODEL.PRETRAIN_PATH "('/home/haoluo/log/gu/reid_baseline_review/Opensource_test/market1501/Experiment-pretrain_choice_all-tricks-tri_center-256x128-bs16x4-warmup10-erase0_5-labelsmooth_on-laststride1-bnneck_on-triplet_centerloss0_0005/resnet50_model_2.pth')" OUTPUT_DIR "('/home/haoluo/log/gu/reid_baseline_review/Opensource_test/market1501/Experiment-pretrain_choice_all-tricks-tri_center-256x128-bs16x4-warmup10-erase0_5-labelsmooth_on-laststride1-bnneck_on-triplet_centerloss0_0005')"
--------------------------------------------------------------------------------
/scripts/seresnext50-tri_center-duke.sh:
--------------------------------------------------------------------------------
1 | # Experiment all tricks with center loss : 256x128-bs16x4-warmup10-erase0_5-labelsmooth_on-laststride1-bnneck_on-triplet_centerloss0_0005
2 | # Dataset 2: dukemtmc
3 | # imagesize: 256x128
4 | # batchsize: 16x4
5 | # warmup_step 10
6 | # random erase prob 0.5
7 | # labelsmooth: on
8 | # last stride 1
9 | # bnneck on
10 | # with center loss
11 | python3 tools/train.py --config_file='configs/softmax_triplet_with_center.yml' MODEL.DEVICE_ID "('1')" MODEL.NAME "('se_resnext50')" MODEL.PRETRAIN_PATH "('/home/haoluo/.torch/models/se_resnext50_32x4d-a260b3a4.pth')" DATASETS.NAMES "('dukemtmc')" DATASETS.ROOT_DIR "('/home/haoluo/data')" OUTPUT_DIR "('/home/haoluo/log/gu/reid_baseline_review/Opensource_test/dukemtmc/Experiment-seresnext50-all-tricks-tri_center-256x128-bs16x4-warmup10-erase0_5-labelsmooth_on-laststride1-bnneck_on-triplet_centerloss0_0005')"
--------------------------------------------------------------------------------
/scripts/seresnext50-tri_center-market.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | # Experiment all tricks with center loss : 256x128-bs16x4-warmup10-erase0_5-labelsmooth_on-laststride1-bnneck_on-triplet_centerloss0_0005
3 | # Dataset 1: market1501
4 | # imagesize: 256x128
5 | # batchsize: 16x4
6 | # warmup_step 10
7 | # random erase prob 0.5
8 | # labelsmooth: on
9 | # last stride 1
10 | # bnneck on
11 | # with center loss
12 | python3 tools/train.py --config_file='configs/softmax_triplet_with_center.yml' MODEL.DEVICE_ID "('0')" MODEL.NAME "('se_resnext50')" MODEL.PRETRAIN_PATH "('/home/haoluo/.torch/models/se_resnext50_32x4d-a260b3a4.pth')" DATASETS.NAMES "('market1501')" DATASETS.ROOT_DIR "('/home/lidg/data')" OUTPUT_DIR "('/home/haoluo/log/gu/reid_baseline_review/Opensource_test/market1501/Experiment-seresnext50-all-tricks-tri_center-256x128-bs16x4-warmup10-erase0_5-labelsmooth_on-laststride1-bnneck_on-triplet_centerloss0_0005')"
--------------------------------------------------------------------------------
/scripts/test-sysu-without_center.sh:
--------------------------------------------------------------------------------
1 | # Experiment all tricks without center loss without re-ranking: 256x128-bs16x4-warmup10-erase0_5-labelsmooth_on-laststride1-bnneck_on (=raw all trick, softmax_triplet.yml)
2 | # Dataset 1: market1501
3 | # imagesize: 256x128
4 | # batchsize: 16x4
5 | # warmup_step 10
6 | # random erase prob 0.5
7 | # labelsmooth: on
8 | # last stride 1
9 | # bnneck on
10 | # without center loss
11 | # without re-ranking
12 | python3 tools/test.py --config_file='configs/softmax_triplet.yml' MODEL.DEVICE_ID "('0')" DATASETS.NAMES "('sysu')" DATASETS.ROOT_DIR "('/home/haoluo/data')" MODEL.PRETRAIN_CHOICE "('self')" TEST.WEIGHT "('/home/haoluo/log/gu/reid_baseline_review/Opensource_test/market1501/Experiment/resnet50_model_120.pth')"
--------------------------------------------------------------------------------
/scripts/test-sysu.sh:
--------------------------------------------------------------------------------
1 | # Experiment all tricks without center loss : 256x128-bs16x4-warmup10-erase0_5-labelsmooth_on-laststride1-bnneck_on
2 | # Dataset 1: market1501
3 | # imagesize: 256x128
4 | # batchsize: 16x4
5 | # warmup_step 10
6 | # random erase prob 0.5
7 | # labelsmooth: on
8 | # last stride 1
9 | # bnneck on
10 | # with center loss
11 | # without re-ranking
12 | python3 tools/test.py --config_file='configs/softmax_triplet_with_center.yml' MODEL.DEVICE_ID "('0')" DATASETS.NAMES "('market1501')" DATASETS.ROOT_DIR "('/home/haoluo/data')" MODEL.PRETRAIN_CHOICE "('self')" TEST.WEIGHT "('/home/haoluo/log/gu/reid_baseline_review/Opensource_test/market1501/Experiment-all-tricks-tri_center-256x128-bs16x4-warmup10-erase0_5-labelsmooth_on-laststride1-bnneck_on-triplet_centerloss0_0005/resnet50_model_120.pth')"
--------------------------------------------------------------------------------
/scripts/tri_center-duke.sh:
--------------------------------------------------------------------------------
1 | # Experiment all tricks with center loss : 256x128-bs16x4-warmup10-erase0_5-labelsmooth_on-laststride1-bnneck_on-triplet_centerloss0_0005
2 | # Dataset 2: dukemtmc
3 | # imagesize: 256x128
4 | # batchsize: 16x4
5 | # warmup_step 10
6 | # random erase prob 0.5
7 | # labelsmooth: on
8 | # last stride 1
9 | # bnneck on
10 | # with center loss
11 | python3 tools/train.py --config_file='configs/softmax_triplet_with_center.yml' MODEL.DEVICE_ID "('3')" DATASETS.NAMES "('dukemtmc')" DATASETS.ROOT_DIR "('/home/haoluo/data')" OUTPUT_DIR "('/home/haoluo/log/gu/reid_baseline_review/Opensource_test/dukemtmc/Experiment-all-tricks-tri_center-256x128-bs16x4-warmup10-erase0_5-labelsmooth_on-laststride1-bnneck_on-triplet_centerloss0_0005')"
--------------------------------------------------------------------------------
/scripts/tri_center-market.sh:
--------------------------------------------------------------------------------
1 | # Experiment all tricks with center loss : 256x128-bs16x4-warmup10-erase0_5-labelsmooth_on-laststride1-bnneck_on-triplet_centerloss0_0005 /data1/lidg/reid_dataset/IV-ReID/split /home/lidg/reid-strong-baseline/data
2 | # Dataset 1: market1501
3 | # imagesize: 256x128
4 | # batchsize: 16x4
5 | # warmup_step 10
6 | # random erase prob 0.5
7 | # labelsmooth: on
8 | # last stride 1
9 | # bnneck on
10 | # with center loss
11 | python3 tools/train.py --config_file='configs/softmax_triplet_with_center.yml' MODEL.DEVICE_ID "('9')" DATASETS.NAMES "('market1501')" DATASETS.ROOT_DIR "('/home/lidg/reid-strong-baseline/data')" OUTPUT_DIR "('/home/lidg/reid-strong-baseline/logs/market1501/Experiment-all-tricks-tri_center-256x128-bs16x4-warmup10-erase0_5-labelsmooth_on-laststride1-bnneck_on-triplet_centerloss0_0005')"
--------------------------------------------------------------------------------
/scripts/tri_center-regdb.sh:
--------------------------------------------------------------------------------
1 | # Experiment all tricks with center loss : 256x128-bs16x4-warmup10-erase0_5-labelsmooth_on-laststride1-bnneck_on-triplet_centerloss0_0005 /data1/lidg/reid_dataset/IV-ReID/split /home/lidg/reid-strong-baseline/data
2 | # Dataset 1: market1501
3 | # imagesize: 256x128
4 | # batchsize: 16x4
5 | # warmup_step 10
6 | # random erase prob 0.5
7 | # labelsmooth: on
8 | # last stride 1
9 | # bnneck on
10 | # with center loss
11 | python3 tools/train.py --config_file='configs/softmax_triplet_with_center.yml' MODEL.DEVICE_ID "('2,3')" DATASETS.NAMES "('regdb')" DATASETS.ROOT_DIR "('/data1/lidg/reid_dataset/IV-ReID/RegDB')" OUTPUT_DIR "('/home/lidg/reid-strong-baseline/logs/regdb/rgb-infrared-gray-cmt-cc')"
--------------------------------------------------------------------------------
/scripts/tri_center-sysu.sh:
--------------------------------------------------------------------------------
1 | # Experiment all tricks with center loss : 256x128-bs16x4-warmup10-erase0_5-labelsmooth_on-laststride1-bnneck_on-triplet_centerloss0_0005 /data1/lidg/reid_dataset/IV-ReID/split /home/lidg/reid-strong-baseline/data
2 | # Dataset 1: market1501
3 | # imagesize: 256x128
4 | # batchsize: 16x4
5 | # warmup_step 10
6 | # random erase prob 0.5
7 | # labelsmooth: on
8 | # last stride 1
9 | # bnneck on
10 | # with center loss
11 | python3 tools/train.py --config_file='configs/softmax_triplet_with_center.yml' MODEL.DEVICE_ID "('0,5')" DATASETS.NAMES "('sysu')" DATASETS.ROOT_DIR "('/data1/lidg/reid_dataset/IV-ReID/split')" OUTPUT_DIR "('/home/lidg/reid-strong-baseline/logs/sysu/baseline-cc')"
--------------------------------------------------------------------------------
/scripts/without_center-duke.sh:
--------------------------------------------------------------------------------
1 | # Experiment all tricks without center loss : 256x128-bs16x4-warmup10-erase0_5-labelsmooth_on-laststride1-bnneck_on
2 | # Dataset 2: dukemtmc
3 | # imagesize: 256x128
4 | # batchsize: 16x4
5 | # warmup_step 10
6 | # random erase prob 0.5
7 | # labelsmooth: on
8 | # last stride 1
9 | # bnneck on
10 | # without center loss
11 | python3 tools/train.py --config_file='configs/softmax_triplet.yml' MODEL.DEVICE_ID "('1')" DATASETS.NAMES "('dukemtmc')" DATASETS.ROOT_DIR "('/home/haoluo/data')" OUTPUT_DIR "('/home/haoluo/log/gu/reid_baseline_review/Opensource_test/dukemtmc/Experiment-all-tricks-256x128-bs16x4-warmup10-erase0_5-labelsmooth_on-laststride1-bnneck_on')"
--------------------------------------------------------------------------------
/scripts/without_center-market.bk.sh:
--------------------------------------------------------------------------------
1 | # Experiment all tricks without center loss : 256x128-bs16x4-warmup10-erase0_5-labelsmooth_on-laststride1-bnneck_on
2 | # Dataset 1: market1501
3 | # imagesize: 256x128
4 | # batchsize: 16x4
5 | # warmup_step 10
6 | # random erase prob 0.5
7 | # labelsmooth: on
8 | # last stride 1
9 | # bnneck on
10 | # without center loss
11 | python3 tools/train.py --config_file='configs/softmax_triplet.yml' MODEL.DEVICE_ID "('6')" DATASETS.NAMES "('market1501')" DATASETS.ROOT_DIR "('/home/lidg/reid-strong-baseline/data')" OUTPUT_DIR "('/home/lidg/reid-strong-baseline/logs/market1501/market-baseline-no')"
--------------------------------------------------------------------------------
/scripts/without_center-market.sh:
--------------------------------------------------------------------------------
1 | # Experiment all tricks without center loss : 256x128-bs16x4-warmup10-erase0_5-labelsmooth_on-laststride1-bnneck_on
2 | # Dataset 1: market1501
3 | # imagesize: 256x128
4 | # batchsize: 16x4
5 | # warmup_step 10
6 | # random erase prob 0.5
7 | # labelsmooth: on
8 | # last stride 1
9 | # bnneck on
10 | # without center loss
11 | python3 tools/train.py --config_file='configs/softmax_triplet.yml' MODEL.DEVICE_ID "('2')" DATASETS.NAMES "('market1501')" DATASETS.ROOT_DIR "('/home/lidg/reid-strong-baseline/data')" OUTPUT_DIR "('/home/lidg/reid-strong-baseline/logs/market1501/Experiment-all-tricks-256x128-bs16x4-warmup10-erase0_5-labelsmooth_on-laststride1-bnneck_on')"
--------------------------------------------------------------------------------
/scripts/without_center-regdb.sh:
--------------------------------------------------------------------------------
1 | # Experiment all tricks without center loss : 256x128-bs16x4-warmup10-erase0_5-labelsmooth_on-laststride1-bnneck_on
2 | # Dataset 1: market1501
3 | # imagesize: 256x128
4 | # batchsize: 16x4
5 | # warmup_step 10
6 | # random erase prob 0.5
7 | # labelsmooth: on
8 | # last stride 1
9 | # bnneck on
10 | # without center loss
11 | python3 tools/train.py --config_file='configs/softmax_triplet.yml' MODEL.DEVICE_ID "('6,7')" DATASETS.NAMES "('regdb')" DATASETS.ROOT_DIR "('/data1/lidg/reid_dataset/IV-ReID/RegDB')" OUTPUT_DIR "('/home/lidg/reid-strong-baseline/logs/regdb/rgb-infrared-gray-cmt')"
--------------------------------------------------------------------------------
/scripts/without_center-sysu.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | # Experiment all tricks without center loss : 256x128-bs16x4-warmup10-erase0_5-labelsmooth_on-laststride1-bnneck_on
3 | # Dataset 1: market1501
4 | # imagesize: 256x128
5 | # batchsize: 16x4
6 | # warmup_step 10
7 | # random erase prob 0.5
8 | # labelsmooth: on
9 | # last stride 1
10 | # bnneck on
11 | # without center loss
12 | python3 tools/train.py --config_file='configs/softmax_triplet.yml' MODEL.DEVICE_ID "('6,7')" DATASETS.NAMES "('sysu')" DATASETS.ROOT_DIR "('/data1/lidg/reid_dataset/IV-ReID/split')" OUTPUT_DIR "('/home/lidg/reid-strong-baseline/logs/sysu/baseline-new')"
--------------------------------------------------------------------------------
/solver/__init__.py:
--------------------------------------------------------------------------------
1 | # encoding: utf-8
2 | """
3 | @author: sherlock
4 | @contact: sherlockliao01@gmail.com
5 | """
6 |
7 | from .build import make_optimizer, make_optimizer_with_center
8 | from .lr_scheduler import WarmupMultiStepLR
--------------------------------------------------------------------------------
/solver/__pycache__/__init__.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Alexadlu/XIVReID/1f8a14e14cfd26790337c45e0e663a3a2acf9814/solver/__pycache__/__init__.cpython-36.pyc
--------------------------------------------------------------------------------
/solver/__pycache__/build.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Alexadlu/XIVReID/1f8a14e14cfd26790337c45e0e663a3a2acf9814/solver/__pycache__/build.cpython-36.pyc
--------------------------------------------------------------------------------
/solver/__pycache__/lr_scheduler.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Alexadlu/XIVReID/1f8a14e14cfd26790337c45e0e663a3a2acf9814/solver/__pycache__/lr_scheduler.cpython-36.pyc
--------------------------------------------------------------------------------
/solver/build.py:
--------------------------------------------------------------------------------
1 | # encoding: utf-8
2 | """
3 | @author: sherlock
4 | @contact: sherlockliao01@gmail.com
5 | """
6 |
7 | import torch
8 |
9 |
10 | def make_optimizer(cfg, model):
11 | params = []
12 | for key, value in model.named_parameters():
13 | if not value.requires_grad:
14 | continue
15 | lr = cfg.SOLVER.BASE_LR
16 | weight_decay = cfg.SOLVER.WEIGHT_DECAY
17 | if "bias" in key:
18 | lr = cfg.SOLVER.BASE_LR * cfg.SOLVER.BIAS_LR_FACTOR
19 | weight_decay = cfg.SOLVER.WEIGHT_DECAY_BIAS
20 | params += [{"params": [value], "lr": lr, "weight_decay": weight_decay}]
21 | if cfg.SOLVER.OPTIMIZER_NAME == 'SGD':
22 | optimizer = getattr(torch.optim, cfg.SOLVER.OPTIMIZER_NAME)(params, momentum=cfg.SOLVER.MOMENTUM)
23 | else:
24 | optimizer = getattr(torch.optim, cfg.SOLVER.OPTIMIZER_NAME)(params)
25 | return optimizer
26 |
27 |
28 | def make_optimizer_with_center(cfg, model, center_criterion):
29 | params = []
30 | for key, value in model.named_parameters():
31 | if not value.requires_grad:
32 | continue
33 | lr = cfg.SOLVER.BASE_LR
34 | weight_decay = cfg.SOLVER.WEIGHT_DECAY
35 | if "bias" in key:
36 | lr = cfg.SOLVER.BASE_LR * cfg.SOLVER.BIAS_LR_FACTOR
37 | weight_decay = cfg.SOLVER.WEIGHT_DECAY_BIAS
38 | params += [{"params": [value], "lr": lr, "weight_decay": weight_decay}]
39 | if cfg.SOLVER.OPTIMIZER_NAME == 'SGD':
40 | optimizer = getattr(torch.optim, cfg.SOLVER.OPTIMIZER_NAME)(params, momentum=cfg.SOLVER.MOMENTUM)
41 | else:
42 | optimizer = getattr(torch.optim, cfg.SOLVER.OPTIMIZER_NAME)(params)
43 | optimizer_center = torch.optim.SGD(center_criterion.parameters(), lr=cfg.SOLVER.CENTER_LR)
44 | return optimizer, optimizer_center
45 |
--------------------------------------------------------------------------------
/solver/lr_scheduler.py:
--------------------------------------------------------------------------------
1 | # encoding: utf-8
2 | """
3 | @author: liaoxingyu
4 | @contact: sherlockliao01@gmail.com
5 | """
6 | from bisect import bisect_right
7 | import torch
8 |
9 |
10 | # FIXME ideally this would be achieved with a CombinedLRScheduler,
11 | # separating MultiStepLR with WarmupLR
12 | # but the current LRScheduler design doesn't allow it
13 |
14 | class WarmupMultiStepLR(torch.optim.lr_scheduler._LRScheduler):
15 | def __init__(
16 | self,
17 | optimizer,
18 | milestones,
19 | gamma=0.1,
20 | warmup_factor=1.0 / 3,
21 | warmup_iters=500,
22 | warmup_method="linear",
23 | last_epoch=-1,
24 | ):
25 | if not list(milestones) == sorted(milestones):
26 | raise ValueError(
27 | "Milestones should be a list of" " increasing integers. Got {}",
28 | milestones,
29 | )
30 |
31 | if warmup_method not in ("constant", "linear"):
32 | raise ValueError(
33 | "Only 'constant' or 'linear' warmup_method accepted"
34 | "got {}".format(warmup_method)
35 | )
36 | self.milestones = milestones
37 | self.gamma = gamma
38 | self.warmup_factor = warmup_factor
39 | self.warmup_iters = warmup_iters
40 | self.warmup_method = warmup_method
41 | super(WarmupMultiStepLR, self).__init__(optimizer, last_epoch)
42 |
43 | def get_lr(self):
44 | warmup_factor = 1
45 | if self.last_epoch < self.warmup_iters:
46 | if self.warmup_method == "constant":
47 | warmup_factor = self.warmup_factor
48 | elif self.warmup_method == "linear":
49 | alpha = self.last_epoch / self.warmup_iters
50 | warmup_factor = self.warmup_factor * (1 - alpha) + alpha
51 | return [
52 | base_lr
53 | * warmup_factor
54 | * self.gamma ** bisect_right(self.milestones, self.last_epoch)
55 | for base_lr in self.base_lrs
56 | ]
57 |
--------------------------------------------------------------------------------
/test.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | python tools/test-sysu.py --config_file='configs/all.yml' MODEL.DEVICE_ID "('9')" TEST.WEIGHT "('./logs/sysu/cosine/resnet50_model_120.pth')"
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
1 | # encoding: utf-8
2 | """
3 | @author: sherlock
4 | @contact: sherlockliao01@gmail.com
5 | """
6 |
--------------------------------------------------------------------------------
/tests/lr_scheduler_test.py:
--------------------------------------------------------------------------------
1 | import sys
2 | import unittest
3 |
4 | import torch
5 | from torch import nn
6 |
7 | sys.path.append('.')
8 | from solver.lr_scheduler import WarmupMultiStepLR
9 | from solver.build import make_optimizer
10 | from config import cfg
11 |
12 |
13 | class MyTestCase(unittest.TestCase):
14 | def test_something(self):
15 | net = nn.Linear(10, 10)
16 | optimizer = make_optimizer(cfg, net)
17 | lr_scheduler = WarmupMultiStepLR(optimizer, [20, 40], warmup_iters=10)
18 | for i in range(50):
19 | lr_scheduler.step()
20 | for j in range(3):
21 | print(i, lr_scheduler.get_lr()[0])
22 | optimizer.step()
23 |
24 |
25 | if __name__ == '__main__':
26 | unittest.main()
27 |
--------------------------------------------------------------------------------
/tools/__init__.py:
--------------------------------------------------------------------------------
1 | # encoding: utf-8
2 | """
3 | @author: sherlock
4 | @contact: sherlockliao01@gmail.com
5 | """
6 |
--------------------------------------------------------------------------------
/tools/test.py:
--------------------------------------------------------------------------------
1 | # encoding: utf-8
2 | """
3 | @author: sherlock
4 | @contact: sherlockliao01@gmail.com
5 | """
6 |
7 | import argparse
8 | import os
9 | import sys
10 | from os import mkdir
11 |
12 | import torch
13 | from torch.backends import cudnn
14 |
15 | sys.path.append('.')
16 | from config import cfg
17 | from data import make_data_loader
18 | from engine.inference import inference
19 | from modeling import build_model
20 | from utils.logger import setup_logger
21 |
22 |
23 | def main():
24 | parser = argparse.ArgumentParser(description="ReID Baseline Inference")
25 | parser.add_argument("--config_file", default="", help="path to config file", type=str)
26 | parser.add_argument("opts", help="Modify config options using the command-line", default=None,nargs=argparse.REMAINDER)
27 |
28 | args = parser.parse_args()
29 |
30 | num_gpus = int(os.environ["WORLD_SIZE"]) if "WORLD_SIZE" in os.environ else 1
31 |
32 | if args.config_file != "":
33 | cfg.merge_from_file(args.config_file)
34 | cfg.merge_from_list(args.opts)
35 | cfg.freeze()
36 |
37 | output_dir = cfg.OUTPUT_DIR
38 | if output_dir and not os.path.exists(output_dir):
39 | mkdir(output_dir)
40 |
41 | logger = setup_logger("reid_baseline", output_dir, 0)
42 | logger.info("Using {} GPUS".format(num_gpus))
43 | logger.info(args)
44 |
45 | if args.config_file != "":
46 | logger.info("Loaded configuration file {}".format(args.config_file))
47 | with open(args.config_file, 'r') as cf:
48 | config_str = "\n" + cf.read()
49 | logger.info(config_str)
50 | logger.info("Running with config:\n{}".format(cfg))
51 |
52 | if cfg.MODEL.DEVICE == "cuda":
53 | os.environ['CUDA_VISIBLE_DEVICES'] = cfg.MODEL.DEVICE_ID
54 | cudnn.benchmark = True
55 |
56 | train_loader, val_loader, num_query, num_classes = make_data_loader(cfg)
57 | model = build_model(cfg, num_classes)
58 |
59 | #model.load_param(cfg.TEST.WEIGHT)
60 | model.load_state_dict(torch.load(cfg.TEST.WEIGHT))
61 |
62 | inference(cfg, model, val_loader, num_query)
63 |
64 |
65 | if __name__ == '__main__':
66 | main()
67 |
--------------------------------------------------------------------------------
/train.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | python tools/train.py --config_file='configs/all.yml' MODEL.DEVICE_ID "('9')" OUTPUT_DIR "('./logs/sysu/all')"
--------------------------------------------------------------------------------
/utils/__init__.py:
--------------------------------------------------------------------------------
1 | # encoding: utf-8
2 | """
3 | @author: sherlock
4 | @contact: sherlockliao01@gmail.com
5 | """
6 |
7 |
--------------------------------------------------------------------------------
/utils/__pycache__/__init__.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Alexadlu/XIVReID/1f8a14e14cfd26790337c45e0e663a3a2acf9814/utils/__pycache__/__init__.cpython-36.pyc
--------------------------------------------------------------------------------
/utils/__pycache__/data_loader.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Alexadlu/XIVReID/1f8a14e14cfd26790337c45e0e663a3a2acf9814/utils/__pycache__/data_loader.cpython-36.pyc
--------------------------------------------------------------------------------
/utils/__pycache__/data_manager.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Alexadlu/XIVReID/1f8a14e14cfd26790337c45e0e663a3a2acf9814/utils/__pycache__/data_manager.cpython-36.pyc
--------------------------------------------------------------------------------
/utils/__pycache__/ecn.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Alexadlu/XIVReID/1f8a14e14cfd26790337c45e0e663a3a2acf9814/utils/__pycache__/ecn.cpython-36.pyc
--------------------------------------------------------------------------------
/utils/__pycache__/eval_metrics.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Alexadlu/XIVReID/1f8a14e14cfd26790337c45e0e663a3a2acf9814/utils/__pycache__/eval_metrics.cpython-36.pyc
--------------------------------------------------------------------------------
/utils/__pycache__/iotools.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Alexadlu/XIVReID/1f8a14e14cfd26790337c45e0e663a3a2acf9814/utils/__pycache__/iotools.cpython-36.pyc
--------------------------------------------------------------------------------
/utils/__pycache__/logger.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Alexadlu/XIVReID/1f8a14e14cfd26790337c45e0e663a3a2acf9814/utils/__pycache__/logger.cpython-36.pyc
--------------------------------------------------------------------------------
/utils/__pycache__/model.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Alexadlu/XIVReID/1f8a14e14cfd26790337c45e0e663a3a2acf9814/utils/__pycache__/model.cpython-36.pyc
--------------------------------------------------------------------------------
/utils/__pycache__/re_ranking.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Alexadlu/XIVReID/1f8a14e14cfd26790337c45e0e663a3a2acf9814/utils/__pycache__/re_ranking.cpython-36.pyc
--------------------------------------------------------------------------------
/utils/__pycache__/reid_metric.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Alexadlu/XIVReID/1f8a14e14cfd26790337c45e0e663a3a2acf9814/utils/__pycache__/reid_metric.cpython-36.pyc
--------------------------------------------------------------------------------
/utils/__pycache__/utils.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Alexadlu/XIVReID/1f8a14e14cfd26790337c45e0e663a3a2acf9814/utils/__pycache__/utils.cpython-36.pyc
--------------------------------------------------------------------------------
/utils/iotools.py:
--------------------------------------------------------------------------------
1 | # encoding: utf-8
2 | """
3 | @author: sherlock
4 | @contact: sherlockliao01@gmail.com
5 | """
6 |
7 | import errno
8 | import json
9 | import os
10 |
11 | import os.path as osp
12 |
13 |
14 | def mkdir_if_missing(directory):
15 | if not osp.exists(directory):
16 | try:
17 | os.makedirs(directory)
18 | except OSError as e:
19 | if e.errno != errno.EEXIST:
20 | raise
21 |
22 |
23 | def check_isfile(path):
24 | isfile = osp.isfile(path)
25 | if not isfile:
26 | print("=> Warning: no file found at '{}' (ignored)".format(path))
27 | return isfile
28 |
29 |
30 | def read_json(fpath):
31 | with open(fpath, 'r') as f:
32 | obj = json.load(f)
33 | return obj
34 |
35 |
36 | def write_json(obj, fpath):
37 | mkdir_if_missing(osp.dirname(fpath))
38 | with open(fpath, 'w') as f:
39 | json.dump(obj, f, indent=4, separators=(',', ': '))
40 |
--------------------------------------------------------------------------------
/utils/logger.py:
--------------------------------------------------------------------------------
1 | # encoding: utf-8
2 | """
3 | @author: sherlock
4 | @contact: sherlockliao01@gmail.com
5 | """
6 |
7 | import logging
8 | import os
9 | import sys
10 |
11 |
12 | def setup_logger(name, save_dir, distributed_rank):
13 | logger = logging.getLogger(name)
14 | logger.setLevel(logging.DEBUG)
15 | # don't log results for the non-master process
16 | if distributed_rank > 0:
17 | return logger
18 | ch = logging.StreamHandler(stream=sys.stdout)
19 | ch.setLevel(logging.DEBUG)
20 | formatter = logging.Formatter("%(asctime)s %(name)s %(levelname)s: %(message)s")
21 | ch.setFormatter(formatter)
22 | logger.addHandler(ch)
23 |
24 | if save_dir:
25 | fh = logging.FileHandler(os.path.join(save_dir, "log.txt"), mode='w')
26 | fh.setLevel(logging.DEBUG)
27 | fh.setFormatter(formatter)
28 | logger.addHandler(fh)
29 |
30 | return logger
31 |
--------------------------------------------------------------------------------
/utils/pre_process_sysu.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | from PIL import Image
3 | import pdb
4 | import os
5 |
6 | data_path = '/data4/lidg/IV-ReID/SYSU'
7 |
8 | rgb_cameras = ['cam1','cam2','cam4','cam5']
9 | ir_cameras = ['cam3','cam6']
10 |
11 | # load id info
12 | file_path_train = os.path.join(data_path,'exp/train_id.txt')
13 | file_path_val = os.path.join(data_path,'exp/val_id.txt')
14 | with open(file_path_train, 'r') as file:
15 | ids = file.read().splitlines()
16 | ids = [int(y) for y in ids[0].split(',')]
17 | id_train = ["%04d" % x for x in ids]
18 |
19 | with open(file_path_val, 'r') as file:
20 | ids = file.read().splitlines()
21 | ids = [int(y) for y in ids[0].split(',')]
22 | id_val = ["%04d" % x for x in ids]
23 |
24 | # combine train and val split
25 | id_train.extend(id_val)
26 |
27 | files_rgb = []
28 | files_ir = []
29 |
30 | for id in sorted(id_train):
31 | for cam in rgb_cameras:
32 | img_dir = os.path.join(data_path,cam,id)
33 | if os.path.isdir(img_dir):
34 | new_files = sorted([img_dir+'/'+i for i in os.listdir(img_dir)])
35 | files_rgb.extend(new_files)
36 |
37 | for cam in ir_cameras:
38 | img_dir = os.path.join(data_path,cam,id)
39 | if os.path.isdir(img_dir):
40 | new_files = sorted([img_dir+'/'+i for i in os.listdir(img_dir)])
41 | files_ir.extend(new_files)
42 |
43 | # relabel
44 | pid_container = set()
45 | for img_path in files_ir:
46 | pid = int(img_path[-13:-9])
47 | pid_container.add(pid)
48 | pid2label = {pid:label for label, pid in enumerate(pid_container)}
49 | fix_image_width = 144
50 | fix_image_height = 288
51 |
52 | def read_imgs(train_image):
53 | train_img = []
54 | train_label = []
55 | for img_path in train_image:
56 | # img
57 | img = Image.open(img_path)
58 | img = img.resize((fix_image_width, fix_image_height), Image.ANTIALIAS)
59 | pix_array = np.array(img)
60 |
61 | train_img.append(pix_array)
62 |
63 | # label
64 | pid = int(img_path[-13:-9])
65 | pid = pid2label[pid]
66 | train_label.append(pid)
67 | return np.array(train_img), np.array(train_label)
68 |
69 | import pdb;pdb.set_trace()
70 |
71 | # rgb imges
72 | train_img, train_label = read_imgs(files_rgb)
73 | np.save(data_path + 'train_rgb_resized_img.npy', train_img)
74 | np.save(data_path + 'train_rgb_resized_label.npy', train_label)
75 |
76 | # ir imges
77 | train_img, train_label = read_imgs(files_ir)
78 | np.save(data_path + 'train_ir_resized_img.npy', train_img)
79 | np.save(data_path + 'train_ir_resized_label.npy', train_label)
80 |
--------------------------------------------------------------------------------
/venv/bin/activate:
--------------------------------------------------------------------------------
1 | # This file must be used with "source bin/activate" *from bash*
2 | # you cannot run it directly
3 |
4 | deactivate () {
5 | # reset old environment variables
6 | if [ -n "$_OLD_VIRTUAL_PATH" ] ; then
7 | PATH="$_OLD_VIRTUAL_PATH"
8 | export PATH
9 | unset _OLD_VIRTUAL_PATH
10 | fi
11 | if [ -n "$_OLD_VIRTUAL_PYTHONHOME" ] ; then
12 | PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
13 | export PYTHONHOME
14 | unset _OLD_VIRTUAL_PYTHONHOME
15 | fi
16 |
17 | # This should detect bash and zsh, which have a hash command that must
18 | # be called to get it to forget past commands. Without forgetting
19 | # past commands the $PATH changes we made may not be respected
20 | if [ -n "$BASH" -o -n "$ZSH_VERSION" ] ; then
21 | hash -r
22 | fi
23 |
24 | if [ -n "$_OLD_VIRTUAL_PS1" ] ; then
25 | PS1="$_OLD_VIRTUAL_PS1"
26 | export PS1
27 | unset _OLD_VIRTUAL_PS1
28 | fi
29 |
30 | unset VIRTUAL_ENV
31 | if [ ! "$1" = "nondestructive" ] ; then
32 | # Self destruct!
33 | unset -f deactivate
34 | fi
35 | }
36 |
37 | # unset irrelavent variables
38 | deactivate nondestructive
39 |
40 | VIRTUAL_ENV="/home/zxh/Cosine-IVReID/venv"
41 | export VIRTUAL_ENV
42 |
43 | _OLD_VIRTUAL_PATH="$PATH"
44 | PATH="$VIRTUAL_ENV/bin:$PATH"
45 | export PATH
46 |
47 | # unset PYTHONHOME if set
48 | # this will fail if PYTHONHOME is set to the empty string (which is bad anyway)
49 | # could use `if (set -u; : $PYTHONHOME) ;` in bash
50 | if [ -n "$PYTHONHOME" ] ; then
51 | _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
52 | unset PYTHONHOME
53 | fi
54 |
55 | if [ -z "$VIRTUAL_ENV_DISABLE_PROMPT" ] ; then
56 | _OLD_VIRTUAL_PS1="$PS1"
57 | if [ "x(venv) " != x ] ; then
58 | PS1="(venv) $PS1"
59 | else
60 | if [ "`basename \"$VIRTUAL_ENV\"`" = "__" ] ; then
61 | # special case for Aspen magic directories
62 | # see http://www.zetadev.com/software/aspen/
63 | PS1="[`basename \`dirname \"$VIRTUAL_ENV\"\``] $PS1"
64 | else
65 | PS1="(`basename \"$VIRTUAL_ENV\"`)$PS1"
66 | fi
67 | fi
68 | export PS1
69 | fi
70 |
71 | # This should detect bash and zsh, which have a hash command that must
72 | # be called to get it to forget past commands. Without forgetting
73 | # past commands the $PATH changes we made may not be respected
74 | if [ -n "$BASH" -o -n "$ZSH_VERSION" ] ; then
75 | hash -r
76 | fi
77 |
--------------------------------------------------------------------------------
/venv/bin/activate.csh:
--------------------------------------------------------------------------------
1 | # This file must be used with "source bin/activate.csh" *from csh*.
2 | # You cannot run it directly.
3 | # Created by Davide Di Blasi .
4 | # Ported to Python 3.3 venv by Andrew Svetlov
5 |
6 | alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; test "\!:*" != "nondestructive" && unalias deactivate'
7 |
8 | # Unset irrelavent variables.
9 | deactivate nondestructive
10 |
11 | setenv VIRTUAL_ENV "/home/zxh/Cosine-IVReID/venv"
12 |
13 | set _OLD_VIRTUAL_PATH="$PATH"
14 | setenv PATH "$VIRTUAL_ENV/bin:$PATH"
15 |
16 |
17 | set _OLD_VIRTUAL_PROMPT="$prompt"
18 |
19 | if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then
20 | if ("venv" != "") then
21 | set env_name = "venv"
22 | else
23 | if (`basename "VIRTUAL_ENV"` == "__") then
24 | # special case for Aspen magic directories
25 | # see http://www.zetadev.com/software/aspen/
26 | set env_name = `basename \`dirname "$VIRTUAL_ENV"\``
27 | else
28 | set env_name = `basename "$VIRTUAL_ENV"`
29 | endif
30 | endif
31 | set prompt = "[$env_name] $prompt"
32 | unset env_name
33 | endif
34 |
35 | alias pydoc python -m pydoc
36 |
37 | rehash
38 |
--------------------------------------------------------------------------------
/venv/bin/activate.fish:
--------------------------------------------------------------------------------
1 | # This file must be used with ". bin/activate.fish" *from fish* (http://fishshell.org)
2 | # you cannot run it directly
3 |
4 | function deactivate -d "Exit virtualenv and return to normal shell environment"
5 | # reset old environment variables
6 | if test -n "$_OLD_VIRTUAL_PATH"
7 | set -gx PATH $_OLD_VIRTUAL_PATH
8 | set -e _OLD_VIRTUAL_PATH
9 | end
10 | if test -n "$_OLD_VIRTUAL_PYTHONHOME"
11 | set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME
12 | set -e _OLD_VIRTUAL_PYTHONHOME
13 | end
14 |
15 | if test -n "$_OLD_FISH_PROMPT_OVERRIDE"
16 | functions -e fish_prompt
17 | set -e _OLD_FISH_PROMPT_OVERRIDE
18 | . ( begin
19 | printf "function fish_prompt\n\t#"
20 | functions _old_fish_prompt
21 | end | psub )
22 | functions -e _old_fish_prompt
23 | end
24 |
25 | set -e VIRTUAL_ENV
26 | if test "$argv[1]" != "nondestructive"
27 | # Self destruct!
28 | functions -e deactivate
29 | end
30 | end
31 |
32 | # unset irrelavent variables
33 | deactivate nondestructive
34 |
35 | set -gx VIRTUAL_ENV "/home/zxh/Cosine-IVReID/venv"
36 |
37 | set -gx _OLD_VIRTUAL_PATH $PATH
38 | set -gx PATH "$VIRTUAL_ENV/bin" $PATH
39 |
40 | # unset PYTHONHOME if set
41 | if set -q PYTHONHOME
42 | set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME
43 | set -e PYTHONHOME
44 | end
45 |
46 | if test -z "$VIRTUAL_ENV_DISABLE_PROMPT"
47 | # fish uses a function instead of an env var to generate the prompt.
48 |
49 | # save the current fish_prompt function as the function _old_fish_prompt
50 | . ( begin
51 | printf "function _old_fish_prompt\n\t#"
52 | functions fish_prompt
53 | end | psub )
54 |
55 | # with the original prompt function renamed, we can override with our own.
56 | function fish_prompt
57 | # Prompt override?
58 | if test -n "$(venv) "
59 | printf "%s%s%s" "$(venv) " (set_color normal) (_old_fish_prompt)
60 | return
61 | end
62 | # ...Otherwise, prepend env
63 | set -l _checkbase (basename "$VIRTUAL_ENV")
64 | if test $_checkbase = "__"
65 | # special case for Aspen magic directories
66 | # see http://www.zetadev.com/software/aspen/
67 | printf "%s[%s]%s %s" (set_color -b blue white) (basename (dirname "$VIRTUAL_ENV")) (set_color normal) (_old_fish_prompt)
68 | else
69 | printf "%s(%s)%s%s" (set_color -b blue white) (basename "$VIRTUAL_ENV") (set_color normal) (_old_fish_prompt)
70 | end
71 | end
72 |
73 | set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV"
74 | end
75 |
--------------------------------------------------------------------------------
/venv/bin/easy_install:
--------------------------------------------------------------------------------
1 | #!/home/zxh/Cosine-IVReID/venv/bin/python
2 | # EASY-INSTALL-ENTRY-SCRIPT: 'setuptools==39.1.0','console_scripts','easy_install'
3 | __requires__ = 'setuptools==39.1.0'
4 | import re
5 | import sys
6 | from pkg_resources import load_entry_point
7 |
8 | if __name__ == '__main__':
9 | sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
10 | sys.exit(
11 | load_entry_point('setuptools==39.1.0', 'console_scripts', 'easy_install')()
12 | )
13 |
--------------------------------------------------------------------------------
/venv/bin/easy_install-3.5:
--------------------------------------------------------------------------------
1 | #!/home/zxh/Cosine-IVReID/venv/bin/python
2 | # EASY-INSTALL-ENTRY-SCRIPT: 'setuptools==39.1.0','console_scripts','easy_install-3.5'
3 | __requires__ = 'setuptools==39.1.0'
4 | import re
5 | import sys
6 | from pkg_resources import load_entry_point
7 |
8 | if __name__ == '__main__':
9 | sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
10 | sys.exit(
11 | load_entry_point('setuptools==39.1.0', 'console_scripts', 'easy_install-3.5')()
12 | )
13 |
--------------------------------------------------------------------------------
/venv/bin/pip:
--------------------------------------------------------------------------------
1 | #!/home/zxh/Cosine-IVReID/venv/bin/python
2 | # EASY-INSTALL-ENTRY-SCRIPT: 'pip==10.0.1','console_scripts','pip'
3 | __requires__ = 'pip==10.0.1'
4 | import re
5 | import sys
6 | from pkg_resources import load_entry_point
7 |
8 | if __name__ == '__main__':
9 | sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
10 | sys.exit(
11 | load_entry_point('pip==10.0.1', 'console_scripts', 'pip')()
12 | )
13 |
--------------------------------------------------------------------------------
/venv/bin/pip3:
--------------------------------------------------------------------------------
1 | #!/home/zxh/Cosine-IVReID/venv/bin/python
2 | # EASY-INSTALL-ENTRY-SCRIPT: 'pip==10.0.1','console_scripts','pip3'
3 | __requires__ = 'pip==10.0.1'
4 | import re
5 | import sys
6 | from pkg_resources import load_entry_point
7 |
8 | if __name__ == '__main__':
9 | sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
10 | sys.exit(
11 | load_entry_point('pip==10.0.1', 'console_scripts', 'pip3')()
12 | )
13 |
--------------------------------------------------------------------------------
/venv/bin/pip3.5:
--------------------------------------------------------------------------------
1 | #!/home/zxh/Cosine-IVReID/venv/bin/python
2 | # EASY-INSTALL-ENTRY-SCRIPT: 'pip==10.0.1','console_scripts','pip3.5'
3 | __requires__ = 'pip==10.0.1'
4 | import re
5 | import sys
6 | from pkg_resources import load_entry_point
7 |
8 | if __name__ == '__main__':
9 | sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
10 | sys.exit(
11 | load_entry_point('pip==10.0.1', 'console_scripts', 'pip3.5')()
12 | )
13 |
--------------------------------------------------------------------------------
/venv/bin/python:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Alexadlu/XIVReID/1f8a14e14cfd26790337c45e0e663a3a2acf9814/venv/bin/python
--------------------------------------------------------------------------------
/venv/bin/python3:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Alexadlu/XIVReID/1f8a14e14cfd26790337c45e0e663a3a2acf9814/venv/bin/python3
--------------------------------------------------------------------------------
/venv/bin/python3.5:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Alexadlu/XIVReID/1f8a14e14cfd26790337c45e0e663a3a2acf9814/venv/bin/python3.5
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/easy-install.pth:
--------------------------------------------------------------------------------
1 | ./setuptools-39.1.0-py3.5.egg
2 | ./pip-10.0.1-py3.5.egg
3 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/EGG-INFO/dependency_links.txt:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/EGG-INFO/entry_points.txt:
--------------------------------------------------------------------------------
1 | [console_scripts]
2 | pip = pip._internal:main
3 | pip3 = pip._internal:main
4 | pip3.5 = pip._internal:main
5 |
6 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/EGG-INFO/not-zip-safe:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/EGG-INFO/requires.txt:
--------------------------------------------------------------------------------
1 |
2 | [testing]
3 | pytest
4 | mock
5 | pretend
6 | scripttest>=1.3
7 | virtualenv>=1.10
8 | freezegun
9 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/EGG-INFO/top_level.txt:
--------------------------------------------------------------------------------
1 | pip
2 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/__init__.py:
--------------------------------------------------------------------------------
1 | __version__ = "10.0.1"
2 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/__main__.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import
2 |
3 | import os
4 | import sys
5 |
6 | # If we are running from a wheel, add the wheel to sys.path
7 | # This allows the usage python pip-*.whl/pip install pip-*.whl
8 | if __package__ == '':
9 | # __file__ is pip-*.whl/pip/__main__.py
10 | # first dirname call strips of '/__main__.py', second strips off '/pip'
11 | # Resulting path is the name of the wheel itself
12 | # Add that to sys.path so we can import pip
13 | path = os.path.dirname(os.path.dirname(__file__))
14 | sys.path.insert(0, path)
15 |
16 | from pip._internal import main as _main # noqa
17 |
18 | if __name__ == '__main__':
19 | sys.exit(_main())
20 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_internal/commands/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | Package containing all pip commands
3 | """
4 | from __future__ import absolute_import
5 |
6 | from pip._internal.commands.completion import CompletionCommand
7 | from pip._internal.commands.configuration import ConfigurationCommand
8 | from pip._internal.commands.download import DownloadCommand
9 | from pip._internal.commands.freeze import FreezeCommand
10 | from pip._internal.commands.hash import HashCommand
11 | from pip._internal.commands.help import HelpCommand
12 | from pip._internal.commands.list import ListCommand
13 | from pip._internal.commands.check import CheckCommand
14 | from pip._internal.commands.search import SearchCommand
15 | from pip._internal.commands.show import ShowCommand
16 | from pip._internal.commands.install import InstallCommand
17 | from pip._internal.commands.uninstall import UninstallCommand
18 | from pip._internal.commands.wheel import WheelCommand
19 |
20 | from pip._internal.utils.typing import MYPY_CHECK_RUNNING
21 |
22 | if MYPY_CHECK_RUNNING:
23 | from typing import List, Type
24 | from pip._internal.basecommand import Command
25 |
26 | commands_order = [
27 | InstallCommand,
28 | DownloadCommand,
29 | UninstallCommand,
30 | FreezeCommand,
31 | ListCommand,
32 | ShowCommand,
33 | CheckCommand,
34 | ConfigurationCommand,
35 | SearchCommand,
36 | WheelCommand,
37 | HashCommand,
38 | CompletionCommand,
39 | HelpCommand,
40 | ] # type: List[Type[Command]]
41 |
42 | commands_dict = {c.name: c for c in commands_order}
43 |
44 |
45 | def get_summaries(ordered=True):
46 | """Yields sorted (command name, command summary) tuples."""
47 |
48 | if ordered:
49 | cmditems = _sort_commands(commands_dict, commands_order)
50 | else:
51 | cmditems = commands_dict.items()
52 |
53 | for name, command_class in cmditems:
54 | yield (name, command_class.summary)
55 |
56 |
57 | def get_similar_commands(name):
58 | """Command name auto-correct."""
59 | from difflib import get_close_matches
60 |
61 | name = name.lower()
62 |
63 | close_commands = get_close_matches(name, commands_dict.keys())
64 |
65 | if close_commands:
66 | return close_commands[0]
67 | else:
68 | return False
69 |
70 |
71 | def _sort_commands(cmddict, order):
72 | def keyfn(key):
73 | try:
74 | return order.index(key[1])
75 | except ValueError:
76 | # unordered items should come last
77 | return 0xff
78 |
79 | return sorted(cmddict.items(), key=keyfn)
80 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_internal/commands/check.py:
--------------------------------------------------------------------------------
1 | import logging
2 |
3 | from pip._internal.basecommand import Command
4 | from pip._internal.operations.check import (
5 | check_package_set, create_package_set_from_installed,
6 | )
7 | from pip._internal.utils.misc import get_installed_distributions
8 |
9 | logger = logging.getLogger(__name__)
10 |
11 |
12 | class CheckCommand(Command):
13 | """Verify installed packages have compatible dependencies."""
14 | name = 'check'
15 | usage = """
16 | %prog [options]"""
17 | summary = 'Verify installed packages have compatible dependencies.'
18 |
19 | def run(self, options, args):
20 | package_set = create_package_set_from_installed()
21 | missing, conflicting = check_package_set(package_set)
22 |
23 | for project_name in missing:
24 | version = package_set[project_name].version
25 | for dependency in missing[project_name]:
26 | logger.info(
27 | "%s %s requires %s, which is not installed.",
28 | project_name, version, dependency[0],
29 | )
30 |
31 | for project_name in conflicting:
32 | version = package_set[project_name].version
33 | for dep_name, dep_version, req in conflicting[project_name]:
34 | logger.info(
35 | "%s %s has requirement %s, but you have %s %s.",
36 | project_name, version, req, dep_name, dep_version,
37 | )
38 |
39 | if missing or conflicting:
40 | return 1
41 | else:
42 | logger.info("No broken requirements found.")
43 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_internal/commands/hash.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import
2 |
3 | import hashlib
4 | import logging
5 | import sys
6 |
7 | from pip._internal.basecommand import Command
8 | from pip._internal.status_codes import ERROR
9 | from pip._internal.utils.hashes import FAVORITE_HASH, STRONG_HASHES
10 | from pip._internal.utils.misc import read_chunks
11 |
12 | logger = logging.getLogger(__name__)
13 |
14 |
15 | class HashCommand(Command):
16 | """
17 | Compute a hash of a local package archive.
18 |
19 | These can be used with --hash in a requirements file to do repeatable
20 | installs.
21 |
22 | """
23 | name = 'hash'
24 | usage = '%prog [options] ...'
25 | summary = 'Compute hashes of package archives.'
26 | ignore_require_venv = True
27 |
28 | def __init__(self, *args, **kw):
29 | super(HashCommand, self).__init__(*args, **kw)
30 | self.cmd_opts.add_option(
31 | '-a', '--algorithm',
32 | dest='algorithm',
33 | choices=STRONG_HASHES,
34 | action='store',
35 | default=FAVORITE_HASH,
36 | help='The hash algorithm to use: one of %s' %
37 | ', '.join(STRONG_HASHES))
38 | self.parser.insert_option_group(0, self.cmd_opts)
39 |
40 | def run(self, options, args):
41 | if not args:
42 | self.parser.print_usage(sys.stderr)
43 | return ERROR
44 |
45 | algorithm = options.algorithm
46 | for path in args:
47 | logger.info('%s:\n--hash=%s:%s',
48 | path, algorithm, _hash_of_file(path, algorithm))
49 |
50 |
51 | def _hash_of_file(path, algorithm):
52 | """Return the hash digest of a file."""
53 | with open(path, 'rb') as archive:
54 | hash = hashlib.new(algorithm)
55 | for chunk in read_chunks(archive):
56 | hash.update(chunk)
57 | return hash.hexdigest()
58 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_internal/commands/help.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import
2 |
3 | from pip._internal.basecommand import SUCCESS, Command
4 | from pip._internal.exceptions import CommandError
5 |
6 |
7 | class HelpCommand(Command):
8 | """Show help for commands"""
9 | name = 'help'
10 | usage = """
11 | %prog """
12 | summary = 'Show help for commands.'
13 | ignore_require_venv = True
14 |
15 | def run(self, options, args):
16 | from pip._internal.commands import commands_dict, get_similar_commands
17 |
18 | try:
19 | # 'pip help' with no args is handled by pip.__init__.parseopt()
20 | cmd_name = args[0] # the command we need help for
21 | except IndexError:
22 | return SUCCESS
23 |
24 | if cmd_name not in commands_dict:
25 | guess = get_similar_commands(cmd_name)
26 |
27 | msg = ['unknown command "%s"' % cmd_name]
28 | if guess:
29 | msg.append('maybe you meant "%s"' % guess)
30 |
31 | raise CommandError(' - '.join(msg))
32 |
33 | command = commands_dict[cmd_name]()
34 | command.parser.print_help()
35 |
36 | return SUCCESS
37 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_internal/models/__init__.py:
--------------------------------------------------------------------------------
1 | from pip._internal.models.index import Index, PyPI
2 |
3 |
4 | __all__ = ["Index", "PyPI"]
5 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_internal/models/index.py:
--------------------------------------------------------------------------------
1 | from pip._vendor.six.moves.urllib import parse as urllib_parse
2 |
3 |
4 | class Index(object):
5 | def __init__(self, url):
6 | self.url = url
7 | self.netloc = urllib_parse.urlsplit(url).netloc
8 | self.simple_url = self.url_to_path('simple')
9 | self.pypi_url = self.url_to_path('pypi')
10 |
11 | def url_to_path(self, path):
12 | return urllib_parse.urljoin(self.url, path)
13 |
14 |
15 | PyPI = Index('https://pypi.org/')
16 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_internal/operations/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Alexadlu/XIVReID/1f8a14e14cfd26790337c45e0e663a3a2acf9814/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_internal/operations/__init__.py
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_internal/req/__init__.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import
2 |
3 | import logging
4 |
5 | from .req_install import InstallRequirement
6 | from .req_set import RequirementSet
7 | from .req_file import parse_requirements
8 | from pip._internal.utils.logging import indent_log
9 |
10 |
11 | __all__ = [
12 | "RequirementSet", "InstallRequirement",
13 | "parse_requirements", "install_given_reqs",
14 | ]
15 |
16 | logger = logging.getLogger(__name__)
17 |
18 |
19 | def install_given_reqs(to_install, install_options, global_options=(),
20 | *args, **kwargs):
21 | """
22 | Install everything in the given list.
23 |
24 | (to be called after having downloaded and unpacked the packages)
25 | """
26 |
27 | if to_install:
28 | logger.info(
29 | 'Installing collected packages: %s',
30 | ', '.join([req.name for req in to_install]),
31 | )
32 |
33 | with indent_log():
34 | for requirement in to_install:
35 | if requirement.conflicts_with:
36 | logger.info(
37 | 'Found existing installation: %s',
38 | requirement.conflicts_with,
39 | )
40 | with indent_log():
41 | uninstalled_pathset = requirement.uninstall(
42 | auto_confirm=True
43 | )
44 | try:
45 | requirement.install(
46 | install_options,
47 | global_options,
48 | *args,
49 | **kwargs
50 | )
51 | except:
52 | should_rollback = (
53 | requirement.conflicts_with and
54 | not requirement.install_succeeded
55 | )
56 | # if install did not succeed, rollback previous uninstall
57 | if should_rollback:
58 | uninstalled_pathset.rollback()
59 | raise
60 | else:
61 | should_commit = (
62 | requirement.conflicts_with and
63 | requirement.install_succeeded
64 | )
65 | if should_commit:
66 | uninstalled_pathset.commit()
67 | requirement.remove_temporary_source()
68 |
69 | return to_install
70 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_internal/status_codes.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import
2 |
3 | SUCCESS = 0
4 | ERROR = 1
5 | UNKNOWN_ERROR = 2
6 | VIRTUALENV_NOT_FOUND = 3
7 | PREVIOUS_BUILD_DIR_ERROR = 4
8 | NO_MATCHES_FOUND = 23
9 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_internal/utils/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Alexadlu/XIVReID/1f8a14e14cfd26790337c45e0e663a3a2acf9814/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_internal/utils/__init__.py
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_internal/utils/deprecation.py:
--------------------------------------------------------------------------------
1 | """
2 | A module that implements tooling to enable easy warnings about deprecations.
3 | """
4 | from __future__ import absolute_import
5 |
6 | import logging
7 | import warnings
8 |
9 | from pip._internal.utils.typing import MYPY_CHECK_RUNNING
10 |
11 | if MYPY_CHECK_RUNNING:
12 | from typing import Any
13 |
14 |
15 | class PipDeprecationWarning(Warning):
16 | pass
17 |
18 |
19 | class Pending(object):
20 | pass
21 |
22 |
23 | class RemovedInPip11Warning(PipDeprecationWarning):
24 | pass
25 |
26 |
27 | class RemovedInPip12Warning(PipDeprecationWarning, Pending):
28 | pass
29 |
30 |
31 | # Warnings <-> Logging Integration
32 |
33 |
34 | _warnings_showwarning = None # type: Any
35 |
36 |
37 | def _showwarning(message, category, filename, lineno, file=None, line=None):
38 | if file is not None:
39 | if _warnings_showwarning is not None:
40 | _warnings_showwarning(
41 | message, category, filename, lineno, file, line,
42 | )
43 | else:
44 | if issubclass(category, PipDeprecationWarning):
45 | # We use a specially named logger which will handle all of the
46 | # deprecation messages for pip.
47 | logger = logging.getLogger("pip._internal.deprecations")
48 |
49 | # This is purposely using the % formatter here instead of letting
50 | # the logging module handle the interpolation. This is because we
51 | # want it to appear as if someone typed this entire message out.
52 | log_message = "DEPRECATION: %s" % message
53 |
54 | # PipDeprecationWarnings that are Pending still have at least 2
55 | # versions to go until they are removed so they can just be
56 | # warnings. Otherwise, they will be removed in the very next
57 | # version of pip. We want these to be more obvious so we use the
58 | # ERROR logging level.
59 | if issubclass(category, Pending):
60 | logger.warning(log_message)
61 | else:
62 | logger.error(log_message)
63 | else:
64 | _warnings_showwarning(
65 | message, category, filename, lineno, file, line,
66 | )
67 |
68 |
69 | def install_warning_logger():
70 | # Enable our Deprecation Warnings
71 | warnings.simplefilter("default", PipDeprecationWarning, append=True)
72 |
73 | global _warnings_showwarning
74 |
75 | if _warnings_showwarning is None:
76 | _warnings_showwarning = warnings.showwarning
77 | warnings.showwarning = _showwarning
78 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_internal/utils/encoding.py:
--------------------------------------------------------------------------------
1 | import codecs
2 | import locale
3 | import re
4 | import sys
5 |
6 | BOMS = [
7 | (codecs.BOM_UTF8, 'utf8'),
8 | (codecs.BOM_UTF16, 'utf16'),
9 | (codecs.BOM_UTF16_BE, 'utf16-be'),
10 | (codecs.BOM_UTF16_LE, 'utf16-le'),
11 | (codecs.BOM_UTF32, 'utf32'),
12 | (codecs.BOM_UTF32_BE, 'utf32-be'),
13 | (codecs.BOM_UTF32_LE, 'utf32-le'),
14 | ]
15 |
16 | ENCODING_RE = re.compile(br'coding[:=]\s*([-\w.]+)')
17 |
18 |
19 | def auto_decode(data):
20 | """Check a bytes string for a BOM to correctly detect the encoding
21 |
22 | Fallback to locale.getpreferredencoding(False) like open() on Python3"""
23 | for bom, encoding in BOMS:
24 | if data.startswith(bom):
25 | return data[len(bom):].decode(encoding)
26 | # Lets check the first two lines as in PEP263
27 | for line in data.split(b'\n')[:2]:
28 | if line[0:1] == b'#' and ENCODING_RE.search(line):
29 | encoding = ENCODING_RE.search(line).groups()[0].decode('ascii')
30 | return data.decode(encoding)
31 | return data.decode(
32 | locale.getpreferredencoding(False) or sys.getdefaultencoding(),
33 | )
34 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_internal/utils/filesystem.py:
--------------------------------------------------------------------------------
1 | import os
2 | import os.path
3 |
4 | from pip._internal.compat import get_path_uid
5 |
6 |
7 | def check_path_owner(path):
8 | # If we don't have a way to check the effective uid of this process, then
9 | # we'll just assume that we own the directory.
10 | if not hasattr(os, "geteuid"):
11 | return True
12 |
13 | previous = None
14 | while path != previous:
15 | if os.path.lexists(path):
16 | # Check if path is writable by current user.
17 | if os.geteuid() == 0:
18 | # Special handling for root user in order to handle properly
19 | # cases where users use sudo without -H flag.
20 | try:
21 | path_uid = get_path_uid(path)
22 | except OSError:
23 | return False
24 | return path_uid == 0
25 | else:
26 | return os.access(path, os.W_OK)
27 | else:
28 | previous, path = path, os.path.dirname(path)
29 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_internal/utils/packaging.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import
2 |
3 | import logging
4 | import sys
5 | from email.parser import FeedParser # type: ignore
6 |
7 | from pip._vendor import pkg_resources
8 | from pip._vendor.packaging import specifiers, version
9 |
10 | from pip._internal import exceptions
11 |
12 | logger = logging.getLogger(__name__)
13 |
14 |
15 | def check_requires_python(requires_python):
16 | """
17 | Check if the python version in use match the `requires_python` specifier.
18 |
19 | Returns `True` if the version of python in use matches the requirement.
20 | Returns `False` if the version of python in use does not matches the
21 | requirement.
22 |
23 | Raises an InvalidSpecifier if `requires_python` have an invalid format.
24 | """
25 | if requires_python is None:
26 | # The package provides no information
27 | return True
28 | requires_python_specifier = specifiers.SpecifierSet(requires_python)
29 |
30 | # We only use major.minor.micro
31 | python_version = version.parse('.'.join(map(str, sys.version_info[:3])))
32 | return python_version in requires_python_specifier
33 |
34 |
35 | def get_metadata(dist):
36 | if (isinstance(dist, pkg_resources.DistInfoDistribution) and
37 | dist.has_metadata('METADATA')):
38 | return dist.get_metadata('METADATA')
39 | elif dist.has_metadata('PKG-INFO'):
40 | return dist.get_metadata('PKG-INFO')
41 |
42 |
43 | def check_dist_requires_python(dist):
44 | metadata = get_metadata(dist)
45 | feed_parser = FeedParser()
46 | feed_parser.feed(metadata)
47 | pkg_info_dict = feed_parser.close()
48 | requires_python = pkg_info_dict.get('Requires-Python')
49 | try:
50 | if not check_requires_python(requires_python):
51 | raise exceptions.UnsupportedPythonVersion(
52 | "%s requires Python '%s' but the running Python is %s" % (
53 | dist.project_name,
54 | requires_python,
55 | '.'.join(map(str, sys.version_info[:3])),)
56 | )
57 | except specifiers.InvalidSpecifier as e:
58 | logger.warning(
59 | "Package %s has an invalid Requires-Python entry %s - %s",
60 | dist.project_name, requires_python, e,
61 | )
62 | return
63 |
64 |
65 | def get_installer(dist):
66 | if dist.has_metadata('INSTALLER'):
67 | for line in dist.get_metadata_lines('INSTALLER'):
68 | if line.strip():
69 | return line.strip()
70 | return ''
71 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_internal/utils/setuptools_build.py:
--------------------------------------------------------------------------------
1 | # Shim to wrap setup.py invocation with setuptools
2 | SETUPTOOLS_SHIM = (
3 | "import setuptools, tokenize;__file__=%r;"
4 | "f=getattr(tokenize, 'open', open)(__file__);"
5 | "code=f.read().replace('\\r\\n', '\\n');"
6 | "f.close();"
7 | "exec(compile(code, __file__, 'exec'))"
8 | )
9 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_internal/utils/temp_dir.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import
2 |
3 | import logging
4 | import os.path
5 | import tempfile
6 |
7 | from pip._internal.utils.misc import rmtree
8 |
9 | logger = logging.getLogger(__name__)
10 |
11 |
12 | class TempDirectory(object):
13 | """Helper class that owns and cleans up a temporary directory.
14 |
15 | This class can be used as a context manager or as an OO representation of a
16 | temporary directory.
17 |
18 | Attributes:
19 | path
20 | Location to the created temporary directory or None
21 | delete
22 | Whether the directory should be deleted when exiting
23 | (when used as a contextmanager)
24 |
25 | Methods:
26 | create()
27 | Creates a temporary directory and stores its path in the path
28 | attribute.
29 | cleanup()
30 | Deletes the temporary directory and sets path attribute to None
31 |
32 | When used as a context manager, a temporary directory is created on
33 | entering the context and, if the delete attribute is True, on exiting the
34 | context the created directory is deleted.
35 | """
36 |
37 | def __init__(self, path=None, delete=None, kind="temp"):
38 | super(TempDirectory, self).__init__()
39 |
40 | if path is None and delete is None:
41 | # If we were not given an explicit directory, and we were not given
42 | # an explicit delete option, then we'll default to deleting.
43 | delete = True
44 |
45 | self.path = path
46 | self.delete = delete
47 | self.kind = kind
48 |
49 | def __repr__(self):
50 | return "<{} {!r}>".format(self.__class__.__name__, self.path)
51 |
52 | def __enter__(self):
53 | self.create()
54 | return self
55 |
56 | def __exit__(self, exc, value, tb):
57 | if self.delete:
58 | self.cleanup()
59 |
60 | def create(self):
61 | """Create a temporary directory and store it's path in self.path
62 | """
63 | if self.path is not None:
64 | logger.debug(
65 | "Skipped creation of temporary directory: {}".format(self.path)
66 | )
67 | return
68 | # We realpath here because some systems have their default tmpdir
69 | # symlinked to another directory. This tends to confuse build
70 | # scripts, so we canonicalize the path by traversing potential
71 | # symlinks here.
72 | self.path = os.path.realpath(
73 | tempfile.mkdtemp(prefix="pip-{}-".format(self.kind))
74 | )
75 | logger.debug("Created temporary directory: {}".format(self.path))
76 |
77 | def cleanup(self):
78 | """Remove the temporary directory created and reset state
79 | """
80 | if self.path is not None and os.path.exists(self.path):
81 | rmtree(self.path)
82 | self.path = None
83 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_internal/utils/typing.py:
--------------------------------------------------------------------------------
1 | """For neatly implementing static typing in pip.
2 |
3 | `mypy` - the static type analysis tool we use - uses the `typing` module, which
4 | provides core functionality fundamental to mypy's functioning.
5 |
6 | Generally, `typing` would be imported at runtime and used in that fashion -
7 | it acts as a no-op at runtime and does not have any run-time overhead by
8 | design.
9 |
10 | As it turns out, `typing` is not vendorable - it uses separate sources for
11 | Python 2/Python 3. Thus, this codebase can not expect it to be present.
12 | To work around this, mypy allows the typing import to be behind a False-y
13 | optional to prevent it from running at runtime and type-comments can be used
14 | to remove the need for the types to be accessible directly during runtime.
15 |
16 | This module provides the False-y guard in a nicely named fashion so that a
17 | curious maintainer can reach here to read this.
18 |
19 | In pip, all static-typing related imports should be guarded as follows:
20 |
21 | from pip.utils.typing import MYPY_CHECK_RUNNING
22 |
23 | if MYPY_CHECK_RUNNING:
24 | from typing import ...
25 |
26 | Ref: https://github.com/python/mypy/issues/3216
27 | """
28 |
29 | MYPY_CHECK_RUNNING = False
30 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/cachecontrol/__init__.py:
--------------------------------------------------------------------------------
1 | """CacheControl import Interface.
2 |
3 | Make it easy to import from cachecontrol without long namespaces.
4 | """
5 | __author__ = 'Eric Larson'
6 | __email__ = 'eric@ionrock.org'
7 | __version__ = '0.12.4'
8 |
9 | from .wrapper import CacheControl
10 | from .adapter import CacheControlAdapter
11 | from .controller import CacheController
12 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/cachecontrol/_cmd.py:
--------------------------------------------------------------------------------
1 | import logging
2 |
3 | from pip._vendor import requests
4 |
5 | from pip._vendor.cachecontrol.adapter import CacheControlAdapter
6 | from pip._vendor.cachecontrol.cache import DictCache
7 | from pip._vendor.cachecontrol.controller import logger
8 |
9 | from argparse import ArgumentParser
10 |
11 |
12 | def setup_logging():
13 | logger.setLevel(logging.DEBUG)
14 | handler = logging.StreamHandler()
15 | logger.addHandler(handler)
16 |
17 |
18 | def get_session():
19 | adapter = CacheControlAdapter(
20 | DictCache(),
21 | cache_etags=True,
22 | serializer=None,
23 | heuristic=None,
24 | )
25 | sess = requests.Session()
26 | sess.mount('http://', adapter)
27 | sess.mount('https://', adapter)
28 |
29 | sess.cache_controller = adapter.controller
30 | return sess
31 |
32 |
33 | def get_args():
34 | parser = ArgumentParser()
35 | parser.add_argument('url', help='The URL to try and cache')
36 | return parser.parse_args()
37 |
38 |
39 | def main(args=None):
40 | args = get_args()
41 | sess = get_session()
42 |
43 | # Make a request to get a response
44 | resp = sess.get(args.url)
45 |
46 | # Turn on logging
47 | setup_logging()
48 |
49 | # try setting the cache
50 | sess.cache_controller.cache_response(resp.request, resp.raw)
51 |
52 | # Now try to get it
53 | if sess.cache_controller.cached_request(resp.request):
54 | print('Cached!')
55 | else:
56 | print('Not cached :(')
57 |
58 |
59 | if __name__ == '__main__':
60 | main()
61 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/cachecontrol/cache.py:
--------------------------------------------------------------------------------
1 | """
2 | The cache object API for implementing caches. The default is a thread
3 | safe in-memory dictionary.
4 | """
5 | from threading import Lock
6 |
7 |
8 | class BaseCache(object):
9 |
10 | def get(self, key):
11 | raise NotImplemented()
12 |
13 | def set(self, key, value):
14 | raise NotImplemented()
15 |
16 | def delete(self, key):
17 | raise NotImplemented()
18 |
19 | def close(self):
20 | pass
21 |
22 |
23 | class DictCache(BaseCache):
24 |
25 | def __init__(self, init_dict=None):
26 | self.lock = Lock()
27 | self.data = init_dict or {}
28 |
29 | def get(self, key):
30 | return self.data.get(key, None)
31 |
32 | def set(self, key, value):
33 | with self.lock:
34 | self.data.update({key: value})
35 |
36 | def delete(self, key):
37 | with self.lock:
38 | if key in self.data:
39 | self.data.pop(key)
40 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/cachecontrol/caches/__init__.py:
--------------------------------------------------------------------------------
1 | from .file_cache import FileCache # noqa
2 | from .redis_cache import RedisCache # noqa
3 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/cachecontrol/caches/redis_cache.py:
--------------------------------------------------------------------------------
1 | from __future__ import division
2 |
3 | from datetime import datetime
4 | from pip._vendor.cachecontrol.cache import BaseCache
5 |
6 |
7 | def total_seconds(td):
8 | """Python 2.6 compatability"""
9 | if hasattr(td, 'total_seconds'):
10 | return int(td.total_seconds())
11 |
12 | ms = td.microseconds
13 | secs = (td.seconds + td.days * 24 * 3600)
14 | return int((ms + secs * 10**6) / 10**6)
15 |
16 |
17 | class RedisCache(BaseCache):
18 |
19 | def __init__(self, conn):
20 | self.conn = conn
21 |
22 | def get(self, key):
23 | return self.conn.get(key)
24 |
25 | def set(self, key, value, expires=None):
26 | if not expires:
27 | self.conn.set(key, value)
28 | else:
29 | expires = expires - datetime.utcnow()
30 | self.conn.setex(key, total_seconds(expires), value)
31 |
32 | def delete(self, key):
33 | self.conn.delete(key)
34 |
35 | def clear(self):
36 | """Helper for clearing all the keys in a database. Use with
37 | caution!"""
38 | for key in self.conn.keys():
39 | self.conn.delete(key)
40 |
41 | def close(self):
42 | """Redis uses connection pooling, no need to close the connection."""
43 | pass
44 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/cachecontrol/compat.py:
--------------------------------------------------------------------------------
1 | try:
2 | from urllib.parse import urljoin
3 | except ImportError:
4 | from urlparse import urljoin
5 |
6 |
7 | try:
8 | import cPickle as pickle
9 | except ImportError:
10 | import pickle
11 |
12 |
13 | # Handle the case where the requests module has been patched to not have
14 | # urllib3 bundled as part of its source.
15 | try:
16 | from pip._vendor.requests.packages.urllib3.response import HTTPResponse
17 | except ImportError:
18 | from pip._vendor.urllib3.response import HTTPResponse
19 |
20 | try:
21 | from pip._vendor.requests.packages.urllib3.util import is_fp_closed
22 | except ImportError:
23 | from pip._vendor.urllib3.util import is_fp_closed
24 |
25 | # Replicate some six behaviour
26 | try:
27 | text_type = unicode
28 | except NameError:
29 | text_type = str
30 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/cachecontrol/filewrapper.py:
--------------------------------------------------------------------------------
1 | from io import BytesIO
2 |
3 |
4 | class CallbackFileWrapper(object):
5 | """
6 | Small wrapper around a fp object which will tee everything read into a
7 | buffer, and when that file is closed it will execute a callback with the
8 | contents of that buffer.
9 |
10 | All attributes are proxied to the underlying file object.
11 |
12 | This class uses members with a double underscore (__) leading prefix so as
13 | not to accidentally shadow an attribute.
14 | """
15 |
16 | def __init__(self, fp, callback):
17 | self.__buf = BytesIO()
18 | self.__fp = fp
19 | self.__callback = callback
20 |
21 | def __getattr__(self, name):
22 | # The vaguaries of garbage collection means that self.__fp is
23 | # not always set. By using __getattribute__ and the private
24 | # name[0] allows looking up the attribute value and raising an
25 | # AttributeError when it doesn't exist. This stop thigns from
26 | # infinitely recursing calls to getattr in the case where
27 | # self.__fp hasn't been set.
28 | #
29 | # [0] https://docs.python.org/2/reference/expressions.html#atom-identifiers
30 | fp = self.__getattribute__('_CallbackFileWrapper__fp')
31 | return getattr(fp, name)
32 |
33 | def __is_fp_closed(self):
34 | try:
35 | return self.__fp.fp is None
36 | except AttributeError:
37 | pass
38 |
39 | try:
40 | return self.__fp.closed
41 | except AttributeError:
42 | pass
43 |
44 | # We just don't cache it then.
45 | # TODO: Add some logging here...
46 | return False
47 |
48 | def _close(self):
49 | if self.__callback:
50 | self.__callback(self.__buf.getvalue())
51 |
52 | # We assign this to None here, because otherwise we can get into
53 | # really tricky problems where the CPython interpreter dead locks
54 | # because the callback is holding a reference to something which
55 | # has a __del__ method. Setting this to None breaks the cycle
56 | # and allows the garbage collector to do it's thing normally.
57 | self.__callback = None
58 |
59 | def read(self, amt=None):
60 | data = self.__fp.read(amt)
61 | self.__buf.write(data)
62 | if self.__is_fp_closed():
63 | self._close()
64 |
65 | return data
66 |
67 | def _safe_read(self, amt):
68 | data = self.__fp._safe_read(amt)
69 | if amt == 2 and data == b'\r\n':
70 | # urllib executes this read to toss the CRLF at the end
71 | # of the chunk.
72 | return data
73 |
74 | self.__buf.write(data)
75 | if self.__is_fp_closed():
76 | self._close()
77 |
78 | return data
79 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/cachecontrol/wrapper.py:
--------------------------------------------------------------------------------
1 | from .adapter import CacheControlAdapter
2 | from .cache import DictCache
3 |
4 |
5 | def CacheControl(sess,
6 | cache=None,
7 | cache_etags=True,
8 | serializer=None,
9 | heuristic=None,
10 | controller_class=None,
11 | adapter_class=None,
12 | cacheable_methods=None):
13 |
14 | cache = cache or DictCache()
15 | adapter_class = adapter_class or CacheControlAdapter
16 | adapter = adapter_class(
17 | cache,
18 | cache_etags=cache_etags,
19 | serializer=serializer,
20 | heuristic=heuristic,
21 | controller_class=controller_class,
22 | cacheable_methods=cacheable_methods
23 | )
24 | sess.mount('http://', adapter)
25 | sess.mount('https://', adapter)
26 |
27 | return sess
28 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/certifi/__init__.py:
--------------------------------------------------------------------------------
1 | from .core import where, old_where
2 |
3 | __version__ = "2018.01.18"
4 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/certifi/__main__.py:
--------------------------------------------------------------------------------
1 | from certifi import where
2 | print(where())
3 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/certifi/core.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 |
4 | """
5 | certifi.py
6 | ~~~~~~~~~~
7 |
8 | This module returns the installation location of cacert.pem.
9 | """
10 | import os
11 | import warnings
12 |
13 |
14 | class DeprecatedBundleWarning(DeprecationWarning):
15 | """
16 | The weak security bundle is being deprecated. Please bother your service
17 | provider to get them to stop using cross-signed roots.
18 | """
19 |
20 |
21 | def where():
22 | f = os.path.dirname(__file__)
23 |
24 | return os.path.join(f, 'cacert.pem')
25 |
26 |
27 | def old_where():
28 | warnings.warn(
29 | "The weak security bundle has been removed. certifi.old_where() is now an alias "
30 | "of certifi.where(). Please update your code to use certifi.where() instead. "
31 | "certifi.old_where() will be removed in 2018.",
32 | DeprecatedBundleWarning
33 | )
34 | return where()
35 |
36 | if __name__ == '__main__':
37 | print(where())
38 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/chardet/__init__.py:
--------------------------------------------------------------------------------
1 | ######################## BEGIN LICENSE BLOCK ########################
2 | # This library is free software; you can redistribute it and/or
3 | # modify it under the terms of the GNU Lesser General Public
4 | # License as published by the Free Software Foundation; either
5 | # version 2.1 of the License, or (at your option) any later version.
6 | #
7 | # This library is distributed in the hope that it will be useful,
8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
10 | # Lesser General Public License for more details.
11 | #
12 | # You should have received a copy of the GNU Lesser General Public
13 | # License along with this library; if not, write to the Free Software
14 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
15 | # 02110-1301 USA
16 | ######################### END LICENSE BLOCK #########################
17 |
18 |
19 | from .compat import PY2, PY3
20 | from .universaldetector import UniversalDetector
21 | from .version import __version__, VERSION
22 |
23 |
24 | def detect(byte_str):
25 | """
26 | Detect the encoding of the given byte string.
27 |
28 | :param byte_str: The byte sequence to examine.
29 | :type byte_str: ``bytes`` or ``bytearray``
30 | """
31 | if not isinstance(byte_str, bytearray):
32 | if not isinstance(byte_str, bytes):
33 | raise TypeError('Expected object of type bytes or bytearray, got: '
34 | '{0}'.format(type(byte_str)))
35 | else:
36 | byte_str = bytearray(byte_str)
37 | detector = UniversalDetector()
38 | detector.feed(byte_str)
39 | return detector.close()
40 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/chardet/big5prober.py:
--------------------------------------------------------------------------------
1 | ######################## BEGIN LICENSE BLOCK ########################
2 | # The Original Code is Mozilla Communicator client code.
3 | #
4 | # The Initial Developer of the Original Code is
5 | # Netscape Communications Corporation.
6 | # Portions created by the Initial Developer are Copyright (C) 1998
7 | # the Initial Developer. All Rights Reserved.
8 | #
9 | # Contributor(s):
10 | # Mark Pilgrim - port to Python
11 | #
12 | # This library is free software; you can redistribute it and/or
13 | # modify it under the terms of the GNU Lesser General Public
14 | # License as published by the Free Software Foundation; either
15 | # version 2.1 of the License, or (at your option) any later version.
16 | #
17 | # This library is distributed in the hope that it will be useful,
18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 | # Lesser General Public License for more details.
21 | #
22 | # You should have received a copy of the GNU Lesser General Public
23 | # License along with this library; if not, write to the Free Software
24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
25 | # 02110-1301 USA
26 | ######################### END LICENSE BLOCK #########################
27 |
28 | from .mbcharsetprober import MultiByteCharSetProber
29 | from .codingstatemachine import CodingStateMachine
30 | from .chardistribution import Big5DistributionAnalysis
31 | from .mbcssm import BIG5_SM_MODEL
32 |
33 |
34 | class Big5Prober(MultiByteCharSetProber):
35 | def __init__(self):
36 | super(Big5Prober, self).__init__()
37 | self.coding_sm = CodingStateMachine(BIG5_SM_MODEL)
38 | self.distribution_analyzer = Big5DistributionAnalysis()
39 | self.reset()
40 |
41 | @property
42 | def charset_name(self):
43 | return "Big5"
44 |
45 | @property
46 | def language(self):
47 | return "Chinese"
48 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/chardet/cli/__init__.py:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/chardet/compat.py:
--------------------------------------------------------------------------------
1 | ######################## BEGIN LICENSE BLOCK ########################
2 | # Contributor(s):
3 | # Dan Blanchard
4 | # Ian Cordasco
5 | #
6 | # This library is free software; you can redistribute it and/or
7 | # modify it under the terms of the GNU Lesser General Public
8 | # License as published by the Free Software Foundation; either
9 | # version 2.1 of the License, or (at your option) any later version.
10 | #
11 | # This library is distributed in the hope that it will be useful,
12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 | # Lesser General Public License for more details.
15 | #
16 | # You should have received a copy of the GNU Lesser General Public
17 | # License along with this library; if not, write to the Free Software
18 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
19 | # 02110-1301 USA
20 | ######################### END LICENSE BLOCK #########################
21 |
22 | import sys
23 |
24 |
25 | if sys.version_info < (3, 0):
26 | PY2 = True
27 | PY3 = False
28 | base_str = (str, unicode)
29 | text_type = unicode
30 | else:
31 | PY2 = False
32 | PY3 = True
33 | base_str = (bytes, str)
34 | text_type = str
35 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/chardet/cp949prober.py:
--------------------------------------------------------------------------------
1 | ######################## BEGIN LICENSE BLOCK ########################
2 | # The Original Code is mozilla.org code.
3 | #
4 | # The Initial Developer of the Original Code is
5 | # Netscape Communications Corporation.
6 | # Portions created by the Initial Developer are Copyright (C) 1998
7 | # the Initial Developer. All Rights Reserved.
8 | #
9 | # Contributor(s):
10 | # Mark Pilgrim - port to Python
11 | #
12 | # This library is free software; you can redistribute it and/or
13 | # modify it under the terms of the GNU Lesser General Public
14 | # License as published by the Free Software Foundation; either
15 | # version 2.1 of the License, or (at your option) any later version.
16 | #
17 | # This library is distributed in the hope that it will be useful,
18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 | # Lesser General Public License for more details.
21 | #
22 | # You should have received a copy of the GNU Lesser General Public
23 | # License along with this library; if not, write to the Free Software
24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
25 | # 02110-1301 USA
26 | ######################### END LICENSE BLOCK #########################
27 |
28 | from .chardistribution import EUCKRDistributionAnalysis
29 | from .codingstatemachine import CodingStateMachine
30 | from .mbcharsetprober import MultiByteCharSetProber
31 | from .mbcssm import CP949_SM_MODEL
32 |
33 |
34 | class CP949Prober(MultiByteCharSetProber):
35 | def __init__(self):
36 | super(CP949Prober, self).__init__()
37 | self.coding_sm = CodingStateMachine(CP949_SM_MODEL)
38 | # NOTE: CP949 is a superset of EUC-KR, so the distribution should be
39 | # not different.
40 | self.distribution_analyzer = EUCKRDistributionAnalysis()
41 | self.reset()
42 |
43 | @property
44 | def charset_name(self):
45 | return "CP949"
46 |
47 | @property
48 | def language(self):
49 | return "Korean"
50 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/chardet/enums.py:
--------------------------------------------------------------------------------
1 | """
2 | All of the Enums that are used throughout the chardet package.
3 |
4 | :author: Dan Blanchard (dan.blanchard@gmail.com)
5 | """
6 |
7 |
8 | class InputState(object):
9 | """
10 | This enum represents the different states a universal detector can be in.
11 | """
12 | PURE_ASCII = 0
13 | ESC_ASCII = 1
14 | HIGH_BYTE = 2
15 |
16 |
17 | class LanguageFilter(object):
18 | """
19 | This enum represents the different language filters we can apply to a
20 | ``UniversalDetector``.
21 | """
22 | CHINESE_SIMPLIFIED = 0x01
23 | CHINESE_TRADITIONAL = 0x02
24 | JAPANESE = 0x04
25 | KOREAN = 0x08
26 | NON_CJK = 0x10
27 | ALL = 0x1F
28 | CHINESE = CHINESE_SIMPLIFIED | CHINESE_TRADITIONAL
29 | CJK = CHINESE | JAPANESE | KOREAN
30 |
31 |
32 | class ProbingState(object):
33 | """
34 | This enum represents the different states a prober can be in.
35 | """
36 | DETECTING = 0
37 | FOUND_IT = 1
38 | NOT_ME = 2
39 |
40 |
41 | class MachineState(object):
42 | """
43 | This enum represents the different states a state machine can be in.
44 | """
45 | START = 0
46 | ERROR = 1
47 | ITS_ME = 2
48 |
49 |
50 | class SequenceLikelihood(object):
51 | """
52 | This enum represents the likelihood of a character following the previous one.
53 | """
54 | NEGATIVE = 0
55 | UNLIKELY = 1
56 | LIKELY = 2
57 | POSITIVE = 3
58 |
59 | @classmethod
60 | def get_num_categories(cls):
61 | """:returns: The number of likelihood categories in the enum."""
62 | return 4
63 |
64 |
65 | class CharacterCategory(object):
66 | """
67 | This enum represents the different categories language models for
68 | ``SingleByteCharsetProber`` put characters into.
69 |
70 | Anything less than CONTROL is considered a letter.
71 | """
72 | UNDEFINED = 255
73 | LINE_BREAK = 254
74 | SYMBOL = 253
75 | DIGIT = 252
76 | CONTROL = 251
77 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/chardet/euckrprober.py:
--------------------------------------------------------------------------------
1 | ######################## BEGIN LICENSE BLOCK ########################
2 | # The Original Code is mozilla.org code.
3 | #
4 | # The Initial Developer of the Original Code is
5 | # Netscape Communications Corporation.
6 | # Portions created by the Initial Developer are Copyright (C) 1998
7 | # the Initial Developer. All Rights Reserved.
8 | #
9 | # Contributor(s):
10 | # Mark Pilgrim - port to Python
11 | #
12 | # This library is free software; you can redistribute it and/or
13 | # modify it under the terms of the GNU Lesser General Public
14 | # License as published by the Free Software Foundation; either
15 | # version 2.1 of the License, or (at your option) any later version.
16 | #
17 | # This library is distributed in the hope that it will be useful,
18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 | # Lesser General Public License for more details.
21 | #
22 | # You should have received a copy of the GNU Lesser General Public
23 | # License along with this library; if not, write to the Free Software
24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
25 | # 02110-1301 USA
26 | ######################### END LICENSE BLOCK #########################
27 |
28 | from .mbcharsetprober import MultiByteCharSetProber
29 | from .codingstatemachine import CodingStateMachine
30 | from .chardistribution import EUCKRDistributionAnalysis
31 | from .mbcssm import EUCKR_SM_MODEL
32 |
33 |
34 | class EUCKRProber(MultiByteCharSetProber):
35 | def __init__(self):
36 | super(EUCKRProber, self).__init__()
37 | self.coding_sm = CodingStateMachine(EUCKR_SM_MODEL)
38 | self.distribution_analyzer = EUCKRDistributionAnalysis()
39 | self.reset()
40 |
41 | @property
42 | def charset_name(self):
43 | return "EUC-KR"
44 |
45 | @property
46 | def language(self):
47 | return "Korean"
48 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/chardet/euctwprober.py:
--------------------------------------------------------------------------------
1 | ######################## BEGIN LICENSE BLOCK ########################
2 | # The Original Code is mozilla.org code.
3 | #
4 | # The Initial Developer of the Original Code is
5 | # Netscape Communications Corporation.
6 | # Portions created by the Initial Developer are Copyright (C) 1998
7 | # the Initial Developer. All Rights Reserved.
8 | #
9 | # Contributor(s):
10 | # Mark Pilgrim - port to Python
11 | #
12 | # This library is free software; you can redistribute it and/or
13 | # modify it under the terms of the GNU Lesser General Public
14 | # License as published by the Free Software Foundation; either
15 | # version 2.1 of the License, or (at your option) any later version.
16 | #
17 | # This library is distributed in the hope that it will be useful,
18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 | # Lesser General Public License for more details.
21 | #
22 | # You should have received a copy of the GNU Lesser General Public
23 | # License along with this library; if not, write to the Free Software
24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
25 | # 02110-1301 USA
26 | ######################### END LICENSE BLOCK #########################
27 |
28 | from .mbcharsetprober import MultiByteCharSetProber
29 | from .codingstatemachine import CodingStateMachine
30 | from .chardistribution import EUCTWDistributionAnalysis
31 | from .mbcssm import EUCTW_SM_MODEL
32 |
33 | class EUCTWProber(MultiByteCharSetProber):
34 | def __init__(self):
35 | super(EUCTWProber, self).__init__()
36 | self.coding_sm = CodingStateMachine(EUCTW_SM_MODEL)
37 | self.distribution_analyzer = EUCTWDistributionAnalysis()
38 | self.reset()
39 |
40 | @property
41 | def charset_name(self):
42 | return "EUC-TW"
43 |
44 | @property
45 | def language(self):
46 | return "Taiwan"
47 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/chardet/gb2312prober.py:
--------------------------------------------------------------------------------
1 | ######################## BEGIN LICENSE BLOCK ########################
2 | # The Original Code is mozilla.org code.
3 | #
4 | # The Initial Developer of the Original Code is
5 | # Netscape Communications Corporation.
6 | # Portions created by the Initial Developer are Copyright (C) 1998
7 | # the Initial Developer. All Rights Reserved.
8 | #
9 | # Contributor(s):
10 | # Mark Pilgrim - port to Python
11 | #
12 | # This library is free software; you can redistribute it and/or
13 | # modify it under the terms of the GNU Lesser General Public
14 | # License as published by the Free Software Foundation; either
15 | # version 2.1 of the License, or (at your option) any later version.
16 | #
17 | # This library is distributed in the hope that it will be useful,
18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 | # Lesser General Public License for more details.
21 | #
22 | # You should have received a copy of the GNU Lesser General Public
23 | # License along with this library; if not, write to the Free Software
24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
25 | # 02110-1301 USA
26 | ######################### END LICENSE BLOCK #########################
27 |
28 | from .mbcharsetprober import MultiByteCharSetProber
29 | from .codingstatemachine import CodingStateMachine
30 | from .chardistribution import GB2312DistributionAnalysis
31 | from .mbcssm import GB2312_SM_MODEL
32 |
33 | class GB2312Prober(MultiByteCharSetProber):
34 | def __init__(self):
35 | super(GB2312Prober, self).__init__()
36 | self.coding_sm = CodingStateMachine(GB2312_SM_MODEL)
37 | self.distribution_analyzer = GB2312DistributionAnalysis()
38 | self.reset()
39 |
40 | @property
41 | def charset_name(self):
42 | return "GB2312"
43 |
44 | @property
45 | def language(self):
46 | return "Chinese"
47 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/chardet/mbcsgroupprober.py:
--------------------------------------------------------------------------------
1 | ######################## BEGIN LICENSE BLOCK ########################
2 | # The Original Code is Mozilla Universal charset detector code.
3 | #
4 | # The Initial Developer of the Original Code is
5 | # Netscape Communications Corporation.
6 | # Portions created by the Initial Developer are Copyright (C) 2001
7 | # the Initial Developer. All Rights Reserved.
8 | #
9 | # Contributor(s):
10 | # Mark Pilgrim - port to Python
11 | # Shy Shalom - original C code
12 | # Proofpoint, Inc.
13 | #
14 | # This library is free software; you can redistribute it and/or
15 | # modify it under the terms of the GNU Lesser General Public
16 | # License as published by the Free Software Foundation; either
17 | # version 2.1 of the License, or (at your option) any later version.
18 | #
19 | # This library is distributed in the hope that it will be useful,
20 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
21 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
22 | # Lesser General Public License for more details.
23 | #
24 | # You should have received a copy of the GNU Lesser General Public
25 | # License along with this library; if not, write to the Free Software
26 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
27 | # 02110-1301 USA
28 | ######################### END LICENSE BLOCK #########################
29 |
30 | from .charsetgroupprober import CharSetGroupProber
31 | from .utf8prober import UTF8Prober
32 | from .sjisprober import SJISProber
33 | from .eucjpprober import EUCJPProber
34 | from .gb2312prober import GB2312Prober
35 | from .euckrprober import EUCKRProber
36 | from .cp949prober import CP949Prober
37 | from .big5prober import Big5Prober
38 | from .euctwprober import EUCTWProber
39 |
40 |
41 | class MBCSGroupProber(CharSetGroupProber):
42 | def __init__(self, lang_filter=None):
43 | super(MBCSGroupProber, self).__init__(lang_filter=lang_filter)
44 | self.probers = [
45 | UTF8Prober(),
46 | SJISProber(),
47 | EUCJPProber(),
48 | GB2312Prober(),
49 | EUCKRProber(),
50 | CP949Prober(),
51 | Big5Prober(),
52 | EUCTWProber()
53 | ]
54 | self.reset()
55 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/chardet/version.py:
--------------------------------------------------------------------------------
1 | """
2 | This module exists only to simplify retrieving the version number of chardet
3 | from within setup.py and from chardet subpackages.
4 |
5 | :author: Dan Blanchard (dan.blanchard@gmail.com)
6 | """
7 |
8 | __version__ = "3.0.4"
9 | VERSION = __version__.split('.')
10 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/colorama/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
2 | from .initialise import init, deinit, reinit, colorama_text
3 | from .ansi import Fore, Back, Style, Cursor
4 | from .ansitowin32 import AnsiToWin32
5 |
6 | __version__ = '0.3.9'
7 |
8 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/colorama/initialise.py:
--------------------------------------------------------------------------------
1 | # Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
2 | import atexit
3 | import contextlib
4 | import sys
5 |
6 | from .ansitowin32 import AnsiToWin32
7 |
8 |
9 | orig_stdout = None
10 | orig_stderr = None
11 |
12 | wrapped_stdout = None
13 | wrapped_stderr = None
14 |
15 | atexit_done = False
16 |
17 |
18 | def reset_all():
19 | if AnsiToWin32 is not None: # Issue #74: objects might become None at exit
20 | AnsiToWin32(orig_stdout).reset_all()
21 |
22 |
23 | def init(autoreset=False, convert=None, strip=None, wrap=True):
24 |
25 | if not wrap and any([autoreset, convert, strip]):
26 | raise ValueError('wrap=False conflicts with any other arg=True')
27 |
28 | global wrapped_stdout, wrapped_stderr
29 | global orig_stdout, orig_stderr
30 |
31 | orig_stdout = sys.stdout
32 | orig_stderr = sys.stderr
33 |
34 | if sys.stdout is None:
35 | wrapped_stdout = None
36 | else:
37 | sys.stdout = wrapped_stdout = \
38 | wrap_stream(orig_stdout, convert, strip, autoreset, wrap)
39 | if sys.stderr is None:
40 | wrapped_stderr = None
41 | else:
42 | sys.stderr = wrapped_stderr = \
43 | wrap_stream(orig_stderr, convert, strip, autoreset, wrap)
44 |
45 | global atexit_done
46 | if not atexit_done:
47 | atexit.register(reset_all)
48 | atexit_done = True
49 |
50 |
51 | def deinit():
52 | if orig_stdout is not None:
53 | sys.stdout = orig_stdout
54 | if orig_stderr is not None:
55 | sys.stderr = orig_stderr
56 |
57 |
58 | @contextlib.contextmanager
59 | def colorama_text(*args, **kwargs):
60 | init(*args, **kwargs)
61 | try:
62 | yield
63 | finally:
64 | deinit()
65 |
66 |
67 | def reinit():
68 | if wrapped_stdout is not None:
69 | sys.stdout = wrapped_stdout
70 | if wrapped_stderr is not None:
71 | sys.stderr = wrapped_stderr
72 |
73 |
74 | def wrap_stream(stream, convert, strip, autoreset, wrap):
75 | if wrap:
76 | wrapper = AnsiToWin32(stream,
77 | convert=convert, strip=strip, autoreset=autoreset)
78 | if wrapper.should_wrap():
79 | stream = wrapper.stream
80 | return stream
81 |
82 |
83 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/distlib/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Copyright (C) 2012-2017 Vinay Sajip.
4 | # Licensed to the Python Software Foundation under a contributor agreement.
5 | # See LICENSE.txt and CONTRIBUTORS.txt.
6 | #
7 | import logging
8 |
9 | __version__ = '0.2.7'
10 |
11 | class DistlibException(Exception):
12 | pass
13 |
14 | try:
15 | from logging import NullHandler
16 | except ImportError: # pragma: no cover
17 | class NullHandler(logging.Handler):
18 | def handle(self, record): pass
19 | def emit(self, record): pass
20 | def createLock(self): self.lock = None
21 |
22 | logger = logging.getLogger(__name__)
23 | logger.addHandler(NullHandler())
24 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/distlib/_backport/__init__.py:
--------------------------------------------------------------------------------
1 | """Modules copied from Python 3 standard libraries, for internal use only.
2 |
3 | Individual classes and functions are found in d2._backport.misc. Intended
4 | usage is to always import things missing from 3.1 from that module: the
5 | built-in/stdlib objects will be used if found.
6 | """
7 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/distlib/_backport/misc.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Copyright (C) 2012 The Python Software Foundation.
4 | # See LICENSE.txt and CONTRIBUTORS.txt.
5 | #
6 | """Backports for individual classes and functions."""
7 |
8 | import os
9 | import sys
10 |
11 | __all__ = ['cache_from_source', 'callable', 'fsencode']
12 |
13 |
14 | try:
15 | from imp import cache_from_source
16 | except ImportError:
17 | def cache_from_source(py_file, debug=__debug__):
18 | ext = debug and 'c' or 'o'
19 | return py_file + ext
20 |
21 |
22 | try:
23 | callable = callable
24 | except NameError:
25 | from collections import Callable
26 |
27 | def callable(obj):
28 | return isinstance(obj, Callable)
29 |
30 |
31 | try:
32 | fsencode = os.fsencode
33 | except AttributeError:
34 | def fsencode(filename):
35 | if isinstance(filename, bytes):
36 | return filename
37 | elif isinstance(filename, str):
38 | return filename.encode(sys.getfilesystemencoding())
39 | else:
40 | raise TypeError("expect bytes or str, not %s" %
41 | type(filename).__name__)
42 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/distlib/_backport/sysconfig.cfg:
--------------------------------------------------------------------------------
1 | [posix_prefix]
2 | # Configuration directories. Some of these come straight out of the
3 | # configure script. They are for implementing the other variables, not to
4 | # be used directly in [resource_locations].
5 | confdir = /etc
6 | datadir = /usr/share
7 | libdir = /usr/lib
8 | statedir = /var
9 | # User resource directory
10 | local = ~/.local/{distribution.name}
11 |
12 | stdlib = {base}/lib/python{py_version_short}
13 | platstdlib = {platbase}/lib/python{py_version_short}
14 | purelib = {base}/lib/python{py_version_short}/site-packages
15 | platlib = {platbase}/lib/python{py_version_short}/site-packages
16 | include = {base}/include/python{py_version_short}{abiflags}
17 | platinclude = {platbase}/include/python{py_version_short}{abiflags}
18 | data = {base}
19 |
20 | [posix_home]
21 | stdlib = {base}/lib/python
22 | platstdlib = {base}/lib/python
23 | purelib = {base}/lib/python
24 | platlib = {base}/lib/python
25 | include = {base}/include/python
26 | platinclude = {base}/include/python
27 | scripts = {base}/bin
28 | data = {base}
29 |
30 | [nt]
31 | stdlib = {base}/Lib
32 | platstdlib = {base}/Lib
33 | purelib = {base}/Lib/site-packages
34 | platlib = {base}/Lib/site-packages
35 | include = {base}/Include
36 | platinclude = {base}/Include
37 | scripts = {base}/Scripts
38 | data = {base}
39 |
40 | [os2]
41 | stdlib = {base}/Lib
42 | platstdlib = {base}/Lib
43 | purelib = {base}/Lib/site-packages
44 | platlib = {base}/Lib/site-packages
45 | include = {base}/Include
46 | platinclude = {base}/Include
47 | scripts = {base}/Scripts
48 | data = {base}
49 |
50 | [os2_home]
51 | stdlib = {userbase}/lib/python{py_version_short}
52 | platstdlib = {userbase}/lib/python{py_version_short}
53 | purelib = {userbase}/lib/python{py_version_short}/site-packages
54 | platlib = {userbase}/lib/python{py_version_short}/site-packages
55 | include = {userbase}/include/python{py_version_short}
56 | scripts = {userbase}/bin
57 | data = {userbase}
58 |
59 | [nt_user]
60 | stdlib = {userbase}/Python{py_version_nodot}
61 | platstdlib = {userbase}/Python{py_version_nodot}
62 | purelib = {userbase}/Python{py_version_nodot}/site-packages
63 | platlib = {userbase}/Python{py_version_nodot}/site-packages
64 | include = {userbase}/Python{py_version_nodot}/Include
65 | scripts = {userbase}/Scripts
66 | data = {userbase}
67 |
68 | [posix_user]
69 | stdlib = {userbase}/lib/python{py_version_short}
70 | platstdlib = {userbase}/lib/python{py_version_short}
71 | purelib = {userbase}/lib/python{py_version_short}/site-packages
72 | platlib = {userbase}/lib/python{py_version_short}/site-packages
73 | include = {userbase}/include/python{py_version_short}
74 | scripts = {userbase}/bin
75 | data = {userbase}
76 |
77 | [osx_framework_user]
78 | stdlib = {userbase}/lib/python
79 | platstdlib = {userbase}/lib/python
80 | purelib = {userbase}/lib/python/site-packages
81 | platlib = {userbase}/lib/python/site-packages
82 | include = {userbase}/include
83 | scripts = {userbase}/bin
84 | data = {userbase}
85 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/distlib/t32.exe:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Alexadlu/XIVReID/1f8a14e14cfd26790337c45e0e663a3a2acf9814/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/distlib/t32.exe
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/distlib/t64.exe:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Alexadlu/XIVReID/1f8a14e14cfd26790337c45e0e663a3a2acf9814/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/distlib/t64.exe
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/distlib/w32.exe:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Alexadlu/XIVReID/1f8a14e14cfd26790337c45e0e663a3a2acf9814/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/distlib/w32.exe
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/distlib/w64.exe:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Alexadlu/XIVReID/1f8a14e14cfd26790337c45e0e663a3a2acf9814/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/distlib/w64.exe
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/html5lib/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | HTML parsing library based on the `WHATWG HTML specification
3 | `_. The parser is designed to be compatible with
4 | existing HTML found in the wild and implements well-defined error recovery that
5 | is largely compatible with modern desktop web browsers.
6 |
7 | Example usage::
8 |
9 | from pip._vendor import html5lib
10 | with open("my_document.html", "rb") as f:
11 | tree = html5lib.parse(f)
12 |
13 | For convenience, this module re-exports the following names:
14 |
15 | * :func:`~.html5parser.parse`
16 | * :func:`~.html5parser.parseFragment`
17 | * :class:`~.html5parser.HTMLParser`
18 | * :func:`~.treebuilders.getTreeBuilder`
19 | * :func:`~.treewalkers.getTreeWalker`
20 | * :func:`~.serializer.serialize`
21 | """
22 |
23 | from __future__ import absolute_import, division, unicode_literals
24 |
25 | from .html5parser import HTMLParser, parse, parseFragment
26 | from .treebuilders import getTreeBuilder
27 | from .treewalkers import getTreeWalker
28 | from .serializer import serialize
29 |
30 | __all__ = ["HTMLParser", "parse", "parseFragment", "getTreeBuilder",
31 | "getTreeWalker", "serialize"]
32 |
33 | # this has to be at the top level, see how setup.py parses this
34 | #: Distribution version number.
35 | __version__ = "1.0.1"
36 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/html5lib/_trie/__init__.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import, division, unicode_literals
2 |
3 | from .py import Trie as PyTrie
4 |
5 | Trie = PyTrie
6 |
7 | # pylint:disable=wrong-import-position
8 | try:
9 | from .datrie import Trie as DATrie
10 | except ImportError:
11 | pass
12 | else:
13 | Trie = DATrie
14 | # pylint:enable=wrong-import-position
15 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/html5lib/_trie/_base.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import, division, unicode_literals
2 |
3 | from collections import Mapping
4 |
5 |
6 | class Trie(Mapping):
7 | """Abstract base class for tries"""
8 |
9 | def keys(self, prefix=None):
10 | # pylint:disable=arguments-differ
11 | keys = super(Trie, self).keys()
12 |
13 | if prefix is None:
14 | return set(keys)
15 |
16 | return {x for x in keys if x.startswith(prefix)}
17 |
18 | def has_keys_with_prefix(self, prefix):
19 | for key in self.keys():
20 | if key.startswith(prefix):
21 | return True
22 |
23 | return False
24 |
25 | def longest_prefix(self, prefix):
26 | if prefix in self:
27 | return prefix
28 |
29 | for i in range(1, len(prefix) + 1):
30 | if prefix[:-i] in self:
31 | return prefix[:-i]
32 |
33 | raise KeyError(prefix)
34 |
35 | def longest_prefix_item(self, prefix):
36 | lprefix = self.longest_prefix(prefix)
37 | return (lprefix, self[lprefix])
38 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/html5lib/_trie/datrie.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import, division, unicode_literals
2 |
3 | from datrie import Trie as DATrie
4 | from pip._vendor.six import text_type
5 |
6 | from ._base import Trie as ABCTrie
7 |
8 |
9 | class Trie(ABCTrie):
10 | def __init__(self, data):
11 | chars = set()
12 | for key in data.keys():
13 | if not isinstance(key, text_type):
14 | raise TypeError("All keys must be strings")
15 | for char in key:
16 | chars.add(char)
17 |
18 | self._data = DATrie("".join(chars))
19 | for key, value in data.items():
20 | self._data[key] = value
21 |
22 | def __contains__(self, key):
23 | return key in self._data
24 |
25 | def __len__(self):
26 | return len(self._data)
27 |
28 | def __iter__(self):
29 | raise NotImplementedError()
30 |
31 | def __getitem__(self, key):
32 | return self._data[key]
33 |
34 | def keys(self, prefix=None):
35 | return self._data.keys(prefix)
36 |
37 | def has_keys_with_prefix(self, prefix):
38 | return self._data.has_keys_with_prefix(prefix)
39 |
40 | def longest_prefix(self, prefix):
41 | return self._data.longest_prefix(prefix)
42 |
43 | def longest_prefix_item(self, prefix):
44 | return self._data.longest_prefix_item(prefix)
45 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/html5lib/_trie/py.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import, division, unicode_literals
2 | from pip._vendor.six import text_type
3 |
4 | from bisect import bisect_left
5 |
6 | from ._base import Trie as ABCTrie
7 |
8 |
9 | class Trie(ABCTrie):
10 | def __init__(self, data):
11 | if not all(isinstance(x, text_type) for x in data.keys()):
12 | raise TypeError("All keys must be strings")
13 |
14 | self._data = data
15 | self._keys = sorted(data.keys())
16 | self._cachestr = ""
17 | self._cachepoints = (0, len(data))
18 |
19 | def __contains__(self, key):
20 | return key in self._data
21 |
22 | def __len__(self):
23 | return len(self._data)
24 |
25 | def __iter__(self):
26 | return iter(self._data)
27 |
28 | def __getitem__(self, key):
29 | return self._data[key]
30 |
31 | def keys(self, prefix=None):
32 | if prefix is None or prefix == "" or not self._keys:
33 | return set(self._keys)
34 |
35 | if prefix.startswith(self._cachestr):
36 | lo, hi = self._cachepoints
37 | start = i = bisect_left(self._keys, prefix, lo, hi)
38 | else:
39 | start = i = bisect_left(self._keys, prefix)
40 |
41 | keys = set()
42 | if start == len(self._keys):
43 | return keys
44 |
45 | while self._keys[i].startswith(prefix):
46 | keys.add(self._keys[i])
47 | i += 1
48 |
49 | self._cachestr = prefix
50 | self._cachepoints = (start, i)
51 |
52 | return keys
53 |
54 | def has_keys_with_prefix(self, prefix):
55 | if prefix in self._data:
56 | return True
57 |
58 | if prefix.startswith(self._cachestr):
59 | lo, hi = self._cachepoints
60 | i = bisect_left(self._keys, prefix, lo, hi)
61 | else:
62 | i = bisect_left(self._keys, prefix)
63 |
64 | if i == len(self._keys):
65 | return False
66 |
67 | return self._keys[i].startswith(prefix)
68 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/html5lib/filters/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Alexadlu/XIVReID/1f8a14e14cfd26790337c45e0e663a3a2acf9814/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/html5lib/filters/__init__.py
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/html5lib/filters/alphabeticalattributes.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import, division, unicode_literals
2 |
3 | from . import base
4 |
5 | from collections import OrderedDict
6 |
7 |
8 | def _attr_key(attr):
9 | """Return an appropriate key for an attribute for sorting
10 |
11 | Attributes have a namespace that can be either ``None`` or a string. We
12 | can't compare the two because they're different types, so we convert
13 | ``None`` to an empty string first.
14 |
15 | """
16 | return (attr[0][0] or ''), attr[0][1]
17 |
18 |
19 | class Filter(base.Filter):
20 | """Alphabetizes attributes for elements"""
21 | def __iter__(self):
22 | for token in base.Filter.__iter__(self):
23 | if token["type"] in ("StartTag", "EmptyTag"):
24 | attrs = OrderedDict()
25 | for name, value in sorted(token["data"].items(),
26 | key=_attr_key):
27 | attrs[name] = value
28 | token["data"] = attrs
29 | yield token
30 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/html5lib/filters/base.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import, division, unicode_literals
2 |
3 |
4 | class Filter(object):
5 | def __init__(self, source):
6 | self.source = source
7 |
8 | def __iter__(self):
9 | return iter(self.source)
10 |
11 | def __getattr__(self, name):
12 | return getattr(self.source, name)
13 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/html5lib/filters/whitespace.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import, division, unicode_literals
2 |
3 | import re
4 |
5 | from . import base
6 | from ..constants import rcdataElements, spaceCharacters
7 | spaceCharacters = "".join(spaceCharacters)
8 |
9 | SPACES_REGEX = re.compile("[%s]+" % spaceCharacters)
10 |
11 |
12 | class Filter(base.Filter):
13 | """Collapses whitespace except in pre, textarea, and script elements"""
14 | spacePreserveElements = frozenset(["pre", "textarea"] + list(rcdataElements))
15 |
16 | def __iter__(self):
17 | preserve = 0
18 | for token in base.Filter.__iter__(self):
19 | type = token["type"]
20 | if type == "StartTag" \
21 | and (preserve or token["name"] in self.spacePreserveElements):
22 | preserve += 1
23 |
24 | elif type == "EndTag" and preserve:
25 | preserve -= 1
26 |
27 | elif not preserve and type == "SpaceCharacters" and token["data"]:
28 | # Test on token["data"] above to not introduce spaces where there were not
29 | token["data"] = " "
30 |
31 | elif not preserve and type == "Characters":
32 | token["data"] = collapse_spaces(token["data"])
33 |
34 | yield token
35 |
36 |
37 | def collapse_spaces(text):
38 | return SPACES_REGEX.sub(' ', text)
39 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/html5lib/treeadapters/__init__.py:
--------------------------------------------------------------------------------
1 | """Tree adapters let you convert from one tree structure to another
2 |
3 | Example:
4 |
5 | .. code-block:: python
6 |
7 | from pip._vendor import html5lib
8 | from pip._vendor.html5lib.treeadapters import genshi
9 |
10 | doc = 'Hi!'
11 | treebuilder = html5lib.getTreeBuilder('etree')
12 | parser = html5lib.HTMLParser(tree=treebuilder)
13 | tree = parser.parse(doc)
14 | TreeWalker = html5lib.getTreeWalker('etree')
15 |
16 | genshi_tree = genshi.to_genshi(TreeWalker(tree))
17 |
18 | """
19 | from __future__ import absolute_import, division, unicode_literals
20 |
21 | from . import sax
22 |
23 | __all__ = ["sax"]
24 |
25 | try:
26 | from . import genshi # noqa
27 | except ImportError:
28 | pass
29 | else:
30 | __all__.append("genshi")
31 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/html5lib/treeadapters/genshi.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import, division, unicode_literals
2 |
3 | from genshi.core import QName, Attrs
4 | from genshi.core import START, END, TEXT, COMMENT, DOCTYPE
5 |
6 |
7 | def to_genshi(walker):
8 | """Convert a tree to a genshi tree
9 |
10 | :arg walker: the treewalker to use to walk the tree to convert it
11 |
12 | :returns: generator of genshi nodes
13 |
14 | """
15 | text = []
16 | for token in walker:
17 | type = token["type"]
18 | if type in ("Characters", "SpaceCharacters"):
19 | text.append(token["data"])
20 | elif text:
21 | yield TEXT, "".join(text), (None, -1, -1)
22 | text = []
23 |
24 | if type in ("StartTag", "EmptyTag"):
25 | if token["namespace"]:
26 | name = "{%s}%s" % (token["namespace"], token["name"])
27 | else:
28 | name = token["name"]
29 | attrs = Attrs([(QName("{%s}%s" % attr if attr[0] is not None else attr[1]), value)
30 | for attr, value in token["data"].items()])
31 | yield (START, (QName(name), attrs), (None, -1, -1))
32 | if type == "EmptyTag":
33 | type = "EndTag"
34 |
35 | if type == "EndTag":
36 | if token["namespace"]:
37 | name = "{%s}%s" % (token["namespace"], token["name"])
38 | else:
39 | name = token["name"]
40 |
41 | yield END, QName(name), (None, -1, -1)
42 |
43 | elif type == "Comment":
44 | yield COMMENT, token["data"], (None, -1, -1)
45 |
46 | elif type == "Doctype":
47 | yield DOCTYPE, (token["name"], token["publicId"],
48 | token["systemId"]), (None, -1, -1)
49 |
50 | else:
51 | pass # FIXME: What to do?
52 |
53 | if text:
54 | yield TEXT, "".join(text), (None, -1, -1)
55 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/html5lib/treeadapters/sax.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import, division, unicode_literals
2 |
3 | from xml.sax.xmlreader import AttributesNSImpl
4 |
5 | from ..constants import adjustForeignAttributes, unadjustForeignAttributes
6 |
7 | prefix_mapping = {}
8 | for prefix, localName, namespace in adjustForeignAttributes.values():
9 | if prefix is not None:
10 | prefix_mapping[prefix] = namespace
11 |
12 |
13 | def to_sax(walker, handler):
14 | """Call SAX-like content handler based on treewalker walker
15 |
16 | :arg walker: the treewalker to use to walk the tree to convert it
17 |
18 | :arg handler: SAX handler to use
19 |
20 | """
21 | handler.startDocument()
22 | for prefix, namespace in prefix_mapping.items():
23 | handler.startPrefixMapping(prefix, namespace)
24 |
25 | for token in walker:
26 | type = token["type"]
27 | if type == "Doctype":
28 | continue
29 | elif type in ("StartTag", "EmptyTag"):
30 | attrs = AttributesNSImpl(token["data"],
31 | unadjustForeignAttributes)
32 | handler.startElementNS((token["namespace"], token["name"]),
33 | token["name"],
34 | attrs)
35 | if type == "EmptyTag":
36 | handler.endElementNS((token["namespace"], token["name"]),
37 | token["name"])
38 | elif type == "EndTag":
39 | handler.endElementNS((token["namespace"], token["name"]),
40 | token["name"])
41 | elif type in ("Characters", "SpaceCharacters"):
42 | handler.characters(token["data"])
43 | elif type == "Comment":
44 | pass
45 | else:
46 | assert False, "Unknown token type"
47 |
48 | for prefix, namespace in prefix_mapping.items():
49 | handler.endPrefixMapping(prefix)
50 | handler.endDocument()
51 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/html5lib/treewalkers/dom.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import, division, unicode_literals
2 |
3 | from xml.dom import Node
4 |
5 | from . import base
6 |
7 |
8 | class TreeWalker(base.NonRecursiveTreeWalker):
9 | def getNodeDetails(self, node):
10 | if node.nodeType == Node.DOCUMENT_TYPE_NODE:
11 | return base.DOCTYPE, node.name, node.publicId, node.systemId
12 |
13 | elif node.nodeType in (Node.TEXT_NODE, Node.CDATA_SECTION_NODE):
14 | return base.TEXT, node.nodeValue
15 |
16 | elif node.nodeType == Node.ELEMENT_NODE:
17 | attrs = {}
18 | for attr in list(node.attributes.keys()):
19 | attr = node.getAttributeNode(attr)
20 | if attr.namespaceURI:
21 | attrs[(attr.namespaceURI, attr.localName)] = attr.value
22 | else:
23 | attrs[(None, attr.name)] = attr.value
24 | return (base.ELEMENT, node.namespaceURI, node.nodeName,
25 | attrs, node.hasChildNodes())
26 |
27 | elif node.nodeType == Node.COMMENT_NODE:
28 | return base.COMMENT, node.nodeValue
29 |
30 | elif node.nodeType in (Node.DOCUMENT_NODE, Node.DOCUMENT_FRAGMENT_NODE):
31 | return (base.DOCUMENT,)
32 |
33 | else:
34 | return base.UNKNOWN, node.nodeType
35 |
36 | def getFirstChild(self, node):
37 | return node.firstChild
38 |
39 | def getNextSibling(self, node):
40 | return node.nextSibling
41 |
42 | def getParentNode(self, node):
43 | return node.parentNode
44 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/html5lib/treewalkers/genshi.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import, division, unicode_literals
2 |
3 | from genshi.core import QName
4 | from genshi.core import START, END, XML_NAMESPACE, DOCTYPE, TEXT
5 | from genshi.core import START_NS, END_NS, START_CDATA, END_CDATA, PI, COMMENT
6 |
7 | from . import base
8 |
9 | from ..constants import voidElements, namespaces
10 |
11 |
12 | class TreeWalker(base.TreeWalker):
13 | def __iter__(self):
14 | # Buffer the events so we can pass in the following one
15 | previous = None
16 | for event in self.tree:
17 | if previous is not None:
18 | for token in self.tokens(previous, event):
19 | yield token
20 | previous = event
21 |
22 | # Don't forget the final event!
23 | if previous is not None:
24 | for token in self.tokens(previous, None):
25 | yield token
26 |
27 | def tokens(self, event, next):
28 | kind, data, _ = event
29 | if kind == START:
30 | tag, attribs = data
31 | name = tag.localname
32 | namespace = tag.namespace
33 | converted_attribs = {}
34 | for k, v in attribs:
35 | if isinstance(k, QName):
36 | converted_attribs[(k.namespace, k.localname)] = v
37 | else:
38 | converted_attribs[(None, k)] = v
39 |
40 | if namespace == namespaces["html"] and name in voidElements:
41 | for token in self.emptyTag(namespace, name, converted_attribs,
42 | not next or next[0] != END or
43 | next[1] != tag):
44 | yield token
45 | else:
46 | yield self.startTag(namespace, name, converted_attribs)
47 |
48 | elif kind == END:
49 | name = data.localname
50 | namespace = data.namespace
51 | if namespace != namespaces["html"] or name not in voidElements:
52 | yield self.endTag(namespace, name)
53 |
54 | elif kind == COMMENT:
55 | yield self.comment(data)
56 |
57 | elif kind == TEXT:
58 | for token in self.text(data):
59 | yield token
60 |
61 | elif kind == DOCTYPE:
62 | yield self.doctype(*data)
63 |
64 | elif kind in (XML_NAMESPACE, DOCTYPE, START_NS, END_NS,
65 | START_CDATA, END_CDATA, PI):
66 | pass
67 |
68 | else:
69 | yield self.unknown(kind)
70 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/idna/__init__.py:
--------------------------------------------------------------------------------
1 | from .package_data import __version__
2 | from .core import *
3 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/idna/compat.py:
--------------------------------------------------------------------------------
1 | from .core import *
2 | from .codec import *
3 |
4 | def ToASCII(label):
5 | return encode(label)
6 |
7 | def ToUnicode(label):
8 | return decode(label)
9 |
10 | def nameprep(s):
11 | raise NotImplementedError("IDNA 2008 does not utilise nameprep protocol")
12 |
13 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/idna/intranges.py:
--------------------------------------------------------------------------------
1 | """
2 | Given a list of integers, made up of (hopefully) a small number of long runs
3 | of consecutive integers, compute a representation of the form
4 | ((start1, end1), (start2, end2) ...). Then answer the question "was x present
5 | in the original list?" in time O(log(# runs)).
6 | """
7 |
8 | import bisect
9 |
10 | def intranges_from_list(list_):
11 | """Represent a list of integers as a sequence of ranges:
12 | ((start_0, end_0), (start_1, end_1), ...), such that the original
13 | integers are exactly those x such that start_i <= x < end_i for some i.
14 |
15 | Ranges are encoded as single integers (start << 32 | end), not as tuples.
16 | """
17 |
18 | sorted_list = sorted(list_)
19 | ranges = []
20 | last_write = -1
21 | for i in range(len(sorted_list)):
22 | if i+1 < len(sorted_list):
23 | if sorted_list[i] == sorted_list[i+1]-1:
24 | continue
25 | current_range = sorted_list[last_write+1:i+1]
26 | ranges.append(_encode_range(current_range[0], current_range[-1] + 1))
27 | last_write = i
28 |
29 | return tuple(ranges)
30 |
31 | def _encode_range(start, end):
32 | return (start << 32) | end
33 |
34 | def _decode_range(r):
35 | return (r >> 32), (r & ((1 << 32) - 1))
36 |
37 |
38 | def intranges_contain(int_, ranges):
39 | """Determine if `int_` falls into one of the ranges in `ranges`."""
40 | tuple_ = _encode_range(int_, 0)
41 | pos = bisect.bisect_left(ranges, tuple_)
42 | # we could be immediately ahead of a tuple (start, end)
43 | # with start < int_ <= end
44 | if pos > 0:
45 | left, right = _decode_range(ranges[pos-1])
46 | if left <= int_ < right:
47 | return True
48 | # or we could be immediately behind a tuple (int_, end)
49 | if pos < len(ranges):
50 | left, _ = _decode_range(ranges[pos])
51 | if left == int_:
52 | return True
53 | return False
54 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/idna/package_data.py:
--------------------------------------------------------------------------------
1 | __version__ = '2.6'
2 |
3 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/lockfile/linklockfile.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import
2 |
3 | import time
4 | import os
5 |
6 | from . import (LockBase, LockFailed, NotLocked, NotMyLock, LockTimeout,
7 | AlreadyLocked)
8 |
9 |
10 | class LinkLockFile(LockBase):
11 | """Lock access to a file using atomic property of link(2).
12 |
13 | >>> lock = LinkLockFile('somefile')
14 | >>> lock = LinkLockFile('somefile', threaded=False)
15 | """
16 |
17 | def acquire(self, timeout=None):
18 | try:
19 | open(self.unique_name, "wb").close()
20 | except IOError:
21 | raise LockFailed("failed to create %s" % self.unique_name)
22 |
23 | timeout = timeout if timeout is not None else self.timeout
24 | end_time = time.time()
25 | if timeout is not None and timeout > 0:
26 | end_time += timeout
27 |
28 | while True:
29 | # Try and create a hard link to it.
30 | try:
31 | os.link(self.unique_name, self.lock_file)
32 | except OSError:
33 | # Link creation failed. Maybe we've double-locked?
34 | nlinks = os.stat(self.unique_name).st_nlink
35 | if nlinks == 2:
36 | # The original link plus the one I created == 2. We're
37 | # good to go.
38 | return
39 | else:
40 | # Otherwise the lock creation failed.
41 | if timeout is not None and time.time() > end_time:
42 | os.unlink(self.unique_name)
43 | if timeout > 0:
44 | raise LockTimeout("Timeout waiting to acquire"
45 | " lock for %s" %
46 | self.path)
47 | else:
48 | raise AlreadyLocked("%s is already locked" %
49 | self.path)
50 | time.sleep(timeout is not None and timeout / 10 or 0.1)
51 | else:
52 | # Link creation succeeded. We're good to go.
53 | return
54 |
55 | def release(self):
56 | if not self.is_locked():
57 | raise NotLocked("%s is not locked" % self.path)
58 | elif not os.path.exists(self.unique_name):
59 | raise NotMyLock("%s is locked, but not by me" % self.path)
60 | os.unlink(self.unique_name)
61 | os.unlink(self.lock_file)
62 |
63 | def is_locked(self):
64 | return os.path.exists(self.lock_file)
65 |
66 | def i_am_locking(self):
67 | return (self.is_locked() and
68 | os.path.exists(self.unique_name) and
69 | os.stat(self.unique_name).st_nlink == 2)
70 |
71 | def break_lock(self):
72 | if os.path.exists(self.lock_file):
73 | os.unlink(self.lock_file)
74 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/lockfile/symlinklockfile.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import
2 |
3 | import os
4 | import time
5 |
6 | from . import (LockBase, NotLocked, NotMyLock, LockTimeout,
7 | AlreadyLocked)
8 |
9 |
10 | class SymlinkLockFile(LockBase):
11 | """Lock access to a file using symlink(2)."""
12 |
13 | def __init__(self, path, threaded=True, timeout=None):
14 | # super(SymlinkLockFile).__init(...)
15 | LockBase.__init__(self, path, threaded, timeout)
16 | # split it back!
17 | self.unique_name = os.path.split(self.unique_name)[1]
18 |
19 | def acquire(self, timeout=None):
20 | # Hopefully unnecessary for symlink.
21 | # try:
22 | # open(self.unique_name, "wb").close()
23 | # except IOError:
24 | # raise LockFailed("failed to create %s" % self.unique_name)
25 | timeout = timeout if timeout is not None else self.timeout
26 | end_time = time.time()
27 | if timeout is not None and timeout > 0:
28 | end_time += timeout
29 |
30 | while True:
31 | # Try and create a symbolic link to it.
32 | try:
33 | os.symlink(self.unique_name, self.lock_file)
34 | except OSError:
35 | # Link creation failed. Maybe we've double-locked?
36 | if self.i_am_locking():
37 | # Linked to out unique name. Proceed.
38 | return
39 | else:
40 | # Otherwise the lock creation failed.
41 | if timeout is not None and time.time() > end_time:
42 | if timeout > 0:
43 | raise LockTimeout("Timeout waiting to acquire"
44 | " lock for %s" %
45 | self.path)
46 | else:
47 | raise AlreadyLocked("%s is already locked" %
48 | self.path)
49 | time.sleep(timeout / 10 if timeout is not None else 0.1)
50 | else:
51 | # Link creation succeeded. We're good to go.
52 | return
53 |
54 | def release(self):
55 | if not self.is_locked():
56 | raise NotLocked("%s is not locked" % self.path)
57 | elif not self.i_am_locking():
58 | raise NotMyLock("%s is locked, but not by me" % self.path)
59 | os.unlink(self.lock_file)
60 |
61 | def is_locked(self):
62 | return os.path.islink(self.lock_file)
63 |
64 | def i_am_locking(self):
65 | return (os.path.islink(self.lock_file)
66 | and os.readlink(self.lock_file) == self.unique_name)
67 |
68 | def break_lock(self):
69 | if os.path.islink(self.lock_file): # exists && link
70 | os.unlink(self.lock_file)
71 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/msgpack/__init__.py:
--------------------------------------------------------------------------------
1 | # coding: utf-8
2 | from pip._vendor.msgpack._version import version
3 | from pip._vendor.msgpack.exceptions import *
4 |
5 | from collections import namedtuple
6 |
7 |
8 | class ExtType(namedtuple('ExtType', 'code data')):
9 | """ExtType represents ext type in msgpack."""
10 | def __new__(cls, code, data):
11 | if not isinstance(code, int):
12 | raise TypeError("code must be int")
13 | if not isinstance(data, bytes):
14 | raise TypeError("data must be bytes")
15 | if not 0 <= code <= 127:
16 | raise ValueError("code must be 0~127")
17 | return super(ExtType, cls).__new__(cls, code, data)
18 |
19 |
20 | import os
21 | if os.environ.get('MSGPACK_PUREPYTHON'):
22 | from pip._vendor.msgpack.fallback import Packer, unpackb, Unpacker
23 | else:
24 | try:
25 | from pip._vendor.msgpack._packer import Packer
26 | from pip._vendor.msgpack._unpacker import unpackb, Unpacker
27 | except ImportError:
28 | from pip._vendor.msgpack.fallback import Packer, unpackb, Unpacker
29 |
30 |
31 | def pack(o, stream, **kwargs):
32 | """
33 | Pack object `o` and write it to `stream`
34 |
35 | See :class:`Packer` for options.
36 | """
37 | packer = Packer(**kwargs)
38 | stream.write(packer.pack(o))
39 |
40 |
41 | def packb(o, **kwargs):
42 | """
43 | Pack object `o` and return packed bytes
44 |
45 | See :class:`Packer` for options.
46 | """
47 | return Packer(**kwargs).pack(o)
48 |
49 |
50 | def unpack(stream, **kwargs):
51 | """
52 | Unpack an object from `stream`.
53 |
54 | Raises `ExtraData` when `stream` contains extra bytes.
55 | See :class:`Unpacker` for options.
56 | """
57 | data = stream.read()
58 | return unpackb(data, **kwargs)
59 |
60 |
61 | # alias for compatibility to simplejson/marshal/pickle.
62 | load = unpack
63 | loads = unpackb
64 |
65 | dump = pack
66 | dumps = packb
67 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/msgpack/_version.py:
--------------------------------------------------------------------------------
1 | version = (0, 5, 6)
2 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/msgpack/exceptions.py:
--------------------------------------------------------------------------------
1 | class UnpackException(Exception):
2 | """Deprecated. Use Exception instead to catch all exception during unpacking."""
3 |
4 |
5 | class BufferFull(UnpackException):
6 | pass
7 |
8 |
9 | class OutOfData(UnpackException):
10 | pass
11 |
12 |
13 | class UnpackValueError(UnpackException, ValueError):
14 | """Deprecated. Use ValueError instead."""
15 |
16 |
17 | class ExtraData(UnpackValueError):
18 | def __init__(self, unpacked, extra):
19 | self.unpacked = unpacked
20 | self.extra = extra
21 |
22 | def __str__(self):
23 | return "unpack(b) received extra data."
24 |
25 |
26 | class PackException(Exception):
27 | """Deprecated. Use Exception instead to catch all exception during packing."""
28 |
29 |
30 | class PackValueError(PackException, ValueError):
31 | """PackValueError is raised when type of input data is supported but it's value is unsupported.
32 |
33 | Deprecated. Use ValueError instead.
34 | """
35 |
36 |
37 | class PackOverflowError(PackValueError, OverflowError):
38 | """PackOverflowError is raised when integer value is out of range of msgpack support [-2**31, 2**32).
39 |
40 | Deprecated. Use ValueError instead.
41 | """
42 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/packaging/__about__.py:
--------------------------------------------------------------------------------
1 | # This file is dual licensed under the terms of the Apache License, Version
2 | # 2.0, and the BSD License. See the LICENSE file in the root of this repository
3 | # for complete details.
4 | from __future__ import absolute_import, division, print_function
5 |
6 | __all__ = [
7 | "__title__", "__summary__", "__uri__", "__version__", "__author__",
8 | "__email__", "__license__", "__copyright__",
9 | ]
10 |
11 | __title__ = "packaging"
12 | __summary__ = "Core utilities for Python packages"
13 | __uri__ = "https://github.com/pypa/packaging"
14 |
15 | __version__ = "17.1"
16 |
17 | __author__ = "Donald Stufft and individual contributors"
18 | __email__ = "donald@stufft.io"
19 |
20 | __license__ = "BSD or Apache License, Version 2.0"
21 | __copyright__ = "Copyright 2014-2016 %s" % __author__
22 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/packaging/__init__.py:
--------------------------------------------------------------------------------
1 | # This file is dual licensed under the terms of the Apache License, Version
2 | # 2.0, and the BSD License. See the LICENSE file in the root of this repository
3 | # for complete details.
4 | from __future__ import absolute_import, division, print_function
5 |
6 | from .__about__ import (
7 | __author__, __copyright__, __email__, __license__, __summary__, __title__,
8 | __uri__, __version__
9 | )
10 |
11 | __all__ = [
12 | "__title__", "__summary__", "__uri__", "__version__", "__author__",
13 | "__email__", "__license__", "__copyright__",
14 | ]
15 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/packaging/_compat.py:
--------------------------------------------------------------------------------
1 | # This file is dual licensed under the terms of the Apache License, Version
2 | # 2.0, and the BSD License. See the LICENSE file in the root of this repository
3 | # for complete details.
4 | from __future__ import absolute_import, division, print_function
5 |
6 | import sys
7 |
8 |
9 | PY2 = sys.version_info[0] == 2
10 | PY3 = sys.version_info[0] == 3
11 |
12 | # flake8: noqa
13 |
14 | if PY3:
15 | string_types = str,
16 | else:
17 | string_types = basestring,
18 |
19 |
20 | def with_metaclass(meta, *bases):
21 | """
22 | Create a base class with a metaclass.
23 | """
24 | # This requires a bit of explanation: the basic idea is to make a dummy
25 | # metaclass for one level of class instantiation that replaces itself with
26 | # the actual metaclass.
27 | class metaclass(meta):
28 | def __new__(cls, name, this_bases, d):
29 | return meta(name, bases, d)
30 | return type.__new__(metaclass, 'temporary_class', (), {})
31 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/packaging/_structures.py:
--------------------------------------------------------------------------------
1 | # This file is dual licensed under the terms of the Apache License, Version
2 | # 2.0, and the BSD License. See the LICENSE file in the root of this repository
3 | # for complete details.
4 | from __future__ import absolute_import, division, print_function
5 |
6 |
7 | class Infinity(object):
8 |
9 | def __repr__(self):
10 | return "Infinity"
11 |
12 | def __hash__(self):
13 | return hash(repr(self))
14 |
15 | def __lt__(self, other):
16 | return False
17 |
18 | def __le__(self, other):
19 | return False
20 |
21 | def __eq__(self, other):
22 | return isinstance(other, self.__class__)
23 |
24 | def __ne__(self, other):
25 | return not isinstance(other, self.__class__)
26 |
27 | def __gt__(self, other):
28 | return True
29 |
30 | def __ge__(self, other):
31 | return True
32 |
33 | def __neg__(self):
34 | return NegativeInfinity
35 |
36 |
37 | Infinity = Infinity()
38 |
39 |
40 | class NegativeInfinity(object):
41 |
42 | def __repr__(self):
43 | return "-Infinity"
44 |
45 | def __hash__(self):
46 | return hash(repr(self))
47 |
48 | def __lt__(self, other):
49 | return True
50 |
51 | def __le__(self, other):
52 | return True
53 |
54 | def __eq__(self, other):
55 | return isinstance(other, self.__class__)
56 |
57 | def __ne__(self, other):
58 | return not isinstance(other, self.__class__)
59 |
60 | def __gt__(self, other):
61 | return False
62 |
63 | def __ge__(self, other):
64 | return False
65 |
66 | def __neg__(self):
67 | return Infinity
68 |
69 |
70 | NegativeInfinity = NegativeInfinity()
71 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/packaging/utils.py:
--------------------------------------------------------------------------------
1 | # This file is dual licensed under the terms of the Apache License, Version
2 | # 2.0, and the BSD License. See the LICENSE file in the root of this repository
3 | # for complete details.
4 | from __future__ import absolute_import, division, print_function
5 |
6 | import re
7 |
8 | from .version import InvalidVersion, Version
9 |
10 |
11 | _canonicalize_regex = re.compile(r"[-_.]+")
12 |
13 |
14 | def canonicalize_name(name):
15 | # This is taken from PEP 503.
16 | return _canonicalize_regex.sub("-", name).lower()
17 |
18 |
19 | def canonicalize_version(version):
20 | """
21 | This is very similar to Version.__str__, but has one subtle differences
22 | with the way it handles the release segment.
23 | """
24 |
25 | try:
26 | version = Version(version)
27 | except InvalidVersion:
28 | # Legacy versions cannot be normalized
29 | return version
30 |
31 | parts = []
32 |
33 | # Epoch
34 | if version.epoch != 0:
35 | parts.append("{0}!".format(version.epoch))
36 |
37 | # Release segment
38 | # NB: This strips trailing '.0's to normalize
39 | parts.append(
40 | re.sub(
41 | r'(\.0)+$',
42 | '',
43 | ".".join(str(x) for x in version.release)
44 | )
45 | )
46 |
47 | # Pre-release
48 | if version.pre is not None:
49 | parts.append("".join(str(x) for x in version.pre))
50 |
51 | # Post-release
52 | if version.post is not None:
53 | parts.append(".post{0}".format(version.post))
54 |
55 | # Development release
56 | if version.dev is not None:
57 | parts.append(".dev{0}".format(version.dev))
58 |
59 | # Local version segment
60 | if version.local is not None:
61 | parts.append("+{0}".format(version.local))
62 |
63 | return "".join(parts)
64 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/pkg_resources/py31compat.py:
--------------------------------------------------------------------------------
1 | import os
2 | import errno
3 | import sys
4 |
5 |
6 | def _makedirs_31(path, exist_ok=False):
7 | try:
8 | os.makedirs(path)
9 | except OSError as exc:
10 | if not exist_ok or exc.errno != errno.EEXIST:
11 | raise
12 |
13 |
14 | # rely on compatibility behavior until mode considerations
15 | # and exists_ok considerations are disentangled.
16 | # See https://github.com/pypa/setuptools/pull/1083#issuecomment-315168663
17 | needs_makedirs = (
18 | sys.version_info < (3, 2, 5) or
19 | (3, 3) <= sys.version_info < (3, 3, 6) or
20 | (3, 4) <= sys.version_info < (3, 4, 1)
21 | )
22 | makedirs = _makedirs_31 if needs_makedirs else os.makedirs
23 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/progress/counter.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | # Copyright (c) 2012 Giorgos Verigakis
4 | #
5 | # Permission to use, copy, modify, and distribute this software for any
6 | # purpose with or without fee is hereby granted, provided that the above
7 | # copyright notice and this permission notice appear in all copies.
8 | #
9 | # THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
10 | # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
11 | # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
12 | # ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
13 | # WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
14 | # ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
15 | # OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
16 |
17 | from __future__ import unicode_literals
18 | from . import Infinite, Progress
19 | from .helpers import WriteMixin
20 |
21 |
22 | class Counter(WriteMixin, Infinite):
23 | message = ''
24 | hide_cursor = True
25 |
26 | def update(self):
27 | self.write(str(self.index))
28 |
29 |
30 | class Countdown(WriteMixin, Progress):
31 | hide_cursor = True
32 |
33 | def update(self):
34 | self.write(str(self.remaining))
35 |
36 |
37 | class Stack(WriteMixin, Progress):
38 | phases = (' ', '▁', '▂', '▃', '▄', '▅', '▆', '▇', '█')
39 | hide_cursor = True
40 |
41 | def update(self):
42 | nphases = len(self.phases)
43 | i = min(nphases - 1, int(self.progress * nphases))
44 | self.write(self.phases[i])
45 |
46 |
47 | class Pie(Stack):
48 | phases = ('○', '◔', '◑', '◕', '●')
49 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/progress/spinner.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | # Copyright (c) 2012 Giorgos Verigakis
4 | #
5 | # Permission to use, copy, modify, and distribute this software for any
6 | # purpose with or without fee is hereby granted, provided that the above
7 | # copyright notice and this permission notice appear in all copies.
8 | #
9 | # THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
10 | # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
11 | # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
12 | # ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
13 | # WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
14 | # ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
15 | # OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
16 |
17 | from __future__ import unicode_literals
18 | from . import Infinite
19 | from .helpers import WriteMixin
20 |
21 |
22 | class Spinner(WriteMixin, Infinite):
23 | message = ''
24 | phases = ('-', '\\', '|', '/')
25 | hide_cursor = True
26 |
27 | def update(self):
28 | i = self.index % len(self.phases)
29 | self.write(self.phases[i])
30 |
31 |
32 | class PieSpinner(Spinner):
33 | phases = ['◷', '◶', '◵', '◴']
34 |
35 |
36 | class MoonSpinner(Spinner):
37 | phases = ['◑', '◒', '◐', '◓']
38 |
39 |
40 | class LineSpinner(Spinner):
41 | phases = ['⎺', '⎻', '⎼', '⎽', '⎼', '⎻']
42 |
43 | class PixelSpinner(Spinner):
44 | phases = ['⣾','⣷', '⣯', '⣟', '⡿', '⢿', '⣻', '⣽']
45 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/pytoml/__init__.py:
--------------------------------------------------------------------------------
1 | from .core import TomlError
2 | from .parser import load, loads
3 | from .writer import dump, dumps
4 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/pytoml/core.py:
--------------------------------------------------------------------------------
1 | class TomlError(RuntimeError):
2 | def __init__(self, message, line, col, filename):
3 | RuntimeError.__init__(self, message, line, col, filename)
4 | self.message = message
5 | self.line = line
6 | self.col = col
7 | self.filename = filename
8 |
9 | def __str__(self):
10 | return '{}({}, {}): {}'.format(self.filename, self.line, self.col, self.message)
11 |
12 | def __repr__(self):
13 | return 'TomlError({!r}, {!r}, {!r}, {!r})'.format(self.message, self.line, self.col, self.filename)
14 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/requests/__version__.py:
--------------------------------------------------------------------------------
1 | # .-. .-. .-. . . .-. .-. .-. .-.
2 | # |( |- |.| | | |- `-. | `-.
3 | # ' ' `-' `-`.`-' `-' `-' ' `-'
4 |
5 | __title__ = 'requests'
6 | __description__ = 'Python HTTP for Humans.'
7 | __url__ = 'http://python-requests.org'
8 | __version__ = '2.18.4'
9 | __build__ = 0x021804
10 | __author__ = 'Kenneth Reitz'
11 | __author_email__ = 'me@kennethreitz.org'
12 | __license__ = 'Apache 2.0'
13 | __copyright__ = 'Copyright 2017 Kenneth Reitz'
14 | __cake__ = u'\u2728 \U0001f370 \u2728'
15 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/requests/_internal_utils.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | """
4 | requests._internal_utils
5 | ~~~~~~~~~~~~~~
6 |
7 | Provides utility functions that are consumed internally by Requests
8 | which depend on extremely few external helpers (such as compat)
9 | """
10 |
11 | from .compat import is_py2, builtin_str, str
12 |
13 |
14 | def to_native_string(string, encoding='ascii'):
15 | """Given a string object, regardless of type, returns a representation of
16 | that string in the native string type, encoding and decoding where
17 | necessary. This assumes ASCII unless told otherwise.
18 | """
19 | if isinstance(string, builtin_str):
20 | out = string
21 | else:
22 | if is_py2:
23 | out = string.encode(encoding)
24 | else:
25 | out = string.decode(encoding)
26 |
27 | return out
28 |
29 |
30 | def unicode_is_ascii(u_string):
31 | """Determine if unicode string only contains ASCII characters.
32 |
33 | :param str u_string: unicode string to check. Must be unicode
34 | and not Python 2 `str`.
35 | :rtype: bool
36 | """
37 | assert isinstance(u_string, str)
38 | try:
39 | u_string.encode('ascii')
40 | return True
41 | except UnicodeEncodeError:
42 | return False
43 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/requests/certs.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 |
4 | """
5 | requests.certs
6 | ~~~~~~~~~~~~~~
7 |
8 | This module returns the preferred default CA certificate bundle. There is
9 | only one — the one from the certifi package.
10 |
11 | If you are packaging Requests, e.g., for a Linux distribution or a managed
12 | environment, you can change the definition of where() to return a separately
13 | packaged CA bundle.
14 | """
15 | from pip._vendor.certifi import where
16 |
17 | if __name__ == '__main__':
18 | print(where())
19 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/requests/compat.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | """
4 | requests.compat
5 | ~~~~~~~~~~~~~~~
6 |
7 | This module handles import compatibility issues between Python 2 and
8 | Python 3.
9 | """
10 |
11 | from pip._vendor import chardet
12 |
13 | import sys
14 |
15 | # -------
16 | # Pythons
17 | # -------
18 |
19 | # Syntax sugar.
20 | _ver = sys.version_info
21 |
22 | #: Python 2.x?
23 | is_py2 = (_ver[0] == 2)
24 |
25 | #: Python 3.x?
26 | is_py3 = (_ver[0] == 3)
27 |
28 | # Note: We've patched out simplejson support in pip because it prevents
29 | # upgrading simplejson on Windows.
30 | # try:
31 | # import simplejson as json
32 | # except (ImportError, SyntaxError):
33 | # # simplejson does not support Python 3.2, it throws a SyntaxError
34 | # # because of u'...' Unicode literals.
35 | import json
36 |
37 | # ---------
38 | # Specifics
39 | # ---------
40 |
41 | if is_py2:
42 | from urllib import (
43 | quote, unquote, quote_plus, unquote_plus, urlencode, getproxies,
44 | proxy_bypass, proxy_bypass_environment, getproxies_environment)
45 | from urlparse import urlparse, urlunparse, urljoin, urlsplit, urldefrag
46 | from urllib2 import parse_http_list
47 | import cookielib
48 | from Cookie import Morsel
49 | from StringIO import StringIO
50 |
51 | from pip._vendor.urllib3.packages.ordered_dict import OrderedDict
52 |
53 | builtin_str = str
54 | bytes = str
55 | str = unicode
56 | basestring = basestring
57 | numeric_types = (int, long, float)
58 | integer_types = (int, long)
59 |
60 | elif is_py3:
61 | from urllib.parse import urlparse, urlunparse, urljoin, urlsplit, urlencode, quote, unquote, quote_plus, unquote_plus, urldefrag
62 | from urllib.request import parse_http_list, getproxies, proxy_bypass, proxy_bypass_environment, getproxies_environment
63 | from http import cookiejar as cookielib
64 | from http.cookies import Morsel
65 | from io import StringIO
66 | from collections import OrderedDict
67 |
68 | builtin_str = str
69 | str = str
70 | bytes = bytes
71 | basestring = (str, bytes)
72 | numeric_types = (int, float)
73 | integer_types = (int,)
74 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/requests/hooks.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | """
4 | requests.hooks
5 | ~~~~~~~~~~~~~~
6 |
7 | This module provides the capabilities for the Requests hooks system.
8 |
9 | Available hooks:
10 |
11 | ``response``:
12 | The response generated from a Request.
13 | """
14 | HOOKS = ['response']
15 |
16 |
17 | def default_hooks():
18 | return dict((event, []) for event in HOOKS)
19 |
20 | # TODO: response is the only one
21 |
22 |
23 | def dispatch_hook(key, hooks, hook_data, **kwargs):
24 | """Dispatches a hook dictionary on a given piece of data."""
25 | hooks = hooks or dict()
26 | hooks = hooks.get(key)
27 | if hooks:
28 | if hasattr(hooks, '__call__'):
29 | hooks = [hooks]
30 | for hook in hooks:
31 | _hook_data = hook(hook_data, **kwargs)
32 | if _hook_data is not None:
33 | hook_data = _hook_data
34 | return hook_data
35 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/requests/packages.py:
--------------------------------------------------------------------------------
1 | import sys
2 |
3 | # This code exists for backwards compatibility reasons.
4 | # I don't like it either. Just look the other way. :)
5 |
6 | for package in ('urllib3', 'idna', 'chardet'):
7 | vendored_package = "pip._vendor." + package
8 | locals()[package] = __import__(vendored_package)
9 | # This traversal is apparently necessary such that the identities are
10 | # preserved (requests.packages.urllib3.* is urllib3.*)
11 | for mod in list(sys.modules):
12 | if mod == vendored_package or mod.startswith(vendored_package + '.'):
13 | unprefixed_mod = mod[len("pip._vendor."):]
14 | sys.modules['pip._vendor.requests.packages.' + unprefixed_mod] = sys.modules[mod]
15 |
16 | # Kinda cool, though, right?
17 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/urllib3/contrib/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Alexadlu/XIVReID/1f8a14e14cfd26790337c45e0e663a3a2acf9814/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/urllib3/contrib/__init__.py
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/urllib3/contrib/_securetransport/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Alexadlu/XIVReID/1f8a14e14cfd26790337c45e0e663a3a2acf9814/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/urllib3/contrib/_securetransport/__init__.py
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/urllib3/filepost.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import
2 | import codecs
3 |
4 | from uuid import uuid4
5 | from io import BytesIO
6 |
7 | from .packages import six
8 | from .packages.six import b
9 | from .fields import RequestField
10 |
11 | writer = codecs.lookup('utf-8')[3]
12 |
13 |
14 | def choose_boundary():
15 | """
16 | Our embarrassingly-simple replacement for mimetools.choose_boundary.
17 | """
18 | return uuid4().hex
19 |
20 |
21 | def iter_field_objects(fields):
22 | """
23 | Iterate over fields.
24 |
25 | Supports list of (k, v) tuples and dicts, and lists of
26 | :class:`~urllib3.fields.RequestField`.
27 |
28 | """
29 | if isinstance(fields, dict):
30 | i = six.iteritems(fields)
31 | else:
32 | i = iter(fields)
33 |
34 | for field in i:
35 | if isinstance(field, RequestField):
36 | yield field
37 | else:
38 | yield RequestField.from_tuples(*field)
39 |
40 |
41 | def iter_fields(fields):
42 | """
43 | .. deprecated:: 1.6
44 |
45 | Iterate over fields.
46 |
47 | The addition of :class:`~urllib3.fields.RequestField` makes this function
48 | obsolete. Instead, use :func:`iter_field_objects`, which returns
49 | :class:`~urllib3.fields.RequestField` objects.
50 |
51 | Supports list of (k, v) tuples and dicts.
52 | """
53 | if isinstance(fields, dict):
54 | return ((k, v) for k, v in six.iteritems(fields))
55 |
56 | return ((k, v) for k, v in fields)
57 |
58 |
59 | def encode_multipart_formdata(fields, boundary=None):
60 | """
61 | Encode a dictionary of ``fields`` using the multipart/form-data MIME format.
62 |
63 | :param fields:
64 | Dictionary of fields or list of (key, :class:`~urllib3.fields.RequestField`).
65 |
66 | :param boundary:
67 | If not specified, then a random boundary will be generated using
68 | :func:`mimetools.choose_boundary`.
69 | """
70 | body = BytesIO()
71 | if boundary is None:
72 | boundary = choose_boundary()
73 |
74 | for field in iter_field_objects(fields):
75 | body.write(b('--%s\r\n' % (boundary)))
76 |
77 | writer(body).write(field.render_headers())
78 | data = field.data
79 |
80 | if isinstance(data, int):
81 | data = str(data) # Backwards compatibility
82 |
83 | if isinstance(data, six.text_type):
84 | writer(body).write(data)
85 | else:
86 | body.write(data)
87 |
88 | body.write(b'\r\n')
89 |
90 | body.write(b('--%s--\r\n' % (boundary)))
91 |
92 | content_type = str('multipart/form-data; boundary=%s' % boundary)
93 |
94 | return body.getvalue(), content_type
95 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/urllib3/packages/__init__.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import
2 |
3 | from . import ssl_match_hostname
4 |
5 | __all__ = ('ssl_match_hostname', )
6 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/urllib3/packages/backports/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Alexadlu/XIVReID/1f8a14e14cfd26790337c45e0e663a3a2acf9814/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/urllib3/packages/backports/__init__.py
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/urllib3/packages/backports/makefile.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 | backports.makefile
4 | ~~~~~~~~~~~~~~~~~~
5 |
6 | Backports the Python 3 ``socket.makefile`` method for use with anything that
7 | wants to create a "fake" socket object.
8 | """
9 | import io
10 |
11 | from socket import SocketIO
12 |
13 |
14 | def backport_makefile(self, mode="r", buffering=None, encoding=None,
15 | errors=None, newline=None):
16 | """
17 | Backport of ``socket.makefile`` from Python 3.5.
18 | """
19 | if not set(mode) <= set(["r", "w", "b"]):
20 | raise ValueError(
21 | "invalid mode %r (only r, w, b allowed)" % (mode,)
22 | )
23 | writing = "w" in mode
24 | reading = "r" in mode or not writing
25 | assert reading or writing
26 | binary = "b" in mode
27 | rawmode = ""
28 | if reading:
29 | rawmode += "r"
30 | if writing:
31 | rawmode += "w"
32 | raw = SocketIO(self, rawmode)
33 | self._makefile_refs += 1
34 | if buffering is None:
35 | buffering = -1
36 | if buffering < 0:
37 | buffering = io.DEFAULT_BUFFER_SIZE
38 | if buffering == 0:
39 | if not binary:
40 | raise ValueError("unbuffered streams must be binary")
41 | return raw
42 | if reading and writing:
43 | buffer = io.BufferedRWPair(raw, raw, buffering)
44 | elif reading:
45 | buffer = io.BufferedReader(raw, buffering)
46 | else:
47 | assert writing
48 | buffer = io.BufferedWriter(raw, buffering)
49 | if binary:
50 | return buffer
51 | text = io.TextIOWrapper(buffer, encoding, errors, newline)
52 | text.mode = mode
53 | return text
54 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/urllib3/packages/ssl_match_hostname/__init__.py:
--------------------------------------------------------------------------------
1 | import sys
2 |
3 | try:
4 | # Our match_hostname function is the same as 3.5's, so we only want to
5 | # import the match_hostname function if it's at least that good.
6 | if sys.version_info < (3, 5):
7 | raise ImportError("Fallback to vendored code")
8 |
9 | from ssl import CertificateError, match_hostname
10 | except ImportError:
11 | try:
12 | # Backport of the function from a pypi module
13 | from backports.ssl_match_hostname import CertificateError, match_hostname
14 | except ImportError:
15 | # Our vendored copy
16 | from ._implementation import CertificateError, match_hostname
17 |
18 | # Not needed, but documenting what we provide.
19 | __all__ = ('CertificateError', 'match_hostname')
20 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/urllib3/util/__init__.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import
2 | # For backwards compatibility, provide imports that used to be here.
3 | from .connection import is_connection_dropped
4 | from .request import make_headers
5 | from .response import is_fp_closed
6 | from .ssl_ import (
7 | SSLContext,
8 | HAS_SNI,
9 | IS_PYOPENSSL,
10 | IS_SECURETRANSPORT,
11 | assert_fingerprint,
12 | resolve_cert_reqs,
13 | resolve_ssl_version,
14 | ssl_wrap_socket,
15 | )
16 | from .timeout import (
17 | current_time,
18 | Timeout,
19 | )
20 |
21 | from .retry import Retry
22 | from .url import (
23 | get_host,
24 | parse_url,
25 | split_first,
26 | Url,
27 | )
28 | from .wait import (
29 | wait_for_read,
30 | wait_for_write
31 | )
32 |
33 | __all__ = (
34 | 'HAS_SNI',
35 | 'IS_PYOPENSSL',
36 | 'IS_SECURETRANSPORT',
37 | 'SSLContext',
38 | 'Retry',
39 | 'Timeout',
40 | 'Url',
41 | 'assert_fingerprint',
42 | 'current_time',
43 | 'is_connection_dropped',
44 | 'is_fp_closed',
45 | 'get_host',
46 | 'parse_url',
47 | 'make_headers',
48 | 'resolve_cert_reqs',
49 | 'resolve_ssl_version',
50 | 'split_first',
51 | 'ssl_wrap_socket',
52 | 'wait_for_read',
53 | 'wait_for_write'
54 | )
55 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/urllib3/util/response.py:
--------------------------------------------------------------------------------
1 | from __future__ import absolute_import
2 | from ..packages.six.moves import http_client as httplib
3 |
4 | from ..exceptions import HeaderParsingError
5 |
6 |
7 | def is_fp_closed(obj):
8 | """
9 | Checks whether a given file-like object is closed.
10 |
11 | :param obj:
12 | The file-like object to check.
13 | """
14 |
15 | try:
16 | # Check `isclosed()` first, in case Python3 doesn't set `closed`.
17 | # GH Issue #928
18 | return obj.isclosed()
19 | except AttributeError:
20 | pass
21 |
22 | try:
23 | # Check via the official file-like-object way.
24 | return obj.closed
25 | except AttributeError:
26 | pass
27 |
28 | try:
29 | # Check if the object is a container for another file-like object that
30 | # gets released on exhaustion (e.g. HTTPResponse).
31 | return obj.fp is None
32 | except AttributeError:
33 | pass
34 |
35 | raise ValueError("Unable to determine whether fp is closed.")
36 |
37 |
38 | def assert_header_parsing(headers):
39 | """
40 | Asserts whether all headers have been successfully parsed.
41 | Extracts encountered errors from the result of parsing headers.
42 |
43 | Only works on Python 3.
44 |
45 | :param headers: Headers to verify.
46 | :type headers: `httplib.HTTPMessage`.
47 |
48 | :raises urllib3.exceptions.HeaderParsingError:
49 | If parsing errors are found.
50 | """
51 |
52 | # This will fail silently if we pass in the wrong kind of parameter.
53 | # To make debugging easier add an explicit check.
54 | if not isinstance(headers, httplib.HTTPMessage):
55 | raise TypeError('expected httplib.Message, got {0}.'.format(
56 | type(headers)))
57 |
58 | defects = getattr(headers, 'defects', None)
59 | get_payload = getattr(headers, 'get_payload', None)
60 |
61 | unparsed_data = None
62 | if get_payload: # Platform-specific: Python 3.
63 | unparsed_data = get_payload()
64 |
65 | if defects or unparsed_data:
66 | raise HeaderParsingError(defects=defects, unparsed_data=unparsed_data)
67 |
68 |
69 | def is_response_to_head(response):
70 | """
71 | Checks whether the request of a response has been a HEAD-request.
72 | Handles the quirks of AppEngine.
73 |
74 | :param conn:
75 | :type conn: :class:`httplib.HTTPResponse`
76 | """
77 | # FIXME: Can we do this somehow without accessing private httplib _method?
78 | method = response._method
79 | if isinstance(method, int): # Platform-specific: Appengine
80 | return method == 3
81 | return method.upper() == 'HEAD'
82 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/urllib3/util/wait.py:
--------------------------------------------------------------------------------
1 | from .selectors import (
2 | HAS_SELECT,
3 | DefaultSelector,
4 | EVENT_READ,
5 | EVENT_WRITE
6 | )
7 |
8 |
9 | def _wait_for_io_events(socks, events, timeout=None):
10 | """ Waits for IO events to be available from a list of sockets
11 | or optionally a single socket if passed in. Returns a list of
12 | sockets that can be interacted with immediately. """
13 | if not HAS_SELECT:
14 | raise ValueError('Platform does not have a selector')
15 | if not isinstance(socks, list):
16 | # Probably just a single socket.
17 | if hasattr(socks, "fileno"):
18 | socks = [socks]
19 | # Otherwise it might be a non-list iterable.
20 | else:
21 | socks = list(socks)
22 | with DefaultSelector() as selector:
23 | for sock in socks:
24 | selector.register(sock, events)
25 | return [key[0].fileobj for key in
26 | selector.select(timeout) if key[1] & events]
27 |
28 |
29 | def wait_for_read(socks, timeout=None):
30 | """ Waits for reading to be available from a list of sockets
31 | or optionally a single socket if passed in. Returns a list of
32 | sockets that can be read from immediately. """
33 | return _wait_for_io_events(socks, EVENT_READ, timeout)
34 |
35 |
36 | def wait_for_write(socks, timeout=None):
37 | """ Waits for writing to be available from a list of sockets
38 | or optionally a single socket if passed in. Returns a list of
39 | sockets that can be written to immediately. """
40 | return _wait_for_io_events(socks, EVENT_WRITE, timeout)
41 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/pip-10.0.1-py3.5.egg/pip/_vendor/webencodings/mklabels.py:
--------------------------------------------------------------------------------
1 | """
2 |
3 | webencodings.mklabels
4 | ~~~~~~~~~~~~~~~~~~~~~
5 |
6 | Regenarate the webencodings.labels module.
7 |
8 | :copyright: Copyright 2012 by Simon Sapin
9 | :license: BSD, see LICENSE for details.
10 |
11 | """
12 |
13 | import json
14 | try:
15 | from urllib import urlopen
16 | except ImportError:
17 | from urllib.request import urlopen
18 |
19 |
20 | def assert_lower(string):
21 | assert string == string.lower()
22 | return string
23 |
24 |
25 | def generate(url):
26 | parts = ['''\
27 | """
28 |
29 | webencodings.labels
30 | ~~~~~~~~~~~~~~~~~~~
31 |
32 | Map encoding labels to their name.
33 |
34 | :copyright: Copyright 2012 by Simon Sapin
35 | :license: BSD, see LICENSE for details.
36 |
37 | """
38 |
39 | # XXX Do not edit!
40 | # This file is automatically generated by mklabels.py
41 |
42 | LABELS = {
43 | ''']
44 | labels = [
45 | (repr(assert_lower(label)).lstrip('u'),
46 | repr(encoding['name']).lstrip('u'))
47 | for category in json.loads(urlopen(url).read().decode('ascii'))
48 | for encoding in category['encodings']
49 | for label in encoding['labels']]
50 | max_len = max(len(label) for label, name in labels)
51 | parts.extend(
52 | ' %s:%s %s,\n' % (label, ' ' * (max_len - len(label)), name)
53 | for label, name in labels)
54 | parts.append('}')
55 | return ''.join(parts)
56 |
57 |
58 | if __name__ == '__main__':
59 | print(generate('http://encoding.spec.whatwg.org/encodings.json'))
60 |
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/setuptools-39.1.0-py3.5.egg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Alexadlu/XIVReID/1f8a14e14cfd26790337c45e0e663a3a2acf9814/venv/lib/python3.5/site-packages/setuptools-39.1.0-py3.5.egg
--------------------------------------------------------------------------------
/venv/lib/python3.5/site-packages/setuptools.pth:
--------------------------------------------------------------------------------
1 | ./setuptools-39.1.0-py3.5.egg
2 |
--------------------------------------------------------------------------------
/venv/pyvenv.cfg:
--------------------------------------------------------------------------------
1 | home = /usr/bin
2 | include-system-site-packages = false
3 | version = 3.5.2
4 |
--------------------------------------------------------------------------------