├── README.md ├── danet ├── README.md ├── __pycache__ │ ├── backbone.cpython-36.pyc │ ├── danet.cpython-36.pyc │ └── resnet.cpython-36.pyc ├── backbone.py ├── danet.py ├── data │ ├── Cityscapes.py │ ├── __init__.py │ ├── __pycache__ │ │ ├── Cityscapes.cpython-36.pyc │ │ ├── __init__.cpython-36.pyc │ │ ├── custom_transforms.cpython-36.pyc │ │ ├── mypath.cpython-36.pyc │ │ └── utils.cpython-36.pyc │ ├── custom_transforms.py │ ├── mypath.py │ └── utils.py ├── inference.py ├── resnet.py ├── s1.jpeg ├── testjpg.png ├── train.py ├── utils │ ├── __init__.py │ ├── __pycache__ │ │ ├── __init__.cpython-36.pyc │ │ ├── loss.cpython-36.pyc │ │ ├── metrics.cpython-36.pyc │ │ └── saver.cpython-36.pyc │ ├── helpers.py │ ├── logger.py │ ├── loss.py │ ├── metrics.py │ ├── saver.py │ └── summaries.py └── venv │ ├── Lib │ └── site-packages │ │ ├── easy-install.pth │ │ ├── pip-19.0.3-py3.7.egg │ │ ├── EGG-INFO │ │ │ ├── PKG-INFO │ │ │ ├── SOURCES.txt │ │ │ ├── dependency_links.txt │ │ │ ├── entry_points.txt │ │ │ ├── not-zip-safe │ │ │ └── top_level.txt │ │ └── pip │ │ │ ├── __init__.py │ │ │ ├── __main__.py │ │ │ ├── _internal │ │ │ ├── __init__.py │ │ │ ├── build_env.py │ │ │ ├── cache.py │ │ │ ├── cli │ │ │ │ ├── __init__.py │ │ │ │ ├── autocompletion.py │ │ │ │ ├── base_command.py │ │ │ │ ├── cmdoptions.py │ │ │ │ ├── main_parser.py │ │ │ │ ├── parser.py │ │ │ │ └── status_codes.py │ │ │ ├── commands │ │ │ │ ├── __init__.py │ │ │ │ ├── check.py │ │ │ │ ├── completion.py │ │ │ │ ├── configuration.py │ │ │ │ ├── download.py │ │ │ │ ├── freeze.py │ │ │ │ ├── hash.py │ │ │ │ ├── help.py │ │ │ │ ├── install.py │ │ │ │ ├── list.py │ │ │ │ ├── search.py │ │ │ │ ├── show.py │ │ │ │ ├── uninstall.py │ │ │ │ └── wheel.py │ │ │ ├── configuration.py │ │ │ ├── download.py │ │ │ ├── exceptions.py │ │ │ ├── index.py │ │ │ ├── locations.py │ │ │ ├── models │ │ │ │ ├── __init__.py │ │ │ │ ├── candidate.py │ │ │ │ ├── format_control.py │ │ │ │ ├── index.py │ │ │ │ └── link.py │ │ │ ├── operations │ │ │ │ ├── __init__.py │ │ │ │ ├── check.py │ │ │ │ ├── freeze.py │ │ │ │ └── prepare.py │ │ │ ├── pep425tags.py │ │ │ ├── pyproject.py │ │ │ ├── req │ │ │ │ ├── __init__.py │ │ │ │ ├── constructors.py │ │ │ │ ├── req_file.py │ │ │ │ ├── req_install.py │ │ │ │ ├── req_set.py │ │ │ │ ├── req_tracker.py │ │ │ │ └── req_uninstall.py │ │ │ ├── resolve.py │ │ │ ├── utils │ │ │ │ ├── __init__.py │ │ │ │ ├── appdirs.py │ │ │ │ ├── compat.py │ │ │ │ ├── deprecation.py │ │ │ │ ├── encoding.py │ │ │ │ ├── filesystem.py │ │ │ │ ├── glibc.py │ │ │ │ ├── hashes.py │ │ │ │ ├── logging.py │ │ │ │ ├── misc.py │ │ │ │ ├── models.py │ │ │ │ ├── outdated.py │ │ │ │ ├── packaging.py │ │ │ │ ├── setuptools_build.py │ │ │ │ ├── temp_dir.py │ │ │ │ ├── typing.py │ │ │ │ └── ui.py │ │ │ ├── vcs │ │ │ │ ├── __init__.py │ │ │ │ ├── bazaar.py │ │ │ │ ├── git.py │ │ │ │ ├── mercurial.py │ │ │ │ └── subversion.py │ │ │ └── wheel.py │ │ │ └── _vendor │ │ │ ├── __init__.py │ │ │ ├── appdirs.py │ │ │ ├── cachecontrol │ │ │ ├── __init__.py │ │ │ ├── _cmd.py │ │ │ ├── adapter.py │ │ │ ├── cache.py │ │ │ ├── caches │ │ │ │ ├── __init__.py │ │ │ │ ├── file_cache.py │ │ │ │ └── redis_cache.py │ │ │ ├── compat.py │ │ │ ├── controller.py │ │ │ ├── filewrapper.py │ │ │ ├── heuristics.py │ │ │ ├── serialize.py │ │ │ └── wrapper.py │ │ │ ├── certifi │ │ │ ├── __init__.py │ │ │ ├── __main__.py │ │ │ ├── cacert.pem │ │ │ └── core.py │ │ │ ├── chardet │ │ │ ├── __init__.py │ │ │ ├── big5freq.py │ │ │ ├── big5prober.py │ │ │ ├── chardistribution.py │ │ │ ├── charsetgroupprober.py │ │ │ ├── charsetprober.py │ │ │ ├── cli │ │ │ │ ├── __init__.py │ │ │ │ └── chardetect.py │ │ │ ├── codingstatemachine.py │ │ │ ├── compat.py │ │ │ ├── cp949prober.py │ │ │ ├── enums.py │ │ │ ├── escprober.py │ │ │ ├── escsm.py │ │ │ ├── eucjpprober.py │ │ │ ├── euckrfreq.py │ │ │ ├── euckrprober.py │ │ │ ├── euctwfreq.py │ │ │ ├── euctwprober.py │ │ │ ├── gb2312freq.py │ │ │ ├── gb2312prober.py │ │ │ ├── hebrewprober.py │ │ │ ├── jisfreq.py │ │ │ ├── jpcntx.py │ │ │ ├── langbulgarianmodel.py │ │ │ ├── langcyrillicmodel.py │ │ │ ├── langgreekmodel.py │ │ │ ├── langhebrewmodel.py │ │ │ ├── langhungarianmodel.py │ │ │ ├── langthaimodel.py │ │ │ ├── langturkishmodel.py │ │ │ ├── latin1prober.py │ │ │ ├── mbcharsetprober.py │ │ │ ├── mbcsgroupprober.py │ │ │ ├── mbcssm.py │ │ │ ├── sbcharsetprober.py │ │ │ ├── sbcsgroupprober.py │ │ │ ├── sjisprober.py │ │ │ ├── universaldetector.py │ │ │ ├── utf8prober.py │ │ │ └── version.py │ │ │ ├── colorama │ │ │ ├── __init__.py │ │ │ ├── ansi.py │ │ │ ├── ansitowin32.py │ │ │ ├── initialise.py │ │ │ ├── win32.py │ │ │ └── winterm.py │ │ │ ├── distlib │ │ │ ├── __init__.py │ │ │ ├── _backport │ │ │ │ ├── __init__.py │ │ │ │ ├── misc.py │ │ │ │ ├── shutil.py │ │ │ │ ├── sysconfig.cfg │ │ │ │ ├── sysconfig.py │ │ │ │ └── tarfile.py │ │ │ ├── compat.py │ │ │ ├── database.py │ │ │ ├── index.py │ │ │ ├── locators.py │ │ │ ├── manifest.py │ │ │ ├── markers.py │ │ │ ├── metadata.py │ │ │ ├── resources.py │ │ │ ├── scripts.py │ │ │ ├── t32.exe │ │ │ ├── t64.exe │ │ │ ├── util.py │ │ │ ├── version.py │ │ │ ├── w32.exe │ │ │ ├── w64.exe │ │ │ └── wheel.py │ │ │ ├── distro.py │ │ │ ├── html5lib │ │ │ ├── __init__.py │ │ │ ├── _ihatexml.py │ │ │ ├── _inputstream.py │ │ │ ├── _tokenizer.py │ │ │ ├── _trie │ │ │ │ ├── __init__.py │ │ │ │ ├── _base.py │ │ │ │ ├── datrie.py │ │ │ │ └── py.py │ │ │ ├── _utils.py │ │ │ ├── constants.py │ │ │ ├── filters │ │ │ │ ├── __init__.py │ │ │ │ ├── alphabeticalattributes.py │ │ │ │ ├── base.py │ │ │ │ ├── inject_meta_charset.py │ │ │ │ ├── lint.py │ │ │ │ ├── optionaltags.py │ │ │ │ ├── sanitizer.py │ │ │ │ └── whitespace.py │ │ │ ├── html5parser.py │ │ │ ├── serializer.py │ │ │ ├── treeadapters │ │ │ │ ├── __init__.py │ │ │ │ ├── genshi.py │ │ │ │ └── sax.py │ │ │ ├── treebuilders │ │ │ │ ├── __init__.py │ │ │ │ ├── base.py │ │ │ │ ├── dom.py │ │ │ │ ├── etree.py │ │ │ │ └── etree_lxml.py │ │ │ └── treewalkers │ │ │ │ ├── __init__.py │ │ │ │ ├── base.py │ │ │ │ ├── dom.py │ │ │ │ ├── etree.py │ │ │ │ ├── etree_lxml.py │ │ │ │ └── genshi.py │ │ │ ├── idna │ │ │ ├── __init__.py │ │ │ ├── codec.py │ │ │ ├── compat.py │ │ │ ├── core.py │ │ │ ├── idnadata.py │ │ │ ├── intranges.py │ │ │ ├── package_data.py │ │ │ └── uts46data.py │ │ │ ├── ipaddress.py │ │ │ ├── lockfile │ │ │ ├── __init__.py │ │ │ ├── linklockfile.py │ │ │ ├── mkdirlockfile.py │ │ │ ├── pidlockfile.py │ │ │ ├── sqlitelockfile.py │ │ │ └── symlinklockfile.py │ │ │ ├── msgpack │ │ │ ├── __init__.py │ │ │ ├── _version.py │ │ │ ├── exceptions.py │ │ │ └── fallback.py │ │ │ ├── packaging │ │ │ ├── __about__.py │ │ │ ├── __init__.py │ │ │ ├── _compat.py │ │ │ ├── _structures.py │ │ │ ├── markers.py │ │ │ ├── requirements.py │ │ │ ├── specifiers.py │ │ │ ├── utils.py │ │ │ └── version.py │ │ │ ├── pep517 │ │ │ ├── __init__.py │ │ │ ├── _in_process.py │ │ │ ├── build.py │ │ │ ├── check.py │ │ │ ├── colorlog.py │ │ │ ├── compat.py │ │ │ ├── envbuild.py │ │ │ └── wrappers.py │ │ │ ├── pkg_resources │ │ │ ├── __init__.py │ │ │ └── py31compat.py │ │ │ ├── progress │ │ │ ├── __init__.py │ │ │ ├── bar.py │ │ │ ├── counter.py │ │ │ ├── helpers.py │ │ │ └── spinner.py │ │ │ ├── pyparsing.py │ │ │ ├── pytoml │ │ │ ├── __init__.py │ │ │ ├── core.py │ │ │ ├── parser.py │ │ │ ├── test.py │ │ │ ├── utils.py │ │ │ └── writer.py │ │ │ ├── requests │ │ │ ├── __init__.py │ │ │ ├── __version__.py │ │ │ ├── _internal_utils.py │ │ │ ├── adapters.py │ │ │ ├── api.py │ │ │ ├── auth.py │ │ │ ├── certs.py │ │ │ ├── compat.py │ │ │ ├── cookies.py │ │ │ ├── exceptions.py │ │ │ ├── help.py │ │ │ ├── hooks.py │ │ │ ├── models.py │ │ │ ├── packages.py │ │ │ ├── sessions.py │ │ │ ├── status_codes.py │ │ │ ├── structures.py │ │ │ └── utils.py │ │ │ ├── retrying.py │ │ │ ├── six.py │ │ │ ├── urllib3 │ │ │ ├── __init__.py │ │ │ ├── _collections.py │ │ │ ├── connection.py │ │ │ ├── connectionpool.py │ │ │ ├── contrib │ │ │ │ ├── __init__.py │ │ │ │ ├── _appengine_environ.py │ │ │ │ ├── _securetransport │ │ │ │ │ ├── __init__.py │ │ │ │ │ ├── bindings.py │ │ │ │ │ └── low_level.py │ │ │ │ ├── appengine.py │ │ │ │ ├── ntlmpool.py │ │ │ │ ├── pyopenssl.py │ │ │ │ ├── securetransport.py │ │ │ │ └── socks.py │ │ │ ├── exceptions.py │ │ │ ├── fields.py │ │ │ ├── filepost.py │ │ │ ├── packages │ │ │ │ ├── __init__.py │ │ │ │ ├── backports │ │ │ │ │ ├── __init__.py │ │ │ │ │ └── makefile.py │ │ │ │ ├── six.py │ │ │ │ └── ssl_match_hostname │ │ │ │ │ ├── __init__.py │ │ │ │ │ └── _implementation.py │ │ │ ├── poolmanager.py │ │ │ ├── request.py │ │ │ ├── response.py │ │ │ └── util │ │ │ │ ├── __init__.py │ │ │ │ ├── connection.py │ │ │ │ ├── queue.py │ │ │ │ ├── request.py │ │ │ │ ├── response.py │ │ │ │ ├── retry.py │ │ │ │ ├── ssl_.py │ │ │ │ ├── timeout.py │ │ │ │ ├── url.py │ │ │ │ └── wait.py │ │ │ └── webencodings │ │ │ ├── __init__.py │ │ │ ├── labels.py │ │ │ ├── mklabels.py │ │ │ ├── tests.py │ │ │ └── x_user_defined.py │ │ ├── setuptools-40.8.0-py3.7.egg │ │ └── setuptools.pth │ ├── Scripts │ ├── Activate.ps1 │ ├── activate │ ├── activate.bat │ ├── deactivate.bat │ ├── easy_install-3.7-script.py │ ├── easy_install-3.7.exe │ ├── easy_install-script.py │ ├── easy_install.exe │ ├── pip-script.py │ ├── pip.exe │ ├── pip3-script.py │ ├── pip3.7-script.py │ ├── pip3.7.exe │ ├── pip3.exe │ ├── python.exe │ └── pythonw.exe │ └── pyvenv.cfg └── network.png /README.md: -------------------------------------------------------------------------------- 1 | # danet-pytorch 2 | [pytorch] DANet: Dual Attention Network for Scene Segmentation 3 | 4 | # network: 5 | ![image](https://github.com/Andy-zhujunwen/danet-pytorch/blob/master/network.png) 6 | 7 | # dataset: 8 | cityspaces 9 | 10 | # how to train: 11 | write the dataset path in mypath.py and run 12 | ``` 13 | python train.py 14 | ``` 15 | after trainning,it will save a model: danet.pth 16 | ## How to Inference: 17 | #run: 18 | ``` 19 | python inference.py 20 | ``` 21 | 22 | # inference: 23 | ## input: 24 | ![image](https://github.com/Andy-zhujunwen/danet-pytorch/blob/master/danet/s1.jpeg) 25 | ## output: 26 | ![image](https://github.com/Andy-zhujunwen/danet-pytorch/blob/master/danet/testjpg.png) 27 | -------------------------------------------------------------------------------- /danet/README.md: -------------------------------------------------------------------------------- 1 | # danet-pytorch 2 | [pytorch] DANet: Dual Attention Network for Scene Segmentation 3 | 4 | # network: 5 | ![image](https://github.com/Andy-zhujunwen/danet-pytorch/blob/master/network.png) 6 | 7 | # dataset: 8 | cityspaces 9 | 10 | # how to train: 11 | write the dataset path in mypath.py and run 12 | ``` 13 | python train.py 14 | ``` 15 | after trainning,it will save a model: danet.pth 16 | ## How to Inference: 17 | #run: 18 | ``` 19 | python inference.py 20 | ``` 21 | 22 | # inference: 23 | ## input: 24 | ![image](https://github.com/Andy-zhujunwen/danet-pytorch/blob/master/danet/s1.jpeg) 25 | ## output: 26 | ![image](https://github.com/Andy-zhujunwen/danet-pytorch/blob/master/danet/testjpg.png) 27 | -------------------------------------------------------------------------------- /danet/__pycache__/backbone.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Andy-zhujunwen/danet-pytorch/82ba577edcc5535b169f35590a6f48a4c2944ae1/danet/__pycache__/backbone.cpython-36.pyc -------------------------------------------------------------------------------- /danet/__pycache__/danet.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Andy-zhujunwen/danet-pytorch/82ba577edcc5535b169f35590a6f48a4c2944ae1/danet/__pycache__/danet.cpython-36.pyc -------------------------------------------------------------------------------- /danet/__pycache__/resnet.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Andy-zhujunwen/danet-pytorch/82ba577edcc5535b169f35590a6f48a4c2944ae1/danet/__pycache__/resnet.cpython-36.pyc -------------------------------------------------------------------------------- /danet/backbone.py: -------------------------------------------------------------------------------- 1 | import torch.nn as nn 2 | import torch 3 | import resnet 4 | from torch.nn import functional as F 5 | class ResNet50(nn.Module): 6 | def __init__(self, pretrained=True): 7 | """Declare all needed layers.""" 8 | super(ResNet50, self).__init__() 9 | self.model = resnet.resnet50(pretrained=pretrained) 10 | self.relu = self.model.relu # Place a hook 11 | 12 | layers_cfg = [4, 5, 6, 7] 13 | self.blocks = [] 14 | for i, num_this_layer in enumerate(layers_cfg): 15 | self.blocks.append(list(self.model.children())[num_this_layer]) 16 | 17 | def base_forward(self, x): 18 | feature_map = [] 19 | x = self.model.conv1(x) 20 | x = self.model.bn1(x) 21 | x = self.model.relu(x) 22 | x = self.model.maxpool(x) 23 | 24 | for i, block in enumerate(self.blocks): 25 | x = block(x) 26 | feature_map.append(x) 27 | 28 | out = nn.AvgPool2d(x.shape[2:])(x).view(x.shape[0], -1) 29 | 30 | return feature_map, out -------------------------------------------------------------------------------- /danet/data/__init__.py: -------------------------------------------------------------------------------- 1 | from data import Cityscapes 2 | from torch.utils.data import DataLoader 3 | 4 | def make_data_loader(args, **kwargs): 5 | train_set = Cityscapes.CityscapesSegmentation(args, split='train') 6 | val_set = Cityscapes.CityscapesSegmentation(args, split='val') 7 | test_set = Cityscapes.CityscapesSegmentation(args, split='test') 8 | num_class = train_set.NUM_CLASSES 9 | train_loader = DataLoader(train_set, batch_size=args.batch_size, shuffle=True, **kwargs) 10 | val_loader = DataLoader(val_set, batch_size=args.batch_size, shuffle=False, **kwargs) 11 | test_loader = DataLoader(test_set, batch_size=args.batch_size, shuffle=False, **kwargs) 12 | return train_loader,val_loader,test_loader,num_class 13 | 14 | -------------------------------------------------------------------------------- /danet/data/__pycache__/Cityscapes.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Andy-zhujunwen/danet-pytorch/82ba577edcc5535b169f35590a6f48a4c2944ae1/danet/data/__pycache__/Cityscapes.cpython-36.pyc -------------------------------------------------------------------------------- /danet/data/__pycache__/__init__.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Andy-zhujunwen/danet-pytorch/82ba577edcc5535b169f35590a6f48a4c2944ae1/danet/data/__pycache__/__init__.cpython-36.pyc -------------------------------------------------------------------------------- /danet/data/__pycache__/custom_transforms.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Andy-zhujunwen/danet-pytorch/82ba577edcc5535b169f35590a6f48a4c2944ae1/danet/data/__pycache__/custom_transforms.cpython-36.pyc -------------------------------------------------------------------------------- /danet/data/__pycache__/mypath.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Andy-zhujunwen/danet-pytorch/82ba577edcc5535b169f35590a6f48a4c2944ae1/danet/data/__pycache__/mypath.cpython-36.pyc -------------------------------------------------------------------------------- /danet/data/__pycache__/utils.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Andy-zhujunwen/danet-pytorch/82ba577edcc5535b169f35590a6f48a4c2944ae1/danet/data/__pycache__/utils.cpython-36.pyc -------------------------------------------------------------------------------- /danet/data/mypath.py: -------------------------------------------------------------------------------- 1 | class Path(object): 2 | @staticmethod 3 | def db_root_dir(dataset): 4 | if dataset == 'cityscapes': 5 | # foler that contains leftImg8bit/ 6 | return r'/home/home_data/zjw/SemanticSegmentationUsingFPN_PanopticFeaturePyramidNetworks-master/data/Cityscapes' 7 | else: 8 | print('Dataset {} not available.'.format(dataset)) 9 | raise NotImplementedError("undefined dataset {}.".format(dataset)) 10 | -------------------------------------------------------------------------------- /danet/data/utils.py: -------------------------------------------------------------------------------- 1 | import matplotlib.pyplot as plt 2 | import numpy as np 3 | import torch 4 | import time 5 | 6 | def decode_seg_map_sequence(label_masks, dataset='Cityscapes', plot=False): 7 | rgb_masks = [] 8 | for label_mask in label_masks: 9 | rgb_mask = decode_segmap(label_mask, dataset, plot) 10 | rgb_masks.append(rgb_mask) 11 | rgb_masks = torch.from_numpy(np.array(rgb_masks).transpose([0, 3, 1, 2])) 12 | return rgb_masks 13 | 14 | 15 | def decode_segmap(label_mask, dataset, plot=False): 16 | """Decode segmentation class labels into a color image 17 | Args: 18 | label_mask (np.ndarray): an (M,N) array of integer values denoting 19 | the class label at each spatial location. 20 | plot (bool, optional): whether to show the resulting color image 21 | in a figure. 22 | Returns: 23 | (np.ndarray, optional): the resulting decoded color image. 24 | """ 25 | if dataset == 'Cityscapes': 26 | n_classes = 19 27 | label_colours = get_cityscapes_labels() 28 | else: 29 | raise NotImplementedError 30 | 31 | r = label_mask.copy() 32 | g = label_mask.copy() 33 | b = label_mask.copy() 34 | for ll in range(0, n_classes): 35 | r[label_mask == ll] = label_colours[ll, 0] 36 | g[label_mask == ll] = label_colours[ll, 1] 37 | b[label_mask == ll] = label_colours[ll, 2] 38 | rgb = np.zeros((label_mask.shape[0], label_mask.shape[1], 3)) 39 | rgb[:, :, 0] = r / 255.0 40 | rgb[:, :, 1] = g / 255.0 41 | rgb[:, :, 2] = b / 255.0 42 | if plot: 43 | plt.imshow(rgb) 44 | plt.show() 45 | # time.sleep(1) 46 | return rgb 47 | else: 48 | return rgb 49 | 50 | 51 | def encode_segmap(mask): 52 | """Encode segmentation label images as pascal classes 53 | Args: 54 | mask (np.ndarray): raw segmentation label image of dimension 55 | (M, N, 3), in which the Pascal classes are encoded as colours. 56 | Returns: 57 | (np.ndarray): class map with dimensions (M,N), where the value at 58 | a given location is the integer denoting the class index. 59 | """ 60 | mask = mask.astype(int) 61 | label_mask = np.zeros((mask.shape[0], mask.shape[1]), dtype=np.int16) 62 | for ii, label in enumerate(get_pascal_labels()): 63 | label_mask[np.where(np.all(mask == label, axis=-1))[:2]] = ii 64 | label_mask = label_mask.astype(int) 65 | return label_mask 66 | 67 | 68 | def get_cityscapes_labels(): 69 | return np.array([ 70 | [128, 64, 128], 71 | [244, 35, 232], 72 | [70, 70, 70], 73 | [102, 102, 156], 74 | [190, 153, 153], 75 | [153, 153, 153], 76 | [250, 170, 30], 77 | [220, 220, 0], 78 | [107, 142, 35], 79 | [152, 251, 152], 80 | [0, 130, 180], 81 | [220, 20, 60], 82 | [255, 0, 0], 83 | [0, 0, 142], 84 | [0, 0, 70], 85 | [0, 60, 100], 86 | [0, 80, 100], 87 | [0, 0, 230], 88 | [119, 11, 32]]) 89 | 90 | 91 | -------------------------------------------------------------------------------- /danet/inference.py: -------------------------------------------------------------------------------- 1 | import torch 2 | from PIL import Image 3 | import scipy.misc 4 | import matplotlib.pyplot as plt 5 | from torchvision import transforms 6 | from data.utils import decode_seg_map_sequence 7 | from torchvision.utils import save_image 8 | import numpy as np 9 | 10 | from torchvision import transforms 11 | 12 | 13 | def Normalize(x): 14 | x = np.array(x).astype(np.float32) 15 | x /=255.0 16 | x-=(0.485,0.456,0.406) 17 | x /=(0.229,0.224,0.225) 18 | return x 19 | 20 | pic_path = './s1.jpeg' 21 | #pic = scipy.misc.imread(pic_path,mode='RGB') 22 | pic = Image.open(pic_path).convert('RGB') 23 | 24 | pic = pic.resize((1024,512),Image.BILINEAR) 25 | print('pic shape:{}'.format(pic.size)) 26 | 27 | pic = np.array(pic) 28 | pic = Normalize(pic) 29 | 30 | pic = np.transpose(pic,(2,0,1)) 31 | pic = torch.from_numpy(pic.copy()).float() 32 | pic = pic.unsqueeze(0) 33 | 34 | 35 | device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu") 36 | pic = pic.to(device) 37 | 38 | 39 | danet = torch.load('./danet.pth') 40 | danet = danet.to(device) 41 | danet = danet.eval() 42 | 43 | 44 | out = danet(pic) 45 | out_all = out[0] 46 | out_p = out[1] 47 | out_c = out[2] 48 | 49 | out = out_all.data.cpu().numpy() 50 | out = np.argmax(out,axis=1) 51 | pre = decode_seg_map_sequence(out, plot=True) 52 | save_image(pre,r'./testjpg.png') 53 | -------------------------------------------------------------------------------- /danet/s1.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Andy-zhujunwen/danet-pytorch/82ba577edcc5535b169f35590a6f48a4c2944ae1/danet/s1.jpeg -------------------------------------------------------------------------------- /danet/testjpg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Andy-zhujunwen/danet-pytorch/82ba577edcc5535b169f35590a6f48a4c2944ae1/danet/testjpg.png -------------------------------------------------------------------------------- /danet/utils/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /danet/utils/__pycache__/__init__.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Andy-zhujunwen/danet-pytorch/82ba577edcc5535b169f35590a6f48a4c2944ae1/danet/utils/__pycache__/__init__.cpython-36.pyc -------------------------------------------------------------------------------- /danet/utils/__pycache__/loss.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Andy-zhujunwen/danet-pytorch/82ba577edcc5535b169f35590a6f48a4c2944ae1/danet/utils/__pycache__/loss.cpython-36.pyc -------------------------------------------------------------------------------- /danet/utils/__pycache__/metrics.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Andy-zhujunwen/danet-pytorch/82ba577edcc5535b169f35590a6f48a4c2944ae1/danet/utils/__pycache__/metrics.cpython-36.pyc -------------------------------------------------------------------------------- /danet/utils/__pycache__/saver.cpython-36.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Andy-zhujunwen/danet-pytorch/82ba577edcc5535b169f35590a6f48a4c2944ae1/danet/utils/__pycache__/saver.cpython-36.pyc -------------------------------------------------------------------------------- /danet/utils/logger.py: -------------------------------------------------------------------------------- 1 | # Code referenced from https://gist.github.com/gyglim/1f8dfb1b5c82627ae3efcfbbadb9f514 2 | import tensorflow as tf 3 | import numpy as np 4 | import scipy.misc 5 | try: 6 | from StringIO import StringIO # Python 2.7 7 | except ImportError: 8 | from io import BytesIO # Python 3.x 9 | 10 | 11 | class Logger(object): 12 | 13 | def __init__(self, log_dir): 14 | """Create a summary writer logging to log_dir.""" 15 | self.writer = tf.summary.FileWriter(log_dir) 16 | 17 | def scalar_summary(self, tag, value, step): 18 | """Log a scalar variable.""" 19 | summary = tf.Summary(value=[tf.Summary.Value(tag=tag, simple_value=value)]) 20 | self.writer.add_summary(summary, step) 21 | 22 | def image_summary(self, tag, images, step): 23 | """Log a list of images.""" 24 | 25 | img_summaries = [] 26 | for i, img in enumerate(images): 27 | # Write the image to a string 28 | try: 29 | s = StringIO() 30 | except: 31 | s = BytesIO() 32 | scipy.misc.toimage(img).save(s, format="png") 33 | 34 | # Create an Image object 35 | img_sum = tf.Summary.Image(encoded_image_string=s.getvalue(), 36 | height=img.shape[0], 37 | width=img.shape[1]) 38 | # Create a Summary value 39 | img_summaries.append(tf.Summary.Value(tag='%s/%d' % (tag, i), image=img_sum)) 40 | 41 | # Create and write Summary 42 | summary = tf.Summary(value=img_summaries) 43 | self.writer.add_summary(summary, step) 44 | 45 | def histo_summary(self, tag, values, step, bins=1000): 46 | """Log a histogram of the tensor of values.""" 47 | 48 | # Create a histogram using numpy 49 | counts, bin_edges = np.histogram(values, bins=bins) 50 | 51 | # Fill the fields of the histogram proto 52 | hist = tf.HistogramProto() 53 | hist.min = float(np.min(values)) 54 | hist.max = float(np.max(values)) 55 | hist.num = int(np.prod(values.shape)) 56 | hist.sum = float(np.sum(values)) 57 | hist.sum_squares = float(np.sum(values**2)) 58 | 59 | # Drop the start of the first bin 60 | bin_edges = bin_edges[1:] 61 | 62 | # Add bin edges and counts 63 | for edge in bin_edges: 64 | hist.bucket_limit.append(edge) 65 | for c in counts: 66 | hist.bucket.append(c) 67 | 68 | # Create and write Summary 69 | summary = tf.Summary(value=[tf.Summary.Value(tag=tag, histo=hist)]) 70 | self.writer.add_summary(summary, step) 71 | self.writer.flush() 72 | -------------------------------------------------------------------------------- /danet/utils/loss.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | 4 | class SegmentationLosses(object): 5 | def __init__(self, weight=None, size_average=True, batch_average=True, ignore_index=255, cuda=False): 6 | self.ignore_index = ignore_index 7 | self.weight = weight 8 | self.size_average = size_average 9 | self.batch_average = batch_average 10 | self.cuda = cuda 11 | 12 | def build_loss(self, mode='ce'): 13 | """Choices: ['ce' or 'focal']""" 14 | if mode == 'ce': 15 | return self.CrossEntropyLoss 16 | elif mode == 'focal': 17 | return self.FocalLoss 18 | else: 19 | raise NotImplementedError 20 | 21 | def CrossEntropyLoss(self, logit, target): 22 | n, c, h, w = logit.size() 23 | criterion = nn.CrossEntropyLoss(weight=self.weight, ignore_index=self.ignore_index, 24 | size_average=self.size_average) 25 | if self.cuda: 26 | criterion = criterion.cuda() 27 | 28 | loss = criterion(logit, target.long()) 29 | 30 | if self.batch_average: 31 | loss /= n 32 | 33 | return loss 34 | 35 | def FocalLoss(self, logit, target, gamma=2, alpha=0.5): 36 | n, c, h, w = logit.size() 37 | criterion = nn.CrossEntropyLoss(weight=self.weight, ignore_index=self.ignore_index, 38 | size_average=self.size_average) 39 | if self.cuda: 40 | criterion = criterion.cuda() 41 | 42 | logpt = -criterion(logit, target.long()) 43 | pt = torch.exp(logpt) 44 | if alpha is not None: 45 | logpt *= alpha 46 | loss = -((1 - pt) ** gamma) * logpt 47 | 48 | if self.batch_average: 49 | loss /= n 50 | 51 | return loss 52 | 53 | if __name__ == "__main__": 54 | loss = SegmentationLosses(cuda=True) 55 | a = torch.rand(1, 3, 7, 7).cuda() 56 | b = torch.rand(1, 7, 7).cuda() 57 | print(loss.CrossEntropyLoss(a, b).item()) 58 | print(loss.FocalLoss(a, b, gamma=0, alpha=None).item()) 59 | print(loss.FocalLoss(a, b, gamma=2, alpha=0.5).item()) 60 | 61 | -------------------------------------------------------------------------------- /danet/utils/metrics.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | class Evaluator(object): 4 | def __init__(self, num_class): 5 | self.num_class = num_class 6 | self.confusion_matrix = np.zeros((self.num_class,) * 2) 7 | 8 | 9 | def Pixel_Accuracy(self): 10 | Acc = np.diag(self.confusion_matrix).sum() / self.confusion_matrix.sum() 11 | return Acc 12 | 13 | 14 | def Pixel_Accuracy_Class(self): 15 | Acc = np.diag(self.confusion_matrix) / self.confusion_matrix.sum(axis=1) 16 | Acc = np.nanmean(Acc) 17 | return Acc 18 | 19 | 20 | def Mean_Intersection_over_Union(self): 21 | MIoU = np.diag(self.confusion_matrix) / ( 22 | np.sum(self.confusion_matrix, axis=1) + np.sum(self.confusion_matrix, axis=0) - 23 | np.diag(self.confusion_matrix)) 24 | MIoU = np.nanmean(MIoU) 25 | return MIoU 26 | 27 | 28 | def Frequency_Weighted_Intersection_over_Union(self): 29 | freq = np.sum(self.confusion_matrix, axis=1) / np.sum(self.confusion_matrix) 30 | iu = np.diag(self.confusion_matrix) / ( 31 | np.sum(self.confusion_matrix, axis=1) + np.sum(self.confusion_matrix, axis=0) - 32 | np.diag(self.confusion_matrix)) 33 | 34 | FWIoU = (freq[freq > 0] * iu[freq > 0]).sum() 35 | return FWIoU 36 | 37 | 38 | def _generate_matrix(self, gt_image, pre_image): 39 | mask = (gt_image >= 0) & (gt_image < self.num_class) 40 | label = self.num_class * gt_image[mask].astype('int') + pre_image[mask] 41 | count = np.bincount(label, minlength=self.num_class**2) 42 | confusion_matrix = count.reshape(self.num_class, self.num_class) 43 | return confusion_matrix 44 | 45 | 46 | def add_batch(self, gt_image, pre_image): 47 | assert gt_image.shape == pre_image.shape 48 | for lp, lt in zip(pre_image, gt_image): 49 | self.confusion_matrix += self._generate_matrix(lt.flatten(), lp.flatten()) 50 | 51 | 52 | def reset(self): 53 | self.confusion_matrix = np.zeros((self.num_class,) * 2) 54 | -------------------------------------------------------------------------------- /danet/utils/saver.py: -------------------------------------------------------------------------------- 1 | import os 2 | import shutil 3 | import torch 4 | from collections import OrderedDict 5 | import glob 6 | 7 | class Saver(object): 8 | 9 | def __init__(self, args): 10 | self.args = args 11 | self.directory = os.path.join('run', args.dataset, args.checkname) 12 | self.runs = sorted(glob.glob(os.path.join(self.directory, 'experiment_*'))) 13 | run_id = int(self.runs[-1].split('_')[-1]) + 1 if self.runs else 0 14 | 15 | self.experiment_dir = os.path.join(self.directory, 'experiment_{}'.format(str(run_id))) 16 | if not os.path.exists(self.experiment_dir): 17 | os.makedirs(self.experiment_dir) 18 | 19 | def save_checkpoint(self, state, is_best, filename='checkpoint.pth.tar'): 20 | """Saves checkpoint to disk""" 21 | filename = os.path.join(self.experiment_dir, filename) 22 | torch.save(state, filename) 23 | if is_best: 24 | best_pred = state['best_pred'] 25 | with open(os.path.join(self.experiment_dir, 'best_pred.txt'), 'w') as f: 26 | f.write(str(best_pred)) 27 | if self.runs: 28 | previous_miou = [0.0] 29 | for run in self.runs: 30 | run_id = run.split('_')[-1] 31 | path = os.path.join(self.directory, 'experiment_{}'.format(str(run_id)), 'best_pred.txt') 32 | if os.path.exists(path): 33 | with open(path, 'r') as f: 34 | miou = float(f.readline()) 35 | previous_miou.append(miou) 36 | else: 37 | continue 38 | max_miou = max(previous_miou) 39 | if best_pred > max_miou: 40 | shutil.copyfile(filename, os.path.join(self.directory, 'model_best.pth.tar')) 41 | else: 42 | shutil.copyfile(filename, os.path.join(self.directory, 'model_best.pth.tar')) 43 | 44 | def save_experiment_config(self): 45 | logfile = os.path.join(self.experiment_dir, 'parameters.txt') 46 | log_file = open(logfile, 'w') 47 | p = OrderedDict() 48 | p['datset'] = self.args.dataset 49 | p['backbone'] = self.args.net 50 | p['lr'] = self.args.lr 51 | p['epoch'] = self.args.epochs 52 | 53 | for key, val in p.items(): 54 | log_file.write(key + ':' + str(val) + '\n') 55 | log_file.close() 56 | -------------------------------------------------------------------------------- /danet/utils/summaries.py: -------------------------------------------------------------------------------- 1 | ''' 2 | import os 3 | import torch 4 | from torchvision.utils import make_grid 5 | from tensorboardX import SummaryWriter 6 | from data.utils import decode_seg_map_sequence 7 | 8 | class TensorboardSummary(object): 9 | def __init__(self, directory): 10 | self.directory = directory 11 | 12 | def create_summary(self): 13 | writer = SummaryWriter(log_dir=os.path.join(self.directory)) 14 | return writer 15 | 16 | def visualize_image(self, writer, dataset, image, target, output, global_step): 17 | grid_image = make_grid(image[:3].clone().cpu().data, 3, normalize=True) 18 | writer.add_image('Image', grid_image, global_step) 19 | grid_image = make_grid(decode_seg_map_sequence(torch.max(output[:3], 1)[1].detach().cpu().numpy(), 20 | dataset=dataset), 3, normalize=False, range=(0, 255)) 21 | writer.add_image('Predicted label', grid_image, global_step) 22 | grid_image = make_grid(decode_seg_map_sequence(torch.squeeze(target[:3], 1).detach().cpu().numpy(), 23 | dataset=dataset), 3, normalize=False, range=(0, 255)) 24 | writer.add_image('Groundtruth label', grid_image, global_step) 25 | ''' 26 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/easy-install.pth: -------------------------------------------------------------------------------- 1 | ./setuptools-40.8.0-py3.7.egg 2 | ./pip-19.0.3-py3.7.egg 3 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/EGG-INFO/dependency_links.txt: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/EGG-INFO/entry_points.txt: -------------------------------------------------------------------------------- 1 | [console_scripts] 2 | pip = pip._internal:main 3 | pip3 = pip._internal:main 4 | pip3.7 = pip._internal:main 5 | 6 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/EGG-INFO/not-zip-safe: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/EGG-INFO/top_level.txt: -------------------------------------------------------------------------------- 1 | pip 2 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/__init__.py: -------------------------------------------------------------------------------- 1 | __version__ = "19.0.3" 2 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/__main__.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | 3 | import os 4 | import sys 5 | 6 | # If we are running from a wheel, add the wheel to sys.path 7 | # This allows the usage python pip-*.whl/pip install pip-*.whl 8 | if __package__ == '': 9 | # __file__ is pip-*.whl/pip/__main__.py 10 | # first dirname call strips of '/__main__.py', second strips off '/pip' 11 | # Resulting path is the name of the wheel itself 12 | # Add that to sys.path so we can import pip 13 | path = os.path.dirname(os.path.dirname(__file__)) 14 | sys.path.insert(0, path) 15 | 16 | from pip._internal import main as _main # isort:skip # noqa 17 | 18 | if __name__ == '__main__': 19 | sys.exit(_main()) 20 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from __future__ import absolute_import 3 | 4 | import locale 5 | import logging 6 | import os 7 | import warnings 8 | 9 | import sys 10 | 11 | # 2016-06-17 barry@debian.org: urllib3 1.14 added optional support for socks, 12 | # but if invoked (i.e. imported), it will issue a warning to stderr if socks 13 | # isn't available. requests unconditionally imports urllib3's socks contrib 14 | # module, triggering this warning. The warning breaks DEP-8 tests (because of 15 | # the stderr output) and is just plain annoying in normal usage. I don't want 16 | # to add socks as yet another dependency for pip, nor do I want to allow-stder 17 | # in the DEP-8 tests, so just suppress the warning. pdb tells me this has to 18 | # be done before the import of pip.vcs. 19 | from pip._vendor.urllib3.exceptions import DependencyWarning 20 | warnings.filterwarnings("ignore", category=DependencyWarning) # noqa 21 | 22 | # We want to inject the use of SecureTransport as early as possible so that any 23 | # references or sessions or what have you are ensured to have it, however we 24 | # only want to do this in the case that we're running on macOS and the linked 25 | # OpenSSL is too old to handle TLSv1.2 26 | try: 27 | import ssl 28 | except ImportError: 29 | pass 30 | else: 31 | # Checks for OpenSSL 1.0.1 on MacOS 32 | if sys.platform == "darwin" and ssl.OPENSSL_VERSION_NUMBER < 0x1000100f: 33 | try: 34 | from pip._vendor.urllib3.contrib import securetransport 35 | except (ImportError, OSError): 36 | pass 37 | else: 38 | securetransport.inject_into_urllib3() 39 | 40 | from pip._internal.cli.autocompletion import autocomplete 41 | from pip._internal.cli.main_parser import parse_command 42 | from pip._internal.commands import commands_dict 43 | from pip._internal.exceptions import PipError 44 | from pip._internal.utils import deprecation 45 | from pip._internal.vcs import git, mercurial, subversion, bazaar # noqa 46 | from pip._vendor.urllib3.exceptions import InsecureRequestWarning 47 | 48 | logger = logging.getLogger(__name__) 49 | 50 | # Hide the InsecureRequestWarning from urllib3 51 | warnings.filterwarnings("ignore", category=InsecureRequestWarning) 52 | 53 | 54 | def main(args=None): 55 | if args is None: 56 | args = sys.argv[1:] 57 | 58 | # Configure our deprecation warnings to be sent through loggers 59 | deprecation.install_warning_logger() 60 | 61 | autocomplete() 62 | 63 | try: 64 | cmd_name, cmd_args = parse_command(args) 65 | except PipError as exc: 66 | sys.stderr.write("ERROR: %s" % exc) 67 | sys.stderr.write(os.linesep) 68 | sys.exit(1) 69 | 70 | # Needed for locale.getpreferredencoding(False) to work 71 | # in pip._internal.utils.encoding.auto_decode 72 | try: 73 | locale.setlocale(locale.LC_ALL, '') 74 | except locale.Error as e: 75 | # setlocale can apparently crash if locale are uninitialized 76 | logger.debug("Ignoring error %s when setting locale", e) 77 | command = commands_dict[cmd_name](isolated=("--isolated" in cmd_args)) 78 | return command.main(cmd_args) 79 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/__init__.py: -------------------------------------------------------------------------------- 1 | """Subpackage containing all of pip's command line interface related code 2 | """ 3 | 4 | # This file intentionally does not import submodules 5 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/cli/status_codes.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | 3 | SUCCESS = 0 4 | ERROR = 1 5 | UNKNOWN_ERROR = 2 6 | VIRTUALENV_NOT_FOUND = 3 7 | PREVIOUS_BUILD_DIR_ERROR = 4 8 | NO_MATCHES_FOUND = 23 9 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Package containing all pip commands 3 | """ 4 | from __future__ import absolute_import 5 | 6 | from pip._internal.commands.completion import CompletionCommand 7 | from pip._internal.commands.configuration import ConfigurationCommand 8 | from pip._internal.commands.download import DownloadCommand 9 | from pip._internal.commands.freeze import FreezeCommand 10 | from pip._internal.commands.hash import HashCommand 11 | from pip._internal.commands.help import HelpCommand 12 | from pip._internal.commands.list import ListCommand 13 | from pip._internal.commands.check import CheckCommand 14 | from pip._internal.commands.search import SearchCommand 15 | from pip._internal.commands.show import ShowCommand 16 | from pip._internal.commands.install import InstallCommand 17 | from pip._internal.commands.uninstall import UninstallCommand 18 | from pip._internal.commands.wheel import WheelCommand 19 | 20 | from pip._internal.utils.typing import MYPY_CHECK_RUNNING 21 | 22 | if MYPY_CHECK_RUNNING: 23 | from typing import List, Type # noqa: F401 24 | from pip._internal.cli.base_command import Command # noqa: F401 25 | 26 | commands_order = [ 27 | InstallCommand, 28 | DownloadCommand, 29 | UninstallCommand, 30 | FreezeCommand, 31 | ListCommand, 32 | ShowCommand, 33 | CheckCommand, 34 | ConfigurationCommand, 35 | SearchCommand, 36 | WheelCommand, 37 | HashCommand, 38 | CompletionCommand, 39 | HelpCommand, 40 | ] # type: List[Type[Command]] 41 | 42 | commands_dict = {c.name: c for c in commands_order} 43 | 44 | 45 | def get_summaries(ordered=True): 46 | """Yields sorted (command name, command summary) tuples.""" 47 | 48 | if ordered: 49 | cmditems = _sort_commands(commands_dict, commands_order) 50 | else: 51 | cmditems = commands_dict.items() 52 | 53 | for name, command_class in cmditems: 54 | yield (name, command_class.summary) 55 | 56 | 57 | def get_similar_commands(name): 58 | """Command name auto-correct.""" 59 | from difflib import get_close_matches 60 | 61 | name = name.lower() 62 | 63 | close_commands = get_close_matches(name, commands_dict.keys()) 64 | 65 | if close_commands: 66 | return close_commands[0] 67 | else: 68 | return False 69 | 70 | 71 | def _sort_commands(cmddict, order): 72 | def keyfn(key): 73 | try: 74 | return order.index(key[1]) 75 | except ValueError: 76 | # unordered items should come last 77 | return 0xff 78 | 79 | return sorted(cmddict.items(), key=keyfn) 80 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/check.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | from pip._internal.cli.base_command import Command 4 | from pip._internal.operations.check import ( 5 | check_package_set, create_package_set_from_installed, 6 | ) 7 | 8 | logger = logging.getLogger(__name__) 9 | 10 | 11 | class CheckCommand(Command): 12 | """Verify installed packages have compatible dependencies.""" 13 | name = 'check' 14 | usage = """ 15 | %prog [options]""" 16 | summary = 'Verify installed packages have compatible dependencies.' 17 | 18 | def run(self, options, args): 19 | package_set, parsing_probs = create_package_set_from_installed() 20 | missing, conflicting = check_package_set(package_set) 21 | 22 | for project_name in missing: 23 | version = package_set[project_name].version 24 | for dependency in missing[project_name]: 25 | logger.info( 26 | "%s %s requires %s, which is not installed.", 27 | project_name, version, dependency[0], 28 | ) 29 | 30 | for project_name in conflicting: 31 | version = package_set[project_name].version 32 | for dep_name, dep_version, req in conflicting[project_name]: 33 | logger.info( 34 | "%s %s has requirement %s, but you have %s %s.", 35 | project_name, version, req, dep_name, dep_version, 36 | ) 37 | 38 | if missing or conflicting or parsing_probs: 39 | return 1 40 | else: 41 | logger.info("No broken requirements found.") 42 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/completion.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | 3 | import sys 4 | import textwrap 5 | 6 | from pip._internal.cli.base_command import Command 7 | from pip._internal.utils.misc import get_prog 8 | 9 | BASE_COMPLETION = """ 10 | # pip %(shell)s completion start%(script)s# pip %(shell)s completion end 11 | """ 12 | 13 | COMPLETION_SCRIPTS = { 14 | 'bash': """ 15 | _pip_completion() 16 | { 17 | COMPREPLY=( $( COMP_WORDS="${COMP_WORDS[*]}" \\ 18 | COMP_CWORD=$COMP_CWORD \\ 19 | PIP_AUTO_COMPLETE=1 $1 ) ) 20 | } 21 | complete -o default -F _pip_completion %(prog)s 22 | """, 23 | 'zsh': """ 24 | function _pip_completion { 25 | local words cword 26 | read -Ac words 27 | read -cn cword 28 | reply=( $( COMP_WORDS="$words[*]" \\ 29 | COMP_CWORD=$(( cword-1 )) \\ 30 | PIP_AUTO_COMPLETE=1 $words[1] ) ) 31 | } 32 | compctl -K _pip_completion %(prog)s 33 | """, 34 | 'fish': """ 35 | function __fish_complete_pip 36 | set -lx COMP_WORDS (commandline -o) "" 37 | set -lx COMP_CWORD ( \\ 38 | math (contains -i -- (commandline -t) $COMP_WORDS)-1 \\ 39 | ) 40 | set -lx PIP_AUTO_COMPLETE 1 41 | string split \\ -- (eval $COMP_WORDS[1]) 42 | end 43 | complete -fa "(__fish_complete_pip)" -c %(prog)s 44 | """, 45 | } 46 | 47 | 48 | class CompletionCommand(Command): 49 | """A helper command to be used for command completion.""" 50 | name = 'completion' 51 | summary = 'A helper command used for command completion.' 52 | ignore_require_venv = True 53 | 54 | def __init__(self, *args, **kw): 55 | super(CompletionCommand, self).__init__(*args, **kw) 56 | 57 | cmd_opts = self.cmd_opts 58 | 59 | cmd_opts.add_option( 60 | '--bash', '-b', 61 | action='store_const', 62 | const='bash', 63 | dest='shell', 64 | help='Emit completion code for bash') 65 | cmd_opts.add_option( 66 | '--zsh', '-z', 67 | action='store_const', 68 | const='zsh', 69 | dest='shell', 70 | help='Emit completion code for zsh') 71 | cmd_opts.add_option( 72 | '--fish', '-f', 73 | action='store_const', 74 | const='fish', 75 | dest='shell', 76 | help='Emit completion code for fish') 77 | 78 | self.parser.insert_option_group(0, cmd_opts) 79 | 80 | def run(self, options, args): 81 | """Prints the completion code of the given shell""" 82 | shells = COMPLETION_SCRIPTS.keys() 83 | shell_options = ['--' + shell for shell in sorted(shells)] 84 | if options.shell in shells: 85 | script = textwrap.dedent( 86 | COMPLETION_SCRIPTS.get(options.shell, '') % { 87 | 'prog': get_prog(), 88 | } 89 | ) 90 | print(BASE_COMPLETION % {'script': script, 'shell': options.shell}) 91 | else: 92 | sys.stderr.write( 93 | 'ERROR: You must pass %s\n' % ' or '.join(shell_options) 94 | ) 95 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/hash.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | 3 | import hashlib 4 | import logging 5 | import sys 6 | 7 | from pip._internal.cli.base_command import Command 8 | from pip._internal.cli.status_codes import ERROR 9 | from pip._internal.utils.hashes import FAVORITE_HASH, STRONG_HASHES 10 | from pip._internal.utils.misc import read_chunks 11 | 12 | logger = logging.getLogger(__name__) 13 | 14 | 15 | class HashCommand(Command): 16 | """ 17 | Compute a hash of a local package archive. 18 | 19 | These can be used with --hash in a requirements file to do repeatable 20 | installs. 21 | 22 | """ 23 | name = 'hash' 24 | usage = '%prog [options] ...' 25 | summary = 'Compute hashes of package archives.' 26 | ignore_require_venv = True 27 | 28 | def __init__(self, *args, **kw): 29 | super(HashCommand, self).__init__(*args, **kw) 30 | self.cmd_opts.add_option( 31 | '-a', '--algorithm', 32 | dest='algorithm', 33 | choices=STRONG_HASHES, 34 | action='store', 35 | default=FAVORITE_HASH, 36 | help='The hash algorithm to use: one of %s' % 37 | ', '.join(STRONG_HASHES)) 38 | self.parser.insert_option_group(0, self.cmd_opts) 39 | 40 | def run(self, options, args): 41 | if not args: 42 | self.parser.print_usage(sys.stderr) 43 | return ERROR 44 | 45 | algorithm = options.algorithm 46 | for path in args: 47 | logger.info('%s:\n--hash=%s:%s', 48 | path, algorithm, _hash_of_file(path, algorithm)) 49 | 50 | 51 | def _hash_of_file(path, algorithm): 52 | """Return the hash digest of a file.""" 53 | with open(path, 'rb') as archive: 54 | hash = hashlib.new(algorithm) 55 | for chunk in read_chunks(archive): 56 | hash.update(chunk) 57 | return hash.hexdigest() 58 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/help.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | 3 | from pip._internal.cli.base_command import Command 4 | from pip._internal.cli.status_codes import SUCCESS 5 | from pip._internal.exceptions import CommandError 6 | 7 | 8 | class HelpCommand(Command): 9 | """Show help for commands""" 10 | name = 'help' 11 | usage = """ 12 | %prog """ 13 | summary = 'Show help for commands.' 14 | ignore_require_venv = True 15 | 16 | def run(self, options, args): 17 | from pip._internal.commands import commands_dict, get_similar_commands 18 | 19 | try: 20 | # 'pip help' with no args is handled by pip.__init__.parseopt() 21 | cmd_name = args[0] # the command we need help for 22 | except IndexError: 23 | return SUCCESS 24 | 25 | if cmd_name not in commands_dict: 26 | guess = get_similar_commands(cmd_name) 27 | 28 | msg = ['unknown command "%s"' % cmd_name] 29 | if guess: 30 | msg.append('maybe you meant "%s"' % guess) 31 | 32 | raise CommandError(' - '.join(msg)) 33 | 34 | command = commands_dict[cmd_name]() 35 | command.parser.print_help() 36 | 37 | return SUCCESS 38 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/commands/uninstall.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | 3 | from pip._vendor.packaging.utils import canonicalize_name 4 | 5 | from pip._internal.cli.base_command import Command 6 | from pip._internal.exceptions import InstallationError 7 | from pip._internal.req import parse_requirements 8 | from pip._internal.req.constructors import install_req_from_line 9 | from pip._internal.utils.misc import protect_pip_from_modification_on_windows 10 | 11 | 12 | class UninstallCommand(Command): 13 | """ 14 | Uninstall packages. 15 | 16 | pip is able to uninstall most installed packages. Known exceptions are: 17 | 18 | - Pure distutils packages installed with ``python setup.py install``, which 19 | leave behind no metadata to determine what files were installed. 20 | - Script wrappers installed by ``python setup.py develop``. 21 | """ 22 | name = 'uninstall' 23 | usage = """ 24 | %prog [options] ... 25 | %prog [options] -r ...""" 26 | summary = 'Uninstall packages.' 27 | 28 | def __init__(self, *args, **kw): 29 | super(UninstallCommand, self).__init__(*args, **kw) 30 | self.cmd_opts.add_option( 31 | '-r', '--requirement', 32 | dest='requirements', 33 | action='append', 34 | default=[], 35 | metavar='file', 36 | help='Uninstall all the packages listed in the given requirements ' 37 | 'file. This option can be used multiple times.', 38 | ) 39 | self.cmd_opts.add_option( 40 | '-y', '--yes', 41 | dest='yes', 42 | action='store_true', 43 | help="Don't ask for confirmation of uninstall deletions.") 44 | 45 | self.parser.insert_option_group(0, self.cmd_opts) 46 | 47 | def run(self, options, args): 48 | with self._build_session(options) as session: 49 | reqs_to_uninstall = {} 50 | for name in args: 51 | req = install_req_from_line( 52 | name, isolated=options.isolated_mode, 53 | ) 54 | if req.name: 55 | reqs_to_uninstall[canonicalize_name(req.name)] = req 56 | for filename in options.requirements: 57 | for req in parse_requirements( 58 | filename, 59 | options=options, 60 | session=session): 61 | if req.name: 62 | reqs_to_uninstall[canonicalize_name(req.name)] = req 63 | if not reqs_to_uninstall: 64 | raise InstallationError( 65 | 'You must give at least one requirement to %(name)s (see ' 66 | '"pip help %(name)s")' % dict(name=self.name) 67 | ) 68 | 69 | protect_pip_from_modification_on_windows( 70 | modifying_pip="pip" in reqs_to_uninstall 71 | ) 72 | 73 | for req in reqs_to_uninstall.values(): 74 | uninstall_pathset = req.uninstall( 75 | auto_confirm=options.yes, verbose=self.verbosity > 0, 76 | ) 77 | if uninstall_pathset: 78 | uninstall_pathset.commit() 79 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/models/__init__.py: -------------------------------------------------------------------------------- 1 | """A package that contains models that represent entities. 2 | """ 3 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/models/candidate.py: -------------------------------------------------------------------------------- 1 | from pip._vendor.packaging.version import parse as parse_version 2 | 3 | from pip._internal.utils.models import KeyBasedCompareMixin 4 | from pip._internal.utils.typing import MYPY_CHECK_RUNNING 5 | 6 | if MYPY_CHECK_RUNNING: 7 | from pip._vendor.packaging.version import _BaseVersion # noqa: F401 8 | from pip._internal.models.link import Link # noqa: F401 9 | from typing import Any, Union # noqa: F401 10 | 11 | 12 | class InstallationCandidate(KeyBasedCompareMixin): 13 | """Represents a potential "candidate" for installation. 14 | """ 15 | 16 | def __init__(self, project, version, location): 17 | # type: (Any, str, Link) -> None 18 | self.project = project 19 | self.version = parse_version(version) # type: _BaseVersion 20 | self.location = location 21 | 22 | super(InstallationCandidate, self).__init__( 23 | key=(self.project, self.version, self.location), 24 | defining_class=InstallationCandidate 25 | ) 26 | 27 | def __repr__(self): 28 | # type: () -> str 29 | return "".format( 30 | self.project, self.version, self.location, 31 | ) 32 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/models/format_control.py: -------------------------------------------------------------------------------- 1 | from pip._vendor.packaging.utils import canonicalize_name 2 | 3 | from pip._internal.utils.typing import MYPY_CHECK_RUNNING 4 | 5 | if MYPY_CHECK_RUNNING: 6 | from typing import Optional, Set, FrozenSet # noqa: F401 7 | 8 | 9 | class FormatControl(object): 10 | """Helper for managing formats from which a package can be installed. 11 | """ 12 | 13 | def __init__(self, no_binary=None, only_binary=None): 14 | # type: (Optional[Set], Optional[Set]) -> None 15 | if no_binary is None: 16 | no_binary = set() 17 | if only_binary is None: 18 | only_binary = set() 19 | 20 | self.no_binary = no_binary 21 | self.only_binary = only_binary 22 | 23 | def __eq__(self, other): 24 | return self.__dict__ == other.__dict__ 25 | 26 | def __ne__(self, other): 27 | return not self.__eq__(other) 28 | 29 | def __repr__(self): 30 | return "{}({}, {})".format( 31 | self.__class__.__name__, 32 | self.no_binary, 33 | self.only_binary 34 | ) 35 | 36 | @staticmethod 37 | def handle_mutual_excludes(value, target, other): 38 | # type: (str, Optional[Set], Optional[Set]) -> None 39 | new = value.split(',') 40 | while ':all:' in new: 41 | other.clear() 42 | target.clear() 43 | target.add(':all:') 44 | del new[:new.index(':all:') + 1] 45 | # Without a none, we want to discard everything as :all: covers it 46 | if ':none:' not in new: 47 | return 48 | for name in new: 49 | if name == ':none:': 50 | target.clear() 51 | continue 52 | name = canonicalize_name(name) 53 | other.discard(name) 54 | target.add(name) 55 | 56 | def get_allowed_formats(self, canonical_name): 57 | # type: (str) -> FrozenSet 58 | result = {"binary", "source"} 59 | if canonical_name in self.only_binary: 60 | result.discard('source') 61 | elif canonical_name in self.no_binary: 62 | result.discard('binary') 63 | elif ':all:' in self.only_binary: 64 | result.discard('source') 65 | elif ':all:' in self.no_binary: 66 | result.discard('binary') 67 | return frozenset(result) 68 | 69 | def disallow_binaries(self): 70 | # type: () -> None 71 | self.handle_mutual_excludes( 72 | ':all:', self.no_binary, self.only_binary, 73 | ) 74 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/models/index.py: -------------------------------------------------------------------------------- 1 | from pip._vendor.six.moves.urllib import parse as urllib_parse 2 | 3 | 4 | class PackageIndex(object): 5 | """Represents a Package Index and provides easier access to endpoints 6 | """ 7 | 8 | def __init__(self, url, file_storage_domain): 9 | # type: (str, str) -> None 10 | super(PackageIndex, self).__init__() 11 | self.url = url 12 | self.netloc = urllib_parse.urlsplit(url).netloc 13 | self.simple_url = self._url_for_path('simple') 14 | self.pypi_url = self._url_for_path('pypi') 15 | 16 | # This is part of a temporary hack used to block installs of PyPI 17 | # packages which depend on external urls only necessary until PyPI can 18 | # block such packages themselves 19 | self.file_storage_domain = file_storage_domain 20 | 21 | def _url_for_path(self, path): 22 | # type: (str) -> str 23 | return urllib_parse.urljoin(self.url, path) 24 | 25 | 26 | PyPI = PackageIndex( 27 | 'https://pypi.org/', file_storage_domain='files.pythonhosted.org' 28 | ) 29 | TestPyPI = PackageIndex( 30 | 'https://test.pypi.org/', file_storage_domain='test-files.pythonhosted.org' 31 | ) 32 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/operations/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Andy-zhujunwen/danet-pytorch/82ba577edcc5535b169f35590a6f48a4c2944ae1/danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/operations/__init__.py -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | 3 | import logging 4 | 5 | from .req_install import InstallRequirement 6 | from .req_set import RequirementSet 7 | from .req_file import parse_requirements 8 | from pip._internal.utils.logging import indent_log 9 | from pip._internal.utils.typing import MYPY_CHECK_RUNNING 10 | 11 | if MYPY_CHECK_RUNNING: 12 | from typing import List, Sequence # noqa: F401 13 | 14 | __all__ = [ 15 | "RequirementSet", "InstallRequirement", 16 | "parse_requirements", "install_given_reqs", 17 | ] 18 | 19 | logger = logging.getLogger(__name__) 20 | 21 | 22 | def install_given_reqs( 23 | to_install, # type: List[InstallRequirement] 24 | install_options, # type: List[str] 25 | global_options=(), # type: Sequence[str] 26 | *args, **kwargs 27 | ): 28 | # type: (...) -> List[InstallRequirement] 29 | """ 30 | Install everything in the given list. 31 | 32 | (to be called after having downloaded and unpacked the packages) 33 | """ 34 | 35 | if to_install: 36 | logger.info( 37 | 'Installing collected packages: %s', 38 | ', '.join([req.name for req in to_install]), 39 | ) 40 | 41 | with indent_log(): 42 | for requirement in to_install: 43 | if requirement.conflicts_with: 44 | logger.info( 45 | 'Found existing installation: %s', 46 | requirement.conflicts_with, 47 | ) 48 | with indent_log(): 49 | uninstalled_pathset = requirement.uninstall( 50 | auto_confirm=True 51 | ) 52 | try: 53 | requirement.install( 54 | install_options, 55 | global_options, 56 | *args, 57 | **kwargs 58 | ) 59 | except Exception: 60 | should_rollback = ( 61 | requirement.conflicts_with and 62 | not requirement.install_succeeded 63 | ) 64 | # if install did not succeed, rollback previous uninstall 65 | if should_rollback: 66 | uninstalled_pathset.rollback() 67 | raise 68 | else: 69 | should_commit = ( 70 | requirement.conflicts_with and 71 | requirement.install_succeeded 72 | ) 73 | if should_commit: 74 | uninstalled_pathset.commit() 75 | requirement.remove_temporary_source() 76 | 77 | return to_install 78 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/req/req_tracker.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | 3 | import contextlib 4 | import errno 5 | import hashlib 6 | import logging 7 | import os 8 | 9 | from pip._internal.utils.temp_dir import TempDirectory 10 | from pip._internal.utils.typing import MYPY_CHECK_RUNNING 11 | 12 | if MYPY_CHECK_RUNNING: 13 | from typing import Set, Iterator # noqa: F401 14 | from pip._internal.req.req_install import InstallRequirement # noqa: F401 15 | from pip._internal.models.link import Link # noqa: F401 16 | 17 | logger = logging.getLogger(__name__) 18 | 19 | 20 | class RequirementTracker(object): 21 | 22 | def __init__(self): 23 | # type: () -> None 24 | self._root = os.environ.get('PIP_REQ_TRACKER') 25 | if self._root is None: 26 | self._temp_dir = TempDirectory(delete=False, kind='req-tracker') 27 | self._temp_dir.create() 28 | self._root = os.environ['PIP_REQ_TRACKER'] = self._temp_dir.path 29 | logger.debug('Created requirements tracker %r', self._root) 30 | else: 31 | self._temp_dir = None 32 | logger.debug('Re-using requirements tracker %r', self._root) 33 | self._entries = set() # type: Set[InstallRequirement] 34 | 35 | def __enter__(self): 36 | return self 37 | 38 | def __exit__(self, exc_type, exc_val, exc_tb): 39 | self.cleanup() 40 | 41 | def _entry_path(self, link): 42 | # type: (Link) -> str 43 | hashed = hashlib.sha224(link.url_without_fragment.encode()).hexdigest() 44 | return os.path.join(self._root, hashed) 45 | 46 | def add(self, req): 47 | # type: (InstallRequirement) -> None 48 | link = req.link 49 | info = str(req) 50 | entry_path = self._entry_path(link) 51 | try: 52 | with open(entry_path) as fp: 53 | # Error, these's already a build in progress. 54 | raise LookupError('%s is already being built: %s' 55 | % (link, fp.read())) 56 | except IOError as e: 57 | if e.errno != errno.ENOENT: 58 | raise 59 | assert req not in self._entries 60 | with open(entry_path, 'w') as fp: 61 | fp.write(info) 62 | self._entries.add(req) 63 | logger.debug('Added %s to build tracker %r', req, self._root) 64 | 65 | def remove(self, req): 66 | # type: (InstallRequirement) -> None 67 | link = req.link 68 | self._entries.remove(req) 69 | os.unlink(self._entry_path(link)) 70 | logger.debug('Removed %s from build tracker %r', req, self._root) 71 | 72 | def cleanup(self): 73 | # type: () -> None 74 | for req in set(self._entries): 75 | self.remove(req) 76 | remove = self._temp_dir is not None 77 | if remove: 78 | self._temp_dir.cleanup() 79 | logger.debug('%s build tracker %r', 80 | 'Removed' if remove else 'Cleaned', 81 | self._root) 82 | 83 | @contextlib.contextmanager 84 | def track(self, req): 85 | # type: (InstallRequirement) -> Iterator[None] 86 | self.add(req) 87 | yield 88 | self.remove(req) 89 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Andy-zhujunwen/danet-pytorch/82ba577edcc5535b169f35590a6f48a4c2944ae1/danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/__init__.py -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/deprecation.py: -------------------------------------------------------------------------------- 1 | """ 2 | A module that implements tooling to enable easy warnings about deprecations. 3 | """ 4 | from __future__ import absolute_import 5 | 6 | import logging 7 | import warnings 8 | 9 | from pip._vendor.packaging.version import parse 10 | 11 | from pip import __version__ as current_version 12 | from pip._internal.utils.typing import MYPY_CHECK_RUNNING 13 | 14 | if MYPY_CHECK_RUNNING: 15 | from typing import Any, Optional # noqa: F401 16 | 17 | 18 | class PipDeprecationWarning(Warning): 19 | pass 20 | 21 | 22 | _original_showwarning = None # type: Any 23 | 24 | 25 | # Warnings <-> Logging Integration 26 | def _showwarning(message, category, filename, lineno, file=None, line=None): 27 | if file is not None: 28 | if _original_showwarning is not None: 29 | _original_showwarning( 30 | message, category, filename, lineno, file, line, 31 | ) 32 | elif issubclass(category, PipDeprecationWarning): 33 | # We use a specially named logger which will handle all of the 34 | # deprecation messages for pip. 35 | logger = logging.getLogger("pip._internal.deprecations") 36 | logger.warning(message) 37 | else: 38 | _original_showwarning( 39 | message, category, filename, lineno, file, line, 40 | ) 41 | 42 | 43 | def install_warning_logger(): 44 | # type: () -> None 45 | # Enable our Deprecation Warnings 46 | warnings.simplefilter("default", PipDeprecationWarning, append=True) 47 | 48 | global _original_showwarning 49 | 50 | if _original_showwarning is None: 51 | _original_showwarning = warnings.showwarning 52 | warnings.showwarning = _showwarning 53 | 54 | 55 | def deprecated(reason, replacement, gone_in, issue=None): 56 | # type: (str, Optional[str], Optional[str], Optional[int]) -> None 57 | """Helper to deprecate existing functionality. 58 | 59 | reason: 60 | Textual reason shown to the user about why this functionality has 61 | been deprecated. 62 | replacement: 63 | Textual suggestion shown to the user about what alternative 64 | functionality they can use. 65 | gone_in: 66 | The version of pip does this functionality should get removed in. 67 | Raises errors if pip's current version is greater than or equal to 68 | this. 69 | issue: 70 | Issue number on the tracker that would serve as a useful place for 71 | users to find related discussion and provide feedback. 72 | 73 | Always pass replacement, gone_in and issue as keyword arguments for clarity 74 | at the call site. 75 | """ 76 | 77 | # Construct a nice message. 78 | # This is purposely eagerly formatted as we want it to appear as if someone 79 | # typed this entire message out. 80 | message = "DEPRECATION: " + reason 81 | if replacement is not None: 82 | message += " A possible replacement is {}.".format(replacement) 83 | if issue is not None: 84 | url = "https://github.com/pypa/pip/issues/" + str(issue) 85 | message += " You can find discussion regarding this at {}.".format(url) 86 | 87 | # Raise as an error if it has to be removed. 88 | if gone_in is not None and parse(current_version) >= parse(gone_in): 89 | raise PipDeprecationWarning(message) 90 | warnings.warn(message, category=PipDeprecationWarning, stacklevel=2) 91 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/encoding.py: -------------------------------------------------------------------------------- 1 | import codecs 2 | import locale 3 | import re 4 | import sys 5 | 6 | from pip._internal.utils.typing import MYPY_CHECK_RUNNING 7 | 8 | if MYPY_CHECK_RUNNING: 9 | from typing import List, Tuple, Text # noqa: F401 10 | 11 | BOMS = [ 12 | (codecs.BOM_UTF8, 'utf8'), 13 | (codecs.BOM_UTF16, 'utf16'), 14 | (codecs.BOM_UTF16_BE, 'utf16-be'), 15 | (codecs.BOM_UTF16_LE, 'utf16-le'), 16 | (codecs.BOM_UTF32, 'utf32'), 17 | (codecs.BOM_UTF32_BE, 'utf32-be'), 18 | (codecs.BOM_UTF32_LE, 'utf32-le'), 19 | ] # type: List[Tuple[bytes, Text]] 20 | 21 | ENCODING_RE = re.compile(br'coding[:=]\s*([-\w.]+)') 22 | 23 | 24 | def auto_decode(data): 25 | # type: (bytes) -> Text 26 | """Check a bytes string for a BOM to correctly detect the encoding 27 | 28 | Fallback to locale.getpreferredencoding(False) like open() on Python3""" 29 | for bom, encoding in BOMS: 30 | if data.startswith(bom): 31 | return data[len(bom):].decode(encoding) 32 | # Lets check the first two lines as in PEP263 33 | for line in data.split(b'\n')[:2]: 34 | if line[0:1] == b'#' and ENCODING_RE.search(line): 35 | encoding = ENCODING_RE.search(line).groups()[0].decode('ascii') 36 | return data.decode(encoding) 37 | return data.decode( 38 | locale.getpreferredencoding(False) or sys.getdefaultencoding(), 39 | ) 40 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/filesystem.py: -------------------------------------------------------------------------------- 1 | import os 2 | import os.path 3 | 4 | from pip._internal.utils.compat import get_path_uid 5 | 6 | 7 | def check_path_owner(path): 8 | # type: (str) -> bool 9 | # If we don't have a way to check the effective uid of this process, then 10 | # we'll just assume that we own the directory. 11 | if not hasattr(os, "geteuid"): 12 | return True 13 | 14 | previous = None 15 | while path != previous: 16 | if os.path.lexists(path): 17 | # Check if path is writable by current user. 18 | if os.geteuid() == 0: 19 | # Special handling for root user in order to handle properly 20 | # cases where users use sudo without -H flag. 21 | try: 22 | path_uid = get_path_uid(path) 23 | except OSError: 24 | return False 25 | return path_uid == 0 26 | else: 27 | return os.access(path, os.W_OK) 28 | else: 29 | previous, path = path, os.path.dirname(path) 30 | return False # assume we don't own the path 31 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/models.py: -------------------------------------------------------------------------------- 1 | """Utilities for defining models 2 | """ 3 | 4 | import operator 5 | 6 | 7 | class KeyBasedCompareMixin(object): 8 | """Provides comparision capabilities that is based on a key 9 | """ 10 | 11 | def __init__(self, key, defining_class): 12 | self._compare_key = key 13 | self._defining_class = defining_class 14 | 15 | def __hash__(self): 16 | return hash(self._compare_key) 17 | 18 | def __lt__(self, other): 19 | return self._compare(other, operator.__lt__) 20 | 21 | def __le__(self, other): 22 | return self._compare(other, operator.__le__) 23 | 24 | def __gt__(self, other): 25 | return self._compare(other, operator.__gt__) 26 | 27 | def __ge__(self, other): 28 | return self._compare(other, operator.__ge__) 29 | 30 | def __eq__(self, other): 31 | return self._compare(other, operator.__eq__) 32 | 33 | def __ne__(self, other): 34 | return self._compare(other, operator.__ne__) 35 | 36 | def _compare(self, other, method): 37 | if not isinstance(other, self._defining_class): 38 | return NotImplemented 39 | 40 | return method(self._compare_key, other._compare_key) 41 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/packaging.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | 3 | import logging 4 | import sys 5 | from email.parser import FeedParser 6 | 7 | from pip._vendor import pkg_resources 8 | from pip._vendor.packaging import specifiers, version 9 | 10 | from pip._internal import exceptions 11 | from pip._internal.utils.misc import display_path 12 | from pip._internal.utils.typing import MYPY_CHECK_RUNNING 13 | 14 | if MYPY_CHECK_RUNNING: 15 | from typing import Optional # noqa: F401 16 | from email.message import Message # noqa: F401 17 | from pip._vendor.pkg_resources import Distribution # noqa: F401 18 | 19 | 20 | logger = logging.getLogger(__name__) 21 | 22 | 23 | def check_requires_python(requires_python): 24 | # type: (Optional[str]) -> bool 25 | """ 26 | Check if the python version in use match the `requires_python` specifier. 27 | 28 | Returns `True` if the version of python in use matches the requirement. 29 | Returns `False` if the version of python in use does not matches the 30 | requirement. 31 | 32 | Raises an InvalidSpecifier if `requires_python` have an invalid format. 33 | """ 34 | if requires_python is None: 35 | # The package provides no information 36 | return True 37 | requires_python_specifier = specifiers.SpecifierSet(requires_python) 38 | 39 | # We only use major.minor.micro 40 | python_version = version.parse('.'.join(map(str, sys.version_info[:3]))) 41 | return python_version in requires_python_specifier 42 | 43 | 44 | def get_metadata(dist): 45 | # type: (Distribution) -> Message 46 | if (isinstance(dist, pkg_resources.DistInfoDistribution) and 47 | dist.has_metadata('METADATA')): 48 | metadata = dist.get_metadata('METADATA') 49 | elif dist.has_metadata('PKG-INFO'): 50 | metadata = dist.get_metadata('PKG-INFO') 51 | else: 52 | logger.warning("No metadata found in %s", display_path(dist.location)) 53 | metadata = '' 54 | 55 | feed_parser = FeedParser() 56 | feed_parser.feed(metadata) 57 | return feed_parser.close() 58 | 59 | 60 | def check_dist_requires_python(dist): 61 | pkg_info_dict = get_metadata(dist) 62 | requires_python = pkg_info_dict.get('Requires-Python') 63 | try: 64 | if not check_requires_python(requires_python): 65 | raise exceptions.UnsupportedPythonVersion( 66 | "%s requires Python '%s' but the running Python is %s" % ( 67 | dist.project_name, 68 | requires_python, 69 | '.'.join(map(str, sys.version_info[:3])),) 70 | ) 71 | except specifiers.InvalidSpecifier as e: 72 | logger.warning( 73 | "Package %s has an invalid Requires-Python entry %s - %s", 74 | dist.project_name, requires_python, e, 75 | ) 76 | return 77 | 78 | 79 | def get_installer(dist): 80 | # type: (Distribution) -> str 81 | if dist.has_metadata('INSTALLER'): 82 | for line in dist.get_metadata_lines('INSTALLER'): 83 | if line.strip(): 84 | return line.strip() 85 | return '' 86 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/setuptools_build.py: -------------------------------------------------------------------------------- 1 | # Shim to wrap setup.py invocation with setuptools 2 | SETUPTOOLS_SHIM = ( 3 | "import setuptools, tokenize;__file__=%r;" 4 | "f=getattr(tokenize, 'open', open)(__file__);" 5 | "code=f.read().replace('\\r\\n', '\\n');" 6 | "f.close();" 7 | "exec(compile(code, __file__, 'exec'))" 8 | ) 9 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_internal/utils/typing.py: -------------------------------------------------------------------------------- 1 | """For neatly implementing static typing in pip. 2 | 3 | `mypy` - the static type analysis tool we use - uses the `typing` module, which 4 | provides core functionality fundamental to mypy's functioning. 5 | 6 | Generally, `typing` would be imported at runtime and used in that fashion - 7 | it acts as a no-op at runtime and does not have any run-time overhead by 8 | design. 9 | 10 | As it turns out, `typing` is not vendorable - it uses separate sources for 11 | Python 2/Python 3. Thus, this codebase can not expect it to be present. 12 | To work around this, mypy allows the typing import to be behind a False-y 13 | optional to prevent it from running at runtime and type-comments can be used 14 | to remove the need for the types to be accessible directly during runtime. 15 | 16 | This module provides the False-y guard in a nicely named fashion so that a 17 | curious maintainer can reach here to read this. 18 | 19 | In pip, all static-typing related imports should be guarded as follows: 20 | 21 | from pip._internal.utils.typing import MYPY_CHECK_RUNNING 22 | 23 | if MYPY_CHECK_RUNNING: 24 | from typing import ... # noqa: F401 25 | 26 | Ref: https://github.com/python/mypy/issues/3216 27 | """ 28 | 29 | MYPY_CHECK_RUNNING = False 30 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/cachecontrol/__init__.py: -------------------------------------------------------------------------------- 1 | """CacheControl import Interface. 2 | 3 | Make it easy to import from cachecontrol without long namespaces. 4 | """ 5 | __author__ = "Eric Larson" 6 | __email__ = "eric@ionrock.org" 7 | __version__ = "0.12.5" 8 | 9 | from .wrapper import CacheControl 10 | from .adapter import CacheControlAdapter 11 | from .controller import CacheController 12 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/cachecontrol/_cmd.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | from pip._vendor import requests 4 | 5 | from pip._vendor.cachecontrol.adapter import CacheControlAdapter 6 | from pip._vendor.cachecontrol.cache import DictCache 7 | from pip._vendor.cachecontrol.controller import logger 8 | 9 | from argparse import ArgumentParser 10 | 11 | 12 | def setup_logging(): 13 | logger.setLevel(logging.DEBUG) 14 | handler = logging.StreamHandler() 15 | logger.addHandler(handler) 16 | 17 | 18 | def get_session(): 19 | adapter = CacheControlAdapter( 20 | DictCache(), cache_etags=True, serializer=None, heuristic=None 21 | ) 22 | sess = requests.Session() 23 | sess.mount("http://", adapter) 24 | sess.mount("https://", adapter) 25 | 26 | sess.cache_controller = adapter.controller 27 | return sess 28 | 29 | 30 | def get_args(): 31 | parser = ArgumentParser() 32 | parser.add_argument("url", help="The URL to try and cache") 33 | return parser.parse_args() 34 | 35 | 36 | def main(args=None): 37 | args = get_args() 38 | sess = get_session() 39 | 40 | # Make a request to get a response 41 | resp = sess.get(args.url) 42 | 43 | # Turn on logging 44 | setup_logging() 45 | 46 | # try setting the cache 47 | sess.cache_controller.cache_response(resp.request, resp.raw) 48 | 49 | # Now try to get it 50 | if sess.cache_controller.cached_request(resp.request): 51 | print("Cached!") 52 | else: 53 | print("Not cached :(") 54 | 55 | 56 | if __name__ == "__main__": 57 | main() 58 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/cachecontrol/cache.py: -------------------------------------------------------------------------------- 1 | """ 2 | The cache object API for implementing caches. The default is a thread 3 | safe in-memory dictionary. 4 | """ 5 | from threading import Lock 6 | 7 | 8 | class BaseCache(object): 9 | 10 | def get(self, key): 11 | raise NotImplementedError() 12 | 13 | def set(self, key, value): 14 | raise NotImplementedError() 15 | 16 | def delete(self, key): 17 | raise NotImplementedError() 18 | 19 | def close(self): 20 | pass 21 | 22 | 23 | class DictCache(BaseCache): 24 | 25 | def __init__(self, init_dict=None): 26 | self.lock = Lock() 27 | self.data = init_dict or {} 28 | 29 | def get(self, key): 30 | return self.data.get(key, None) 31 | 32 | def set(self, key, value): 33 | with self.lock: 34 | self.data.update({key: value}) 35 | 36 | def delete(self, key): 37 | with self.lock: 38 | if key in self.data: 39 | self.data.pop(key) 40 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/cachecontrol/caches/__init__.py: -------------------------------------------------------------------------------- 1 | from .file_cache import FileCache # noqa 2 | from .redis_cache import RedisCache # noqa 3 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/cachecontrol/caches/redis_cache.py: -------------------------------------------------------------------------------- 1 | from __future__ import division 2 | 3 | from datetime import datetime 4 | from pip._vendor.cachecontrol.cache import BaseCache 5 | 6 | 7 | class RedisCache(BaseCache): 8 | 9 | def __init__(self, conn): 10 | self.conn = conn 11 | 12 | def get(self, key): 13 | return self.conn.get(key) 14 | 15 | def set(self, key, value, expires=None): 16 | if not expires: 17 | self.conn.set(key, value) 18 | else: 19 | expires = expires - datetime.utcnow() 20 | self.conn.setex(key, int(expires.total_seconds()), value) 21 | 22 | def delete(self, key): 23 | self.conn.delete(key) 24 | 25 | def clear(self): 26 | """Helper for clearing all the keys in a database. Use with 27 | caution!""" 28 | for key in self.conn.keys(): 29 | self.conn.delete(key) 30 | 31 | def close(self): 32 | """Redis uses connection pooling, no need to close the connection.""" 33 | pass 34 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/cachecontrol/compat.py: -------------------------------------------------------------------------------- 1 | try: 2 | from urllib.parse import urljoin 3 | except ImportError: 4 | from urlparse import urljoin 5 | 6 | 7 | try: 8 | import cPickle as pickle 9 | except ImportError: 10 | import pickle 11 | 12 | 13 | # Handle the case where the requests module has been patched to not have 14 | # urllib3 bundled as part of its source. 15 | try: 16 | from pip._vendor.requests.packages.urllib3.response import HTTPResponse 17 | except ImportError: 18 | from pip._vendor.urllib3.response import HTTPResponse 19 | 20 | try: 21 | from pip._vendor.requests.packages.urllib3.util import is_fp_closed 22 | except ImportError: 23 | from pip._vendor.urllib3.util import is_fp_closed 24 | 25 | # Replicate some six behaviour 26 | try: 27 | text_type = unicode 28 | except NameError: 29 | text_type = str 30 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/cachecontrol/filewrapper.py: -------------------------------------------------------------------------------- 1 | from io import BytesIO 2 | 3 | 4 | class CallbackFileWrapper(object): 5 | """ 6 | Small wrapper around a fp object which will tee everything read into a 7 | buffer, and when that file is closed it will execute a callback with the 8 | contents of that buffer. 9 | 10 | All attributes are proxied to the underlying file object. 11 | 12 | This class uses members with a double underscore (__) leading prefix so as 13 | not to accidentally shadow an attribute. 14 | """ 15 | 16 | def __init__(self, fp, callback): 17 | self.__buf = BytesIO() 18 | self.__fp = fp 19 | self.__callback = callback 20 | 21 | def __getattr__(self, name): 22 | # The vaguaries of garbage collection means that self.__fp is 23 | # not always set. By using __getattribute__ and the private 24 | # name[0] allows looking up the attribute value and raising an 25 | # AttributeError when it doesn't exist. This stop thigns from 26 | # infinitely recursing calls to getattr in the case where 27 | # self.__fp hasn't been set. 28 | # 29 | # [0] https://docs.python.org/2/reference/expressions.html#atom-identifiers 30 | fp = self.__getattribute__("_CallbackFileWrapper__fp") 31 | return getattr(fp, name) 32 | 33 | def __is_fp_closed(self): 34 | try: 35 | return self.__fp.fp is None 36 | 37 | except AttributeError: 38 | pass 39 | 40 | try: 41 | return self.__fp.closed 42 | 43 | except AttributeError: 44 | pass 45 | 46 | # We just don't cache it then. 47 | # TODO: Add some logging here... 48 | return False 49 | 50 | def _close(self): 51 | if self.__callback: 52 | self.__callback(self.__buf.getvalue()) 53 | 54 | # We assign this to None here, because otherwise we can get into 55 | # really tricky problems where the CPython interpreter dead locks 56 | # because the callback is holding a reference to something which 57 | # has a __del__ method. Setting this to None breaks the cycle 58 | # and allows the garbage collector to do it's thing normally. 59 | self.__callback = None 60 | 61 | def read(self, amt=None): 62 | data = self.__fp.read(amt) 63 | self.__buf.write(data) 64 | if self.__is_fp_closed(): 65 | self._close() 66 | 67 | return data 68 | 69 | def _safe_read(self, amt): 70 | data = self.__fp._safe_read(amt) 71 | if amt == 2 and data == b"\r\n": 72 | # urllib executes this read to toss the CRLF at the end 73 | # of the chunk. 74 | return data 75 | 76 | self.__buf.write(data) 77 | if self.__is_fp_closed(): 78 | self._close() 79 | 80 | return data 81 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/cachecontrol/wrapper.py: -------------------------------------------------------------------------------- 1 | from .adapter import CacheControlAdapter 2 | from .cache import DictCache 3 | 4 | 5 | def CacheControl( 6 | sess, 7 | cache=None, 8 | cache_etags=True, 9 | serializer=None, 10 | heuristic=None, 11 | controller_class=None, 12 | adapter_class=None, 13 | cacheable_methods=None, 14 | ): 15 | 16 | cache = cache or DictCache() 17 | adapter_class = adapter_class or CacheControlAdapter 18 | adapter = adapter_class( 19 | cache, 20 | cache_etags=cache_etags, 21 | serializer=serializer, 22 | heuristic=heuristic, 23 | controller_class=controller_class, 24 | cacheable_methods=cacheable_methods, 25 | ) 26 | sess.mount("http://", adapter) 27 | sess.mount("https://", adapter) 28 | 29 | return sess 30 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/certifi/__init__.py: -------------------------------------------------------------------------------- 1 | from .core import where 2 | 3 | __version__ = "2018.11.29" 4 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/certifi/__main__.py: -------------------------------------------------------------------------------- 1 | from pip._vendor.certifi import where 2 | print(where()) 3 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/certifi/core.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | """ 5 | certifi.py 6 | ~~~~~~~~~~ 7 | 8 | This module returns the installation location of cacert.pem. 9 | """ 10 | import os 11 | 12 | 13 | def where(): 14 | f = os.path.dirname(__file__) 15 | 16 | return os.path.join(f, 'cacert.pem') 17 | 18 | 19 | if __name__ == '__main__': 20 | print(where()) 21 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/__init__.py: -------------------------------------------------------------------------------- 1 | ######################## BEGIN LICENSE BLOCK ######################## 2 | # This library is free software; you can redistribute it and/or 3 | # modify it under the terms of the GNU Lesser General Public 4 | # License as published by the Free Software Foundation; either 5 | # version 2.1 of the License, or (at your option) any later version. 6 | # 7 | # This library is distributed in the hope that it will be useful, 8 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 9 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 10 | # Lesser General Public License for more details. 11 | # 12 | # You should have received a copy of the GNU Lesser General Public 13 | # License along with this library; if not, write to the Free Software 14 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 15 | # 02110-1301 USA 16 | ######################### END LICENSE BLOCK ######################### 17 | 18 | 19 | from .compat import PY2, PY3 20 | from .universaldetector import UniversalDetector 21 | from .version import __version__, VERSION 22 | 23 | 24 | def detect(byte_str): 25 | """ 26 | Detect the encoding of the given byte string. 27 | 28 | :param byte_str: The byte sequence to examine. 29 | :type byte_str: ``bytes`` or ``bytearray`` 30 | """ 31 | if not isinstance(byte_str, bytearray): 32 | if not isinstance(byte_str, bytes): 33 | raise TypeError('Expected object of type bytes or bytearray, got: ' 34 | '{0}'.format(type(byte_str))) 35 | else: 36 | byte_str = bytearray(byte_str) 37 | detector = UniversalDetector() 38 | detector.feed(byte_str) 39 | return detector.close() 40 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/big5prober.py: -------------------------------------------------------------------------------- 1 | ######################## BEGIN LICENSE BLOCK ######################## 2 | # The Original Code is Mozilla Communicator client code. 3 | # 4 | # The Initial Developer of the Original Code is 5 | # Netscape Communications Corporation. 6 | # Portions created by the Initial Developer are Copyright (C) 1998 7 | # the Initial Developer. All Rights Reserved. 8 | # 9 | # Contributor(s): 10 | # Mark Pilgrim - port to Python 11 | # 12 | # This library is free software; you can redistribute it and/or 13 | # modify it under the terms of the GNU Lesser General Public 14 | # License as published by the Free Software Foundation; either 15 | # version 2.1 of the License, or (at your option) any later version. 16 | # 17 | # This library is distributed in the hope that it will be useful, 18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 20 | # Lesser General Public License for more details. 21 | # 22 | # You should have received a copy of the GNU Lesser General Public 23 | # License along with this library; if not, write to the Free Software 24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 25 | # 02110-1301 USA 26 | ######################### END LICENSE BLOCK ######################### 27 | 28 | from .mbcharsetprober import MultiByteCharSetProber 29 | from .codingstatemachine import CodingStateMachine 30 | from .chardistribution import Big5DistributionAnalysis 31 | from .mbcssm import BIG5_SM_MODEL 32 | 33 | 34 | class Big5Prober(MultiByteCharSetProber): 35 | def __init__(self): 36 | super(Big5Prober, self).__init__() 37 | self.coding_sm = CodingStateMachine(BIG5_SM_MODEL) 38 | self.distribution_analyzer = Big5DistributionAnalysis() 39 | self.reset() 40 | 41 | @property 42 | def charset_name(self): 43 | return "Big5" 44 | 45 | @property 46 | def language(self): 47 | return "Chinese" 48 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/cli/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/cli/chardetect.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | """ 3 | Script which takes one or more file paths and reports on their detected 4 | encodings 5 | 6 | Example:: 7 | 8 | % chardetect somefile someotherfile 9 | somefile: windows-1252 with confidence 0.5 10 | someotherfile: ascii with confidence 1.0 11 | 12 | If no paths are provided, it takes its input from stdin. 13 | 14 | """ 15 | 16 | from __future__ import absolute_import, print_function, unicode_literals 17 | 18 | import argparse 19 | import sys 20 | 21 | from pip._vendor.chardet import __version__ 22 | from pip._vendor.chardet.compat import PY2 23 | from pip._vendor.chardet.universaldetector import UniversalDetector 24 | 25 | 26 | def description_of(lines, name='stdin'): 27 | """ 28 | Return a string describing the probable encoding of a file or 29 | list of strings. 30 | 31 | :param lines: The lines to get the encoding of. 32 | :type lines: Iterable of bytes 33 | :param name: Name of file or collection of lines 34 | :type name: str 35 | """ 36 | u = UniversalDetector() 37 | for line in lines: 38 | line = bytearray(line) 39 | u.feed(line) 40 | # shortcut out of the loop to save reading further - particularly useful if we read a BOM. 41 | if u.done: 42 | break 43 | u.close() 44 | result = u.result 45 | if PY2: 46 | name = name.decode(sys.getfilesystemencoding(), 'ignore') 47 | if result['encoding']: 48 | return '{0}: {1} with confidence {2}'.format(name, result['encoding'], 49 | result['confidence']) 50 | else: 51 | return '{0}: no result'.format(name) 52 | 53 | 54 | def main(argv=None): 55 | """ 56 | Handles command line arguments and gets things started. 57 | 58 | :param argv: List of arguments, as if specified on the command-line. 59 | If None, ``sys.argv[1:]`` is used instead. 60 | :type argv: list of str 61 | """ 62 | # Get command line arguments 63 | parser = argparse.ArgumentParser( 64 | description="Takes one or more file paths and reports their detected \ 65 | encodings") 66 | parser.add_argument('input', 67 | help='File whose encoding we would like to determine. \ 68 | (default: stdin)', 69 | type=argparse.FileType('rb'), nargs='*', 70 | default=[sys.stdin if PY2 else sys.stdin.buffer]) 71 | parser.add_argument('--version', action='version', 72 | version='%(prog)s {0}'.format(__version__)) 73 | args = parser.parse_args(argv) 74 | 75 | for f in args.input: 76 | if f.isatty(): 77 | print("You are running chardetect interactively. Press " + 78 | "CTRL-D twice at the start of a blank line to signal the " + 79 | "end of your input. If you want help, run chardetect " + 80 | "--help\n", file=sys.stderr) 81 | print(description_of(f, f.name)) 82 | 83 | 84 | if __name__ == '__main__': 85 | main() 86 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/compat.py: -------------------------------------------------------------------------------- 1 | ######################## BEGIN LICENSE BLOCK ######################## 2 | # Contributor(s): 3 | # Dan Blanchard 4 | # Ian Cordasco 5 | # 6 | # This library is free software; you can redistribute it and/or 7 | # modify it under the terms of the GNU Lesser General Public 8 | # License as published by the Free Software Foundation; either 9 | # version 2.1 of the License, or (at your option) any later version. 10 | # 11 | # This library is distributed in the hope that it will be useful, 12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 14 | # Lesser General Public License for more details. 15 | # 16 | # You should have received a copy of the GNU Lesser General Public 17 | # License along with this library; if not, write to the Free Software 18 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 19 | # 02110-1301 USA 20 | ######################### END LICENSE BLOCK ######################### 21 | 22 | import sys 23 | 24 | 25 | if sys.version_info < (3, 0): 26 | PY2 = True 27 | PY3 = False 28 | base_str = (str, unicode) 29 | text_type = unicode 30 | else: 31 | PY2 = False 32 | PY3 = True 33 | base_str = (bytes, str) 34 | text_type = str 35 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/cp949prober.py: -------------------------------------------------------------------------------- 1 | ######################## BEGIN LICENSE BLOCK ######################## 2 | # The Original Code is mozilla.org code. 3 | # 4 | # The Initial Developer of the Original Code is 5 | # Netscape Communications Corporation. 6 | # Portions created by the Initial Developer are Copyright (C) 1998 7 | # the Initial Developer. All Rights Reserved. 8 | # 9 | # Contributor(s): 10 | # Mark Pilgrim - port to Python 11 | # 12 | # This library is free software; you can redistribute it and/or 13 | # modify it under the terms of the GNU Lesser General Public 14 | # License as published by the Free Software Foundation; either 15 | # version 2.1 of the License, or (at your option) any later version. 16 | # 17 | # This library is distributed in the hope that it will be useful, 18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 20 | # Lesser General Public License for more details. 21 | # 22 | # You should have received a copy of the GNU Lesser General Public 23 | # License along with this library; if not, write to the Free Software 24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 25 | # 02110-1301 USA 26 | ######################### END LICENSE BLOCK ######################### 27 | 28 | from .chardistribution import EUCKRDistributionAnalysis 29 | from .codingstatemachine import CodingStateMachine 30 | from .mbcharsetprober import MultiByteCharSetProber 31 | from .mbcssm import CP949_SM_MODEL 32 | 33 | 34 | class CP949Prober(MultiByteCharSetProber): 35 | def __init__(self): 36 | super(CP949Prober, self).__init__() 37 | self.coding_sm = CodingStateMachine(CP949_SM_MODEL) 38 | # NOTE: CP949 is a superset of EUC-KR, so the distribution should be 39 | # not different. 40 | self.distribution_analyzer = EUCKRDistributionAnalysis() 41 | self.reset() 42 | 43 | @property 44 | def charset_name(self): 45 | return "CP949" 46 | 47 | @property 48 | def language(self): 49 | return "Korean" 50 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/enums.py: -------------------------------------------------------------------------------- 1 | """ 2 | All of the Enums that are used throughout the chardet package. 3 | 4 | :author: Dan Blanchard (dan.blanchard@gmail.com) 5 | """ 6 | 7 | 8 | class InputState(object): 9 | """ 10 | This enum represents the different states a universal detector can be in. 11 | """ 12 | PURE_ASCII = 0 13 | ESC_ASCII = 1 14 | HIGH_BYTE = 2 15 | 16 | 17 | class LanguageFilter(object): 18 | """ 19 | This enum represents the different language filters we can apply to a 20 | ``UniversalDetector``. 21 | """ 22 | CHINESE_SIMPLIFIED = 0x01 23 | CHINESE_TRADITIONAL = 0x02 24 | JAPANESE = 0x04 25 | KOREAN = 0x08 26 | NON_CJK = 0x10 27 | ALL = 0x1F 28 | CHINESE = CHINESE_SIMPLIFIED | CHINESE_TRADITIONAL 29 | CJK = CHINESE | JAPANESE | KOREAN 30 | 31 | 32 | class ProbingState(object): 33 | """ 34 | This enum represents the different states a prober can be in. 35 | """ 36 | DETECTING = 0 37 | FOUND_IT = 1 38 | NOT_ME = 2 39 | 40 | 41 | class MachineState(object): 42 | """ 43 | This enum represents the different states a state machine can be in. 44 | """ 45 | START = 0 46 | ERROR = 1 47 | ITS_ME = 2 48 | 49 | 50 | class SequenceLikelihood(object): 51 | """ 52 | This enum represents the likelihood of a character following the previous one. 53 | """ 54 | NEGATIVE = 0 55 | UNLIKELY = 1 56 | LIKELY = 2 57 | POSITIVE = 3 58 | 59 | @classmethod 60 | def get_num_categories(cls): 61 | """:returns: The number of likelihood categories in the enum.""" 62 | return 4 63 | 64 | 65 | class CharacterCategory(object): 66 | """ 67 | This enum represents the different categories language models for 68 | ``SingleByteCharsetProber`` put characters into. 69 | 70 | Anything less than CONTROL is considered a letter. 71 | """ 72 | UNDEFINED = 255 73 | LINE_BREAK = 254 74 | SYMBOL = 253 75 | DIGIT = 252 76 | CONTROL = 251 77 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/euckrprober.py: -------------------------------------------------------------------------------- 1 | ######################## BEGIN LICENSE BLOCK ######################## 2 | # The Original Code is mozilla.org code. 3 | # 4 | # The Initial Developer of the Original Code is 5 | # Netscape Communications Corporation. 6 | # Portions created by the Initial Developer are Copyright (C) 1998 7 | # the Initial Developer. All Rights Reserved. 8 | # 9 | # Contributor(s): 10 | # Mark Pilgrim - port to Python 11 | # 12 | # This library is free software; you can redistribute it and/or 13 | # modify it under the terms of the GNU Lesser General Public 14 | # License as published by the Free Software Foundation; either 15 | # version 2.1 of the License, or (at your option) any later version. 16 | # 17 | # This library is distributed in the hope that it will be useful, 18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 20 | # Lesser General Public License for more details. 21 | # 22 | # You should have received a copy of the GNU Lesser General Public 23 | # License along with this library; if not, write to the Free Software 24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 25 | # 02110-1301 USA 26 | ######################### END LICENSE BLOCK ######################### 27 | 28 | from .mbcharsetprober import MultiByteCharSetProber 29 | from .codingstatemachine import CodingStateMachine 30 | from .chardistribution import EUCKRDistributionAnalysis 31 | from .mbcssm import EUCKR_SM_MODEL 32 | 33 | 34 | class EUCKRProber(MultiByteCharSetProber): 35 | def __init__(self): 36 | super(EUCKRProber, self).__init__() 37 | self.coding_sm = CodingStateMachine(EUCKR_SM_MODEL) 38 | self.distribution_analyzer = EUCKRDistributionAnalysis() 39 | self.reset() 40 | 41 | @property 42 | def charset_name(self): 43 | return "EUC-KR" 44 | 45 | @property 46 | def language(self): 47 | return "Korean" 48 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/euctwprober.py: -------------------------------------------------------------------------------- 1 | ######################## BEGIN LICENSE BLOCK ######################## 2 | # The Original Code is mozilla.org code. 3 | # 4 | # The Initial Developer of the Original Code is 5 | # Netscape Communications Corporation. 6 | # Portions created by the Initial Developer are Copyright (C) 1998 7 | # the Initial Developer. All Rights Reserved. 8 | # 9 | # Contributor(s): 10 | # Mark Pilgrim - port to Python 11 | # 12 | # This library is free software; you can redistribute it and/or 13 | # modify it under the terms of the GNU Lesser General Public 14 | # License as published by the Free Software Foundation; either 15 | # version 2.1 of the License, or (at your option) any later version. 16 | # 17 | # This library is distributed in the hope that it will be useful, 18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 20 | # Lesser General Public License for more details. 21 | # 22 | # You should have received a copy of the GNU Lesser General Public 23 | # License along with this library; if not, write to the Free Software 24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 25 | # 02110-1301 USA 26 | ######################### END LICENSE BLOCK ######################### 27 | 28 | from .mbcharsetprober import MultiByteCharSetProber 29 | from .codingstatemachine import CodingStateMachine 30 | from .chardistribution import EUCTWDistributionAnalysis 31 | from .mbcssm import EUCTW_SM_MODEL 32 | 33 | class EUCTWProber(MultiByteCharSetProber): 34 | def __init__(self): 35 | super(EUCTWProber, self).__init__() 36 | self.coding_sm = CodingStateMachine(EUCTW_SM_MODEL) 37 | self.distribution_analyzer = EUCTWDistributionAnalysis() 38 | self.reset() 39 | 40 | @property 41 | def charset_name(self): 42 | return "EUC-TW" 43 | 44 | @property 45 | def language(self): 46 | return "Taiwan" 47 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/gb2312prober.py: -------------------------------------------------------------------------------- 1 | ######################## BEGIN LICENSE BLOCK ######################## 2 | # The Original Code is mozilla.org code. 3 | # 4 | # The Initial Developer of the Original Code is 5 | # Netscape Communications Corporation. 6 | # Portions created by the Initial Developer are Copyright (C) 1998 7 | # the Initial Developer. All Rights Reserved. 8 | # 9 | # Contributor(s): 10 | # Mark Pilgrim - port to Python 11 | # 12 | # This library is free software; you can redistribute it and/or 13 | # modify it under the terms of the GNU Lesser General Public 14 | # License as published by the Free Software Foundation; either 15 | # version 2.1 of the License, or (at your option) any later version. 16 | # 17 | # This library is distributed in the hope that it will be useful, 18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 20 | # Lesser General Public License for more details. 21 | # 22 | # You should have received a copy of the GNU Lesser General Public 23 | # License along with this library; if not, write to the Free Software 24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 25 | # 02110-1301 USA 26 | ######################### END LICENSE BLOCK ######################### 27 | 28 | from .mbcharsetprober import MultiByteCharSetProber 29 | from .codingstatemachine import CodingStateMachine 30 | from .chardistribution import GB2312DistributionAnalysis 31 | from .mbcssm import GB2312_SM_MODEL 32 | 33 | class GB2312Prober(MultiByteCharSetProber): 34 | def __init__(self): 35 | super(GB2312Prober, self).__init__() 36 | self.coding_sm = CodingStateMachine(GB2312_SM_MODEL) 37 | self.distribution_analyzer = GB2312DistributionAnalysis() 38 | self.reset() 39 | 40 | @property 41 | def charset_name(self): 42 | return "GB2312" 43 | 44 | @property 45 | def language(self): 46 | return "Chinese" 47 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/mbcsgroupprober.py: -------------------------------------------------------------------------------- 1 | ######################## BEGIN LICENSE BLOCK ######################## 2 | # The Original Code is Mozilla Universal charset detector code. 3 | # 4 | # The Initial Developer of the Original Code is 5 | # Netscape Communications Corporation. 6 | # Portions created by the Initial Developer are Copyright (C) 2001 7 | # the Initial Developer. All Rights Reserved. 8 | # 9 | # Contributor(s): 10 | # Mark Pilgrim - port to Python 11 | # Shy Shalom - original C code 12 | # Proofpoint, Inc. 13 | # 14 | # This library is free software; you can redistribute it and/or 15 | # modify it under the terms of the GNU Lesser General Public 16 | # License as published by the Free Software Foundation; either 17 | # version 2.1 of the License, or (at your option) any later version. 18 | # 19 | # This library is distributed in the hope that it will be useful, 20 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 21 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 22 | # Lesser General Public License for more details. 23 | # 24 | # You should have received a copy of the GNU Lesser General Public 25 | # License along with this library; if not, write to the Free Software 26 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 27 | # 02110-1301 USA 28 | ######################### END LICENSE BLOCK ######################### 29 | 30 | from .charsetgroupprober import CharSetGroupProber 31 | from .utf8prober import UTF8Prober 32 | from .sjisprober import SJISProber 33 | from .eucjpprober import EUCJPProber 34 | from .gb2312prober import GB2312Prober 35 | from .euckrprober import EUCKRProber 36 | from .cp949prober import CP949Prober 37 | from .big5prober import Big5Prober 38 | from .euctwprober import EUCTWProber 39 | 40 | 41 | class MBCSGroupProber(CharSetGroupProber): 42 | def __init__(self, lang_filter=None): 43 | super(MBCSGroupProber, self).__init__(lang_filter=lang_filter) 44 | self.probers = [ 45 | UTF8Prober(), 46 | SJISProber(), 47 | EUCJPProber(), 48 | GB2312Prober(), 49 | EUCKRProber(), 50 | CP949Prober(), 51 | Big5Prober(), 52 | EUCTWProber() 53 | ] 54 | self.reset() 55 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/utf8prober.py: -------------------------------------------------------------------------------- 1 | ######################## BEGIN LICENSE BLOCK ######################## 2 | # The Original Code is mozilla.org code. 3 | # 4 | # The Initial Developer of the Original Code is 5 | # Netscape Communications Corporation. 6 | # Portions created by the Initial Developer are Copyright (C) 1998 7 | # the Initial Developer. All Rights Reserved. 8 | # 9 | # Contributor(s): 10 | # Mark Pilgrim - port to Python 11 | # 12 | # This library is free software; you can redistribute it and/or 13 | # modify it under the terms of the GNU Lesser General Public 14 | # License as published by the Free Software Foundation; either 15 | # version 2.1 of the License, or (at your option) any later version. 16 | # 17 | # This library is distributed in the hope that it will be useful, 18 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 19 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 20 | # Lesser General Public License for more details. 21 | # 22 | # You should have received a copy of the GNU Lesser General Public 23 | # License along with this library; if not, write to the Free Software 24 | # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 25 | # 02110-1301 USA 26 | ######################### END LICENSE BLOCK ######################### 27 | 28 | from .charsetprober import CharSetProber 29 | from .enums import ProbingState, MachineState 30 | from .codingstatemachine import CodingStateMachine 31 | from .mbcssm import UTF8_SM_MODEL 32 | 33 | 34 | 35 | class UTF8Prober(CharSetProber): 36 | ONE_CHAR_PROB = 0.5 37 | 38 | def __init__(self): 39 | super(UTF8Prober, self).__init__() 40 | self.coding_sm = CodingStateMachine(UTF8_SM_MODEL) 41 | self._num_mb_chars = None 42 | self.reset() 43 | 44 | def reset(self): 45 | super(UTF8Prober, self).reset() 46 | self.coding_sm.reset() 47 | self._num_mb_chars = 0 48 | 49 | @property 50 | def charset_name(self): 51 | return "utf-8" 52 | 53 | @property 54 | def language(self): 55 | return "" 56 | 57 | def feed(self, byte_str): 58 | for c in byte_str: 59 | coding_state = self.coding_sm.next_state(c) 60 | if coding_state == MachineState.ERROR: 61 | self._state = ProbingState.NOT_ME 62 | break 63 | elif coding_state == MachineState.ITS_ME: 64 | self._state = ProbingState.FOUND_IT 65 | break 66 | elif coding_state == MachineState.START: 67 | if self.coding_sm.get_current_charlen() >= 2: 68 | self._num_mb_chars += 1 69 | 70 | if self.state == ProbingState.DETECTING: 71 | if self.get_confidence() > self.SHORTCUT_THRESHOLD: 72 | self._state = ProbingState.FOUND_IT 73 | 74 | return self.state 75 | 76 | def get_confidence(self): 77 | unlike = 0.99 78 | if self._num_mb_chars < 6: 79 | unlike *= self.ONE_CHAR_PROB ** self._num_mb_chars 80 | return 1.0 - unlike 81 | else: 82 | return unlike 83 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/chardet/version.py: -------------------------------------------------------------------------------- 1 | """ 2 | This module exists only to simplify retrieving the version number of chardet 3 | from within setup.py and from chardet subpackages. 4 | 5 | :author: Dan Blanchard (dan.blanchard@gmail.com) 6 | """ 7 | 8 | __version__ = "3.0.4" 9 | VERSION = __version__.split('.') 10 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/colorama/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. 2 | from .initialise import init, deinit, reinit, colorama_text 3 | from .ansi import Fore, Back, Style, Cursor 4 | from .ansitowin32 import AnsiToWin32 5 | 6 | __version__ = '0.4.1' 7 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/colorama/ansi.py: -------------------------------------------------------------------------------- 1 | # Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. 2 | ''' 3 | This module generates ANSI character codes to printing colors to terminals. 4 | See: http://en.wikipedia.org/wiki/ANSI_escape_code 5 | ''' 6 | 7 | CSI = '\033[' 8 | OSC = '\033]' 9 | BEL = '\007' 10 | 11 | 12 | def code_to_chars(code): 13 | return CSI + str(code) + 'm' 14 | 15 | def set_title(title): 16 | return OSC + '2;' + title + BEL 17 | 18 | def clear_screen(mode=2): 19 | return CSI + str(mode) + 'J' 20 | 21 | def clear_line(mode=2): 22 | return CSI + str(mode) + 'K' 23 | 24 | 25 | class AnsiCodes(object): 26 | def __init__(self): 27 | # the subclasses declare class attributes which are numbers. 28 | # Upon instantiation we define instance attributes, which are the same 29 | # as the class attributes but wrapped with the ANSI escape sequence 30 | for name in dir(self): 31 | if not name.startswith('_'): 32 | value = getattr(self, name) 33 | setattr(self, name, code_to_chars(value)) 34 | 35 | 36 | class AnsiCursor(object): 37 | def UP(self, n=1): 38 | return CSI + str(n) + 'A' 39 | def DOWN(self, n=1): 40 | return CSI + str(n) + 'B' 41 | def FORWARD(self, n=1): 42 | return CSI + str(n) + 'C' 43 | def BACK(self, n=1): 44 | return CSI + str(n) + 'D' 45 | def POS(self, x=1, y=1): 46 | return CSI + str(y) + ';' + str(x) + 'H' 47 | 48 | 49 | class AnsiFore(AnsiCodes): 50 | BLACK = 30 51 | RED = 31 52 | GREEN = 32 53 | YELLOW = 33 54 | BLUE = 34 55 | MAGENTA = 35 56 | CYAN = 36 57 | WHITE = 37 58 | RESET = 39 59 | 60 | # These are fairly well supported, but not part of the standard. 61 | LIGHTBLACK_EX = 90 62 | LIGHTRED_EX = 91 63 | LIGHTGREEN_EX = 92 64 | LIGHTYELLOW_EX = 93 65 | LIGHTBLUE_EX = 94 66 | LIGHTMAGENTA_EX = 95 67 | LIGHTCYAN_EX = 96 68 | LIGHTWHITE_EX = 97 69 | 70 | 71 | class AnsiBack(AnsiCodes): 72 | BLACK = 40 73 | RED = 41 74 | GREEN = 42 75 | YELLOW = 43 76 | BLUE = 44 77 | MAGENTA = 45 78 | CYAN = 46 79 | WHITE = 47 80 | RESET = 49 81 | 82 | # These are fairly well supported, but not part of the standard. 83 | LIGHTBLACK_EX = 100 84 | LIGHTRED_EX = 101 85 | LIGHTGREEN_EX = 102 86 | LIGHTYELLOW_EX = 103 87 | LIGHTBLUE_EX = 104 88 | LIGHTMAGENTA_EX = 105 89 | LIGHTCYAN_EX = 106 90 | LIGHTWHITE_EX = 107 91 | 92 | 93 | class AnsiStyle(AnsiCodes): 94 | BRIGHT = 1 95 | DIM = 2 96 | NORMAL = 22 97 | RESET_ALL = 0 98 | 99 | Fore = AnsiFore() 100 | Back = AnsiBack() 101 | Style = AnsiStyle() 102 | Cursor = AnsiCursor() 103 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/colorama/initialise.py: -------------------------------------------------------------------------------- 1 | # Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. 2 | import atexit 3 | import contextlib 4 | import sys 5 | 6 | from .ansitowin32 import AnsiToWin32 7 | 8 | 9 | orig_stdout = None 10 | orig_stderr = None 11 | 12 | wrapped_stdout = None 13 | wrapped_stderr = None 14 | 15 | atexit_done = False 16 | 17 | 18 | def reset_all(): 19 | if AnsiToWin32 is not None: # Issue #74: objects might become None at exit 20 | AnsiToWin32(orig_stdout).reset_all() 21 | 22 | 23 | def init(autoreset=False, convert=None, strip=None, wrap=True): 24 | 25 | if not wrap and any([autoreset, convert, strip]): 26 | raise ValueError('wrap=False conflicts with any other arg=True') 27 | 28 | global wrapped_stdout, wrapped_stderr 29 | global orig_stdout, orig_stderr 30 | 31 | orig_stdout = sys.stdout 32 | orig_stderr = sys.stderr 33 | 34 | if sys.stdout is None: 35 | wrapped_stdout = None 36 | else: 37 | sys.stdout = wrapped_stdout = \ 38 | wrap_stream(orig_stdout, convert, strip, autoreset, wrap) 39 | if sys.stderr is None: 40 | wrapped_stderr = None 41 | else: 42 | sys.stderr = wrapped_stderr = \ 43 | wrap_stream(orig_stderr, convert, strip, autoreset, wrap) 44 | 45 | global atexit_done 46 | if not atexit_done: 47 | atexit.register(reset_all) 48 | atexit_done = True 49 | 50 | 51 | def deinit(): 52 | if orig_stdout is not None: 53 | sys.stdout = orig_stdout 54 | if orig_stderr is not None: 55 | sys.stderr = orig_stderr 56 | 57 | 58 | @contextlib.contextmanager 59 | def colorama_text(*args, **kwargs): 60 | init(*args, **kwargs) 61 | try: 62 | yield 63 | finally: 64 | deinit() 65 | 66 | 67 | def reinit(): 68 | if wrapped_stdout is not None: 69 | sys.stdout = wrapped_stdout 70 | if wrapped_stderr is not None: 71 | sys.stderr = wrapped_stderr 72 | 73 | 74 | def wrap_stream(stream, convert, strip, autoreset, wrap): 75 | if wrap: 76 | wrapper = AnsiToWin32(stream, 77 | convert=convert, strip=strip, autoreset=autoreset) 78 | if wrapper.should_wrap(): 79 | stream = wrapper.stream 80 | return stream 81 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/distlib/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Copyright (C) 2012-2017 Vinay Sajip. 4 | # Licensed to the Python Software Foundation under a contributor agreement. 5 | # See LICENSE.txt and CONTRIBUTORS.txt. 6 | # 7 | import logging 8 | 9 | __version__ = '0.2.8' 10 | 11 | class DistlibException(Exception): 12 | pass 13 | 14 | try: 15 | from logging import NullHandler 16 | except ImportError: # pragma: no cover 17 | class NullHandler(logging.Handler): 18 | def handle(self, record): pass 19 | def emit(self, record): pass 20 | def createLock(self): self.lock = None 21 | 22 | logger = logging.getLogger(__name__) 23 | logger.addHandler(NullHandler()) 24 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/distlib/_backport/__init__.py: -------------------------------------------------------------------------------- 1 | """Modules copied from Python 3 standard libraries, for internal use only. 2 | 3 | Individual classes and functions are found in d2._backport.misc. Intended 4 | usage is to always import things missing from 3.1 from that module: the 5 | built-in/stdlib objects will be used if found. 6 | """ 7 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/distlib/_backport/misc.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Copyright (C) 2012 The Python Software Foundation. 4 | # See LICENSE.txt and CONTRIBUTORS.txt. 5 | # 6 | """Backports for individual classes and functions.""" 7 | 8 | import os 9 | import sys 10 | 11 | __all__ = ['cache_from_source', 'callable', 'fsencode'] 12 | 13 | 14 | try: 15 | from imp import cache_from_source 16 | except ImportError: 17 | def cache_from_source(py_file, debug=__debug__): 18 | ext = debug and 'c' or 'o' 19 | return py_file + ext 20 | 21 | 22 | try: 23 | callable = callable 24 | except NameError: 25 | from collections import Callable 26 | 27 | def callable(obj): 28 | return isinstance(obj, Callable) 29 | 30 | 31 | try: 32 | fsencode = os.fsencode 33 | except AttributeError: 34 | def fsencode(filename): 35 | if isinstance(filename, bytes): 36 | return filename 37 | elif isinstance(filename, str): 38 | return filename.encode(sys.getfilesystemencoding()) 39 | else: 40 | raise TypeError("expect bytes or str, not %s" % 41 | type(filename).__name__) 42 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/distlib/_backport/sysconfig.cfg: -------------------------------------------------------------------------------- 1 | [posix_prefix] 2 | # Configuration directories. Some of these come straight out of the 3 | # configure script. They are for implementing the other variables, not to 4 | # be used directly in [resource_locations]. 5 | confdir = /etc 6 | datadir = /usr/share 7 | libdir = /usr/lib 8 | statedir = /var 9 | # User resource directory 10 | local = ~/.local/{distribution.name} 11 | 12 | stdlib = {base}/lib/python{py_version_short} 13 | platstdlib = {platbase}/lib/python{py_version_short} 14 | purelib = {base}/lib/python{py_version_short}/site-packages 15 | platlib = {platbase}/lib/python{py_version_short}/site-packages 16 | include = {base}/include/python{py_version_short}{abiflags} 17 | platinclude = {platbase}/include/python{py_version_short}{abiflags} 18 | data = {base} 19 | 20 | [posix_home] 21 | stdlib = {base}/lib/python 22 | platstdlib = {base}/lib/python 23 | purelib = {base}/lib/python 24 | platlib = {base}/lib/python 25 | include = {base}/include/python 26 | platinclude = {base}/include/python 27 | scripts = {base}/bin 28 | data = {base} 29 | 30 | [nt] 31 | stdlib = {base}/Lib 32 | platstdlib = {base}/Lib 33 | purelib = {base}/Lib/site-packages 34 | platlib = {base}/Lib/site-packages 35 | include = {base}/Include 36 | platinclude = {base}/Include 37 | scripts = {base}/Scripts 38 | data = {base} 39 | 40 | [os2] 41 | stdlib = {base}/Lib 42 | platstdlib = {base}/Lib 43 | purelib = {base}/Lib/site-packages 44 | platlib = {base}/Lib/site-packages 45 | include = {base}/Include 46 | platinclude = {base}/Include 47 | scripts = {base}/Scripts 48 | data = {base} 49 | 50 | [os2_home] 51 | stdlib = {userbase}/lib/python{py_version_short} 52 | platstdlib = {userbase}/lib/python{py_version_short} 53 | purelib = {userbase}/lib/python{py_version_short}/site-packages 54 | platlib = {userbase}/lib/python{py_version_short}/site-packages 55 | include = {userbase}/include/python{py_version_short} 56 | scripts = {userbase}/bin 57 | data = {userbase} 58 | 59 | [nt_user] 60 | stdlib = {userbase}/Python{py_version_nodot} 61 | platstdlib = {userbase}/Python{py_version_nodot} 62 | purelib = {userbase}/Python{py_version_nodot}/site-packages 63 | platlib = {userbase}/Python{py_version_nodot}/site-packages 64 | include = {userbase}/Python{py_version_nodot}/Include 65 | scripts = {userbase}/Scripts 66 | data = {userbase} 67 | 68 | [posix_user] 69 | stdlib = {userbase}/lib/python{py_version_short} 70 | platstdlib = {userbase}/lib/python{py_version_short} 71 | purelib = {userbase}/lib/python{py_version_short}/site-packages 72 | platlib = {userbase}/lib/python{py_version_short}/site-packages 73 | include = {userbase}/include/python{py_version_short} 74 | scripts = {userbase}/bin 75 | data = {userbase} 76 | 77 | [osx_framework_user] 78 | stdlib = {userbase}/lib/python 79 | platstdlib = {userbase}/lib/python 80 | purelib = {userbase}/lib/python/site-packages 81 | platlib = {userbase}/lib/python/site-packages 82 | include = {userbase}/include 83 | scripts = {userbase}/bin 84 | data = {userbase} 85 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/distlib/t32.exe: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Andy-zhujunwen/danet-pytorch/82ba577edcc5535b169f35590a6f48a4c2944ae1/danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/distlib/t32.exe -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/distlib/t64.exe: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Andy-zhujunwen/danet-pytorch/82ba577edcc5535b169f35590a6f48a4c2944ae1/danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/distlib/t64.exe -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/distlib/w32.exe: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Andy-zhujunwen/danet-pytorch/82ba577edcc5535b169f35590a6f48a4c2944ae1/danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/distlib/w32.exe -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/distlib/w64.exe: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Andy-zhujunwen/danet-pytorch/82ba577edcc5535b169f35590a6f48a4c2944ae1/danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/distlib/w64.exe -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | HTML parsing library based on the `WHATWG HTML specification 3 | `_. The parser is designed to be compatible with 4 | existing HTML found in the wild and implements well-defined error recovery that 5 | is largely compatible with modern desktop web browsers. 6 | 7 | Example usage:: 8 | 9 | from pip._vendor import html5lib 10 | with open("my_document.html", "rb") as f: 11 | tree = html5lib.parse(f) 12 | 13 | For convenience, this module re-exports the following names: 14 | 15 | * :func:`~.html5parser.parse` 16 | * :func:`~.html5parser.parseFragment` 17 | * :class:`~.html5parser.HTMLParser` 18 | * :func:`~.treebuilders.getTreeBuilder` 19 | * :func:`~.treewalkers.getTreeWalker` 20 | * :func:`~.serializer.serialize` 21 | """ 22 | 23 | from __future__ import absolute_import, division, unicode_literals 24 | 25 | from .html5parser import HTMLParser, parse, parseFragment 26 | from .treebuilders import getTreeBuilder 27 | from .treewalkers import getTreeWalker 28 | from .serializer import serialize 29 | 30 | __all__ = ["HTMLParser", "parse", "parseFragment", "getTreeBuilder", 31 | "getTreeWalker", "serialize"] 32 | 33 | # this has to be at the top level, see how setup.py parses this 34 | #: Distribution version number. 35 | __version__ = "1.0.1" 36 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/_trie/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import, division, unicode_literals 2 | 3 | from .py import Trie as PyTrie 4 | 5 | Trie = PyTrie 6 | 7 | # pylint:disable=wrong-import-position 8 | try: 9 | from .datrie import Trie as DATrie 10 | except ImportError: 11 | pass 12 | else: 13 | Trie = DATrie 14 | # pylint:enable=wrong-import-position 15 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/_trie/_base.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import, division, unicode_literals 2 | 3 | from collections import Mapping 4 | 5 | 6 | class Trie(Mapping): 7 | """Abstract base class for tries""" 8 | 9 | def keys(self, prefix=None): 10 | # pylint:disable=arguments-differ 11 | keys = super(Trie, self).keys() 12 | 13 | if prefix is None: 14 | return set(keys) 15 | 16 | return {x for x in keys if x.startswith(prefix)} 17 | 18 | def has_keys_with_prefix(self, prefix): 19 | for key in self.keys(): 20 | if key.startswith(prefix): 21 | return True 22 | 23 | return False 24 | 25 | def longest_prefix(self, prefix): 26 | if prefix in self: 27 | return prefix 28 | 29 | for i in range(1, len(prefix) + 1): 30 | if prefix[:-i] in self: 31 | return prefix[:-i] 32 | 33 | raise KeyError(prefix) 34 | 35 | def longest_prefix_item(self, prefix): 36 | lprefix = self.longest_prefix(prefix) 37 | return (lprefix, self[lprefix]) 38 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/_trie/datrie.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import, division, unicode_literals 2 | 3 | from datrie import Trie as DATrie 4 | from pip._vendor.six import text_type 5 | 6 | from ._base import Trie as ABCTrie 7 | 8 | 9 | class Trie(ABCTrie): 10 | def __init__(self, data): 11 | chars = set() 12 | for key in data.keys(): 13 | if not isinstance(key, text_type): 14 | raise TypeError("All keys must be strings") 15 | for char in key: 16 | chars.add(char) 17 | 18 | self._data = DATrie("".join(chars)) 19 | for key, value in data.items(): 20 | self._data[key] = value 21 | 22 | def __contains__(self, key): 23 | return key in self._data 24 | 25 | def __len__(self): 26 | return len(self._data) 27 | 28 | def __iter__(self): 29 | raise NotImplementedError() 30 | 31 | def __getitem__(self, key): 32 | return self._data[key] 33 | 34 | def keys(self, prefix=None): 35 | return self._data.keys(prefix) 36 | 37 | def has_keys_with_prefix(self, prefix): 38 | return self._data.has_keys_with_prefix(prefix) 39 | 40 | def longest_prefix(self, prefix): 41 | return self._data.longest_prefix(prefix) 42 | 43 | def longest_prefix_item(self, prefix): 44 | return self._data.longest_prefix_item(prefix) 45 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/_trie/py.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import, division, unicode_literals 2 | from pip._vendor.six import text_type 3 | 4 | from bisect import bisect_left 5 | 6 | from ._base import Trie as ABCTrie 7 | 8 | 9 | class Trie(ABCTrie): 10 | def __init__(self, data): 11 | if not all(isinstance(x, text_type) for x in data.keys()): 12 | raise TypeError("All keys must be strings") 13 | 14 | self._data = data 15 | self._keys = sorted(data.keys()) 16 | self._cachestr = "" 17 | self._cachepoints = (0, len(data)) 18 | 19 | def __contains__(self, key): 20 | return key in self._data 21 | 22 | def __len__(self): 23 | return len(self._data) 24 | 25 | def __iter__(self): 26 | return iter(self._data) 27 | 28 | def __getitem__(self, key): 29 | return self._data[key] 30 | 31 | def keys(self, prefix=None): 32 | if prefix is None or prefix == "" or not self._keys: 33 | return set(self._keys) 34 | 35 | if prefix.startswith(self._cachestr): 36 | lo, hi = self._cachepoints 37 | start = i = bisect_left(self._keys, prefix, lo, hi) 38 | else: 39 | start = i = bisect_left(self._keys, prefix) 40 | 41 | keys = set() 42 | if start == len(self._keys): 43 | return keys 44 | 45 | while self._keys[i].startswith(prefix): 46 | keys.add(self._keys[i]) 47 | i += 1 48 | 49 | self._cachestr = prefix 50 | self._cachepoints = (start, i) 51 | 52 | return keys 53 | 54 | def has_keys_with_prefix(self, prefix): 55 | if prefix in self._data: 56 | return True 57 | 58 | if prefix.startswith(self._cachestr): 59 | lo, hi = self._cachepoints 60 | i = bisect_left(self._keys, prefix, lo, hi) 61 | else: 62 | i = bisect_left(self._keys, prefix) 63 | 64 | if i == len(self._keys): 65 | return False 66 | 67 | return self._keys[i].startswith(prefix) 68 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/filters/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Andy-zhujunwen/danet-pytorch/82ba577edcc5535b169f35590a6f48a4c2944ae1/danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/filters/__init__.py -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/filters/alphabeticalattributes.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import, division, unicode_literals 2 | 3 | from . import base 4 | 5 | from collections import OrderedDict 6 | 7 | 8 | def _attr_key(attr): 9 | """Return an appropriate key for an attribute for sorting 10 | 11 | Attributes have a namespace that can be either ``None`` or a string. We 12 | can't compare the two because they're different types, so we convert 13 | ``None`` to an empty string first. 14 | 15 | """ 16 | return (attr[0][0] or ''), attr[0][1] 17 | 18 | 19 | class Filter(base.Filter): 20 | """Alphabetizes attributes for elements""" 21 | def __iter__(self): 22 | for token in base.Filter.__iter__(self): 23 | if token["type"] in ("StartTag", "EmptyTag"): 24 | attrs = OrderedDict() 25 | for name, value in sorted(token["data"].items(), 26 | key=_attr_key): 27 | attrs[name] = value 28 | token["data"] = attrs 29 | yield token 30 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/filters/base.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import, division, unicode_literals 2 | 3 | 4 | class Filter(object): 5 | def __init__(self, source): 6 | self.source = source 7 | 8 | def __iter__(self): 9 | return iter(self.source) 10 | 11 | def __getattr__(self, name): 12 | return getattr(self.source, name) 13 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/filters/inject_meta_charset.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import, division, unicode_literals 2 | 3 | from . import base 4 | 5 | 6 | class Filter(base.Filter): 7 | """Injects ```` tag into head of document""" 8 | def __init__(self, source, encoding): 9 | """Creates a Filter 10 | 11 | :arg source: the source token stream 12 | 13 | :arg encoding: the encoding to set 14 | 15 | """ 16 | base.Filter.__init__(self, source) 17 | self.encoding = encoding 18 | 19 | def __iter__(self): 20 | state = "pre_head" 21 | meta_found = (self.encoding is None) 22 | pending = [] 23 | 24 | for token in base.Filter.__iter__(self): 25 | type = token["type"] 26 | if type == "StartTag": 27 | if token["name"].lower() == "head": 28 | state = "in_head" 29 | 30 | elif type == "EmptyTag": 31 | if token["name"].lower() == "meta": 32 | # replace charset with actual encoding 33 | has_http_equiv_content_type = False 34 | for (namespace, name), value in token["data"].items(): 35 | if namespace is not None: 36 | continue 37 | elif name.lower() == 'charset': 38 | token["data"][(namespace, name)] = self.encoding 39 | meta_found = True 40 | break 41 | elif name == 'http-equiv' and value.lower() == 'content-type': 42 | has_http_equiv_content_type = True 43 | else: 44 | if has_http_equiv_content_type and (None, "content") in token["data"]: 45 | token["data"][(None, "content")] = 'text/html; charset=%s' % self.encoding 46 | meta_found = True 47 | 48 | elif token["name"].lower() == "head" and not meta_found: 49 | # insert meta into empty head 50 | yield {"type": "StartTag", "name": "head", 51 | "data": token["data"]} 52 | yield {"type": "EmptyTag", "name": "meta", 53 | "data": {(None, "charset"): self.encoding}} 54 | yield {"type": "EndTag", "name": "head"} 55 | meta_found = True 56 | continue 57 | 58 | elif type == "EndTag": 59 | if token["name"].lower() == "head" and pending: 60 | # insert meta into head (if necessary) and flush pending queue 61 | yield pending.pop(0) 62 | if not meta_found: 63 | yield {"type": "EmptyTag", "name": "meta", 64 | "data": {(None, "charset"): self.encoding}} 65 | while pending: 66 | yield pending.pop(0) 67 | meta_found = True 68 | state = "post_head" 69 | 70 | if state == "in_head": 71 | pending.append(token) 72 | else: 73 | yield token 74 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/filters/whitespace.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import, division, unicode_literals 2 | 3 | import re 4 | 5 | from . import base 6 | from ..constants import rcdataElements, spaceCharacters 7 | spaceCharacters = "".join(spaceCharacters) 8 | 9 | SPACES_REGEX = re.compile("[%s]+" % spaceCharacters) 10 | 11 | 12 | class Filter(base.Filter): 13 | """Collapses whitespace except in pre, textarea, and script elements""" 14 | spacePreserveElements = frozenset(["pre", "textarea"] + list(rcdataElements)) 15 | 16 | def __iter__(self): 17 | preserve = 0 18 | for token in base.Filter.__iter__(self): 19 | type = token["type"] 20 | if type == "StartTag" \ 21 | and (preserve or token["name"] in self.spacePreserveElements): 22 | preserve += 1 23 | 24 | elif type == "EndTag" and preserve: 25 | preserve -= 1 26 | 27 | elif not preserve and type == "SpaceCharacters" and token["data"]: 28 | # Test on token["data"] above to not introduce spaces where there were not 29 | token["data"] = " " 30 | 31 | elif not preserve and type == "Characters": 32 | token["data"] = collapse_spaces(token["data"]) 33 | 34 | yield token 35 | 36 | 37 | def collapse_spaces(text): 38 | return SPACES_REGEX.sub(' ', text) 39 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/treeadapters/__init__.py: -------------------------------------------------------------------------------- 1 | """Tree adapters let you convert from one tree structure to another 2 | 3 | Example: 4 | 5 | .. code-block:: python 6 | 7 | from pip._vendor import html5lib 8 | from pip._vendor.html5lib.treeadapters import genshi 9 | 10 | doc = 'Hi!' 11 | treebuilder = html5lib.getTreeBuilder('etree') 12 | parser = html5lib.HTMLParser(tree=treebuilder) 13 | tree = parser.parse(doc) 14 | TreeWalker = html5lib.getTreeWalker('etree') 15 | 16 | genshi_tree = genshi.to_genshi(TreeWalker(tree)) 17 | 18 | """ 19 | from __future__ import absolute_import, division, unicode_literals 20 | 21 | from . import sax 22 | 23 | __all__ = ["sax"] 24 | 25 | try: 26 | from . import genshi # noqa 27 | except ImportError: 28 | pass 29 | else: 30 | __all__.append("genshi") 31 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/treeadapters/genshi.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import, division, unicode_literals 2 | 3 | from genshi.core import QName, Attrs 4 | from genshi.core import START, END, TEXT, COMMENT, DOCTYPE 5 | 6 | 7 | def to_genshi(walker): 8 | """Convert a tree to a genshi tree 9 | 10 | :arg walker: the treewalker to use to walk the tree to convert it 11 | 12 | :returns: generator of genshi nodes 13 | 14 | """ 15 | text = [] 16 | for token in walker: 17 | type = token["type"] 18 | if type in ("Characters", "SpaceCharacters"): 19 | text.append(token["data"]) 20 | elif text: 21 | yield TEXT, "".join(text), (None, -1, -1) 22 | text = [] 23 | 24 | if type in ("StartTag", "EmptyTag"): 25 | if token["namespace"]: 26 | name = "{%s}%s" % (token["namespace"], token["name"]) 27 | else: 28 | name = token["name"] 29 | attrs = Attrs([(QName("{%s}%s" % attr if attr[0] is not None else attr[1]), value) 30 | for attr, value in token["data"].items()]) 31 | yield (START, (QName(name), attrs), (None, -1, -1)) 32 | if type == "EmptyTag": 33 | type = "EndTag" 34 | 35 | if type == "EndTag": 36 | if token["namespace"]: 37 | name = "{%s}%s" % (token["namespace"], token["name"]) 38 | else: 39 | name = token["name"] 40 | 41 | yield END, QName(name), (None, -1, -1) 42 | 43 | elif type == "Comment": 44 | yield COMMENT, token["data"], (None, -1, -1) 45 | 46 | elif type == "Doctype": 47 | yield DOCTYPE, (token["name"], token["publicId"], 48 | token["systemId"]), (None, -1, -1) 49 | 50 | else: 51 | pass # FIXME: What to do? 52 | 53 | if text: 54 | yield TEXT, "".join(text), (None, -1, -1) 55 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/treeadapters/sax.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import, division, unicode_literals 2 | 3 | from xml.sax.xmlreader import AttributesNSImpl 4 | 5 | from ..constants import adjustForeignAttributes, unadjustForeignAttributes 6 | 7 | prefix_mapping = {} 8 | for prefix, localName, namespace in adjustForeignAttributes.values(): 9 | if prefix is not None: 10 | prefix_mapping[prefix] = namespace 11 | 12 | 13 | def to_sax(walker, handler): 14 | """Call SAX-like content handler based on treewalker walker 15 | 16 | :arg walker: the treewalker to use to walk the tree to convert it 17 | 18 | :arg handler: SAX handler to use 19 | 20 | """ 21 | handler.startDocument() 22 | for prefix, namespace in prefix_mapping.items(): 23 | handler.startPrefixMapping(prefix, namespace) 24 | 25 | for token in walker: 26 | type = token["type"] 27 | if type == "Doctype": 28 | continue 29 | elif type in ("StartTag", "EmptyTag"): 30 | attrs = AttributesNSImpl(token["data"], 31 | unadjustForeignAttributes) 32 | handler.startElementNS((token["namespace"], token["name"]), 33 | token["name"], 34 | attrs) 35 | if type == "EmptyTag": 36 | handler.endElementNS((token["namespace"], token["name"]), 37 | token["name"]) 38 | elif type == "EndTag": 39 | handler.endElementNS((token["namespace"], token["name"]), 40 | token["name"]) 41 | elif type in ("Characters", "SpaceCharacters"): 42 | handler.characters(token["data"]) 43 | elif type == "Comment": 44 | pass 45 | else: 46 | assert False, "Unknown token type" 47 | 48 | for prefix, namespace in prefix_mapping.items(): 49 | handler.endPrefixMapping(prefix) 50 | handler.endDocument() 51 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/treewalkers/dom.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import, division, unicode_literals 2 | 3 | from xml.dom import Node 4 | 5 | from . import base 6 | 7 | 8 | class TreeWalker(base.NonRecursiveTreeWalker): 9 | def getNodeDetails(self, node): 10 | if node.nodeType == Node.DOCUMENT_TYPE_NODE: 11 | return base.DOCTYPE, node.name, node.publicId, node.systemId 12 | 13 | elif node.nodeType in (Node.TEXT_NODE, Node.CDATA_SECTION_NODE): 14 | return base.TEXT, node.nodeValue 15 | 16 | elif node.nodeType == Node.ELEMENT_NODE: 17 | attrs = {} 18 | for attr in list(node.attributes.keys()): 19 | attr = node.getAttributeNode(attr) 20 | if attr.namespaceURI: 21 | attrs[(attr.namespaceURI, attr.localName)] = attr.value 22 | else: 23 | attrs[(None, attr.name)] = attr.value 24 | return (base.ELEMENT, node.namespaceURI, node.nodeName, 25 | attrs, node.hasChildNodes()) 26 | 27 | elif node.nodeType == Node.COMMENT_NODE: 28 | return base.COMMENT, node.nodeValue 29 | 30 | elif node.nodeType in (Node.DOCUMENT_NODE, Node.DOCUMENT_FRAGMENT_NODE): 31 | return (base.DOCUMENT,) 32 | 33 | else: 34 | return base.UNKNOWN, node.nodeType 35 | 36 | def getFirstChild(self, node): 37 | return node.firstChild 38 | 39 | def getNextSibling(self, node): 40 | return node.nextSibling 41 | 42 | def getParentNode(self, node): 43 | return node.parentNode 44 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/html5lib/treewalkers/genshi.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import, division, unicode_literals 2 | 3 | from genshi.core import QName 4 | from genshi.core import START, END, XML_NAMESPACE, DOCTYPE, TEXT 5 | from genshi.core import START_NS, END_NS, START_CDATA, END_CDATA, PI, COMMENT 6 | 7 | from . import base 8 | 9 | from ..constants import voidElements, namespaces 10 | 11 | 12 | class TreeWalker(base.TreeWalker): 13 | def __iter__(self): 14 | # Buffer the events so we can pass in the following one 15 | previous = None 16 | for event in self.tree: 17 | if previous is not None: 18 | for token in self.tokens(previous, event): 19 | yield token 20 | previous = event 21 | 22 | # Don't forget the final event! 23 | if previous is not None: 24 | for token in self.tokens(previous, None): 25 | yield token 26 | 27 | def tokens(self, event, next): 28 | kind, data, _ = event 29 | if kind == START: 30 | tag, attribs = data 31 | name = tag.localname 32 | namespace = tag.namespace 33 | converted_attribs = {} 34 | for k, v in attribs: 35 | if isinstance(k, QName): 36 | converted_attribs[(k.namespace, k.localname)] = v 37 | else: 38 | converted_attribs[(None, k)] = v 39 | 40 | if namespace == namespaces["html"] and name in voidElements: 41 | for token in self.emptyTag(namespace, name, converted_attribs, 42 | not next or next[0] != END or 43 | next[1] != tag): 44 | yield token 45 | else: 46 | yield self.startTag(namespace, name, converted_attribs) 47 | 48 | elif kind == END: 49 | name = data.localname 50 | namespace = data.namespace 51 | if namespace != namespaces["html"] or name not in voidElements: 52 | yield self.endTag(namespace, name) 53 | 54 | elif kind == COMMENT: 55 | yield self.comment(data) 56 | 57 | elif kind == TEXT: 58 | for token in self.text(data): 59 | yield token 60 | 61 | elif kind == DOCTYPE: 62 | yield self.doctype(*data) 63 | 64 | elif kind in (XML_NAMESPACE, DOCTYPE, START_NS, END_NS, 65 | START_CDATA, END_CDATA, PI): 66 | pass 67 | 68 | else: 69 | yield self.unknown(kind) 70 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/idna/__init__.py: -------------------------------------------------------------------------------- 1 | from .package_data import __version__ 2 | from .core import * 3 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/idna/compat.py: -------------------------------------------------------------------------------- 1 | from .core import * 2 | from .codec import * 3 | 4 | def ToASCII(label): 5 | return encode(label) 6 | 7 | def ToUnicode(label): 8 | return decode(label) 9 | 10 | def nameprep(s): 11 | raise NotImplementedError("IDNA 2008 does not utilise nameprep protocol") 12 | 13 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/idna/intranges.py: -------------------------------------------------------------------------------- 1 | """ 2 | Given a list of integers, made up of (hopefully) a small number of long runs 3 | of consecutive integers, compute a representation of the form 4 | ((start1, end1), (start2, end2) ...). Then answer the question "was x present 5 | in the original list?" in time O(log(# runs)). 6 | """ 7 | 8 | import bisect 9 | 10 | def intranges_from_list(list_): 11 | """Represent a list of integers as a sequence of ranges: 12 | ((start_0, end_0), (start_1, end_1), ...), such that the original 13 | integers are exactly those x such that start_i <= x < end_i for some i. 14 | 15 | Ranges are encoded as single integers (start << 32 | end), not as tuples. 16 | """ 17 | 18 | sorted_list = sorted(list_) 19 | ranges = [] 20 | last_write = -1 21 | for i in range(len(sorted_list)): 22 | if i+1 < len(sorted_list): 23 | if sorted_list[i] == sorted_list[i+1]-1: 24 | continue 25 | current_range = sorted_list[last_write+1:i+1] 26 | ranges.append(_encode_range(current_range[0], current_range[-1] + 1)) 27 | last_write = i 28 | 29 | return tuple(ranges) 30 | 31 | def _encode_range(start, end): 32 | return (start << 32) | end 33 | 34 | def _decode_range(r): 35 | return (r >> 32), (r & ((1 << 32) - 1)) 36 | 37 | 38 | def intranges_contain(int_, ranges): 39 | """Determine if `int_` falls into one of the ranges in `ranges`.""" 40 | tuple_ = _encode_range(int_, 0) 41 | pos = bisect.bisect_left(ranges, tuple_) 42 | # we could be immediately ahead of a tuple (start, end) 43 | # with start < int_ <= end 44 | if pos > 0: 45 | left, right = _decode_range(ranges[pos-1]) 46 | if left <= int_ < right: 47 | return True 48 | # or we could be immediately behind a tuple (int_, end) 49 | if pos < len(ranges): 50 | left, _ = _decode_range(ranges[pos]) 51 | if left == int_: 52 | return True 53 | return False 54 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/idna/package_data.py: -------------------------------------------------------------------------------- 1 | __version__ = '2.8' 2 | 3 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/lockfile/linklockfile.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | 3 | import time 4 | import os 5 | 6 | from . import (LockBase, LockFailed, NotLocked, NotMyLock, LockTimeout, 7 | AlreadyLocked) 8 | 9 | 10 | class LinkLockFile(LockBase): 11 | """Lock access to a file using atomic property of link(2). 12 | 13 | >>> lock = LinkLockFile('somefile') 14 | >>> lock = LinkLockFile('somefile', threaded=False) 15 | """ 16 | 17 | def acquire(self, timeout=None): 18 | try: 19 | open(self.unique_name, "wb").close() 20 | except IOError: 21 | raise LockFailed("failed to create %s" % self.unique_name) 22 | 23 | timeout = timeout if timeout is not None else self.timeout 24 | end_time = time.time() 25 | if timeout is not None and timeout > 0: 26 | end_time += timeout 27 | 28 | while True: 29 | # Try and create a hard link to it. 30 | try: 31 | os.link(self.unique_name, self.lock_file) 32 | except OSError: 33 | # Link creation failed. Maybe we've double-locked? 34 | nlinks = os.stat(self.unique_name).st_nlink 35 | if nlinks == 2: 36 | # The original link plus the one I created == 2. We're 37 | # good to go. 38 | return 39 | else: 40 | # Otherwise the lock creation failed. 41 | if timeout is not None and time.time() > end_time: 42 | os.unlink(self.unique_name) 43 | if timeout > 0: 44 | raise LockTimeout("Timeout waiting to acquire" 45 | " lock for %s" % 46 | self.path) 47 | else: 48 | raise AlreadyLocked("%s is already locked" % 49 | self.path) 50 | time.sleep(timeout is not None and timeout / 10 or 0.1) 51 | else: 52 | # Link creation succeeded. We're good to go. 53 | return 54 | 55 | def release(self): 56 | if not self.is_locked(): 57 | raise NotLocked("%s is not locked" % self.path) 58 | elif not os.path.exists(self.unique_name): 59 | raise NotMyLock("%s is locked, but not by me" % self.path) 60 | os.unlink(self.unique_name) 61 | os.unlink(self.lock_file) 62 | 63 | def is_locked(self): 64 | return os.path.exists(self.lock_file) 65 | 66 | def i_am_locking(self): 67 | return (self.is_locked() and 68 | os.path.exists(self.unique_name) and 69 | os.stat(self.unique_name).st_nlink == 2) 70 | 71 | def break_lock(self): 72 | if os.path.exists(self.lock_file): 73 | os.unlink(self.lock_file) 74 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/lockfile/mkdirlockfile.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import, division 2 | 3 | import time 4 | import os 5 | import sys 6 | import errno 7 | 8 | from . import (LockBase, LockFailed, NotLocked, NotMyLock, LockTimeout, 9 | AlreadyLocked) 10 | 11 | 12 | class MkdirLockFile(LockBase): 13 | """Lock file by creating a directory.""" 14 | def __init__(self, path, threaded=True, timeout=None): 15 | """ 16 | >>> lock = MkdirLockFile('somefile') 17 | >>> lock = MkdirLockFile('somefile', threaded=False) 18 | """ 19 | LockBase.__init__(self, path, threaded, timeout) 20 | # Lock file itself is a directory. Place the unique file name into 21 | # it. 22 | self.unique_name = os.path.join(self.lock_file, 23 | "%s.%s%s" % (self.hostname, 24 | self.tname, 25 | self.pid)) 26 | 27 | def acquire(self, timeout=None): 28 | timeout = timeout if timeout is not None else self.timeout 29 | end_time = time.time() 30 | if timeout is not None and timeout > 0: 31 | end_time += timeout 32 | 33 | if timeout is None: 34 | wait = 0.1 35 | else: 36 | wait = max(0, timeout / 10) 37 | 38 | while True: 39 | try: 40 | os.mkdir(self.lock_file) 41 | except OSError: 42 | err = sys.exc_info()[1] 43 | if err.errno == errno.EEXIST: 44 | # Already locked. 45 | if os.path.exists(self.unique_name): 46 | # Already locked by me. 47 | return 48 | if timeout is not None and time.time() > end_time: 49 | if timeout > 0: 50 | raise LockTimeout("Timeout waiting to acquire" 51 | " lock for %s" % 52 | self.path) 53 | else: 54 | # Someone else has the lock. 55 | raise AlreadyLocked("%s is already locked" % 56 | self.path) 57 | time.sleep(wait) 58 | else: 59 | # Couldn't create the lock for some other reason 60 | raise LockFailed("failed to create %s" % self.lock_file) 61 | else: 62 | open(self.unique_name, "wb").close() 63 | return 64 | 65 | def release(self): 66 | if not self.is_locked(): 67 | raise NotLocked("%s is not locked" % self.path) 68 | elif not os.path.exists(self.unique_name): 69 | raise NotMyLock("%s is locked, but not by me" % self.path) 70 | os.unlink(self.unique_name) 71 | os.rmdir(self.lock_file) 72 | 73 | def is_locked(self): 74 | return os.path.exists(self.lock_file) 75 | 76 | def i_am_locking(self): 77 | return (self.is_locked() and 78 | os.path.exists(self.unique_name)) 79 | 80 | def break_lock(self): 81 | if os.path.exists(self.lock_file): 82 | for name in os.listdir(self.lock_file): 83 | os.unlink(os.path.join(self.lock_file, name)) 84 | os.rmdir(self.lock_file) 85 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/lockfile/symlinklockfile.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | 3 | import os 4 | import time 5 | 6 | from . import (LockBase, NotLocked, NotMyLock, LockTimeout, 7 | AlreadyLocked) 8 | 9 | 10 | class SymlinkLockFile(LockBase): 11 | """Lock access to a file using symlink(2).""" 12 | 13 | def __init__(self, path, threaded=True, timeout=None): 14 | # super(SymlinkLockFile).__init(...) 15 | LockBase.__init__(self, path, threaded, timeout) 16 | # split it back! 17 | self.unique_name = os.path.split(self.unique_name)[1] 18 | 19 | def acquire(self, timeout=None): 20 | # Hopefully unnecessary for symlink. 21 | # try: 22 | # open(self.unique_name, "wb").close() 23 | # except IOError: 24 | # raise LockFailed("failed to create %s" % self.unique_name) 25 | timeout = timeout if timeout is not None else self.timeout 26 | end_time = time.time() 27 | if timeout is not None and timeout > 0: 28 | end_time += timeout 29 | 30 | while True: 31 | # Try and create a symbolic link to it. 32 | try: 33 | os.symlink(self.unique_name, self.lock_file) 34 | except OSError: 35 | # Link creation failed. Maybe we've double-locked? 36 | if self.i_am_locking(): 37 | # Linked to out unique name. Proceed. 38 | return 39 | else: 40 | # Otherwise the lock creation failed. 41 | if timeout is not None and time.time() > end_time: 42 | if timeout > 0: 43 | raise LockTimeout("Timeout waiting to acquire" 44 | " lock for %s" % 45 | self.path) 46 | else: 47 | raise AlreadyLocked("%s is already locked" % 48 | self.path) 49 | time.sleep(timeout / 10 if timeout is not None else 0.1) 50 | else: 51 | # Link creation succeeded. We're good to go. 52 | return 53 | 54 | def release(self): 55 | if not self.is_locked(): 56 | raise NotLocked("%s is not locked" % self.path) 57 | elif not self.i_am_locking(): 58 | raise NotMyLock("%s is locked, but not by me" % self.path) 59 | os.unlink(self.lock_file) 60 | 61 | def is_locked(self): 62 | return os.path.islink(self.lock_file) 63 | 64 | def i_am_locking(self): 65 | return (os.path.islink(self.lock_file) 66 | and os.readlink(self.lock_file) == self.unique_name) 67 | 68 | def break_lock(self): 69 | if os.path.islink(self.lock_file): # exists && link 70 | os.unlink(self.lock_file) 71 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/msgpack/__init__.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | from pip._vendor.msgpack._version import version 3 | from pip._vendor.msgpack.exceptions import * 4 | 5 | from collections import namedtuple 6 | 7 | 8 | class ExtType(namedtuple('ExtType', 'code data')): 9 | """ExtType represents ext type in msgpack.""" 10 | def __new__(cls, code, data): 11 | if not isinstance(code, int): 12 | raise TypeError("code must be int") 13 | if not isinstance(data, bytes): 14 | raise TypeError("data must be bytes") 15 | if not 0 <= code <= 127: 16 | raise ValueError("code must be 0~127") 17 | return super(ExtType, cls).__new__(cls, code, data) 18 | 19 | 20 | import os 21 | if os.environ.get('MSGPACK_PUREPYTHON'): 22 | from pip._vendor.msgpack.fallback import Packer, unpackb, Unpacker 23 | else: 24 | try: 25 | from pip._vendor.msgpack._packer import Packer 26 | from pip._vendor.msgpack._unpacker import unpackb, Unpacker 27 | except ImportError: 28 | from pip._vendor.msgpack.fallback import Packer, unpackb, Unpacker 29 | 30 | 31 | def pack(o, stream, **kwargs): 32 | """ 33 | Pack object `o` and write it to `stream` 34 | 35 | See :class:`Packer` for options. 36 | """ 37 | packer = Packer(**kwargs) 38 | stream.write(packer.pack(o)) 39 | 40 | 41 | def packb(o, **kwargs): 42 | """ 43 | Pack object `o` and return packed bytes 44 | 45 | See :class:`Packer` for options. 46 | """ 47 | return Packer(**kwargs).pack(o) 48 | 49 | 50 | def unpack(stream, **kwargs): 51 | """ 52 | Unpack an object from `stream`. 53 | 54 | Raises `ExtraData` when `stream` contains extra bytes. 55 | See :class:`Unpacker` for options. 56 | """ 57 | data = stream.read() 58 | return unpackb(data, **kwargs) 59 | 60 | 61 | # alias for compatibility to simplejson/marshal/pickle. 62 | load = unpack 63 | loads = unpackb 64 | 65 | dump = pack 66 | dumps = packb 67 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/msgpack/_version.py: -------------------------------------------------------------------------------- 1 | version = (0, 5, 6) 2 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/msgpack/exceptions.py: -------------------------------------------------------------------------------- 1 | class UnpackException(Exception): 2 | """Deprecated. Use Exception instead to catch all exception during unpacking.""" 3 | 4 | 5 | class BufferFull(UnpackException): 6 | pass 7 | 8 | 9 | class OutOfData(UnpackException): 10 | pass 11 | 12 | 13 | class UnpackValueError(UnpackException, ValueError): 14 | """Deprecated. Use ValueError instead.""" 15 | 16 | 17 | class ExtraData(UnpackValueError): 18 | def __init__(self, unpacked, extra): 19 | self.unpacked = unpacked 20 | self.extra = extra 21 | 22 | def __str__(self): 23 | return "unpack(b) received extra data." 24 | 25 | 26 | class PackException(Exception): 27 | """Deprecated. Use Exception instead to catch all exception during packing.""" 28 | 29 | 30 | class PackValueError(PackException, ValueError): 31 | """PackValueError is raised when type of input data is supported but it's value is unsupported. 32 | 33 | Deprecated. Use ValueError instead. 34 | """ 35 | 36 | 37 | class PackOverflowError(PackValueError, OverflowError): 38 | """PackOverflowError is raised when integer value is out of range of msgpack support [-2**31, 2**32). 39 | 40 | Deprecated. Use ValueError instead. 41 | """ 42 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/packaging/__about__.py: -------------------------------------------------------------------------------- 1 | # This file is dual licensed under the terms of the Apache License, Version 2 | # 2.0, and the BSD License. See the LICENSE file in the root of this repository 3 | # for complete details. 4 | from __future__ import absolute_import, division, print_function 5 | 6 | __all__ = [ 7 | "__title__", 8 | "__summary__", 9 | "__uri__", 10 | "__version__", 11 | "__author__", 12 | "__email__", 13 | "__license__", 14 | "__copyright__", 15 | ] 16 | 17 | __title__ = "packaging" 18 | __summary__ = "Core utilities for Python packages" 19 | __uri__ = "https://github.com/pypa/packaging" 20 | 21 | __version__ = "19.0" 22 | 23 | __author__ = "Donald Stufft and individual contributors" 24 | __email__ = "donald@stufft.io" 25 | 26 | __license__ = "BSD or Apache License, Version 2.0" 27 | __copyright__ = "Copyright 2014-2019 %s" % __author__ 28 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/packaging/__init__.py: -------------------------------------------------------------------------------- 1 | # This file is dual licensed under the terms of the Apache License, Version 2 | # 2.0, and the BSD License. See the LICENSE file in the root of this repository 3 | # for complete details. 4 | from __future__ import absolute_import, division, print_function 5 | 6 | from .__about__ import ( 7 | __author__, 8 | __copyright__, 9 | __email__, 10 | __license__, 11 | __summary__, 12 | __title__, 13 | __uri__, 14 | __version__, 15 | ) 16 | 17 | __all__ = [ 18 | "__title__", 19 | "__summary__", 20 | "__uri__", 21 | "__version__", 22 | "__author__", 23 | "__email__", 24 | "__license__", 25 | "__copyright__", 26 | ] 27 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/packaging/_compat.py: -------------------------------------------------------------------------------- 1 | # This file is dual licensed under the terms of the Apache License, Version 2 | # 2.0, and the BSD License. See the LICENSE file in the root of this repository 3 | # for complete details. 4 | from __future__ import absolute_import, division, print_function 5 | 6 | import sys 7 | 8 | 9 | PY2 = sys.version_info[0] == 2 10 | PY3 = sys.version_info[0] == 3 11 | 12 | # flake8: noqa 13 | 14 | if PY3: 15 | string_types = (str,) 16 | else: 17 | string_types = (basestring,) 18 | 19 | 20 | def with_metaclass(meta, *bases): 21 | """ 22 | Create a base class with a metaclass. 23 | """ 24 | # This requires a bit of explanation: the basic idea is to make a dummy 25 | # metaclass for one level of class instantiation that replaces itself with 26 | # the actual metaclass. 27 | class metaclass(meta): 28 | def __new__(cls, name, this_bases, d): 29 | return meta(name, bases, d) 30 | 31 | return type.__new__(metaclass, "temporary_class", (), {}) 32 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/packaging/_structures.py: -------------------------------------------------------------------------------- 1 | # This file is dual licensed under the terms of the Apache License, Version 2 | # 2.0, and the BSD License. See the LICENSE file in the root of this repository 3 | # for complete details. 4 | from __future__ import absolute_import, division, print_function 5 | 6 | 7 | class Infinity(object): 8 | def __repr__(self): 9 | return "Infinity" 10 | 11 | def __hash__(self): 12 | return hash(repr(self)) 13 | 14 | def __lt__(self, other): 15 | return False 16 | 17 | def __le__(self, other): 18 | return False 19 | 20 | def __eq__(self, other): 21 | return isinstance(other, self.__class__) 22 | 23 | def __ne__(self, other): 24 | return not isinstance(other, self.__class__) 25 | 26 | def __gt__(self, other): 27 | return True 28 | 29 | def __ge__(self, other): 30 | return True 31 | 32 | def __neg__(self): 33 | return NegativeInfinity 34 | 35 | 36 | Infinity = Infinity() 37 | 38 | 39 | class NegativeInfinity(object): 40 | def __repr__(self): 41 | return "-Infinity" 42 | 43 | def __hash__(self): 44 | return hash(repr(self)) 45 | 46 | def __lt__(self, other): 47 | return True 48 | 49 | def __le__(self, other): 50 | return True 51 | 52 | def __eq__(self, other): 53 | return isinstance(other, self.__class__) 54 | 55 | def __ne__(self, other): 56 | return not isinstance(other, self.__class__) 57 | 58 | def __gt__(self, other): 59 | return False 60 | 61 | def __ge__(self, other): 62 | return False 63 | 64 | def __neg__(self): 65 | return Infinity 66 | 67 | 68 | NegativeInfinity = NegativeInfinity() 69 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/packaging/utils.py: -------------------------------------------------------------------------------- 1 | # This file is dual licensed under the terms of the Apache License, Version 2 | # 2.0, and the BSD License. See the LICENSE file in the root of this repository 3 | # for complete details. 4 | from __future__ import absolute_import, division, print_function 5 | 6 | import re 7 | 8 | from .version import InvalidVersion, Version 9 | 10 | 11 | _canonicalize_regex = re.compile(r"[-_.]+") 12 | 13 | 14 | def canonicalize_name(name): 15 | # This is taken from PEP 503. 16 | return _canonicalize_regex.sub("-", name).lower() 17 | 18 | 19 | def canonicalize_version(version): 20 | """ 21 | This is very similar to Version.__str__, but has one subtle differences 22 | with the way it handles the release segment. 23 | """ 24 | 25 | try: 26 | version = Version(version) 27 | except InvalidVersion: 28 | # Legacy versions cannot be normalized 29 | return version 30 | 31 | parts = [] 32 | 33 | # Epoch 34 | if version.epoch != 0: 35 | parts.append("{0}!".format(version.epoch)) 36 | 37 | # Release segment 38 | # NB: This strips trailing '.0's to normalize 39 | parts.append(re.sub(r"(\.0)+$", "", ".".join(str(x) for x in version.release))) 40 | 41 | # Pre-release 42 | if version.pre is not None: 43 | parts.append("".join(str(x) for x in version.pre)) 44 | 45 | # Post-release 46 | if version.post is not None: 47 | parts.append(".post{0}".format(version.post)) 48 | 49 | # Development release 50 | if version.dev is not None: 51 | parts.append(".dev{0}".format(version.dev)) 52 | 53 | # Local version segment 54 | if version.local is not None: 55 | parts.append("+{0}".format(version.local)) 56 | 57 | return "".join(parts) 58 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/pep517/__init__.py: -------------------------------------------------------------------------------- 1 | """Wrappers to build Python packages using PEP 517 hooks 2 | """ 3 | 4 | __version__ = '0.5.0' 5 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/pep517/build.py: -------------------------------------------------------------------------------- 1 | """Build a project using PEP 517 hooks. 2 | """ 3 | import argparse 4 | import logging 5 | import os 6 | import contextlib 7 | from pip._vendor import pytoml 8 | import shutil 9 | import errno 10 | import tempfile 11 | 12 | from .envbuild import BuildEnvironment 13 | from .wrappers import Pep517HookCaller 14 | 15 | log = logging.getLogger(__name__) 16 | 17 | 18 | @contextlib.contextmanager 19 | def tempdir(): 20 | td = tempfile.mkdtemp() 21 | try: 22 | yield td 23 | finally: 24 | shutil.rmtree(td) 25 | 26 | 27 | def _do_build(hooks, env, dist, dest): 28 | get_requires_name = 'get_requires_for_build_{dist}'.format(**locals()) 29 | get_requires = getattr(hooks, get_requires_name) 30 | reqs = get_requires({}) 31 | log.info('Got build requires: %s', reqs) 32 | 33 | env.pip_install(reqs) 34 | log.info('Installed dynamic build dependencies') 35 | 36 | with tempdir() as td: 37 | log.info('Trying to build %s in %s', dist, td) 38 | build_name = 'build_{dist}'.format(**locals()) 39 | build = getattr(hooks, build_name) 40 | filename = build(td, {}) 41 | source = os.path.join(td, filename) 42 | shutil.move(source, os.path.join(dest, os.path.basename(filename))) 43 | 44 | 45 | def mkdir_p(*args, **kwargs): 46 | """Like `mkdir`, but does not raise an exception if the 47 | directory already exists. 48 | """ 49 | try: 50 | return os.mkdir(*args, **kwargs) 51 | except OSError as exc: 52 | if exc.errno != errno.EEXIST: 53 | raise 54 | 55 | 56 | def build(source_dir, dist, dest=None): 57 | pyproject = os.path.join(source_dir, 'pyproject.toml') 58 | dest = os.path.join(source_dir, dest or 'dist') 59 | mkdir_p(dest) 60 | 61 | with open(pyproject) as f: 62 | pyproject_data = pytoml.load(f) 63 | # Ensure the mandatory data can be loaded 64 | buildsys = pyproject_data['build-system'] 65 | requires = buildsys['requires'] 66 | backend = buildsys['build-backend'] 67 | 68 | hooks = Pep517HookCaller(source_dir, backend) 69 | 70 | with BuildEnvironment() as env: 71 | env.pip_install(requires) 72 | _do_build(hooks, env, dist, dest) 73 | 74 | 75 | parser = argparse.ArgumentParser() 76 | parser.add_argument( 77 | 'source_dir', 78 | help="A directory containing pyproject.toml", 79 | ) 80 | parser.add_argument( 81 | '--binary', '-b', 82 | action='store_true', 83 | default=False, 84 | ) 85 | parser.add_argument( 86 | '--source', '-s', 87 | action='store_true', 88 | default=False, 89 | ) 90 | parser.add_argument( 91 | '--out-dir', '-o', 92 | help="Destination in which to save the builds relative to source dir", 93 | ) 94 | 95 | 96 | def main(args): 97 | # determine which dists to build 98 | dists = list(filter(None, ( 99 | 'sdist' if args.source or not args.binary else None, 100 | 'wheel' if args.binary or not args.source else None, 101 | ))) 102 | 103 | for dist in dists: 104 | build(args.source_dir, dist, args.out_dir) 105 | 106 | 107 | if __name__ == '__main__': 108 | main(parser.parse_args()) 109 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/pep517/compat.py: -------------------------------------------------------------------------------- 1 | """Handle reading and writing JSON in UTF-8, on Python 3 and 2.""" 2 | import json 3 | import sys 4 | 5 | if sys.version_info[0] >= 3: 6 | # Python 3 7 | def write_json(obj, path, **kwargs): 8 | with open(path, 'w', encoding='utf-8') as f: 9 | json.dump(obj, f, **kwargs) 10 | 11 | def read_json(path): 12 | with open(path, 'r', encoding='utf-8') as f: 13 | return json.load(f) 14 | 15 | else: 16 | # Python 2 17 | def write_json(obj, path, **kwargs): 18 | with open(path, 'wb') as f: 19 | json.dump(obj, f, encoding='utf-8', **kwargs) 20 | 21 | def read_json(path): 22 | with open(path, 'rb') as f: 23 | return json.load(f) 24 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/pkg_resources/py31compat.py: -------------------------------------------------------------------------------- 1 | import os 2 | import errno 3 | import sys 4 | 5 | from pip._vendor import six 6 | 7 | 8 | def _makedirs_31(path, exist_ok=False): 9 | try: 10 | os.makedirs(path) 11 | except OSError as exc: 12 | if not exist_ok or exc.errno != errno.EEXIST: 13 | raise 14 | 15 | 16 | # rely on compatibility behavior until mode considerations 17 | # and exists_ok considerations are disentangled. 18 | # See https://github.com/pypa/setuptools/pull/1083#issuecomment-315168663 19 | needs_makedirs = ( 20 | six.PY2 or 21 | (3, 4) <= sys.version_info < (3, 4, 1) 22 | ) 23 | makedirs = _makedirs_31 if needs_makedirs else os.makedirs 24 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/progress/bar.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Copyright (c) 2012 Giorgos Verigakis 4 | # 5 | # Permission to use, copy, modify, and distribute this software for any 6 | # purpose with or without fee is hereby granted, provided that the above 7 | # copyright notice and this permission notice appear in all copies. 8 | # 9 | # THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES 10 | # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF 11 | # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR 12 | # ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES 13 | # WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN 14 | # ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF 15 | # OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. 16 | 17 | from __future__ import unicode_literals 18 | 19 | import sys 20 | 21 | from . import Progress 22 | from .helpers import WritelnMixin 23 | 24 | 25 | class Bar(WritelnMixin, Progress): 26 | width = 32 27 | message = '' 28 | suffix = '%(index)d/%(max)d' 29 | bar_prefix = ' |' 30 | bar_suffix = '| ' 31 | empty_fill = ' ' 32 | fill = '#' 33 | hide_cursor = True 34 | 35 | def update(self): 36 | filled_length = int(self.width * self.progress) 37 | empty_length = self.width - filled_length 38 | 39 | message = self.message % self 40 | bar = self.fill * filled_length 41 | empty = self.empty_fill * empty_length 42 | suffix = self.suffix % self 43 | line = ''.join([message, self.bar_prefix, bar, empty, self.bar_suffix, 44 | suffix]) 45 | self.writeln(line) 46 | 47 | 48 | class ChargingBar(Bar): 49 | suffix = '%(percent)d%%' 50 | bar_prefix = ' ' 51 | bar_suffix = ' ' 52 | empty_fill = '∙' 53 | fill = '█' 54 | 55 | 56 | class FillingSquaresBar(ChargingBar): 57 | empty_fill = '▢' 58 | fill = '▣' 59 | 60 | 61 | class FillingCirclesBar(ChargingBar): 62 | empty_fill = '◯' 63 | fill = '◉' 64 | 65 | 66 | class IncrementalBar(Bar): 67 | if sys.platform.startswith('win'): 68 | phases = (u' ', u'▌', u'█') 69 | else: 70 | phases = (' ', '▏', '▎', '▍', '▌', '▋', '▊', '▉', '█') 71 | 72 | def update(self): 73 | nphases = len(self.phases) 74 | filled_len = self.width * self.progress 75 | nfull = int(filled_len) # Number of full chars 76 | phase = int((filled_len - nfull) * nphases) # Phase of last char 77 | nempty = self.width - nfull # Number of empty chars 78 | 79 | message = self.message % self 80 | bar = self.phases[-1] * nfull 81 | current = self.phases[phase] if phase > 0 else '' 82 | empty = self.empty_fill * max(0, nempty - len(current)) 83 | suffix = self.suffix % self 84 | line = ''.join([message, self.bar_prefix, bar, current, empty, 85 | self.bar_suffix, suffix]) 86 | self.writeln(line) 87 | 88 | 89 | class PixelBar(IncrementalBar): 90 | phases = ('⡀', '⡄', '⡆', '⡇', '⣇', '⣧', '⣷', '⣿') 91 | 92 | 93 | class ShadyBar(IncrementalBar): 94 | phases = (' ', '░', '▒', '▓', '█') 95 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/progress/counter.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Copyright (c) 2012 Giorgos Verigakis 4 | # 5 | # Permission to use, copy, modify, and distribute this software for any 6 | # purpose with or without fee is hereby granted, provided that the above 7 | # copyright notice and this permission notice appear in all copies. 8 | # 9 | # THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES 10 | # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF 11 | # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR 12 | # ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES 13 | # WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN 14 | # ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF 15 | # OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. 16 | 17 | from __future__ import unicode_literals 18 | from . import Infinite, Progress 19 | from .helpers import WriteMixin 20 | 21 | 22 | class Counter(WriteMixin, Infinite): 23 | message = '' 24 | hide_cursor = True 25 | 26 | def update(self): 27 | self.write(str(self.index)) 28 | 29 | 30 | class Countdown(WriteMixin, Progress): 31 | hide_cursor = True 32 | 33 | def update(self): 34 | self.write(str(self.remaining)) 35 | 36 | 37 | class Stack(WriteMixin, Progress): 38 | phases = (' ', '▁', '▂', '▃', '▄', '▅', '▆', '▇', '█') 39 | hide_cursor = True 40 | 41 | def update(self): 42 | nphases = len(self.phases) 43 | i = min(nphases - 1, int(self.progress * nphases)) 44 | self.write(self.phases[i]) 45 | 46 | 47 | class Pie(Stack): 48 | phases = ('○', '◔', '◑', '◕', '●') 49 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/progress/helpers.py: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2012 Giorgos Verigakis 2 | # 3 | # Permission to use, copy, modify, and distribute this software for any 4 | # purpose with or without fee is hereby granted, provided that the above 5 | # copyright notice and this permission notice appear in all copies. 6 | # 7 | # THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES 8 | # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF 9 | # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR 10 | # ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES 11 | # WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN 12 | # ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF 13 | # OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. 14 | 15 | from __future__ import print_function 16 | 17 | 18 | HIDE_CURSOR = '\x1b[?25l' 19 | SHOW_CURSOR = '\x1b[?25h' 20 | 21 | 22 | class WriteMixin(object): 23 | hide_cursor = False 24 | 25 | def __init__(self, message=None, **kwargs): 26 | super(WriteMixin, self).__init__(**kwargs) 27 | self._width = 0 28 | if message: 29 | self.message = message 30 | 31 | if self.file and self.file.isatty(): 32 | if self.hide_cursor: 33 | print(HIDE_CURSOR, end='', file=self.file) 34 | print(self.message, end='', file=self.file) 35 | self.file.flush() 36 | 37 | def write(self, s): 38 | if self.file and self.file.isatty(): 39 | b = '\b' * self._width 40 | c = s.ljust(self._width) 41 | print(b + c, end='', file=self.file) 42 | self._width = max(self._width, len(s)) 43 | self.file.flush() 44 | 45 | def finish(self): 46 | if self.file and self.file.isatty() and self.hide_cursor: 47 | print(SHOW_CURSOR, end='', file=self.file) 48 | 49 | 50 | class WritelnMixin(object): 51 | hide_cursor = False 52 | 53 | def __init__(self, message=None, **kwargs): 54 | super(WritelnMixin, self).__init__(**kwargs) 55 | if message: 56 | self.message = message 57 | 58 | if self.file and self.file.isatty() and self.hide_cursor: 59 | print(HIDE_CURSOR, end='', file=self.file) 60 | 61 | def clearln(self): 62 | if self.file and self.file.isatty(): 63 | print('\r\x1b[K', end='', file=self.file) 64 | 65 | def writeln(self, line): 66 | if self.file and self.file.isatty(): 67 | self.clearln() 68 | print(line, end='', file=self.file) 69 | self.file.flush() 70 | 71 | def finish(self): 72 | if self.file and self.file.isatty(): 73 | print(file=self.file) 74 | if self.hide_cursor: 75 | print(SHOW_CURSOR, end='', file=self.file) 76 | 77 | 78 | from signal import signal, SIGINT 79 | from sys import exit 80 | 81 | 82 | class SigIntMixin(object): 83 | """Registers a signal handler that calls finish on SIGINT""" 84 | 85 | def __init__(self, *args, **kwargs): 86 | super(SigIntMixin, self).__init__(*args, **kwargs) 87 | signal(SIGINT, self._sigint_handler) 88 | 89 | def _sigint_handler(self, signum, frame): 90 | self.finish() 91 | exit(0) 92 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/progress/spinner.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | # Copyright (c) 2012 Giorgos Verigakis 4 | # 5 | # Permission to use, copy, modify, and distribute this software for any 6 | # purpose with or without fee is hereby granted, provided that the above 7 | # copyright notice and this permission notice appear in all copies. 8 | # 9 | # THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES 10 | # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF 11 | # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR 12 | # ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES 13 | # WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN 14 | # ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF 15 | # OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. 16 | 17 | from __future__ import unicode_literals 18 | from . import Infinite 19 | from .helpers import WriteMixin 20 | 21 | 22 | class Spinner(WriteMixin, Infinite): 23 | message = '' 24 | phases = ('-', '\\', '|', '/') 25 | hide_cursor = True 26 | 27 | def update(self): 28 | i = self.index % len(self.phases) 29 | self.write(self.phases[i]) 30 | 31 | 32 | class PieSpinner(Spinner): 33 | phases = ['◷', '◶', '◵', '◴'] 34 | 35 | 36 | class MoonSpinner(Spinner): 37 | phases = ['◑', '◒', '◐', '◓'] 38 | 39 | 40 | class LineSpinner(Spinner): 41 | phases = ['⎺', '⎻', '⎼', '⎽', '⎼', '⎻'] 42 | 43 | class PixelSpinner(Spinner): 44 | phases = ['⣾','⣷', '⣯', '⣟', '⡿', '⢿', '⣻', '⣽'] 45 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/pytoml/__init__.py: -------------------------------------------------------------------------------- 1 | from .core import TomlError 2 | from .parser import load, loads 3 | from .test import translate_to_test 4 | from .writer import dump, dumps -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/pytoml/core.py: -------------------------------------------------------------------------------- 1 | class TomlError(RuntimeError): 2 | def __init__(self, message, line, col, filename): 3 | RuntimeError.__init__(self, message, line, col, filename) 4 | self.message = message 5 | self.line = line 6 | self.col = col 7 | self.filename = filename 8 | 9 | def __str__(self): 10 | return '{}({}, {}): {}'.format(self.filename, self.line, self.col, self.message) 11 | 12 | def __repr__(self): 13 | return 'TomlError({!r}, {!r}, {!r}, {!r})'.format(self.message, self.line, self.col, self.filename) 14 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/pytoml/test.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | from .utils import format_rfc3339 3 | 4 | try: 5 | _string_types = (str, unicode) 6 | _int_types = (int, long) 7 | except NameError: 8 | _string_types = str 9 | _int_types = int 10 | 11 | def translate_to_test(v): 12 | if isinstance(v, dict): 13 | return { k: translate_to_test(v) for k, v in v.items() } 14 | if isinstance(v, list): 15 | a = [translate_to_test(x) for x in v] 16 | if v and isinstance(v[0], dict): 17 | return a 18 | else: 19 | return {'type': 'array', 'value': a} 20 | if isinstance(v, datetime.datetime): 21 | return {'type': 'datetime', 'value': format_rfc3339(v)} 22 | if isinstance(v, bool): 23 | return {'type': 'bool', 'value': 'true' if v else 'false'} 24 | if isinstance(v, _int_types): 25 | return {'type': 'integer', 'value': str(v)} 26 | if isinstance(v, float): 27 | return {'type': 'float', 'value': '{:.17}'.format(v)} 28 | if isinstance(v, _string_types): 29 | return {'type': 'string', 'value': v} 30 | raise RuntimeError('unexpected value: {!r}'.format(v)) 31 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/pytoml/utils.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import re 3 | 4 | rfc3339_re = re.compile(r'(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2})(\.\d+)?(?:Z|([+-]\d{2}):(\d{2}))') 5 | 6 | def parse_rfc3339(v): 7 | m = rfc3339_re.match(v) 8 | if not m or m.group(0) != v: 9 | return None 10 | return parse_rfc3339_re(m) 11 | 12 | def parse_rfc3339_re(m): 13 | r = map(int, m.groups()[:6]) 14 | if m.group(7): 15 | micro = float(m.group(7)) 16 | else: 17 | micro = 0 18 | 19 | if m.group(8): 20 | g = int(m.group(8), 10) * 60 + int(m.group(9), 10) 21 | tz = _TimeZone(datetime.timedelta(0, g * 60)) 22 | else: 23 | tz = _TimeZone(datetime.timedelta(0, 0)) 24 | 25 | y, m, d, H, M, S = r 26 | return datetime.datetime(y, m, d, H, M, S, int(micro * 1000000), tz) 27 | 28 | 29 | def format_rfc3339(v): 30 | offs = v.utcoffset() 31 | offs = int(offs.total_seconds()) // 60 if offs is not None else 0 32 | 33 | if offs == 0: 34 | suffix = 'Z' 35 | else: 36 | if offs > 0: 37 | suffix = '+' 38 | else: 39 | suffix = '-' 40 | offs = -offs 41 | suffix = '{0}{1:02}:{2:02}'.format(suffix, offs // 60, offs % 60) 42 | 43 | if v.microsecond: 44 | return v.strftime('%Y-%m-%dT%H:%M:%S.%f') + suffix 45 | else: 46 | return v.strftime('%Y-%m-%dT%H:%M:%S') + suffix 47 | 48 | class _TimeZone(datetime.tzinfo): 49 | def __init__(self, offset): 50 | self._offset = offset 51 | 52 | def utcoffset(self, dt): 53 | return self._offset 54 | 55 | def dst(self, dt): 56 | return None 57 | 58 | def tzname(self, dt): 59 | m = self._offset.total_seconds() // 60 60 | if m < 0: 61 | res = '-' 62 | m = -m 63 | else: 64 | res = '+' 65 | h = m // 60 66 | m = m - h * 60 67 | return '{}{:.02}{:.02}'.format(res, h, m) 68 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/requests/__version__.py: -------------------------------------------------------------------------------- 1 | # .-. .-. .-. . . .-. .-. .-. .-. 2 | # |( |- |.| | | |- `-. | `-. 3 | # ' ' `-' `-`.`-' `-' `-' ' `-' 4 | 5 | __title__ = 'requests' 6 | __description__ = 'Python HTTP for Humans.' 7 | __url__ = 'http://python-requests.org' 8 | __version__ = '2.21.0' 9 | __build__ = 0x022100 10 | __author__ = 'Kenneth Reitz' 11 | __author_email__ = 'me@kennethreitz.org' 12 | __license__ = 'Apache 2.0' 13 | __copyright__ = 'Copyright 2018 Kenneth Reitz' 14 | __cake__ = u'\u2728 \U0001f370 \u2728' 15 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/requests/_internal_utils.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ 4 | requests._internal_utils 5 | ~~~~~~~~~~~~~~ 6 | 7 | Provides utility functions that are consumed internally by Requests 8 | which depend on extremely few external helpers (such as compat) 9 | """ 10 | 11 | from .compat import is_py2, builtin_str, str 12 | 13 | 14 | def to_native_string(string, encoding='ascii'): 15 | """Given a string object, regardless of type, returns a representation of 16 | that string in the native string type, encoding and decoding where 17 | necessary. This assumes ASCII unless told otherwise. 18 | """ 19 | if isinstance(string, builtin_str): 20 | out = string 21 | else: 22 | if is_py2: 23 | out = string.encode(encoding) 24 | else: 25 | out = string.decode(encoding) 26 | 27 | return out 28 | 29 | 30 | def unicode_is_ascii(u_string): 31 | """Determine if unicode string only contains ASCII characters. 32 | 33 | :param str u_string: unicode string to check. Must be unicode 34 | and not Python 2 `str`. 35 | :rtype: bool 36 | """ 37 | assert isinstance(u_string, str) 38 | try: 39 | u_string.encode('ascii') 40 | return True 41 | except UnicodeEncodeError: 42 | return False 43 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/requests/certs.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | """ 5 | requests.certs 6 | ~~~~~~~~~~~~~~ 7 | 8 | This module returns the preferred default CA certificate bundle. There is 9 | only one — the one from the certifi package. 10 | 11 | If you are packaging Requests, e.g., for a Linux distribution or a managed 12 | environment, you can change the definition of where() to return a separately 13 | packaged CA bundle. 14 | """ 15 | from pip._vendor.certifi import where 16 | 17 | if __name__ == '__main__': 18 | print(where()) 19 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/requests/compat.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ 4 | requests.compat 5 | ~~~~~~~~~~~~~~~ 6 | 7 | This module handles import compatibility issues between Python 2 and 8 | Python 3. 9 | """ 10 | 11 | from pip._vendor import chardet 12 | 13 | import sys 14 | 15 | # ------- 16 | # Pythons 17 | # ------- 18 | 19 | # Syntax sugar. 20 | _ver = sys.version_info 21 | 22 | #: Python 2.x? 23 | is_py2 = (_ver[0] == 2) 24 | 25 | #: Python 3.x? 26 | is_py3 = (_ver[0] == 3) 27 | 28 | # Note: We've patched out simplejson support in pip because it prevents 29 | # upgrading simplejson on Windows. 30 | # try: 31 | # import simplejson as json 32 | # except (ImportError, SyntaxError): 33 | # # simplejson does not support Python 3.2, it throws a SyntaxError 34 | # # because of u'...' Unicode literals. 35 | import json 36 | 37 | # --------- 38 | # Specifics 39 | # --------- 40 | 41 | if is_py2: 42 | from urllib import ( 43 | quote, unquote, quote_plus, unquote_plus, urlencode, getproxies, 44 | proxy_bypass, proxy_bypass_environment, getproxies_environment) 45 | from urlparse import urlparse, urlunparse, urljoin, urlsplit, urldefrag 46 | from urllib2 import parse_http_list 47 | import cookielib 48 | from Cookie import Morsel 49 | from StringIO import StringIO 50 | from collections import Callable, Mapping, MutableMapping, OrderedDict 51 | 52 | 53 | builtin_str = str 54 | bytes = str 55 | str = unicode 56 | basestring = basestring 57 | numeric_types = (int, long, float) 58 | integer_types = (int, long) 59 | 60 | elif is_py3: 61 | from urllib.parse import urlparse, urlunparse, urljoin, urlsplit, urlencode, quote, unquote, quote_plus, unquote_plus, urldefrag 62 | from urllib.request import parse_http_list, getproxies, proxy_bypass, proxy_bypass_environment, getproxies_environment 63 | from http import cookiejar as cookielib 64 | from http.cookies import Morsel 65 | from io import StringIO 66 | from collections import OrderedDict 67 | from collections.abc import Callable, Mapping, MutableMapping 68 | 69 | builtin_str = str 70 | str = str 71 | bytes = bytes 72 | basestring = (str, bytes) 73 | numeric_types = (int, float) 74 | integer_types = (int,) 75 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/requests/hooks.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ 4 | requests.hooks 5 | ~~~~~~~~~~~~~~ 6 | 7 | This module provides the capabilities for the Requests hooks system. 8 | 9 | Available hooks: 10 | 11 | ``response``: 12 | The response generated from a Request. 13 | """ 14 | HOOKS = ['response'] 15 | 16 | 17 | def default_hooks(): 18 | return {event: [] for event in HOOKS} 19 | 20 | # TODO: response is the only one 21 | 22 | 23 | def dispatch_hook(key, hooks, hook_data, **kwargs): 24 | """Dispatches a hook dictionary on a given piece of data.""" 25 | hooks = hooks or {} 26 | hooks = hooks.get(key) 27 | if hooks: 28 | if hasattr(hooks, '__call__'): 29 | hooks = [hooks] 30 | for hook in hooks: 31 | _hook_data = hook(hook_data, **kwargs) 32 | if _hook_data is not None: 33 | hook_data = _hook_data 34 | return hook_data 35 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/requests/packages.py: -------------------------------------------------------------------------------- 1 | import sys 2 | 3 | # This code exists for backwards compatibility reasons. 4 | # I don't like it either. Just look the other way. :) 5 | 6 | for package in ('urllib3', 'idna', 'chardet'): 7 | vendored_package = "pip._vendor." + package 8 | locals()[package] = __import__(vendored_package) 9 | # This traversal is apparently necessary such that the identities are 10 | # preserved (requests.packages.urllib3.* is urllib3.*) 11 | for mod in list(sys.modules): 12 | if mod == vendored_package or mod.startswith(vendored_package + '.'): 13 | unprefixed_mod = mod[len("pip._vendor."):] 14 | sys.modules['pip._vendor.requests.packages.' + unprefixed_mod] = sys.modules[mod] 15 | 16 | # Kinda cool, though, right? 17 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/requests/structures.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ 4 | requests.structures 5 | ~~~~~~~~~~~~~~~~~~~ 6 | 7 | Data structures that power Requests. 8 | """ 9 | 10 | from .compat import OrderedDict, Mapping, MutableMapping 11 | 12 | 13 | class CaseInsensitiveDict(MutableMapping): 14 | """A case-insensitive ``dict``-like object. 15 | 16 | Implements all methods and operations of 17 | ``MutableMapping`` as well as dict's ``copy``. Also 18 | provides ``lower_items``. 19 | 20 | All keys are expected to be strings. The structure remembers the 21 | case of the last key to be set, and ``iter(instance)``, 22 | ``keys()``, ``items()``, ``iterkeys()``, and ``iteritems()`` 23 | will contain case-sensitive keys. However, querying and contains 24 | testing is case insensitive:: 25 | 26 | cid = CaseInsensitiveDict() 27 | cid['Accept'] = 'application/json' 28 | cid['aCCEPT'] == 'application/json' # True 29 | list(cid) == ['Accept'] # True 30 | 31 | For example, ``headers['content-encoding']`` will return the 32 | value of a ``'Content-Encoding'`` response header, regardless 33 | of how the header name was originally stored. 34 | 35 | If the constructor, ``.update``, or equality comparison 36 | operations are given keys that have equal ``.lower()``s, the 37 | behavior is undefined. 38 | """ 39 | 40 | def __init__(self, data=None, **kwargs): 41 | self._store = OrderedDict() 42 | if data is None: 43 | data = {} 44 | self.update(data, **kwargs) 45 | 46 | def __setitem__(self, key, value): 47 | # Use the lowercased key for lookups, but store the actual 48 | # key alongside the value. 49 | self._store[key.lower()] = (key, value) 50 | 51 | def __getitem__(self, key): 52 | return self._store[key.lower()][1] 53 | 54 | def __delitem__(self, key): 55 | del self._store[key.lower()] 56 | 57 | def __iter__(self): 58 | return (casedkey for casedkey, mappedvalue in self._store.values()) 59 | 60 | def __len__(self): 61 | return len(self._store) 62 | 63 | def lower_items(self): 64 | """Like iteritems(), but with all lowercase keys.""" 65 | return ( 66 | (lowerkey, keyval[1]) 67 | for (lowerkey, keyval) 68 | in self._store.items() 69 | ) 70 | 71 | def __eq__(self, other): 72 | if isinstance(other, Mapping): 73 | other = CaseInsensitiveDict(other) 74 | else: 75 | return NotImplemented 76 | # Compare insensitively 77 | return dict(self.lower_items()) == dict(other.lower_items()) 78 | 79 | # Copy is required 80 | def copy(self): 81 | return CaseInsensitiveDict(self._store.values()) 82 | 83 | def __repr__(self): 84 | return str(dict(self.items())) 85 | 86 | 87 | class LookupDict(dict): 88 | """Dictionary lookup object.""" 89 | 90 | def __init__(self, name=None): 91 | self.name = name 92 | super(LookupDict, self).__init__() 93 | 94 | def __repr__(self): 95 | return '' % (self.name) 96 | 97 | def __getitem__(self, key): 98 | # We allow fall-through here, so values default to None 99 | 100 | return self.__dict__.get(key, None) 101 | 102 | def get(self, key, default=None): 103 | return self.__dict__.get(key, default) 104 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | urllib3 - Thread-safe connection pooling and re-using. 3 | """ 4 | 5 | from __future__ import absolute_import 6 | import warnings 7 | 8 | from .connectionpool import ( 9 | HTTPConnectionPool, 10 | HTTPSConnectionPool, 11 | connection_from_url 12 | ) 13 | 14 | from . import exceptions 15 | from .filepost import encode_multipart_formdata 16 | from .poolmanager import PoolManager, ProxyManager, proxy_from_url 17 | from .response import HTTPResponse 18 | from .util.request import make_headers 19 | from .util.url import get_host 20 | from .util.timeout import Timeout 21 | from .util.retry import Retry 22 | 23 | 24 | # Set default logging handler to avoid "No handler found" warnings. 25 | import logging 26 | from logging import NullHandler 27 | 28 | __author__ = 'Andrey Petrov (andrey.petrov@shazow.net)' 29 | __license__ = 'MIT' 30 | __version__ = '1.24.1' 31 | 32 | __all__ = ( 33 | 'HTTPConnectionPool', 34 | 'HTTPSConnectionPool', 35 | 'PoolManager', 36 | 'ProxyManager', 37 | 'HTTPResponse', 38 | 'Retry', 39 | 'Timeout', 40 | 'add_stderr_logger', 41 | 'connection_from_url', 42 | 'disable_warnings', 43 | 'encode_multipart_formdata', 44 | 'get_host', 45 | 'make_headers', 46 | 'proxy_from_url', 47 | ) 48 | 49 | logging.getLogger(__name__).addHandler(NullHandler()) 50 | 51 | 52 | def add_stderr_logger(level=logging.DEBUG): 53 | """ 54 | Helper for quickly adding a StreamHandler to the logger. Useful for 55 | debugging. 56 | 57 | Returns the handler after adding it. 58 | """ 59 | # This method needs to be in this __init__.py to get the __name__ correct 60 | # even if urllib3 is vendored within another package. 61 | logger = logging.getLogger(__name__) 62 | handler = logging.StreamHandler() 63 | handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s %(message)s')) 64 | logger.addHandler(handler) 65 | logger.setLevel(level) 66 | logger.debug('Added a stderr logging handler to logger: %s', __name__) 67 | return handler 68 | 69 | 70 | # ... Clean up. 71 | del NullHandler 72 | 73 | 74 | # All warning filters *must* be appended unless you're really certain that they 75 | # shouldn't be: otherwise, it's very hard for users to use most Python 76 | # mechanisms to silence them. 77 | # SecurityWarning's always go off by default. 78 | warnings.simplefilter('always', exceptions.SecurityWarning, append=True) 79 | # SubjectAltNameWarning's should go off once per host 80 | warnings.simplefilter('default', exceptions.SubjectAltNameWarning, append=True) 81 | # InsecurePlatformWarning's don't vary between requests, so we keep it default. 82 | warnings.simplefilter('default', exceptions.InsecurePlatformWarning, 83 | append=True) 84 | # SNIMissingWarnings should go off only once. 85 | warnings.simplefilter('default', exceptions.SNIMissingWarning, append=True) 86 | 87 | 88 | def disable_warnings(category=exceptions.HTTPWarning): 89 | """ 90 | Helper for quickly disabling all urllib3 warnings. 91 | """ 92 | warnings.simplefilter('ignore', category) 93 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/contrib/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Andy-zhujunwen/danet-pytorch/82ba577edcc5535b169f35590a6f48a4c2944ae1/danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/contrib/__init__.py -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/contrib/_appengine_environ.py: -------------------------------------------------------------------------------- 1 | """ 2 | This module provides means to detect the App Engine environment. 3 | """ 4 | 5 | import os 6 | 7 | 8 | def is_appengine(): 9 | return (is_local_appengine() or 10 | is_prod_appengine() or 11 | is_prod_appengine_mvms()) 12 | 13 | 14 | def is_appengine_sandbox(): 15 | return is_appengine() and not is_prod_appengine_mvms() 16 | 17 | 18 | def is_local_appengine(): 19 | return ('APPENGINE_RUNTIME' in os.environ and 20 | 'Development/' in os.environ['SERVER_SOFTWARE']) 21 | 22 | 23 | def is_prod_appengine(): 24 | return ('APPENGINE_RUNTIME' in os.environ and 25 | 'Google App Engine/' in os.environ['SERVER_SOFTWARE'] and 26 | not is_prod_appengine_mvms()) 27 | 28 | 29 | def is_prod_appengine_mvms(): 30 | return os.environ.get('GAE_VM', False) == 'true' 31 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/contrib/_securetransport/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Andy-zhujunwen/danet-pytorch/82ba577edcc5535b169f35590a6f48a4c2944ae1/danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/contrib/_securetransport/__init__.py -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/filepost.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | import binascii 3 | import codecs 4 | import os 5 | 6 | from io import BytesIO 7 | 8 | from .packages import six 9 | from .packages.six import b 10 | from .fields import RequestField 11 | 12 | writer = codecs.lookup('utf-8')[3] 13 | 14 | 15 | def choose_boundary(): 16 | """ 17 | Our embarrassingly-simple replacement for mimetools.choose_boundary. 18 | """ 19 | boundary = binascii.hexlify(os.urandom(16)) 20 | if six.PY3: 21 | boundary = boundary.decode('ascii') 22 | return boundary 23 | 24 | 25 | def iter_field_objects(fields): 26 | """ 27 | Iterate over fields. 28 | 29 | Supports list of (k, v) tuples and dicts, and lists of 30 | :class:`~urllib3.fields.RequestField`. 31 | 32 | """ 33 | if isinstance(fields, dict): 34 | i = six.iteritems(fields) 35 | else: 36 | i = iter(fields) 37 | 38 | for field in i: 39 | if isinstance(field, RequestField): 40 | yield field 41 | else: 42 | yield RequestField.from_tuples(*field) 43 | 44 | 45 | def iter_fields(fields): 46 | """ 47 | .. deprecated:: 1.6 48 | 49 | Iterate over fields. 50 | 51 | The addition of :class:`~urllib3.fields.RequestField` makes this function 52 | obsolete. Instead, use :func:`iter_field_objects`, which returns 53 | :class:`~urllib3.fields.RequestField` objects. 54 | 55 | Supports list of (k, v) tuples and dicts. 56 | """ 57 | if isinstance(fields, dict): 58 | return ((k, v) for k, v in six.iteritems(fields)) 59 | 60 | return ((k, v) for k, v in fields) 61 | 62 | 63 | def encode_multipart_formdata(fields, boundary=None): 64 | """ 65 | Encode a dictionary of ``fields`` using the multipart/form-data MIME format. 66 | 67 | :param fields: 68 | Dictionary of fields or list of (key, :class:`~urllib3.fields.RequestField`). 69 | 70 | :param boundary: 71 | If not specified, then a random boundary will be generated using 72 | :func:`urllib3.filepost.choose_boundary`. 73 | """ 74 | body = BytesIO() 75 | if boundary is None: 76 | boundary = choose_boundary() 77 | 78 | for field in iter_field_objects(fields): 79 | body.write(b('--%s\r\n' % (boundary))) 80 | 81 | writer(body).write(field.render_headers()) 82 | data = field.data 83 | 84 | if isinstance(data, int): 85 | data = str(data) # Backwards compatibility 86 | 87 | if isinstance(data, six.text_type): 88 | writer(body).write(data) 89 | else: 90 | body.write(data) 91 | 92 | body.write(b'\r\n') 93 | 94 | body.write(b('--%s--\r\n' % (boundary))) 95 | 96 | content_type = str('multipart/form-data; boundary=%s' % boundary) 97 | 98 | return body.getvalue(), content_type 99 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/packages/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | 3 | from . import ssl_match_hostname 4 | 5 | __all__ = ('ssl_match_hostname', ) 6 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/packages/backports/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Andy-zhujunwen/danet-pytorch/82ba577edcc5535b169f35590a6f48a4c2944ae1/danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/packages/backports/__init__.py -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/packages/backports/makefile.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | backports.makefile 4 | ~~~~~~~~~~~~~~~~~~ 5 | 6 | Backports the Python 3 ``socket.makefile`` method for use with anything that 7 | wants to create a "fake" socket object. 8 | """ 9 | import io 10 | 11 | from socket import SocketIO 12 | 13 | 14 | def backport_makefile(self, mode="r", buffering=None, encoding=None, 15 | errors=None, newline=None): 16 | """ 17 | Backport of ``socket.makefile`` from Python 3.5. 18 | """ 19 | if not set(mode) <= {"r", "w", "b"}: 20 | raise ValueError( 21 | "invalid mode %r (only r, w, b allowed)" % (mode,) 22 | ) 23 | writing = "w" in mode 24 | reading = "r" in mode or not writing 25 | assert reading or writing 26 | binary = "b" in mode 27 | rawmode = "" 28 | if reading: 29 | rawmode += "r" 30 | if writing: 31 | rawmode += "w" 32 | raw = SocketIO(self, rawmode) 33 | self._makefile_refs += 1 34 | if buffering is None: 35 | buffering = -1 36 | if buffering < 0: 37 | buffering = io.DEFAULT_BUFFER_SIZE 38 | if buffering == 0: 39 | if not binary: 40 | raise ValueError("unbuffered streams must be binary") 41 | return raw 42 | if reading and writing: 43 | buffer = io.BufferedRWPair(raw, raw, buffering) 44 | elif reading: 45 | buffer = io.BufferedReader(raw, buffering) 46 | else: 47 | assert writing 48 | buffer = io.BufferedWriter(raw, buffering) 49 | if binary: 50 | return buffer 51 | text = io.TextIOWrapper(buffer, encoding, errors, newline) 52 | text.mode = mode 53 | return text 54 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/packages/ssl_match_hostname/__init__.py: -------------------------------------------------------------------------------- 1 | import sys 2 | 3 | try: 4 | # Our match_hostname function is the same as 3.5's, so we only want to 5 | # import the match_hostname function if it's at least that good. 6 | if sys.version_info < (3, 5): 7 | raise ImportError("Fallback to vendored code") 8 | 9 | from ssl import CertificateError, match_hostname 10 | except ImportError: 11 | try: 12 | # Backport of the function from a pypi module 13 | from backports.ssl_match_hostname import CertificateError, match_hostname 14 | except ImportError: 15 | # Our vendored copy 16 | from ._implementation import CertificateError, match_hostname 17 | 18 | # Not needed, but documenting what we provide. 19 | __all__ = ('CertificateError', 'match_hostname') 20 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/util/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | # For backwards compatibility, provide imports that used to be here. 3 | from .connection import is_connection_dropped 4 | from .request import make_headers 5 | from .response import is_fp_closed 6 | from .ssl_ import ( 7 | SSLContext, 8 | HAS_SNI, 9 | IS_PYOPENSSL, 10 | IS_SECURETRANSPORT, 11 | assert_fingerprint, 12 | resolve_cert_reqs, 13 | resolve_ssl_version, 14 | ssl_wrap_socket, 15 | ) 16 | from .timeout import ( 17 | current_time, 18 | Timeout, 19 | ) 20 | 21 | from .retry import Retry 22 | from .url import ( 23 | get_host, 24 | parse_url, 25 | split_first, 26 | Url, 27 | ) 28 | from .wait import ( 29 | wait_for_read, 30 | wait_for_write 31 | ) 32 | 33 | __all__ = ( 34 | 'HAS_SNI', 35 | 'IS_PYOPENSSL', 36 | 'IS_SECURETRANSPORT', 37 | 'SSLContext', 38 | 'Retry', 39 | 'Timeout', 40 | 'Url', 41 | 'assert_fingerprint', 42 | 'current_time', 43 | 'is_connection_dropped', 44 | 'is_fp_closed', 45 | 'get_host', 46 | 'parse_url', 47 | 'make_headers', 48 | 'resolve_cert_reqs', 49 | 'resolve_ssl_version', 50 | 'split_first', 51 | 'ssl_wrap_socket', 52 | 'wait_for_read', 53 | 'wait_for_write' 54 | ) 55 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/util/queue.py: -------------------------------------------------------------------------------- 1 | import collections 2 | from ..packages import six 3 | from ..packages.six.moves import queue 4 | 5 | if six.PY2: 6 | # Queue is imported for side effects on MS Windows. See issue #229. 7 | import Queue as _unused_module_Queue # noqa: F401 8 | 9 | 10 | class LifoQueue(queue.Queue): 11 | def _init(self, _): 12 | self.queue = collections.deque() 13 | 14 | def _qsize(self, len=len): 15 | return len(self.queue) 16 | 17 | def _put(self, item): 18 | self.queue.append(item) 19 | 20 | def _get(self): 21 | return self.queue.pop() 22 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/urllib3/util/response.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | from ..packages.six.moves import http_client as httplib 3 | 4 | from ..exceptions import HeaderParsingError 5 | 6 | 7 | def is_fp_closed(obj): 8 | """ 9 | Checks whether a given file-like object is closed. 10 | 11 | :param obj: 12 | The file-like object to check. 13 | """ 14 | 15 | try: 16 | # Check `isclosed()` first, in case Python3 doesn't set `closed`. 17 | # GH Issue #928 18 | return obj.isclosed() 19 | except AttributeError: 20 | pass 21 | 22 | try: 23 | # Check via the official file-like-object way. 24 | return obj.closed 25 | except AttributeError: 26 | pass 27 | 28 | try: 29 | # Check if the object is a container for another file-like object that 30 | # gets released on exhaustion (e.g. HTTPResponse). 31 | return obj.fp is None 32 | except AttributeError: 33 | pass 34 | 35 | raise ValueError("Unable to determine whether fp is closed.") 36 | 37 | 38 | def assert_header_parsing(headers): 39 | """ 40 | Asserts whether all headers have been successfully parsed. 41 | Extracts encountered errors from the result of parsing headers. 42 | 43 | Only works on Python 3. 44 | 45 | :param headers: Headers to verify. 46 | :type headers: `httplib.HTTPMessage`. 47 | 48 | :raises urllib3.exceptions.HeaderParsingError: 49 | If parsing errors are found. 50 | """ 51 | 52 | # This will fail silently if we pass in the wrong kind of parameter. 53 | # To make debugging easier add an explicit check. 54 | if not isinstance(headers, httplib.HTTPMessage): 55 | raise TypeError('expected httplib.Message, got {0}.'.format( 56 | type(headers))) 57 | 58 | defects = getattr(headers, 'defects', None) 59 | get_payload = getattr(headers, 'get_payload', None) 60 | 61 | unparsed_data = None 62 | if get_payload: 63 | # get_payload is actually email.message.Message.get_payload; 64 | # we're only interested in the result if it's not a multipart message 65 | if not headers.is_multipart(): 66 | payload = get_payload() 67 | 68 | if isinstance(payload, (bytes, str)): 69 | unparsed_data = payload 70 | 71 | if defects or unparsed_data: 72 | raise HeaderParsingError(defects=defects, unparsed_data=unparsed_data) 73 | 74 | 75 | def is_response_to_head(response): 76 | """ 77 | Checks whether the request of a response has been a HEAD-request. 78 | Handles the quirks of AppEngine. 79 | 80 | :param conn: 81 | :type conn: :class:`httplib.HTTPResponse` 82 | """ 83 | # FIXME: Can we do this somehow without accessing private httplib _method? 84 | method = response._method 85 | if isinstance(method, int): # Platform-specific: Appengine 86 | return method == 3 87 | return method.upper() == 'HEAD' 88 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/pip-19.0.3-py3.7.egg/pip/_vendor/webencodings/mklabels.py: -------------------------------------------------------------------------------- 1 | """ 2 | 3 | webencodings.mklabels 4 | ~~~~~~~~~~~~~~~~~~~~~ 5 | 6 | Regenarate the webencodings.labels module. 7 | 8 | :copyright: Copyright 2012 by Simon Sapin 9 | :license: BSD, see LICENSE for details. 10 | 11 | """ 12 | 13 | import json 14 | try: 15 | from urllib import urlopen 16 | except ImportError: 17 | from urllib.request import urlopen 18 | 19 | 20 | def assert_lower(string): 21 | assert string == string.lower() 22 | return string 23 | 24 | 25 | def generate(url): 26 | parts = ['''\ 27 | """ 28 | 29 | webencodings.labels 30 | ~~~~~~~~~~~~~~~~~~~ 31 | 32 | Map encoding labels to their name. 33 | 34 | :copyright: Copyright 2012 by Simon Sapin 35 | :license: BSD, see LICENSE for details. 36 | 37 | """ 38 | 39 | # XXX Do not edit! 40 | # This file is automatically generated by mklabels.py 41 | 42 | LABELS = { 43 | '''] 44 | labels = [ 45 | (repr(assert_lower(label)).lstrip('u'), 46 | repr(encoding['name']).lstrip('u')) 47 | for category in json.loads(urlopen(url).read().decode('ascii')) 48 | for encoding in category['encodings'] 49 | for label in encoding['labels']] 50 | max_len = max(len(label) for label, name in labels) 51 | parts.extend( 52 | ' %s:%s %s,\n' % (label, ' ' * (max_len - len(label)), name) 53 | for label, name in labels) 54 | parts.append('}') 55 | return ''.join(parts) 56 | 57 | 58 | if __name__ == '__main__': 59 | print(generate('http://encoding.spec.whatwg.org/encodings.json')) 60 | -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/setuptools-40.8.0-py3.7.egg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Andy-zhujunwen/danet-pytorch/82ba577edcc5535b169f35590a6f48a4c2944ae1/danet/venv/Lib/site-packages/setuptools-40.8.0-py3.7.egg -------------------------------------------------------------------------------- /danet/venv/Lib/site-packages/setuptools.pth: -------------------------------------------------------------------------------- 1 | ./setuptools-40.8.0-py3.7.egg 2 | -------------------------------------------------------------------------------- /danet/venv/Scripts/Activate.ps1: -------------------------------------------------------------------------------- 1 | function global:deactivate ([switch]$NonDestructive) { 2 | # Revert to original values 3 | if (Test-Path function:_OLD_VIRTUAL_PROMPT) { 4 | copy-item function:_OLD_VIRTUAL_PROMPT function:prompt 5 | remove-item function:_OLD_VIRTUAL_PROMPT 6 | } 7 | 8 | if (Test-Path env:_OLD_VIRTUAL_PYTHONHOME) { 9 | copy-item env:_OLD_VIRTUAL_PYTHONHOME env:PYTHONHOME 10 | remove-item env:_OLD_VIRTUAL_PYTHONHOME 11 | } 12 | 13 | if (Test-Path env:_OLD_VIRTUAL_PATH) { 14 | copy-item env:_OLD_VIRTUAL_PATH env:PATH 15 | remove-item env:_OLD_VIRTUAL_PATH 16 | } 17 | 18 | if (Test-Path env:VIRTUAL_ENV) { 19 | remove-item env:VIRTUAL_ENV 20 | } 21 | 22 | if (!$NonDestructive) { 23 | # Self destruct! 24 | remove-item function:deactivate 25 | } 26 | } 27 | 28 | deactivate -nondestructive 29 | 30 | $env:VIRTUAL_ENV="E:\PyCharm 2019.1.2\danet\venv" 31 | 32 | if (! $env:VIRTUAL_ENV_DISABLE_PROMPT) { 33 | # Set the prompt to include the env name 34 | # Make sure _OLD_VIRTUAL_PROMPT is global 35 | function global:_OLD_VIRTUAL_PROMPT {""} 36 | copy-item function:prompt function:_OLD_VIRTUAL_PROMPT 37 | function global:prompt { 38 | Write-Host -NoNewline -ForegroundColor Green '(venv) ' 39 | _OLD_VIRTUAL_PROMPT 40 | } 41 | } 42 | 43 | # Clear PYTHONHOME 44 | if (Test-Path env:PYTHONHOME) { 45 | copy-item env:PYTHONHOME env:_OLD_VIRTUAL_PYTHONHOME 46 | remove-item env:PYTHONHOME 47 | } 48 | 49 | # Add the venv to the PATH 50 | copy-item env:PATH env:_OLD_VIRTUAL_PATH 51 | $env:PATH = "$env:VIRTUAL_ENV\Scripts;$env:PATH" 52 | -------------------------------------------------------------------------------- /danet/venv/Scripts/activate: -------------------------------------------------------------------------------- 1 | # This file must be used with "source bin/activate" *from bash* 2 | # you cannot run it directly 3 | 4 | deactivate () { 5 | # reset old environment variables 6 | if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then 7 | PATH="${_OLD_VIRTUAL_PATH:-}" 8 | export PATH 9 | unset _OLD_VIRTUAL_PATH 10 | fi 11 | if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then 12 | PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}" 13 | export PYTHONHOME 14 | unset _OLD_VIRTUAL_PYTHONHOME 15 | fi 16 | 17 | # This should detect bash and zsh, which have a hash command that must 18 | # be called to get it to forget past commands. Without forgetting 19 | # past commands the $PATH changes we made may not be respected 20 | if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then 21 | hash -r 22 | fi 23 | 24 | if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then 25 | PS1="${_OLD_VIRTUAL_PS1:-}" 26 | export PS1 27 | unset _OLD_VIRTUAL_PS1 28 | fi 29 | 30 | unset VIRTUAL_ENV 31 | if [ ! "$1" = "nondestructive" ] ; then 32 | # Self destruct! 33 | unset -f deactivate 34 | fi 35 | } 36 | 37 | # unset irrelevant variables 38 | deactivate nondestructive 39 | 40 | VIRTUAL_ENV="E:\PyCharm 2019.1.2\danet\venv" 41 | export VIRTUAL_ENV 42 | 43 | _OLD_VIRTUAL_PATH="$PATH" 44 | PATH="$VIRTUAL_ENV/Scripts:$PATH" 45 | export PATH 46 | 47 | # unset PYTHONHOME if set 48 | # this will fail if PYTHONHOME is set to the empty string (which is bad anyway) 49 | # could use `if (set -u; : $PYTHONHOME) ;` in bash 50 | if [ -n "${PYTHONHOME:-}" ] ; then 51 | _OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}" 52 | unset PYTHONHOME 53 | fi 54 | 55 | if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then 56 | _OLD_VIRTUAL_PS1="${PS1:-}" 57 | if [ "x(venv) " != x ] ; then 58 | PS1="(venv) ${PS1:-}" 59 | else 60 | if [ "`basename \"$VIRTUAL_ENV\"`" = "__" ] ; then 61 | # special case for Aspen magic directories 62 | # see http://www.zetadev.com/software/aspen/ 63 | PS1="[`basename \`dirname \"$VIRTUAL_ENV\"\``] $PS1" 64 | else 65 | PS1="(`basename \"$VIRTUAL_ENV\"`)$PS1" 66 | fi 67 | fi 68 | export PS1 69 | fi 70 | 71 | # This should detect bash and zsh, which have a hash command that must 72 | # be called to get it to forget past commands. Without forgetting 73 | # past commands the $PATH changes we made may not be respected 74 | if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then 75 | hash -r 76 | fi 77 | -------------------------------------------------------------------------------- /danet/venv/Scripts/activate.bat: -------------------------------------------------------------------------------- 1 | @echo off 2 | 3 | rem This file is UTF-8 encoded, so we need to update the current code page while executing it 4 | for /f "tokens=2 delims=:" %%a in ('"%SystemRoot%\System32\chcp.com"') do ( 5 | set "_OLD_CODEPAGE=%%a" 6 | ) 7 | if defined _OLD_CODEPAGE ( 8 | "%SystemRoot%\System32\chcp.com" 65001 > nul 9 | ) 10 | 11 | set "VIRTUAL_ENV=E:\PyCharm 2019.1.2\danet\venv" 12 | 13 | if not defined PROMPT ( 14 | set "PROMPT=$P$G" 15 | ) 16 | 17 | if defined _OLD_VIRTUAL_PROMPT ( 18 | set "PROMPT=%_OLD_VIRTUAL_PROMPT%" 19 | ) 20 | 21 | if defined _OLD_VIRTUAL_PYTHONHOME ( 22 | set "PYTHONHOME=%_OLD_VIRTUAL_PYTHONHOME%" 23 | ) 24 | 25 | set "_OLD_VIRTUAL_PROMPT=%PROMPT%" 26 | set "PROMPT=(venv) %PROMPT%" 27 | 28 | if defined PYTHONHOME ( 29 | set "_OLD_VIRTUAL_PYTHONHOME=%PYTHONHOME%" 30 | set PYTHONHOME= 31 | ) 32 | 33 | if defined _OLD_VIRTUAL_PATH ( 34 | set "PATH=%_OLD_VIRTUAL_PATH%" 35 | ) else ( 36 | set "_OLD_VIRTUAL_PATH=%PATH%" 37 | ) 38 | 39 | set "PATH=%VIRTUAL_ENV%\Scripts;%PATH%" 40 | 41 | :END 42 | if defined _OLD_CODEPAGE ( 43 | "%SystemRoot%\System32\chcp.com" %_OLD_CODEPAGE% > nul 44 | set "_OLD_CODEPAGE=" 45 | ) 46 | -------------------------------------------------------------------------------- /danet/venv/Scripts/deactivate.bat: -------------------------------------------------------------------------------- 1 | @echo off 2 | 3 | if defined _OLD_VIRTUAL_PROMPT ( 4 | set "PROMPT=%_OLD_VIRTUAL_PROMPT%" 5 | ) 6 | set _OLD_VIRTUAL_PROMPT= 7 | 8 | if defined _OLD_VIRTUAL_PYTHONHOME ( 9 | set "PYTHONHOME=%_OLD_VIRTUAL_PYTHONHOME%" 10 | set _OLD_VIRTUAL_PYTHONHOME= 11 | ) 12 | 13 | if defined _OLD_VIRTUAL_PATH ( 14 | set "PATH=%_OLD_VIRTUAL_PATH%" 15 | ) 16 | 17 | set _OLD_VIRTUAL_PATH= 18 | 19 | set VIRTUAL_ENV= 20 | 21 | :END 22 | -------------------------------------------------------------------------------- /danet/venv/Scripts/easy_install-3.7-script.py: -------------------------------------------------------------------------------- 1 | #!"E:\PyCharm 2019.1.2\danet\venv\Scripts\python.exe" 2 | # EASY-INSTALL-ENTRY-SCRIPT: 'setuptools==40.8.0','console_scripts','easy_install-3.7' 3 | __requires__ = 'setuptools==40.8.0' 4 | import re 5 | import sys 6 | from pkg_resources import load_entry_point 7 | 8 | if __name__ == '__main__': 9 | sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) 10 | sys.exit( 11 | load_entry_point('setuptools==40.8.0', 'console_scripts', 'easy_install-3.7')() 12 | ) 13 | -------------------------------------------------------------------------------- /danet/venv/Scripts/easy_install-3.7.exe: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Andy-zhujunwen/danet-pytorch/82ba577edcc5535b169f35590a6f48a4c2944ae1/danet/venv/Scripts/easy_install-3.7.exe -------------------------------------------------------------------------------- /danet/venv/Scripts/easy_install-script.py: -------------------------------------------------------------------------------- 1 | #!"E:\PyCharm 2019.1.2\danet\venv\Scripts\python.exe" 2 | # EASY-INSTALL-ENTRY-SCRIPT: 'setuptools==40.8.0','console_scripts','easy_install' 3 | __requires__ = 'setuptools==40.8.0' 4 | import re 5 | import sys 6 | from pkg_resources import load_entry_point 7 | 8 | if __name__ == '__main__': 9 | sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) 10 | sys.exit( 11 | load_entry_point('setuptools==40.8.0', 'console_scripts', 'easy_install')() 12 | ) 13 | -------------------------------------------------------------------------------- /danet/venv/Scripts/easy_install.exe: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Andy-zhujunwen/danet-pytorch/82ba577edcc5535b169f35590a6f48a4c2944ae1/danet/venv/Scripts/easy_install.exe -------------------------------------------------------------------------------- /danet/venv/Scripts/pip-script.py: -------------------------------------------------------------------------------- 1 | #!"E:\PyCharm 2019.1.2\danet\venv\Scripts\python.exe" 2 | # EASY-INSTALL-ENTRY-SCRIPT: 'pip==19.0.3','console_scripts','pip' 3 | __requires__ = 'pip==19.0.3' 4 | import re 5 | import sys 6 | from pkg_resources import load_entry_point 7 | 8 | if __name__ == '__main__': 9 | sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) 10 | sys.exit( 11 | load_entry_point('pip==19.0.3', 'console_scripts', 'pip')() 12 | ) 13 | -------------------------------------------------------------------------------- /danet/venv/Scripts/pip.exe: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Andy-zhujunwen/danet-pytorch/82ba577edcc5535b169f35590a6f48a4c2944ae1/danet/venv/Scripts/pip.exe -------------------------------------------------------------------------------- /danet/venv/Scripts/pip3-script.py: -------------------------------------------------------------------------------- 1 | #!"E:\PyCharm 2019.1.2\danet\venv\Scripts\python.exe" 2 | # EASY-INSTALL-ENTRY-SCRIPT: 'pip==19.0.3','console_scripts','pip3' 3 | __requires__ = 'pip==19.0.3' 4 | import re 5 | import sys 6 | from pkg_resources import load_entry_point 7 | 8 | if __name__ == '__main__': 9 | sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) 10 | sys.exit( 11 | load_entry_point('pip==19.0.3', 'console_scripts', 'pip3')() 12 | ) 13 | -------------------------------------------------------------------------------- /danet/venv/Scripts/pip3.7-script.py: -------------------------------------------------------------------------------- 1 | #!"E:\PyCharm 2019.1.2\danet\venv\Scripts\python.exe" 2 | # EASY-INSTALL-ENTRY-SCRIPT: 'pip==19.0.3','console_scripts','pip3.7' 3 | __requires__ = 'pip==19.0.3' 4 | import re 5 | import sys 6 | from pkg_resources import load_entry_point 7 | 8 | if __name__ == '__main__': 9 | sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) 10 | sys.exit( 11 | load_entry_point('pip==19.0.3', 'console_scripts', 'pip3.7')() 12 | ) 13 | -------------------------------------------------------------------------------- /danet/venv/Scripts/pip3.7.exe: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Andy-zhujunwen/danet-pytorch/82ba577edcc5535b169f35590a6f48a4c2944ae1/danet/venv/Scripts/pip3.7.exe -------------------------------------------------------------------------------- /danet/venv/Scripts/pip3.exe: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Andy-zhujunwen/danet-pytorch/82ba577edcc5535b169f35590a6f48a4c2944ae1/danet/venv/Scripts/pip3.exe -------------------------------------------------------------------------------- /danet/venv/Scripts/python.exe: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Andy-zhujunwen/danet-pytorch/82ba577edcc5535b169f35590a6f48a4c2944ae1/danet/venv/Scripts/python.exe -------------------------------------------------------------------------------- /danet/venv/Scripts/pythonw.exe: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Andy-zhujunwen/danet-pytorch/82ba577edcc5535b169f35590a6f48a4c2944ae1/danet/venv/Scripts/pythonw.exe -------------------------------------------------------------------------------- /danet/venv/pyvenv.cfg: -------------------------------------------------------------------------------- 1 | home = D:\python3.7.3 2 | include-system-site-packages = false 3 | version = 3.7.3 4 | -------------------------------------------------------------------------------- /network.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Andy-zhujunwen/danet-pytorch/82ba577edcc5535b169f35590a6f48a4c2944ae1/network.png --------------------------------------------------------------------------------