├── .dockerignore
├── .flake8
├── .github
├── ISSUE_TEMPLATE
│ ├── issue_template.md
│ └── proposal_template.md
├── pull_request_template.md
└── workflows
│ ├── dailymed_tests.yml
│ └── digitalocean_push.yml
├── .gitignore
├── Dockerfile
├── LICENSE.md
├── README.md
├── api
├── api
│ ├── __init__.py
│ ├── asgi.py
│ ├── settings.py
│ ├── urls.py
│ └── wsgi.py
├── dailymed
│ ├── __init__.py
│ ├── admin.py
│ ├── apps.py
│ ├── filters.py
│ ├── migrations
│ │ ├── 0001_initial.py
│ │ └── __init__.py
│ ├── models.py
│ ├── serializers.py
│ ├── tests
│ │ ├── __init__.py
│ │ ├── test.json
│ │ ├── test_api.py
│ │ └── test_db.py
│ ├── urls.py
│ └── views.py
└── manage.py
├── docker-compose.prod.yml
├── docker-compose.yml
├── extract_zips.py
├── get_zips.py
├── poetry.lock
├── pyproject.toml
└── scraper
├── scraper
├── __init__.py
├── items.py
├── middlewares.py
├── pipelines.py
├── settings.py
├── spiders
│ ├── __init__.py
│ ├── basic_extract.py
│ └── json_extract.py
└── utils.py
└── scrapy.cfg
/.dockerignore:
--------------------------------------------------------------------------------
1 | data/
2 |
--------------------------------------------------------------------------------
/.flake8:
--------------------------------------------------------------------------------
1 | [flake8]
2 | exclude =
3 | api/dailymed/migrations,
4 | __pycache__,
5 | api/api/settings.py,
6 | api/api/manage.py
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/issue_template.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Issue
3 | about: Create a new issue
4 | ---
5 | # Problem Statement
6 | [What needs to be done and why]
7 |
8 | # Criteria for Success
9 | [Measureable outcome if possible]
10 |
11 | # Additional Information
12 | [ways one might accomplish this task, links, documentation, alternatives, etc.]
13 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/proposal_template.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Proposal
3 | about: Propose a new feature or some other changes not related to a direct issue
4 | ---
5 |
6 | # Proposal
7 | [What is the idea]
8 |
9 | # Rationale
10 | [Why should this be implemented]
11 |
--------------------------------------------------------------------------------
/.github/pull_request_template.md:
--------------------------------------------------------------------------------
1 | Fixes coderxio/dailymed-api#ISSUE NUMBER
2 |
3 | ## Explanation
4 | [What did you change?]
5 |
6 | ## Rationale
7 | [Why did you make the changes mentioned above? What alternatives did you consider?]
8 |
9 | ## Tests
10 | 1. What testing did you do?
11 | 1. Attach testing logs inside a summary block:
12 |
13 |
14 | testing logs
15 |
16 | ```
17 |
18 | ```
19 |
20 |
21 |
--------------------------------------------------------------------------------
/.github/workflows/dailymed_tests.yml:
--------------------------------------------------------------------------------
1 | name: Tests
2 |
3 | on: push
4 |
5 | jobs:
6 | build:
7 |
8 | runs-on: ubuntu-latest
9 | strategy:
10 | max-parallel: 4
11 | matrix:
12 | python-version: [ 3.8 ]
13 |
14 | env:
15 | VIRTUAL_ENV: /opt/venv
16 |
17 | steps:
18 | - name: Checkout Code
19 | uses: actions/checkout@v2
20 |
21 | - name: Set up Python ${{ matrix.python-version }}
22 | uses: actions/setup-python@v2
23 | with:
24 | python-version: ${{ matrix.python-version }}
25 |
26 | - name: Install poetry
27 | run: pip install poetry
28 |
29 | - name: List current host python installed dependencies
30 | run: pip list
31 |
32 | - name: Create venv
33 | run: python -m venv $VIRTUAL_ENV
34 |
35 | - name: Install app dependencies with poetry
36 | run: poetry install
37 |
38 | - name: Activate venv
39 | run: echo ${{ env.VIRTUAL_ENV }}/bin >> $GITHUB_PATH
40 |
41 | - name: List app dependencies
42 | run: pip list
43 |
44 | - name: Test
45 | run: ./api/manage.py test api
46 |
47 | - name: Styling
48 | run: flake8
49 |
--------------------------------------------------------------------------------
/.github/workflows/digitalocean_push.yml:
--------------------------------------------------------------------------------
1 | name: Deployment
2 |
3 | on:
4 | push:
5 | branches:
6 | - master
7 |
8 | jobs:
9 | deploy:
10 |
11 | runs-on: ubuntu-latest
12 |
13 | steps:
14 | - name: Checkout Code
15 | uses: actions/checkout@v2
16 |
17 | - name: Update
18 | if: contains(github.event.pull_request.labels.*.name, 'App Only')
19 | uses: appleboy/scp-action@master
20 | env:
21 | HOST: ${{ secrets.HOST }}
22 | USERNAME: ${{ secrets.USERNAME }}
23 | KEY: ${{ secrets.SSH_KEY }}
24 | with:
25 | source: "."
26 | target: /opt/dailymed/dailymed-api/
27 |
28 | - name: Deploy
29 | if: contains( github.event.pull_request.labels.*.name, 'App Only')
30 | uses: appleboy/ssh-action@master
31 | with:
32 | HOST: ${{ secrets.HOST }}
33 | USERNAME: ${{ secrets.USERNAME }}
34 | KEY: ${{ secrets.SSH_KEY }}
35 | script: |
36 | chown -R www-data:www-data /opt/dailymed
37 | cd /opt/dailymed/dailymed-api
38 | docker-compose -f docker-compose.prod.yml up --build -d
39 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | data/
2 | venv/
3 | __pycache__/
4 | db.sqlite3
5 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.8.2
2 |
3 | RUN pip install poetry
4 |
5 | ENV VIRTUAL_ENV=/opt/venv
6 | RUN python -m venv $VIRTUAL_ENV
7 | ENV PATH="$VIRTUAL_ENV/bin:$PATH"
8 |
9 | WORKDIR /dailymed-api
10 |
11 | COPY poetry.lock pyproject.toml ./
12 |
13 | RUN poetry install --no-root
14 |
15 | COPY . ./
16 |
17 | ENV PYTHONPATH=/dailymed-api/api
18 |
19 | RUN poetry install
20 |
21 | ARG user1=www-data
22 |
23 | RUN chown -R ${user1}:${user1} /dailymed-api
24 | RUN chmod -R 750 /dailymed-api
25 |
26 | USER ${user1}
27 |
--------------------------------------------------------------------------------
/LICENSE.md:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2020 CodeRx
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | [
](https://coderx.slack.com) 
2 |
3 | # DailyMed API
4 | ## Available at [api.coderx.io](http://api.coderx.io)
5 | ### Endpoint(s)
6 | - [/SPL](http://api.coderx.io/spl)
7 | Filter by set_id, labeler, package_ndc, product_ndc, product_name, inactive_ingredient_name, inactive_ingredient_unii or schedule
8 | Example filter by schedule: http://api.coderx.io/spl/?schedule=CIII
9 |
10 | ### Docker Containers
11 | #### Docker Development Usage
12 | **This method is intended for internal testing only. It has not been secured for external access.**
13 | ##### Prep:
14 | - Download SPL zip files `python3 get_zips.py`
15 | Example arguments to download SPL zip 4 and unpack 100 SPL files `python3 get_zips.py --select 4 --unzip 100`
16 | For further assistance `python3 get_zips.py -h`
17 | ##### Steps:
18 | 1. Create docker container `docker-compose up -d` to bring up the Django API
19 | 2. Optional: load the database `docker-compose exec -d api sh -c "cd /dailymed-api/scraper/ && scrapy crawl json_extract"`
20 | An alternative command is `docker exec -d -it -w /dailymed-api/scraper dailymed-api scrapy crawl json_extract`
21 |
22 | #### Docker Production Usage
23 | **This method is for using docker-compose.prod.yml**
24 | ##### Prep:
25 | - Update secret in Django settings.py
26 | - Disable debug mode in Django settings.py
27 | - Install & configure Nginx to serve static folder and proxy Gunicorn
28 | - Download SPL zip files `sudo -u www-data python3 get_zips.py`
29 | ##### Steps:
30 | 1. Create directory `mkdir /opt/dailymed`
31 | 2. Change owner `chown www-data:www-data /opt/dailymed`
32 | 3. Change directory `cd /opt/dailymed`
33 | 4. Clone repo `sudo -u www-data git clone https://github.com/coderxio/dailymed-api`
34 | An alternative command is `git clone https://github.com/coderxio/dailymed-api && chown -R www-data:www-data /opt/dailymed`
35 | 5. Change directory `cd dailymed-api`
36 | 6. Create docker container `docker-compose -f docker-compose.prod.yaml up --build -d`
37 | 7. Optional: load the database `docker-compose exec -d api sh -c "cd /dailymed-api/scraper/ && scrapy crawl json_extract"`
38 | An alternative command is `docker exec -d -it -w /dailymed-api/scraper dailymed-api scrapy crawl json_extract`
--------------------------------------------------------------------------------
/api/api/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/coderxio/dailymed-api/90fe5f8b40a854ff7543ec9b8c4fc0232fdd0dfc/api/api/__init__.py
--------------------------------------------------------------------------------
/api/api/asgi.py:
--------------------------------------------------------------------------------
1 | """
2 | ASGI config for api project.
3 |
4 | It exposes the ASGI callable as a module-level variable named ``application``.
5 |
6 | For more information on this file, see
7 | https://docs.djangoproject.com/en/3.1/howto/deployment/asgi/
8 | """
9 |
10 | import os
11 |
12 | from django.core.asgi import get_asgi_application
13 |
14 | os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'api.settings')
15 |
16 | application = get_asgi_application()
17 |
--------------------------------------------------------------------------------
/api/api/settings.py:
--------------------------------------------------------------------------------
1 | """
2 | Django settings for api project.
3 |
4 | Generated by 'django-admin startproject' using Django 3.1.
5 |
6 | For more information on this file, see
7 | https://docs.djangoproject.com/en/3.1/topics/settings/
8 |
9 | For the full list of settings and their values, see
10 | https://docs.djangoproject.com/en/3.1/ref/settings/
11 | """
12 |
13 | from pathlib import Path
14 |
15 | # Build paths inside the project like this: BASE_DIR / 'subdir'.
16 | BASE_DIR = Path(__file__).resolve(strict=True).parent.parent
17 |
18 | BASE_URL = 'localhost:8000'
19 |
20 | # Quick-start development settings - unsuitable for production
21 | # See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/
22 |
23 | # SECURITY WARNING: keep the secret key used in production secret!
24 | SECRET_KEY = 'k^78@33zizzadq^av@2)y1jp()u@pzyu&s&*qk$%4xt=w^y94='
25 |
26 | # SECURITY WARNING: don't run with debug turned on in production!
27 | DEBUG = True
28 |
29 | ALLOWED_HOSTS = ['*']
30 |
31 |
32 | # Application definition
33 |
34 | INSTALLED_APPS = [
35 | 'django.contrib.admin',
36 | 'django.contrib.auth',
37 | 'django.contrib.contenttypes',
38 | 'django.contrib.sessions',
39 | 'django.contrib.messages',
40 | 'django.contrib.staticfiles',
41 | # rest framework
42 | 'rest_framework',
43 | # local
44 | 'dailymed',
45 | # filters
46 | 'django_filters',
47 | ]
48 |
49 | MIDDLEWARE = [
50 | 'django.middleware.security.SecurityMiddleware',
51 | 'django.contrib.sessions.middleware.SessionMiddleware',
52 | 'django.middleware.common.CommonMiddleware',
53 | 'django.middleware.csrf.CsrfViewMiddleware',
54 | 'django.contrib.auth.middleware.AuthenticationMiddleware',
55 | 'django.contrib.messages.middleware.MessageMiddleware',
56 | 'django.middleware.clickjacking.XFrameOptionsMiddleware',
57 | ]
58 |
59 | ROOT_URLCONF = 'api.urls'
60 |
61 | TEMPLATES = [
62 | {
63 | 'BACKEND': 'django.template.backends.django.DjangoTemplates',
64 | 'DIRS': [],
65 | 'APP_DIRS': True,
66 | 'OPTIONS': {
67 | 'context_processors': [
68 | 'django.template.context_processors.debug',
69 | 'django.template.context_processors.request',
70 | 'django.contrib.auth.context_processors.auth',
71 | 'django.contrib.messages.context_processors.messages',
72 | ],
73 | },
74 | },
75 | ]
76 |
77 | WSGI_APPLICATION = 'api.wsgi.application'
78 |
79 |
80 | # Database
81 | # https://docs.djangoproject.com/en/3.1/ref/settings/#databases
82 |
83 | DATABASES = {
84 | 'default': {
85 | 'ENGINE': 'django.db.backends.sqlite3',
86 | 'NAME': BASE_DIR / 'db.sqlite3',
87 | }
88 | }
89 |
90 |
91 | # Password validation
92 | # https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators
93 |
94 | AUTH_PASSWORD_VALIDATORS = [
95 | {
96 | 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
97 | },
98 | {
99 | 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
100 | },
101 | {
102 | 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
103 | },
104 | {
105 | 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
106 | },
107 | ]
108 |
109 |
110 | # Internationalization
111 | # https://docs.djangoproject.com/en/3.1/topics/i18n/
112 |
113 | LANGUAGE_CODE = 'en-us'
114 |
115 | TIME_ZONE = 'UTC'
116 |
117 | USE_I18N = True
118 |
119 | USE_L10N = True
120 |
121 | USE_TZ = True
122 |
123 |
124 | # Static files (CSS, JavaScript, Images)
125 | # https://docs.djangoproject.com/en/3.1/howto/static-files/
126 |
127 | STATIC_URL = '/static/'
128 |
129 | STATIC_ROOT = '/var/www/static/'
130 |
131 | REST_FRAMEWORK = {
132 | 'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.LimitOffsetPagination',
133 | 'PAGE_SIZE': 100,
134 | 'DEFAULT_FILTER_BACKENDS': (
135 | 'django_filters.rest_framework.DjangoFilterBackend',
136 | ),
137 | }
138 |
--------------------------------------------------------------------------------
/api/api/urls.py:
--------------------------------------------------------------------------------
1 | """api URL Configuration
2 |
3 | The `urlpatterns` list routes URLs to views. For more information please see:
4 | https://docs.djangoproject.com/en/3.1/topics/http/urls/
5 | Examples:
6 | Function views
7 | 1. Add an import: from my_app import views
8 | 2. Add a URL to urlpatterns: path('', views.home, name='home')
9 | Class-based views
10 | 1. Add an import: from other_app.views import Home
11 | 2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
12 | Including another URLconf
13 | 1. Import the include() function: from django.urls import include, path
14 | 2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
15 | """
16 | from django.contrib import admin
17 | from django.urls import include, path
18 |
19 | urlpatterns = [
20 | path('', include('dailymed.urls')),
21 | path('admin/', admin.site.urls),
22 | ]
23 |
--------------------------------------------------------------------------------
/api/api/wsgi.py:
--------------------------------------------------------------------------------
1 | """
2 | WSGI config for api project.
3 |
4 | It exposes the WSGI callable as a module-level variable named ``application``.
5 |
6 | For more information on this file, see
7 | https://docs.djangoproject.com/en/3.1/howto/deployment/wsgi/
8 | """
9 |
10 | import os
11 |
12 | from django.core.wsgi import get_wsgi_application
13 |
14 | os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'api.settings')
15 |
16 | application = get_wsgi_application()
17 |
--------------------------------------------------------------------------------
/api/dailymed/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/coderxio/dailymed-api/90fe5f8b40a854ff7543ec9b8c4fc0232fdd0dfc/api/dailymed/__init__.py
--------------------------------------------------------------------------------
/api/dailymed/admin.py:
--------------------------------------------------------------------------------
1 | # from django.contrib import admin
2 |
3 | # Register your models here.
4 |
--------------------------------------------------------------------------------
/api/dailymed/apps.py:
--------------------------------------------------------------------------------
1 | from django.apps import AppConfig
2 |
3 |
4 | class DailymedConfig(AppConfig):
5 | name = 'dailymed'
6 |
--------------------------------------------------------------------------------
/api/dailymed/filters.py:
--------------------------------------------------------------------------------
1 | from django_filters import rest_framework as filters
2 | from dailymed.models import Spl, Set
3 |
4 |
5 | SCHEDULE_CHOICES = (
6 | ('', ''),
7 | ('CII', 'CII'),
8 | ('CIII', 'CIII'),
9 | ('CIV', 'CIV'),
10 | ('CV', 'CV')
11 | )
12 |
13 | TTY_CHOICES = (
14 | ('', ''),
15 | ('IN', 'IN'),
16 | ('PIN', 'PIN'),
17 | ('MIN', 'MIN'),
18 | ('SCDC', 'SCDC'),
19 | ('SCDF', 'SCDF'),
20 | ('SCDG', 'SCDG'),
21 | ('SCD', 'SCD'),
22 | ('GPCK', 'GPCK'),
23 | ('BN', 'BIN'),
24 | ('SBDC', 'SBDC'),
25 | ('SBDF', 'SDBF'),
26 | ('SBDG', 'SBDG'),
27 | ('SBD', 'SBD'),
28 | ('BPCK', 'BPCK'),
29 | ('PSN', 'PSN'),
30 | ('SY', 'SY'),
31 | ('TMSY', 'TMSY'),
32 | ('DF', 'DF'),
33 | ('ET', 'ET'),
34 | ('DFG', 'DFG')
35 | )
36 |
37 |
38 | class SplFilter(filters.FilterSet):
39 | set_id = filters.CharFilter(
40 | field_name='set__id',
41 | lookup_expr='iexact',
42 | distinct=True)
43 | labeler = filters.CharFilter(
44 | field_name='labeler',
45 | lookup_expr='icontains',
46 | distinct=True)
47 | package_ndc = filters.CharFilter(
48 | field_name='products__packages__code',
49 | lookup_expr='icontains',
50 | distinct=True)
51 | product_ndc = filters.CharFilter(
52 | field_name='products__code',
53 | lookup_expr='icontains',
54 | distinct=True)
55 | product_name = filters.CharFilter(
56 | field_name='products__name',
57 | lookup_expr='icontains',
58 | distinct=True)
59 | inactive_ingredient_name = filters.CharFilter(
60 | field_name='products__inactive_ingredients__name',
61 | lookup_expr='icontains',
62 | distinct=True)
63 | inactive_ingredient_unii = filters.CharFilter(
64 | field_name='products__inactive_ingredients__unii',
65 | lookup_expr='icontains',
66 | distinct=True)
67 | schedule = filters.MultipleChoiceFilter(
68 | field_name='products__schedule',
69 | choices=SCHEDULE_CHOICES,
70 | distinct=True)
71 |
72 | class Meta:
73 | model = Spl
74 | fields = []
75 |
76 |
77 | class SetFilter(filters.FilterSet):
78 | set_id = filters.CharFilter(
79 | field_name='id',
80 | lookup_expr='iexact',
81 | distinct=True)
82 | labeler = filters.CharFilter(
83 | field_name='spls__labeler',
84 | lookup_expr='icontains',
85 | distinct=True)
86 | package_ndc = filters.CharFilter(
87 | field_name='spls__products__packages__code',
88 | lookup_expr='icontains',
89 | distinct=True)
90 | product_ndc = filters.CharFilter(
91 | field_name='spls__products__code',
92 | lookup_expr='icontains',
93 | distinct=True)
94 | product_name = filters.CharFilter(
95 | field_name='spls__products__name',
96 | lookup_expr='icontains',
97 | distinct=True)
98 | inactive_ingredient_name = filters.CharFilter(
99 | field_name='spls__products__inactive_ingredients__name',
100 | lookup_expr='icontains',
101 | distinct=True)
102 | inactive_ingredient_unii = filters.CharFilter(
103 | field_name='spls__products__inactive_ingredients__unii',
104 | lookup_expr='icontains',
105 | distinct=True)
106 | schedule = filters.MultipleChoiceFilter(
107 | field_name='spls__products__schedule',
108 | choices=SCHEDULE_CHOICES,
109 | distinct=True)
110 | rxcui = filters.CharFilter(
111 | field_name='rxnorms__rxcui',
112 | lookup_expr='iexact',
113 | distinct=True)
114 | rxstring = filters.CharFilter(
115 | field_name='rxnorms__rxstring',
116 | lookup_expr='icontains',
117 | distinct=True)
118 | rxtty = filters.MultipleChoiceFilter(
119 | field_name='rxnorms__rxtty',
120 | choices=TTY_CHOICES,
121 | distinct=True)
122 |
123 | class Meta:
124 | model = Set
125 | fields = []
126 |
--------------------------------------------------------------------------------
/api/dailymed/migrations/0001_initial.py:
--------------------------------------------------------------------------------
1 | # Generated by Django 3.1 on 2020-12-23 12:42
2 |
3 | from django.db import migrations, models
4 | import django.db.models.deletion
5 |
6 |
7 | class Migration(migrations.Migration):
8 |
9 | initial = True
10 |
11 | dependencies = [
12 | ]
13 |
14 | operations = [
15 | migrations.CreateModel(
16 | name='InactiveIngredient',
17 | fields=[
18 | ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
19 | ('name', models.CharField(max_length=255)),
20 | ('unii', models.CharField(max_length=50)),
21 | ],
22 | ),
23 | migrations.CreateModel(
24 | name='Set',
25 | fields=[
26 | ('id', models.CharField(max_length=100, primary_key=True, serialize=False)),
27 | ],
28 | ),
29 | migrations.CreateModel(
30 | name='Spl',
31 | fields=[
32 | ('id', models.CharField(max_length=100, primary_key=True, serialize=False)),
33 | ('labeler', models.CharField(max_length=200)),
34 | ('set', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='spls', to='dailymed.set')),
35 | ],
36 | ),
37 | migrations.CreateModel(
38 | name='RxNorm',
39 | fields=[
40 | ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
41 | ('rxcui', models.CharField(max_length=20)),
42 | ('rxstring', models.CharField(max_length=255)),
43 | ('rxtty', models.CharField(max_length=5)),
44 | ('set', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='rxnorms', to='dailymed.set')),
45 | ],
46 | ),
47 | migrations.CreateModel(
48 | name='Product',
49 | fields=[
50 | ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
51 | ('code', models.CharField(max_length=20)),
52 | ('name', models.CharField(max_length=255)),
53 | ('schedule', models.CharField(max_length=10)),
54 | ('inactive_ingredients', models.ManyToManyField(related_name='products', to='dailymed.InactiveIngredient')),
55 | ('spl', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='products', to='dailymed.spl')),
56 | ],
57 | ),
58 | migrations.CreateModel(
59 | name='Package',
60 | fields=[
61 | ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
62 | ('code', models.CharField(max_length=20)),
63 | ('product', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='packages', to='dailymed.product')),
64 | ],
65 | ),
66 | ]
67 |
--------------------------------------------------------------------------------
/api/dailymed/migrations/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/coderxio/dailymed-api/90fe5f8b40a854ff7543ec9b8c4fc0232fdd0dfc/api/dailymed/migrations/__init__.py
--------------------------------------------------------------------------------
/api/dailymed/models.py:
--------------------------------------------------------------------------------
1 | from django.db import models
2 |
3 |
4 | class Set(models.Model):
5 | """DailyMed set model which can contain multiple spl files"""
6 | id = models.CharField(max_length=100, primary_key=True)
7 |
8 | # Str method used for testing database creation in test.py
9 | def __str__(self):
10 | return self.id
11 |
12 |
13 | class Spl(models.Model):
14 | """DailyMed model for individual spl xml files"""
15 | id = models.CharField(max_length=100, primary_key=True)
16 | set = models.ForeignKey(
17 | Set,
18 | on_delete=models.PROTECT,
19 | related_name='spls',
20 | )
21 | labeler = models.CharField(max_length=200)
22 |
23 | def __str__(self):
24 | return f"{self.id} -- {self.set} -- {self.labeler}"
25 |
26 |
27 | class Product(models.Model):
28 | """Product level model"""
29 | code = models.CharField(max_length=20)
30 | name = models.CharField(max_length=255)
31 | schedule = models.CharField(max_length=10)
32 | spl = models.ForeignKey(
33 | Spl,
34 | on_delete=models.PROTECT,
35 | related_name='products',
36 | )
37 | inactive_ingredients = models.ManyToManyField(
38 | 'InactiveIngredient',
39 | related_name='products',
40 | )
41 |
42 | def __str__(self):
43 | return f"{self.code} -- {self.name} -- {self.schedule} -- {self.spl}"
44 |
45 |
46 | class InactiveIngredient(models.Model):
47 | """Inactive ingredient for each product"""
48 | name = models.CharField(max_length=255)
49 | unii = models.CharField(max_length=50)
50 |
51 | def __str__(self):
52 | return f"{self.name} -- {self.unii}"
53 |
54 |
55 | class Package(models.Model):
56 | """Package level model"""
57 | code = models.CharField(max_length=20)
58 | product = models.ForeignKey(
59 | Product,
60 | on_delete=models.PROTECT,
61 | related_name='packages'
62 | )
63 |
64 | def __str__(self):
65 | return f"{self.code} -- {self.product}"
66 |
67 |
68 | class RxNorm(models.Model):
69 | """RxNorm mapping model"""
70 | rxcui = models.CharField(max_length=20)
71 | rxstring = models.CharField(max_length=255)
72 | rxtty = models.CharField(max_length=5)
73 | set = models.ForeignKey(
74 | Set,
75 | on_delete=models.PROTECT,
76 | related_name='rxnorms',
77 | )
78 |
79 | def __str__(self):
80 | return f"{self.rxcui} -- {self.rxtring} -- {self.rxtty}"
81 |
--------------------------------------------------------------------------------
/api/dailymed/serializers.py:
--------------------------------------------------------------------------------
1 | from rest_framework import serializers
2 | from django.conf import settings
3 | from dailymed.models import Set, Spl, Product, InactiveIngredient
4 | from dailymed.models import Package, RxNorm
5 |
6 |
7 | class PackageSerializer(serializers.ModelSerializer):
8 | class Meta:
9 | model = Package
10 | exclude = ('product', )
11 |
12 |
13 | class RxNormSerializer(serializers.ModelSerializer):
14 |
15 | class Meta:
16 | model = RxNorm
17 | exclude = ('set', )
18 |
19 |
20 | class InactiveIngredientSerializer(serializers.ModelSerializer):
21 | class Meta:
22 | model = InactiveIngredient
23 | fields = '__all__'
24 |
25 |
26 | class ProductSerializer(serializers.ModelSerializer):
27 | packages = PackageSerializer(many=True)
28 | inactive_ingredients = InactiveIngredientSerializer(many=True)
29 |
30 | class Meta:
31 | model = Product
32 | exclude = ('spl', )
33 |
34 |
35 | class SplSerializer(serializers.ModelSerializer):
36 | products = ProductSerializer(many=True)
37 |
38 | class Meta:
39 | model = Spl
40 | fields = '__all__'
41 |
42 |
43 | class SetSerializer(serializers.ModelSerializer):
44 | set_url = serializers.SerializerMethodField()
45 | rxnorms = RxNormSerializer(many=True)
46 | spls = serializers.HyperlinkedRelatedField(
47 | many=True, read_only=True, view_name='spl-detail',
48 | )
49 |
50 | class Meta:
51 | model = Set
52 | fields = '__all__'
53 |
54 | def get_set_url(self, obj):
55 | return f"http://{settings.BASE_URL}/api/v1/set/{obj.id}/"
56 |
57 |
58 | class DetailSerializer(serializers.ModelSerializer):
59 | spls = SplSerializer(many=True)
60 | rxnorms = RxNormSerializer(many=True)
61 |
62 | class Meta:
63 | model = Set
64 | fields = '__all__'
65 |
--------------------------------------------------------------------------------
/api/dailymed/tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/coderxio/dailymed-api/90fe5f8b40a854ff7543ec9b8c4fc0232fdd0dfc/api/dailymed/tests/__init__.py
--------------------------------------------------------------------------------
/api/dailymed/tests/test_api.py:
--------------------------------------------------------------------------------
1 | from django.test import TestCase
2 | from django.urls import reverse
3 |
4 | from rest_framework import status
5 | from rest_framework.test import APIClient
6 |
7 | from dailymed.models import Set, Spl, InactiveIngredient
8 |
9 | from dailymed.serializers import SplSerializer
10 |
11 | import json
12 | from pathlib import Path
13 |
14 |
15 | SPL_URL = reverse('spl-list')
16 | PRODUCT_URL = reverse('product-list')
17 | PACKAGE_URL = reverse('package-list')
18 |
19 |
20 | class PublicApiTest(TestCase):
21 | """Test public daily med API"""
22 |
23 | def setUp(self):
24 | self.client = APIClient()
25 |
26 | """Creates sample data for database"""
27 | cwd = Path(__file__).parent.absolute()
28 | with open(f'{cwd}/test.json', 'r') as f:
29 | default = json.load(f)
30 |
31 | for data in default['results']:
32 | set_id = data.pop('set_id')
33 | products_data = data.pop('products')
34 |
35 | set_obj = Set.objects.create(id=set_id)
36 |
37 | spl_obj = set_obj.spls.create(**data)
38 |
39 | for product_data in products_data:
40 | product_data.pop('name')
41 | packages_data = product_data.pop('packages')
42 | if 'inactive_ingredients' in product_data:
43 | inactive_ingredients_data = product_data\
44 | .pop('inactive_ingredients')
45 |
46 | inactive_ingredients_list = []
47 | for inactive_ingredient_data in inactive_ingredients_data:
48 | try:
49 | ingredient = InactiveIngredient.objects.get(
50 | **inactive_ingredient_data
51 | )
52 | inactive_ingredients_list.append(ingredient)
53 | except Exception:
54 | ingredient = InactiveIngredient.objects.create(
55 | **inactive_ingredient_data
56 | )
57 | inactive_ingredients_list.append(ingredient)
58 |
59 | product_obj = spl_obj.products.create(**product_data)
60 | product_obj.inactive_ingredients\
61 | .add(*inactive_ingredients_list)
62 |
63 | for package_data in packages_data:
64 | product_obj.packages.create(**package_data)
65 |
66 | def test_retrieve_spls(self):
67 | """Test retrieving spls"""
68 | res = self.client.get(
69 | SPL_URL,
70 | format='json'
71 | )
72 |
73 | serializer = SplSerializer(Spl.objects.filter(), many=True)
74 |
75 | self.assertEqual(res.status_code, status.HTTP_200_OK)
76 | self.assertEqual(serializer.data, res.data['results'])
77 |
78 | def test_retrieve_spls_filter_by_set(self):
79 | """Test retrieving a spl by set filter"""
80 | set_id = Set.objects.first()
81 | res = self.client.get(
82 | SPL_URL,
83 | {'set_id': set_id.id},
84 | format='json')
85 |
86 | serializer = SplSerializer(
87 | Spl.objects.filter(set__id=set_id.id), many=True
88 | )
89 |
90 | self.assertEqual(res.status_code, status.HTTP_200_OK)
91 | self.assertEqual(serializer.data, res.data['results'])
92 |
93 | def test_retrieve_spls_filter_by_inactive_ing(self):
94 | """Test retrieving a spl by inactive ingredient filter"""
95 | inactive_ing = 'alcohol'
96 | res = self.client.get(
97 | SPL_URL,
98 | {'inactive_ingredient_name': inactive_ing},
99 | format='json')
100 |
101 | serializer = SplSerializer(
102 | Spl.objects.filter(
103 | products__inactive_ingredients__name__icontains=inactive_ing)
104 | .distinct(),
105 | many=True
106 | )
107 |
108 | self.assertEqual(res.status_code, status.HTTP_200_OK)
109 | self.assertEqual(serializer.data, res.data['results'])
110 |
111 | def test_retrieve_spls_filter_by_schedule(self):
112 | """Test retrieving spls by schedule filter"""
113 | schedule = 'CIV'
114 | res = self.client.get(
115 | SPL_URL,
116 | {'schedule': schedule},
117 | format='json')
118 |
119 | serializer = SplSerializer(Spl.objects.filter(
120 | products__schedule=schedule).distinct(),
121 | many=True
122 | )
123 |
124 | self.assertEqual(res.status_code, status.HTTP_200_OK)
125 | self.assertEqual(serializer.data, res.data['results'])
126 |
127 | def test_retrieve_spls_filter_by_drug_name(self):
128 | """Test retrieving spls by drug name filter"""
129 | name = 'Ciprofloxacin'
130 | res = self.client.get(
131 | SPL_URL,
132 | {'product_name': name},
133 | format='json')
134 |
135 | serializer = SplSerializer(Spl.objects.filter(
136 | products__name=name).distinct(),
137 | many=True
138 | )
139 |
140 | self.assertEqual(res.status_code, status.HTTP_200_OK)
141 | self.assertEqual(serializer.data, res.data['results'])
142 |
143 | def test_retrieve_spls_filter_by_complex(self):
144 | """Test retrieving spls filtered by set & inactive ingredient"""
145 | set_id = 'b88efb93-f1d1-4606-a669-6896f432a27f'
146 | inactive_ing = 'alcohol'
147 | res = self.client.get(
148 | SPL_URL,
149 | {'set_id': set_id,
150 | 'inactive_ingredient_name': inactive_ing},
151 | format='json'
152 | )
153 |
154 | serializer = SplSerializer(
155 | Spl.objects.filter(
156 | products__inactive_ingredients__name__icontains=inactive_ing,
157 | set__id=set_id)
158 | .distinct(),
159 | many=True
160 | )
161 |
162 | self.assertEqual(res.status_code, status.HTTP_200_OK)
163 | self.assertEqual(len(res.data['results']), 1)
164 | self.assertEqual(serializer.data, res.data['results'])
165 |
--------------------------------------------------------------------------------
/api/dailymed/tests/test_db.py:
--------------------------------------------------------------------------------
1 | from django.test import TestCase
2 |
3 | from dailymed.models import Set, Spl, Product, InactiveIngredient, Package
4 |
5 | import json
6 | from pathlib import Path
7 |
8 |
9 | class DatabaseTest(TestCase):
10 | """Test database creation and structure"""
11 |
12 | def setUp(self):
13 | """Creates sample data to load into database"""
14 | cwd = Path(__file__).parent.absolute()
15 | with open(f'{cwd}/test.json', 'r') as f:
16 | default = json.load(f)
17 |
18 | for data in default['results']:
19 | set_id = data.pop('set_id')
20 | products_data = data.pop('products')
21 |
22 | set_obj = Set.objects.create(id=set_id)
23 |
24 | spl_obj = set_obj.spls.create(**data)
25 |
26 | for product_data in products_data:
27 | product_data.pop('name')
28 | packages_data = product_data.pop('packages')
29 | if 'inactive_ingredients' in product_data:
30 | inactive_ingredients_data = product_data\
31 | .pop('inactive_ingredients')
32 |
33 | inactive_ingredients_list = []
34 | for inactive_ingredient_data in inactive_ingredients_data:
35 | try:
36 | ingredient = InactiveIngredient.objects.get(
37 | **inactive_ingredient_data
38 | )
39 | inactive_ingredients_list.append(ingredient)
40 | except Exception:
41 | ingredient = InactiveIngredient.objects.create(
42 | **inactive_ingredient_data
43 | )
44 | inactive_ingredients_list.append(ingredient)
45 |
46 | product_obj = spl_obj.products.create(**product_data)
47 | product_obj.inactive_ingredients\
48 | .add(*inactive_ingredients_list)
49 |
50 | for package_data in packages_data:
51 | product_obj.packages.create(**package_data)
52 |
53 | def test_create_set(self):
54 | """Test creation of set in db"""
55 | obj1 = Set.objects.first()
56 | obj2 = Set.objects.last()
57 |
58 | self.assertEqual(str(obj1), obj1.id)
59 | self.assertEqual(str(obj2), obj2.id)
60 |
61 | def test_create_spl(self):
62 | """Test creation of spl in db"""
63 | obj1 = Spl.objects.first()
64 | obj2 = Spl.objects.last()
65 |
66 | expected_str1 = f"{obj1.id} -- {obj1.set} -- {obj1.labeler}"
67 |
68 | self.assertEqual(str(obj1), expected_str1)
69 | self.assertNotEqual(str(obj2), expected_str1)
70 |
71 | def test_create_product(self):
72 | """Test creation of product in db"""
73 | obj1 = Product.objects.first()
74 | obj2 = Product.objects.last()
75 |
76 | expected_str1 = f"{obj1.code} -- {obj1.name} -- " \
77 | f"{obj1.schedule} -- {obj1.spl}"
78 | self.assertEqual(str(obj1), expected_str1)
79 | self.assertNotEqual(str(obj2), expected_str1)
80 |
81 | def test_create_inactive_ingredient(self):
82 | """Test creation of inactive ingredient in db"""
83 | obj1 = InactiveIngredient.objects.first()
84 | obj2 = InactiveIngredient.objects.last()
85 |
86 | expected_str1 = f"{obj1.name} -- {obj1.unii}"
87 | self.assertEqual(str(obj1), expected_str1)
88 | self.assertNotEqual(str(obj2), expected_str1)
89 |
90 | def test_create_package(self):
91 | """Test creation of package in db"""
92 | obj1 = Package.objects.first()
93 | obj2 = Package.objects.last()
94 |
95 | expected_str1 = f"{obj1.code} -- {obj1.product}"
96 | self.assertEqual(str(obj1), expected_str1)
97 | self.assertNotEqual(str(obj2), expected_str1)
98 |
--------------------------------------------------------------------------------
/api/dailymed/urls.py:
--------------------------------------------------------------------------------
1 | from django.urls import path, include
2 | from rest_framework.routers import DefaultRouter
3 | from dailymed import views
4 |
5 | # Create a router and register our viewsets with it.
6 | router = DefaultRouter()
7 | router.register(r'api/v1/set', views.SetViewSet)
8 | router.register(r'api/v1/spl', views.SplViewSet)
9 | router.register(r'api/v1/product', views.ProductViewSet)
10 | router.register(r'api/v1/ndc', views.PackageViewSet)
11 | router.register(r'api/v1/rxnorm', views.RxNormViewSet)
12 |
13 | # The API URLs are now determined automatically by the router.
14 | urlpatterns = [
15 | path('', include(router.urls)),
16 | ]
17 |
--------------------------------------------------------------------------------
/api/dailymed/views.py:
--------------------------------------------------------------------------------
1 | from rest_framework import viewsets, mixins
2 | from dailymed.models import Set, Spl, Product, Package, RxNorm
3 | from dailymed.serializers import (
4 | SetSerializer,
5 | SplSerializer,
6 | ProductSerializer,
7 | PackageSerializer,
8 | RxNormSerializer,
9 | DetailSerializer
10 | )
11 | from dailymed.filters import SplFilter, SetFilter
12 |
13 |
14 | class DualSetSerializerViewSet(viewsets.ModelViewSet):
15 | """
16 | ViewSet providing different serializers for list and detail views.
17 |
18 | Use list_serializer and detail_serializer to provide them
19 | """
20 | def list(self, *args, **kwargs):
21 | self.serializer_class = SetSerializer
22 | self.filterset_class = SetFilter
23 | return viewsets.ModelViewSet.list(self, *args, **kwargs)
24 |
25 | def retrieve(self, *args, **kwargs):
26 | self.serializer_class = DetailSerializer
27 | return viewsets.ModelViewSet.retrieve(self, *args, **kwargs)
28 |
29 |
30 | class SetViewSet(DualSetSerializerViewSet):
31 |
32 | queryset = Set.objects.all()
33 | list_serializer = SetSerializer
34 | retrieve_serializer = DetailSerializer
35 |
36 |
37 | class SplViewSet(
38 | mixins.RetrieveModelMixin, mixins.ListModelMixin, viewsets.GenericViewSet
39 | ):
40 | queryset = Spl.objects.all()
41 | serializer_class = SplSerializer
42 | filterset_class = SplFilter
43 |
44 |
45 | class ProductViewSet(
46 | mixins.RetrieveModelMixin, mixins.ListModelMixin, viewsets.GenericViewSet
47 | ):
48 | queryset = Product.objects.all()
49 | serializer_class = ProductSerializer
50 |
51 |
52 | class PackageViewSet(
53 | mixins.RetrieveModelMixin, mixins.ListModelMixin, viewsets.GenericViewSet
54 | ):
55 | queryset = Package.objects.all()
56 | serializer_class = PackageSerializer
57 |
58 |
59 | class RxNormViewSet(
60 | mixins.RetrieveModelMixin, mixins.ListModelMixin, viewsets.GenericViewSet
61 | ):
62 | queryset = RxNorm.objects.all()
63 | serializer_class = RxNormSerializer
64 |
--------------------------------------------------------------------------------
/api/manage.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | """Django's command-line utility for administrative tasks."""
3 | import os
4 | import sys
5 |
6 |
7 | def main():
8 | """Run administrative tasks."""
9 | os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'api.settings')
10 | try:
11 | from django.core.management import execute_from_command_line
12 | except ImportError as exc:
13 | raise ImportError(
14 | "Couldn't import Django. Are you sure it's installed and "
15 | "available on your PYTHONPATH environment variable? Did you "
16 | "forget to activate a virtual environment?"
17 | ) from exc
18 | execute_from_command_line(sys.argv)
19 |
20 |
21 | if __name__ == '__main__':
22 | main()
23 |
--------------------------------------------------------------------------------
/docker-compose.prod.yml:
--------------------------------------------------------------------------------
1 | version: '3.7'
2 |
3 | services:
4 |
5 | api:
6 | build:
7 | context: .
8 | container_name: dailymed-api
9 | working_dir: /dailymed-api/api
10 | command: >
11 | sh -c 'python ./manage.py migrate &&\
12 | python ./manage.py collectstatic --noinput &&\
13 | gunicorn api.wsgi --bind 0.0.0.0:8000'
14 | ports:
15 | - 8000:8000
16 | volumes:
17 | - ./:/dailymed-api
18 | - ./data:/dailymed-api/data
19 | - /var/www/:/var/www/
20 |
--------------------------------------------------------------------------------
/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '3.7'
2 |
3 | services:
4 |
5 | api:
6 | build:
7 | context: .
8 | args:
9 | user1: root
10 | container_name: dailymed-api
11 | command: >
12 | sh -c 'python ./api/manage.py migrate &&\
13 | python ./api/manage.py runserver 0.0.0.0:8000'
14 | ports:
15 | - 8000:8000
16 | volumes:
17 | - ./:/dailymed-api
18 | - ./data:/dailymed-api/data
19 |
--------------------------------------------------------------------------------
/extract_zips.py:
--------------------------------------------------------------------------------
1 | from io import BytesIO
2 | import zipfile
3 | from pathlib import Path
4 |
5 |
6 | def extract(depth=-1):
7 | cwd = Path(__file__).parent.absolute()
8 | data_dir = cwd / 'data'
9 | partial_dir = data_dir / 'spls'
10 |
11 | if not partial_dir.exists():
12 | partial_dir.mkdir(exist_ok=True)
13 |
14 | try:
15 | ziped_dm_data = list(data_dir.glob('*.zip'))[0]
16 | except Exception:
17 | raise Exception("Is there a zip file in the data dir?")
18 |
19 | with zipfile.ZipFile(ziped_dm_data) as zip_ref:
20 | unzip_count = depth
21 |
22 | for spl_zip in zip_ref.namelist():
23 | if not unzip_count:
24 | break
25 | unzip_count -= 1
26 | nested_zip_data = BytesIO(zip_ref.read(spl_zip))
27 | with zipfile.ZipFile(nested_zip_data) as nested_zip:
28 | for unzip_file in nested_zip.namelist():
29 | if unzip_file.endswith('xml'):
30 | nested_zip.extract(unzip_file, partial_dir)
31 |
32 |
33 | if __name__ == '__main__':
34 | extract()
35 |
--------------------------------------------------------------------------------
/get_zips.py:
--------------------------------------------------------------------------------
1 | import urllib.request as request
2 | from pathlib import Path
3 | import shutil
4 | import os
5 | import argparse
6 | import zipfile
7 |
8 | from extract_zips import extract
9 |
10 |
11 | parser = argparse.ArgumentParser(description="Download and unzip SPL data.")
12 | parser.add_argument(
13 | '--unzip',
14 | metavar='int',
15 | default='-1',
16 | type=int,
17 | help='Optional number of files to extract from SPL zip.'
18 | )
19 | download_or_select = parser.add_mutually_exclusive_group()
20 | download_or_select.add_argument(
21 | '--download',
22 | metavar='int',
23 | default='4',
24 | type=int,
25 | help='Optional number of SPL zip files to download, max 4.'
26 | )
27 | download_or_select.add_argument(
28 | '--select',
29 | metavar='int',
30 | type=int,
31 | help="Optional SPL zip file to download, i.e. 1, 2, 3 or 4"
32 | )
33 |
34 | args = parser.parse_args()
35 | depth = args.unzip
36 | number = args.download
37 | spl_zip = args.select
38 |
39 |
40 | cwd = Path(__file__).parent.absolute()
41 | data_dir = cwd / 'data'
42 |
43 | if not data_dir.exists():
44 | data_dir.mkdir(exist_ok=True)
45 |
46 | try:
47 | output_dir = data_dir / 'rxnorm'
48 | if not output_dir.exists():
49 | output_dir.mkdir(exist_ok=True)
50 | with request.urlopen('ftp://public.nlm.nih.gov/nlmdata/.dailymed/rxnorm_mappings.zip') as r, open( # noqa: E501
51 | f'{data_dir}/rxnorm.zip', 'wb') as f:
52 | shutil.copyfileobj(r, f)
53 | with zipfile.ZipFile(f'{data_dir}/rxnorm.zip') as zip_ref:
54 | zip_ref.extractall(output_dir)
55 | os.remove(f'{data_dir}/rxnorm.zip')
56 | except Exception as err:
57 | raise Exception(f"Unable to perform request: {err}")
58 |
59 | try:
60 | if spl_zip:
61 | with request.urlopen(
62 | f'ftp://public.nlm.nih.gov/nlmdata/.dailymed/dm_spl_release_human_rx_part{spl_zip}.zip') as r, open( # noqa: E501
63 | f'{data_dir}/spl_part{spl_zip}.zip', 'wb') as f:
64 | shutil.copyfileobj(r, f)
65 | extract(depth)
66 | os.remove(f'{data_dir}/spl_part{spl_zip}.zip')
67 | else:
68 | for i in range(1, number+1):
69 | with request.urlopen(
70 | f'ftp://public.nlm.nih.gov/nlmdata/.dailymed/dm_spl_release_human_rx_part{i}.zip') as r, open( # noqa: E501
71 | f'{data_dir}/spl_part{i}.zip', 'wb') as f:
72 | shutil.copyfileobj(r, f)
73 | extract(depth)
74 | os.remove(f'{data_dir}/spl_part{i}.zip')
75 | except Exception as err:
76 | raise Exception(f"Unable to perform request: {err}")
77 | finally:
78 | print("Downloads complete")
79 |
--------------------------------------------------------------------------------
/poetry.lock:
--------------------------------------------------------------------------------
1 | [[package]]
2 | category = "main"
3 | description = "Disable App Nap on OS X 10.9"
4 | marker = "sys_platform == \"darwin\""
5 | name = "appnope"
6 | optional = false
7 | python-versions = "*"
8 | version = "0.1.0"
9 |
10 | [[package]]
11 | category = "main"
12 | description = "ASGI specs, helper code, and adapters"
13 | name = "asgiref"
14 | optional = false
15 | python-versions = ">=3.5"
16 | version = "3.2.10"
17 |
18 | [package.extras]
19 | tests = ["pytest", "pytest-asyncio"]
20 |
21 | [[package]]
22 | category = "main"
23 | description = "Classes Without Boilerplate"
24 | name = "attrs"
25 | optional = false
26 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
27 | version = "19.3.0"
28 |
29 | [package.extras]
30 | azure-pipelines = ["coverage", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface", "pytest-azurepipelines"]
31 | dev = ["coverage", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface", "sphinx", "pre-commit"]
32 | docs = ["sphinx", "zope.interface"]
33 | tests = ["coverage", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface"]
34 |
35 | [[package]]
36 | category = "main"
37 | description = "Self-service finite-state machines for the programmer on the go."
38 | name = "automat"
39 | optional = false
40 | python-versions = "*"
41 | version = "20.2.0"
42 |
43 | [package.dependencies]
44 | attrs = ">=19.2.0"
45 | six = "*"
46 |
47 | [package.extras]
48 | visualize = ["graphviz (>0.5.1)", "Twisted (>=16.1.1)"]
49 |
50 | [[package]]
51 | category = "main"
52 | description = "Specifications for callback functions passed in to an API"
53 | name = "backcall"
54 | optional = false
55 | python-versions = "*"
56 | version = "0.2.0"
57 |
58 | [[package]]
59 | category = "main"
60 | description = "Foreign Function Interface for Python calling C code."
61 | name = "cffi"
62 | optional = false
63 | python-versions = "*"
64 | version = "1.14.1"
65 |
66 | [package.dependencies]
67 | pycparser = "*"
68 |
69 | [[package]]
70 | category = "main"
71 | description = "Cross-platform colored terminal text."
72 | marker = "sys_platform == \"win32\""
73 | name = "colorama"
74 | optional = false
75 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
76 | version = "0.4.3"
77 |
78 | [[package]]
79 | category = "main"
80 | description = "Symbolic constants in Python"
81 | name = "constantly"
82 | optional = false
83 | python-versions = "*"
84 | version = "15.1.0"
85 |
86 | [[package]]
87 | category = "main"
88 | description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
89 | name = "cryptography"
90 | optional = false
91 | python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*"
92 | version = "3.0"
93 |
94 | [package.dependencies]
95 | cffi = ">=1.8,<1.11.3 || >1.11.3"
96 | six = ">=1.4.1"
97 |
98 | [package.extras]
99 | docs = ["sphinx (>=1.6.5,<1.8.0 || >1.8.0,<3.1.0 || >3.1.0,<3.1.1 || >3.1.1)", "sphinx-rtd-theme"]
100 | docstest = ["doc8", "pyenchant (>=1.6.11)", "twine (>=1.12.0)", "sphinxcontrib-spelling (>=4.0.1)"]
101 | idna = ["idna (>=2.1)"]
102 | pep8test = ["black", "flake8", "flake8-import-order", "pep8-naming"]
103 | ssh = ["bcrypt (>=3.1.5)"]
104 | test = ["pytest (>=3.6.0,<3.9.0 || >3.9.0,<3.9.1 || >3.9.1,<3.9.2 || >3.9.2)", "pretend", "iso8601", "pytz", "hypothesis (>=1.11.4,<3.79.2 || >3.79.2)"]
105 |
106 | [[package]]
107 | category = "main"
108 | description = "cssselect parses CSS3 Selectors and translates them to XPath 1.0"
109 | name = "cssselect"
110 | optional = false
111 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
112 | version = "1.1.0"
113 |
114 | [[package]]
115 | category = "main"
116 | description = "Decorators for Humans"
117 | name = "decorator"
118 | optional = false
119 | python-versions = ">=2.6, !=3.0.*, !=3.1.*"
120 | version = "4.4.2"
121 |
122 | [[package]]
123 | category = "main"
124 | description = "A high-level Python Web framework that encourages rapid development and clean, pragmatic design."
125 | name = "django"
126 | optional = false
127 | python-versions = ">=3.6"
128 | version = "3.1"
129 |
130 | [package.dependencies]
131 | asgiref = ">=3.2.10,<3.3.0"
132 | pytz = "*"
133 | sqlparse = ">=0.2.2"
134 |
135 | [package.extras]
136 | argon2 = ["argon2-cffi (>=16.1.0)"]
137 | bcrypt = ["bcrypt"]
138 |
139 | [[package]]
140 | category = "main"
141 | description = "Django-filter is a reusable Django application for allowing users to filter querysets dynamically."
142 | name = "django-filter"
143 | optional = false
144 | python-versions = ">=3.5"
145 | version = "2.3.0"
146 |
147 | [package.dependencies]
148 | Django = ">=2.2"
149 |
150 | [[package]]
151 | category = "main"
152 | description = "Web APIs for Django, made easy."
153 | name = "djangorestframework"
154 | optional = false
155 | python-versions = ">=3.5"
156 | version = "3.11.1"
157 |
158 | [package.dependencies]
159 | django = ">=1.11"
160 |
161 | [[package]]
162 | category = "main"
163 | description = "the modular source code checker: pep8 pyflakes and co"
164 | name = "flake8"
165 | optional = false
166 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7"
167 | version = "3.8.3"
168 |
169 | [package.dependencies]
170 | mccabe = ">=0.6.0,<0.7.0"
171 | pycodestyle = ">=2.6.0a1,<2.7.0"
172 | pyflakes = ">=2.2.0,<2.3.0"
173 |
174 | [[package]]
175 | category = "main"
176 | description = "WSGI HTTP Server for UNIX"
177 | name = "gunicorn"
178 | optional = false
179 | python-versions = ">=3.4"
180 | version = "20.0.4"
181 |
182 | [package.dependencies]
183 | setuptools = ">=3.0"
184 |
185 | [package.extras]
186 | eventlet = ["eventlet (>=0.9.7)"]
187 | gevent = ["gevent (>=0.13)"]
188 | setproctitle = ["setproctitle"]
189 | tornado = ["tornado (>=0.2)"]
190 |
191 | [[package]]
192 | category = "main"
193 | description = "A featureful, immutable, and correct URL for Python."
194 | name = "hyperlink"
195 | optional = false
196 | python-versions = "*"
197 | version = "19.0.0"
198 |
199 | [package.dependencies]
200 | idna = ">=2.5"
201 |
202 | [[package]]
203 | category = "main"
204 | description = "Internationalized Domain Names in Applications (IDNA)"
205 | name = "idna"
206 | optional = false
207 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
208 | version = "2.10"
209 |
210 | [[package]]
211 | category = "main"
212 | description = ""
213 | name = "incremental"
214 | optional = false
215 | python-versions = "*"
216 | version = "17.5.0"
217 |
218 | [package.extras]
219 | scripts = ["click (>=6.0)", "twisted (>=16.4.0)"]
220 |
221 | [[package]]
222 | category = "main"
223 | description = "IPython: Productive Interactive Computing"
224 | name = "ipython"
225 | optional = false
226 | python-versions = ">=3.6"
227 | version = "7.16.1"
228 |
229 | [package.dependencies]
230 | appnope = "*"
231 | backcall = "*"
232 | colorama = "*"
233 | decorator = "*"
234 | jedi = ">=0.10"
235 | pexpect = "*"
236 | pickleshare = "*"
237 | prompt-toolkit = ">=2.0.0,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.1.0"
238 | pygments = "*"
239 | setuptools = ">=18.5"
240 | traitlets = ">=4.2"
241 |
242 | [package.extras]
243 | all = ["Sphinx (>=1.3)", "ipykernel", "ipyparallel", "ipywidgets", "nbconvert", "nbformat", "nose (>=0.10.1)", "notebook", "numpy (>=1.14)", "pygments", "qtconsole", "requests", "testpath"]
244 | doc = ["Sphinx (>=1.3)"]
245 | kernel = ["ipykernel"]
246 | nbconvert = ["nbconvert"]
247 | nbformat = ["nbformat"]
248 | notebook = ["notebook", "ipywidgets"]
249 | parallel = ["ipyparallel"]
250 | qtconsole = ["qtconsole"]
251 | test = ["nose (>=0.10.1)", "requests", "testpath", "pygments", "nbformat", "ipykernel", "numpy (>=1.14)"]
252 |
253 | [[package]]
254 | category = "main"
255 | description = "Vestigial utilities from IPython"
256 | name = "ipython-genutils"
257 | optional = false
258 | python-versions = "*"
259 | version = "0.2.0"
260 |
261 | [[package]]
262 | category = "main"
263 | description = "Common interface for data container classes"
264 | name = "itemadapter"
265 | optional = false
266 | python-versions = "*"
267 | version = "0.1.0"
268 |
269 | [[package]]
270 | category = "main"
271 | description = "An autocompletion tool for Python that can be used for text editors."
272 | name = "jedi"
273 | optional = false
274 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
275 | version = "0.17.2"
276 |
277 | [package.dependencies]
278 | parso = ">=0.7.0,<0.8.0"
279 |
280 | [package.extras]
281 | qa = ["flake8 (3.7.9)"]
282 | testing = ["Django (<3.1)", "colorama", "docopt", "pytest (>=3.9.0,<5.0.0)"]
283 |
284 | [[package]]
285 | category = "main"
286 | description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API."
287 | name = "lxml"
288 | optional = false
289 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*"
290 | version = "4.5.2"
291 |
292 | [package.extras]
293 | cssselect = ["cssselect (>=0.7)"]
294 | html5 = ["html5lib"]
295 | htmlsoup = ["beautifulsoup4"]
296 | source = ["Cython (>=0.29.7)"]
297 |
298 | [[package]]
299 | category = "main"
300 | description = "McCabe checker, plugin for flake8"
301 | name = "mccabe"
302 | optional = false
303 | python-versions = "*"
304 | version = "0.6.1"
305 |
306 | [[package]]
307 | category = "main"
308 | description = "Parsel is a library to extract data from HTML and XML using XPath and CSS selectors"
309 | name = "parsel"
310 | optional = false
311 | python-versions = "*"
312 | version = "1.6.0"
313 |
314 | [package.dependencies]
315 | cssselect = ">=0.9"
316 | lxml = "*"
317 | six = ">=1.6.0"
318 | w3lib = ">=1.19.0"
319 |
320 | [[package]]
321 | category = "main"
322 | description = "A Python Parser"
323 | name = "parso"
324 | optional = false
325 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
326 | version = "0.7.1"
327 |
328 | [package.extras]
329 | testing = ["docopt", "pytest (>=3.0.7)"]
330 |
331 | [[package]]
332 | category = "main"
333 | description = "Pexpect allows easy control of interactive console applications."
334 | marker = "sys_platform != \"win32\""
335 | name = "pexpect"
336 | optional = false
337 | python-versions = "*"
338 | version = "4.8.0"
339 |
340 | [package.dependencies]
341 | ptyprocess = ">=0.5"
342 |
343 | [[package]]
344 | category = "main"
345 | description = "Tiny 'shelve'-like database with concurrency support"
346 | name = "pickleshare"
347 | optional = false
348 | python-versions = "*"
349 | version = "0.7.5"
350 |
351 | [[package]]
352 | category = "main"
353 | description = "Library for building powerful interactive command lines in Python"
354 | name = "prompt-toolkit"
355 | optional = false
356 | python-versions = ">=3.6.1"
357 | version = "3.0.5"
358 |
359 | [package.dependencies]
360 | wcwidth = "*"
361 |
362 | [[package]]
363 | category = "main"
364 | description = "Pure-Python robots.txt parser with support for modern conventions"
365 | name = "protego"
366 | optional = false
367 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
368 | version = "0.1.16"
369 |
370 | [package.dependencies]
371 | six = "*"
372 |
373 | [[package]]
374 | category = "main"
375 | description = "Run a subprocess in a pseudo terminal"
376 | marker = "sys_platform != \"win32\""
377 | name = "ptyprocess"
378 | optional = false
379 | python-versions = "*"
380 | version = "0.6.0"
381 |
382 | [[package]]
383 | category = "main"
384 | description = "ASN.1 types and codecs"
385 | name = "pyasn1"
386 | optional = false
387 | python-versions = "*"
388 | version = "0.4.8"
389 |
390 | [[package]]
391 | category = "main"
392 | description = "A collection of ASN.1-based protocols modules."
393 | name = "pyasn1-modules"
394 | optional = false
395 | python-versions = "*"
396 | version = "0.2.8"
397 |
398 | [package.dependencies]
399 | pyasn1 = ">=0.4.6,<0.5.0"
400 |
401 | [[package]]
402 | category = "main"
403 | description = "Python style guide checker"
404 | name = "pycodestyle"
405 | optional = false
406 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
407 | version = "2.6.0"
408 |
409 | [[package]]
410 | category = "main"
411 | description = "C parser in Python"
412 | name = "pycparser"
413 | optional = false
414 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
415 | version = "2.20"
416 |
417 | [[package]]
418 | category = "main"
419 | description = "Multi-producer-multi-consumer signal dispatching mechanism"
420 | name = "pydispatcher"
421 | optional = false
422 | python-versions = "*"
423 | version = "2.0.5"
424 |
425 | [[package]]
426 | category = "main"
427 | description = "passive checker of Python programs"
428 | name = "pyflakes"
429 | optional = false
430 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
431 | version = "2.2.0"
432 |
433 | [[package]]
434 | category = "main"
435 | description = "Pygments is a syntax highlighting package written in Python."
436 | name = "pygments"
437 | optional = false
438 | python-versions = ">=3.5"
439 | version = "2.6.1"
440 |
441 | [[package]]
442 | category = "main"
443 | description = "Hamcrest framework for matcher objects"
444 | name = "pyhamcrest"
445 | optional = false
446 | python-versions = ">=3.5"
447 | version = "2.0.2"
448 |
449 | [[package]]
450 | category = "main"
451 | description = "Python wrapper module around the OpenSSL library"
452 | name = "pyopenssl"
453 | optional = false
454 | python-versions = "*"
455 | version = "19.1.0"
456 |
457 | [package.dependencies]
458 | cryptography = ">=2.8"
459 | six = ">=1.5.2"
460 |
461 | [package.extras]
462 | docs = ["sphinx", "sphinx-rtd-theme"]
463 | test = ["flaky", "pretend", "pytest (>=3.0.1)"]
464 |
465 | [[package]]
466 | category = "main"
467 | description = "Multi-producer-multi-consumer signal dispatching mechanism"
468 | marker = "platform_python_implementation == \"PyPy\""
469 | name = "pypydispatcher"
470 | optional = false
471 | python-versions = "*"
472 | version = "2.1.2"
473 |
474 | [[package]]
475 | category = "main"
476 | description = "World timezone definitions, modern and historical"
477 | name = "pytz"
478 | optional = false
479 | python-versions = "*"
480 | version = "2020.1"
481 |
482 | [[package]]
483 | category = "main"
484 | description = "Collection of persistent (disk-based) queues"
485 | name = "queuelib"
486 | optional = false
487 | python-versions = "*"
488 | version = "1.5.0"
489 |
490 | [[package]]
491 | category = "main"
492 | description = "A high-level Web Crawling and Web Scraping framework"
493 | name = "scrapy"
494 | optional = false
495 | python-versions = ">=3.5.2"
496 | version = "2.2.1"
497 |
498 | [package.dependencies]
499 | PyDispatcher = ">=2.0.5"
500 | PyPyDispatcher = ">=2.1.0"
501 | Twisted = ">=17.9.0"
502 | cryptography = ">=2.0"
503 | cssselect = ">=0.9.1"
504 | itemadapter = ">=0.1.0"
505 | lxml = ">=3.5.0"
506 | parsel = ">=1.5.0"
507 | protego = ">=0.1.15"
508 | pyOpenSSL = ">=16.2.0"
509 | queuelib = ">=1.4.2"
510 | service-identity = ">=16.0.0"
511 | w3lib = ">=1.17.0"
512 | "zope.interface" = ">=4.1.3"
513 |
514 | [[package]]
515 | category = "main"
516 | description = "Service identity verification for pyOpenSSL & cryptography."
517 | name = "service-identity"
518 | optional = false
519 | python-versions = "*"
520 | version = "18.1.0"
521 |
522 | [package.dependencies]
523 | attrs = ">=16.0.0"
524 | cryptography = "*"
525 | pyasn1 = "*"
526 | pyasn1-modules = "*"
527 |
528 | [package.extras]
529 | dev = ["coverage (>=4.2.0)", "pytest", "sphinx", "idna", "pyopenssl"]
530 | docs = ["sphinx"]
531 | idna = ["idna"]
532 | tests = ["coverage (>=4.2.0)", "pytest"]
533 |
534 | [[package]]
535 | category = "main"
536 | description = "Python 2 and 3 compatibility utilities"
537 | name = "six"
538 | optional = false
539 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
540 | version = "1.15.0"
541 |
542 | [[package]]
543 | category = "main"
544 | description = "Non-validating SQL parser"
545 | name = "sqlparse"
546 | optional = false
547 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
548 | version = "0.3.1"
549 |
550 | [[package]]
551 | category = "main"
552 | description = "Traitlets Python config system"
553 | name = "traitlets"
554 | optional = false
555 | python-versions = "*"
556 | version = "4.3.3"
557 |
558 | [package.dependencies]
559 | decorator = "*"
560 | ipython-genutils = "*"
561 | six = "*"
562 |
563 | [package.extras]
564 | test = ["pytest", "mock"]
565 |
566 | [[package]]
567 | category = "main"
568 | description = "An asynchronous networking framework written in Python"
569 | name = "twisted"
570 | optional = false
571 | python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*"
572 | version = "20.3.0"
573 |
574 | [package.dependencies]
575 | Automat = ">=0.3.0"
576 | PyHamcrest = ">=1.9.0,<1.10.0 || >1.10.0"
577 | attrs = ">=19.2.0"
578 | constantly = ">=15.1"
579 | hyperlink = ">=17.1.1"
580 | incremental = ">=16.10.1"
581 | "zope.interface" = ">=4.4.2"
582 |
583 | [package.extras]
584 | all_non_platform = ["pyopenssl (>=16.0.0)", "service_identity (>=18.1.0)", "idna (>=0.6,<2.3 || >2.3)", "pyasn1", "cryptography (>=2.5)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "soappy", "pyserial (>=3.0)", "h2 (>=3.0,<4.0)", "priority (>=1.1.0,<2.0)", "pywin32 (!=226)"]
585 | conch = ["pyasn1", "cryptography (>=2.5)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)"]
586 | dev = ["pyflakes (>=1.0.0)", "twisted-dev-tools (>=0.0.2)", "python-subunit", "sphinx (>=1.3.1)", "towncrier (>=17.4.0)"]
587 | http2 = ["h2 (>=3.0,<4.0)", "priority (>=1.1.0,<2.0)"]
588 | macos_platform = ["pyobjc-core", "pyobjc-framework-cfnetwork", "pyobjc-framework-cocoa", "pyopenssl (>=16.0.0)", "service_identity (>=18.1.0)", "idna (>=0.6,<2.3 || >2.3)", "pyasn1", "cryptography (>=2.5)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "soappy", "pyserial (>=3.0)", "h2 (>=3.0,<4.0)", "priority (>=1.1.0,<2.0)", "pywin32 (!=226)"]
589 | osx_platform = ["pyobjc-core", "pyobjc-framework-cfnetwork", "pyobjc-framework-cocoa", "pyopenssl (>=16.0.0)", "service_identity (>=18.1.0)", "idna (>=0.6,<2.3 || >2.3)", "pyasn1", "cryptography (>=2.5)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "soappy", "pyserial (>=3.0)", "h2 (>=3.0,<4.0)", "priority (>=1.1.0,<2.0)", "pywin32 (!=226)"]
590 | serial = ["pyserial (>=3.0)", "pywin32 (!=226)"]
591 | soap = ["soappy"]
592 | tls = ["pyopenssl (>=16.0.0)", "service_identity (>=18.1.0)", "idna (>=0.6,<2.3 || >2.3)"]
593 | windows_platform = ["pywin32 (!=226)", "pyopenssl (>=16.0.0)", "service_identity (>=18.1.0)", "idna (>=0.6,<2.3 || >2.3)", "pyasn1", "cryptography (>=2.5)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "soappy", "pyserial (>=3.0)", "h2 (>=3.0,<4.0)", "priority (>=1.1.0,<2.0)", "pywin32 (!=226)"]
594 |
595 | [[package]]
596 | category = "main"
597 | description = "Library of web-related functions"
598 | name = "w3lib"
599 | optional = false
600 | python-versions = "*"
601 | version = "1.22.0"
602 |
603 | [package.dependencies]
604 | six = ">=1.4.1"
605 |
606 | [[package]]
607 | category = "main"
608 | description = "Measures the displayed width of unicode strings in a terminal"
609 | name = "wcwidth"
610 | optional = false
611 | python-versions = "*"
612 | version = "0.2.5"
613 |
614 | [[package]]
615 | category = "main"
616 | description = "Interfaces for Python"
617 | name = "zope.interface"
618 | optional = false
619 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
620 | version = "5.1.0"
621 |
622 | [package.dependencies]
623 | setuptools = "*"
624 |
625 | [package.extras]
626 | docs = ["sphinx", "repoze.sphinx.autointerface"]
627 | test = ["coverage (>=5.0.3)", "zope.event", "zope.testing"]
628 | testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"]
629 |
630 | [metadata]
631 | content-hash = "7abdd75096c0ab65a7e9d8bdb0af26bd8330025d87dca1d3b163125b795891a1"
632 | lock-version = "1.0"
633 | python-versions = "^3.8"
634 |
635 | [metadata.files]
636 | appnope = [
637 | {file = "appnope-0.1.0-py2.py3-none-any.whl", hash = "sha256:5b26757dc6f79a3b7dc9fab95359328d5747fcb2409d331ea66d0272b90ab2a0"},
638 | {file = "appnope-0.1.0.tar.gz", hash = "sha256:8b995ffe925347a2138d7ac0fe77155e4311a0ea6d6da4f5128fe4b3cbe5ed71"},
639 | ]
640 | asgiref = [
641 | {file = "asgiref-3.2.10-py3-none-any.whl", hash = "sha256:9fc6fb5d39b8af147ba40765234fa822b39818b12cc80b35ad9b0cef3a476aed"},
642 | {file = "asgiref-3.2.10.tar.gz", hash = "sha256:7e51911ee147dd685c3c8b805c0ad0cb58d360987b56953878f8c06d2d1c6f1a"},
643 | ]
644 | attrs = [
645 | {file = "attrs-19.3.0-py2.py3-none-any.whl", hash = "sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c"},
646 | {file = "attrs-19.3.0.tar.gz", hash = "sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72"},
647 | ]
648 | automat = [
649 | {file = "Automat-20.2.0-py2.py3-none-any.whl", hash = "sha256:b6feb6455337df834f6c9962d6ccf771515b7d939bca142b29c20c2376bc6111"},
650 | {file = "Automat-20.2.0.tar.gz", hash = "sha256:7979803c74610e11ef0c0d68a2942b152df52da55336e0c9d58daf1831cbdf33"},
651 | ]
652 | backcall = [
653 | {file = "backcall-0.2.0-py2.py3-none-any.whl", hash = "sha256:fbbce6a29f263178a1f7915c1940bde0ec2b2a967566fe1c65c1dfb7422bd255"},
654 | {file = "backcall-0.2.0.tar.gz", hash = "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e"},
655 | ]
656 | cffi = [
657 | {file = "cffi-1.14.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:66dd45eb9530e3dde8f7c009f84568bc7cac489b93d04ac86e3111fb46e470c2"},
658 | {file = "cffi-1.14.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:4f53e4128c81ca3212ff4cf097c797ab44646a40b42ec02a891155cd7a2ba4d8"},
659 | {file = "cffi-1.14.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:833401b15de1bb92791d7b6fb353d4af60dc688eaa521bd97203dcd2d124a7c1"},
660 | {file = "cffi-1.14.1-cp27-cp27m-win32.whl", hash = "sha256:26f33e8f6a70c255767e3c3f957ccafc7f1f706b966e110b855bfe944511f1f9"},
661 | {file = "cffi-1.14.1-cp27-cp27m-win_amd64.whl", hash = "sha256:b87dfa9f10a470eee7f24234a37d1d5f51e5f5fa9eeffda7c282e2b8f5162eb1"},
662 | {file = "cffi-1.14.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:effd2ba52cee4ceff1a77f20d2a9f9bf8d50353c854a282b8760ac15b9833168"},
663 | {file = "cffi-1.14.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:bac0d6f7728a9cc3c1e06d4fcbac12aaa70e9379b3025b27ec1226f0e2d404cf"},
664 | {file = "cffi-1.14.1-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:d6033b4ffa34ef70f0b8086fd4c3df4bf801fee485a8a7d4519399818351aa8e"},
665 | {file = "cffi-1.14.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:8416ed88ddc057bab0526d4e4e9f3660f614ac2394b5e019a628cdfff3733849"},
666 | {file = "cffi-1.14.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:892daa86384994fdf4856cb43c93f40cbe80f7f95bb5da94971b39c7f54b3a9c"},
667 | {file = "cffi-1.14.1-cp35-cp35m-win32.whl", hash = "sha256:c991112622baee0ae4d55c008380c32ecfd0ad417bcd0417ba432e6ba7328caa"},
668 | {file = "cffi-1.14.1-cp35-cp35m-win_amd64.whl", hash = "sha256:fcf32bf76dc25e30ed793145a57426064520890d7c02866eb93d3e4abe516948"},
669 | {file = "cffi-1.14.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f960375e9823ae6a07072ff7f8a85954e5a6434f97869f50d0e41649a1c8144f"},
670 | {file = "cffi-1.14.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:a6d28e7f14ecf3b2ad67c4f106841218c8ab12a0683b1528534a6c87d2307af3"},
671 | {file = "cffi-1.14.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:cda422d54ee7905bfc53ee6915ab68fe7b230cacf581110df4272ee10462aadc"},
672 | {file = "cffi-1.14.1-cp36-cp36m-win32.whl", hash = "sha256:4a03416915b82b81af5502459a8a9dd62a3c299b295dcdf470877cb948d655f2"},
673 | {file = "cffi-1.14.1-cp36-cp36m-win_amd64.whl", hash = "sha256:4ce1e995aeecf7cc32380bc11598bfdfa017d592259d5da00fc7ded11e61d022"},
674 | {file = "cffi-1.14.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e23cb7f1d8e0f93addf0cae3c5b6f00324cccb4a7949ee558d7b6ca973ab8ae9"},
675 | {file = "cffi-1.14.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:ddff0b2bd7edcc8c82d1adde6dbbf5e60d57ce985402541cd2985c27f7bec2a0"},
676 | {file = "cffi-1.14.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:f90c2267101010de42f7273c94a1f026e56cbc043f9330acd8a80e64300aba33"},
677 | {file = "cffi-1.14.1-cp37-cp37m-win32.whl", hash = "sha256:3cd2c044517f38d1b577f05927fb9729d3396f1d44d0c659a445599e79519792"},
678 | {file = "cffi-1.14.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fa72a52a906425416f41738728268072d5acfd48cbe7796af07a923236bcf96"},
679 | {file = "cffi-1.14.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:267adcf6e68d77ba154334a3e4fc921b8e63cbb38ca00d33d40655d4228502bc"},
680 | {file = "cffi-1.14.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:d3148b6ba3923c5850ea197a91a42683f946dba7e8eb82dfa211ab7e708de939"},
681 | {file = "cffi-1.14.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:98be759efdb5e5fa161e46d404f4e0ce388e72fbf7d9baf010aff16689e22abe"},
682 | {file = "cffi-1.14.1-cp38-cp38-win32.whl", hash = "sha256:6923d077d9ae9e8bacbdb1c07ae78405a9306c8fd1af13bfa06ca891095eb995"},
683 | {file = "cffi-1.14.1-cp38-cp38-win_amd64.whl", hash = "sha256:b1d6ebc891607e71fd9da71688fcf332a6630b7f5b7f5549e6e631821c0e5d90"},
684 | {file = "cffi-1.14.1.tar.gz", hash = "sha256:b2a2b0d276a136146e012154baefaea2758ef1f56ae9f4e01c612b0831e0bd2f"},
685 | ]
686 | colorama = [
687 | {file = "colorama-0.4.3-py2.py3-none-any.whl", hash = "sha256:7d73d2a99753107a36ac6b455ee49046802e59d9d076ef8e47b61499fa29afff"},
688 | {file = "colorama-0.4.3.tar.gz", hash = "sha256:e96da0d330793e2cb9485e9ddfd918d456036c7149416295932478192f4436a1"},
689 | ]
690 | constantly = [
691 | {file = "constantly-15.1.0-py2.py3-none-any.whl", hash = "sha256:dd2fa9d6b1a51a83f0d7dd76293d734046aa176e384bf6e33b7e44880eb37c5d"},
692 | {file = "constantly-15.1.0.tar.gz", hash = "sha256:586372eb92059873e29eba4f9dec8381541b4d3834660707faf8ba59146dfc35"},
693 | ]
694 | cryptography = [
695 | {file = "cryptography-3.0-cp27-cp27m-macosx_10_10_x86_64.whl", hash = "sha256:ab49edd5bea8d8b39a44b3db618e4783ef84c19c8b47286bf05dfdb3efb01c83"},
696 | {file = "cryptography-3.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:124af7255ffc8e964d9ff26971b3a6153e1a8a220b9a685dc407976ecb27a06a"},
697 | {file = "cryptography-3.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:51e40123083d2f946794f9fe4adeeee2922b581fa3602128ce85ff813d85b81f"},
698 | {file = "cryptography-3.0-cp27-cp27m-win32.whl", hash = "sha256:dea0ba7fe6f9461d244679efa968d215ea1f989b9c1957d7f10c21e5c7c09ad6"},
699 | {file = "cryptography-3.0-cp27-cp27m-win_amd64.whl", hash = "sha256:8ecf9400d0893836ff41b6f977a33972145a855b6efeb605b49ee273c5e6469f"},
700 | {file = "cryptography-3.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:0c608ff4d4adad9e39b5057de43657515c7da1ccb1807c3a27d4cf31fc923b4b"},
701 | {file = "cryptography-3.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:bec7568c6970b865f2bcebbe84d547c52bb2abadf74cefce396ba07571109c67"},
702 | {file = "cryptography-3.0-cp35-abi3-macosx_10_10_x86_64.whl", hash = "sha256:0cbfed8ea74631fe4de00630f4bb592dad564d57f73150d6f6796a24e76c76cd"},
703 | {file = "cryptography-3.0-cp35-abi3-manylinux1_x86_64.whl", hash = "sha256:a09fd9c1cca9a46b6ad4bea0a1f86ab1de3c0c932364dbcf9a6c2a5eeb44fa77"},
704 | {file = "cryptography-3.0-cp35-abi3-manylinux2010_x86_64.whl", hash = "sha256:ce82cc06588e5cbc2a7df3c8a9c778f2cb722f56835a23a68b5a7264726bb00c"},
705 | {file = "cryptography-3.0-cp35-cp35m-win32.whl", hash = "sha256:9367d00e14dee8d02134c6c9524bb4bd39d4c162456343d07191e2a0b5ec8b3b"},
706 | {file = "cryptography-3.0-cp35-cp35m-win_amd64.whl", hash = "sha256:384d7c681b1ab904fff3400a6909261cae1d0939cc483a68bdedab282fb89a07"},
707 | {file = "cryptography-3.0-cp36-cp36m-win32.whl", hash = "sha256:4d355f2aee4a29063c10164b032d9fa8a82e2c30768737a2fd56d256146ad559"},
708 | {file = "cryptography-3.0-cp36-cp36m-win_amd64.whl", hash = "sha256:45741f5499150593178fc98d2c1a9c6722df88b99c821ad6ae298eff0ba1ae71"},
709 | {file = "cryptography-3.0-cp37-cp37m-win32.whl", hash = "sha256:8ecef21ac982aa78309bb6f092d1677812927e8b5ef204a10c326fc29f1367e2"},
710 | {file = "cryptography-3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4b9303507254ccb1181d1803a2080a798910ba89b1a3c9f53639885c90f7a756"},
711 | {file = "cryptography-3.0-cp38-cp38-win32.whl", hash = "sha256:8713ddb888119b0d2a1462357d5946b8911be01ddbf31451e1d07eaa5077a261"},
712 | {file = "cryptography-3.0-cp38-cp38-win_amd64.whl", hash = "sha256:bea0b0468f89cdea625bb3f692cd7a4222d80a6bdafd6fb923963f2b9da0e15f"},
713 | {file = "cryptography-3.0.tar.gz", hash = "sha256:8e924dbc025206e97756e8903039662aa58aa9ba357d8e1d8fc29e3092322053"},
714 | ]
715 | cssselect = [
716 | {file = "cssselect-1.1.0-py2.py3-none-any.whl", hash = "sha256:f612ee47b749c877ebae5bb77035d8f4202c6ad0f0fc1271b3c18ad6c4468ecf"},
717 | {file = "cssselect-1.1.0.tar.gz", hash = "sha256:f95f8dedd925fd8f54edb3d2dfb44c190d9d18512377d3c1e2388d16126879bc"},
718 | ]
719 | decorator = [
720 | {file = "decorator-4.4.2-py2.py3-none-any.whl", hash = "sha256:41fa54c2a0cc4ba648be4fd43cff00aedf5b9465c9bf18d64325bc225f08f760"},
721 | {file = "decorator-4.4.2.tar.gz", hash = "sha256:e3a62f0520172440ca0dcc823749319382e377f37f140a0b99ef45fecb84bfe7"},
722 | ]
723 | django = [
724 | {file = "Django-3.1-py3-none-any.whl", hash = "sha256:1a63f5bb6ff4d7c42f62a519edc2adbb37f9b78068a5a862beff858b68e3dc8b"},
725 | {file = "Django-3.1.tar.gz", hash = "sha256:2d390268a13c655c97e0e2ede9d117007996db692c1bb93eabebd4fb7ea7012b"},
726 | ]
727 | django-filter = [
728 | {file = "django-filter-2.3.0.tar.gz", hash = "sha256:11e63dd759835d9ba7a763926ffb2662cf8a6dcb4c7971a95064de34dbc7e5af"},
729 | {file = "django_filter-2.3.0-py3-none-any.whl", hash = "sha256:616848eab6fc50193a1b3730140c49b60c57a3eda1f7fc57fa8505ac156c6c75"},
730 | ]
731 | djangorestframework = [
732 | {file = "djangorestframework-3.11.1-py3-none-any.whl", hash = "sha256:8b1ac62c581dbc5799b03e535854b92fc4053ecfe74bad3f9c05782063d4196b"},
733 | {file = "djangorestframework-3.11.1.tar.gz", hash = "sha256:6dd02d5a4bd2516fb93f80360673bf540c3b6641fec8766b1da2870a5aa00b32"},
734 | ]
735 | flake8 = [
736 | {file = "flake8-3.8.3-py2.py3-none-any.whl", hash = "sha256:15e351d19611c887e482fb960eae4d44845013cc142d42896e9862f775d8cf5c"},
737 | {file = "flake8-3.8.3.tar.gz", hash = "sha256:f04b9fcbac03b0a3e58c0ab3a0ecc462e023a9faf046d57794184028123aa208"},
738 | ]
739 | gunicorn = [
740 | {file = "gunicorn-20.0.4-py2.py3-none-any.whl", hash = "sha256:cd4a810dd51bf497552cf3f863b575dabd73d6ad6a91075b65936b151cbf4f9c"},
741 | {file = "gunicorn-20.0.4.tar.gz", hash = "sha256:1904bb2b8a43658807108d59c3f3d56c2b6121a701161de0ddf9ad140073c626"},
742 | ]
743 | hyperlink = [
744 | {file = "hyperlink-19.0.0-py2.py3-none-any.whl", hash = "sha256:ab4a308feb039b04f855a020a6eda3b18ca5a68e6d8f8c899cbe9e653721d04f"},
745 | {file = "hyperlink-19.0.0.tar.gz", hash = "sha256:4288e34705da077fada1111a24a0aa08bb1e76699c9ce49876af722441845654"},
746 | ]
747 | idna = [
748 | {file = "idna-2.10-py2.py3-none-any.whl", hash = "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0"},
749 | {file = "idna-2.10.tar.gz", hash = "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6"},
750 | ]
751 | incremental = [
752 | {file = "incremental-17.5.0-py2.py3-none-any.whl", hash = "sha256:717e12246dddf231a349175f48d74d93e2897244939173b01974ab6661406b9f"},
753 | {file = "incremental-17.5.0.tar.gz", hash = "sha256:7b751696aaf36eebfab537e458929e194460051ccad279c72b755a167eebd4b3"},
754 | ]
755 | ipython = [
756 | {file = "ipython-7.16.1-py3-none-any.whl", hash = "sha256:2dbcc8c27ca7d3cfe4fcdff7f45b27f9a8d3edfa70ff8024a71c7a8eb5f09d64"},
757 | {file = "ipython-7.16.1.tar.gz", hash = "sha256:9f4fcb31d3b2c533333893b9172264e4821c1ac91839500f31bd43f2c59b3ccf"},
758 | ]
759 | ipython-genutils = [
760 | {file = "ipython_genutils-0.2.0-py2.py3-none-any.whl", hash = "sha256:72dd37233799e619666c9f639a9da83c34013a73e8bbc79a7a6348d93c61fab8"},
761 | {file = "ipython_genutils-0.2.0.tar.gz", hash = "sha256:eb2e116e75ecef9d4d228fdc66af54269afa26ab4463042e33785b887c628ba8"},
762 | ]
763 | itemadapter = [
764 | {file = "itemadapter-0.1.0-py3-none-any.whl", hash = "sha256:de3f6f567446122a48d496c10b5d830d7909f2399bc7ad51d61163c57967f480"},
765 | {file = "itemadapter-0.1.0.tar.gz", hash = "sha256:52159b4f97d82aa2968000ee8371b2114af56a2f44e4cd9142580d46eea39020"},
766 | ]
767 | jedi = [
768 | {file = "jedi-0.17.2-py2.py3-none-any.whl", hash = "sha256:98cc583fa0f2f8304968199b01b6b4b94f469a1f4a74c1560506ca2a211378b5"},
769 | {file = "jedi-0.17.2.tar.gz", hash = "sha256:86ed7d9b750603e4ba582ea8edc678657fb4007894a12bcf6f4bb97892f31d20"},
770 | ]
771 | lxml = [
772 | {file = "lxml-4.5.2-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:74f48ec98430e06c1fa8949b49ebdd8d27ceb9df8d3d1c92e1fdc2773f003f20"},
773 | {file = "lxml-4.5.2-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:e70d4e467e243455492f5de463b72151cc400710ac03a0678206a5f27e79ddef"},
774 | {file = "lxml-4.5.2-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:7ad7906e098ccd30d8f7068030a0b16668ab8aa5cda6fcd5146d8d20cbaa71b5"},
775 | {file = "lxml-4.5.2-cp27-cp27m-win32.whl", hash = "sha256:92282c83547a9add85ad658143c76a64a8d339028926d7dc1998ca029c88ea6a"},
776 | {file = "lxml-4.5.2-cp27-cp27m-win_amd64.whl", hash = "sha256:05a444b207901a68a6526948c7cc8f9fe6d6f24c70781488e32fd74ff5996e3f"},
777 | {file = "lxml-4.5.2-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:94150231f1e90c9595ccc80d7d2006c61f90a5995db82bccbca7944fd457f0f6"},
778 | {file = "lxml-4.5.2-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:bea760a63ce9bba566c23f726d72b3c0250e2fa2569909e2d83cda1534c79443"},
779 | {file = "lxml-4.5.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:c3f511a3c58676147c277eff0224c061dd5a6a8e1373572ac817ac6324f1b1e0"},
780 | {file = "lxml-4.5.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:59daa84aef650b11bccd18f99f64bfe44b9f14a08a28259959d33676554065a1"},
781 | {file = "lxml-4.5.2-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:c9d317efde4bafbc1561509bfa8a23c5cab66c44d49ab5b63ff690f5159b2304"},
782 | {file = "lxml-4.5.2-cp35-cp35m-win32.whl", hash = "sha256:9dc9006dcc47e00a8a6a029eb035c8f696ad38e40a27d073a003d7d1443f5d88"},
783 | {file = "lxml-4.5.2-cp35-cp35m-win_amd64.whl", hash = "sha256:08fc93257dcfe9542c0a6883a25ba4971d78297f63d7a5a26ffa34861ca78730"},
784 | {file = "lxml-4.5.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:121b665b04083a1e85ff1f5243d4a93aa1aaba281bc12ea334d5a187278ceaf1"},
785 | {file = "lxml-4.5.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:5591c4164755778e29e69b86e425880f852464a21c7bb53c7ea453bbe2633bbe"},
786 | {file = "lxml-4.5.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:cc411ad324a4486b142c41d9b2b6a722c534096963688d879ea6fa8a35028258"},
787 | {file = "lxml-4.5.2-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:1fa21263c3aba2b76fd7c45713d4428dbcc7644d73dcf0650e9d344e433741b3"},
788 | {file = "lxml-4.5.2-cp36-cp36m-win32.whl", hash = "sha256:786aad2aa20de3dbff21aab86b2fb6a7be68064cbbc0219bde414d3a30aa47ae"},
789 | {file = "lxml-4.5.2-cp36-cp36m-win_amd64.whl", hash = "sha256:e1cacf4796b20865789083252186ce9dc6cc59eca0c2e79cca332bdff24ac481"},
790 | {file = "lxml-4.5.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:80a38b188d20c0524fe8959c8ce770a8fdf0e617c6912d23fc97c68301bb9aba"},
791 | {file = "lxml-4.5.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:ecc930ae559ea8a43377e8b60ca6f8d61ac532fc57efb915d899de4a67928efd"},
792 | {file = "lxml-4.5.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:a76979f728dd845655026ab991df25d26379a1a8fc1e9e68e25c7eda43004bed"},
793 | {file = "lxml-4.5.2-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:cfd7c5dd3c35c19cec59c63df9571c67c6d6e5c92e0fe63517920e97f61106d1"},
794 | {file = "lxml-4.5.2-cp37-cp37m-win32.whl", hash = "sha256:5a9c8d11aa2c8f8b6043d845927a51eb9102eb558e3f936df494e96393f5fd3e"},
795 | {file = "lxml-4.5.2-cp37-cp37m-win_amd64.whl", hash = "sha256:4b4a111bcf4b9c948e020fd207f915c24a6de3f1adc7682a2d92660eb4e84f1a"},
796 | {file = "lxml-4.5.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5dd20538a60c4cc9a077d3b715bb42307239fcd25ef1ca7286775f95e9e9a46d"},
797 | {file = "lxml-4.5.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:2b30aa2bcff8e958cd85d907d5109820b01ac511eae5b460803430a7404e34d7"},
798 | {file = "lxml-4.5.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:aa8eba3db3d8761db161003e2d0586608092e217151d7458206e243be5a43843"},
799 | {file = "lxml-4.5.2-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:8f0ec6b9b3832e0bd1d57af41f9238ea7709bbd7271f639024f2fc9d3bb01293"},
800 | {file = "lxml-4.5.2-cp38-cp38-win32.whl", hash = "sha256:107781b213cf7201ec3806555657ccda67b1fccc4261fb889ef7fc56976db81f"},
801 | {file = "lxml-4.5.2-cp38-cp38-win_amd64.whl", hash = "sha256:f161af26f596131b63b236372e4ce40f3167c1b5b5d459b29d2514bd8c9dc9ee"},
802 | {file = "lxml-4.5.2.tar.gz", hash = "sha256:cdc13a1682b2a6241080745b1953719e7fe0850b40a5c71ca574f090a1391df6"},
803 | ]
804 | mccabe = [
805 | {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"},
806 | {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"},
807 | ]
808 | parsel = [
809 | {file = "parsel-1.6.0-py2.py3-none-any.whl", hash = "sha256:9e1fa8db1c0b4a878bf34b35c043d89c9d1cbebc23b4d34dbc3c0ec33f2e087d"},
810 | {file = "parsel-1.6.0.tar.gz", hash = "sha256:70efef0b651a996cceebc69e55a85eb2233be0890959203ba7c3a03c72725c79"},
811 | ]
812 | parso = [
813 | {file = "parso-0.7.1-py2.py3-none-any.whl", hash = "sha256:97218d9159b2520ff45eb78028ba8b50d2bc61dcc062a9682666f2dc4bd331ea"},
814 | {file = "parso-0.7.1.tar.gz", hash = "sha256:caba44724b994a8a5e086460bb212abc5a8bc46951bf4a9a1210745953622eb9"},
815 | ]
816 | pexpect = [
817 | {file = "pexpect-4.8.0-py2.py3-none-any.whl", hash = "sha256:0b48a55dcb3c05f3329815901ea4fc1537514d6ba867a152b581d69ae3710937"},
818 | {file = "pexpect-4.8.0.tar.gz", hash = "sha256:fc65a43959d153d0114afe13997d439c22823a27cefceb5ff35c2178c6784c0c"},
819 | ]
820 | pickleshare = [
821 | {file = "pickleshare-0.7.5-py2.py3-none-any.whl", hash = "sha256:9649af414d74d4df115d5d718f82acb59c9d418196b7b4290ed47a12ce62df56"},
822 | {file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"},
823 | ]
824 | prompt-toolkit = [
825 | {file = "prompt_toolkit-3.0.5-py3-none-any.whl", hash = "sha256:df7e9e63aea609b1da3a65641ceaf5bc7d05e0a04de5bd45d05dbeffbabf9e04"},
826 | {file = "prompt_toolkit-3.0.5.tar.gz", hash = "sha256:563d1a4140b63ff9dd587bda9557cffb2fe73650205ab6f4383092fb882e7dc8"},
827 | ]
828 | protego = [
829 | {file = "Protego-0.1.16.tar.gz", hash = "sha256:a682771bc7b51b2ff41466460896c1a5a653f9a1e71639ef365a72e66d8734b4"},
830 | ]
831 | ptyprocess = [
832 | {file = "ptyprocess-0.6.0-py2.py3-none-any.whl", hash = "sha256:d7cc528d76e76342423ca640335bd3633420dc1366f258cb31d05e865ef5ca1f"},
833 | {file = "ptyprocess-0.6.0.tar.gz", hash = "sha256:923f299cc5ad920c68f2bc0bc98b75b9f838b93b599941a6b63ddbc2476394c0"},
834 | ]
835 | pyasn1 = [
836 | {file = "pyasn1-0.4.8-py2.4.egg", hash = "sha256:fec3e9d8e36808a28efb59b489e4528c10ad0f480e57dcc32b4de5c9d8c9fdf3"},
837 | {file = "pyasn1-0.4.8-py2.5.egg", hash = "sha256:0458773cfe65b153891ac249bcf1b5f8f320b7c2ce462151f8fa74de8934becf"},
838 | {file = "pyasn1-0.4.8-py2.6.egg", hash = "sha256:5c9414dcfede6e441f7e8f81b43b34e834731003427e5b09e4e00e3172a10f00"},
839 | {file = "pyasn1-0.4.8-py2.7.egg", hash = "sha256:6e7545f1a61025a4e58bb336952c5061697da694db1cae97b116e9c46abcf7c8"},
840 | {file = "pyasn1-0.4.8-py2.py3-none-any.whl", hash = "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d"},
841 | {file = "pyasn1-0.4.8-py3.1.egg", hash = "sha256:78fa6da68ed2727915c4767bb386ab32cdba863caa7dbe473eaae45f9959da86"},
842 | {file = "pyasn1-0.4.8-py3.2.egg", hash = "sha256:08c3c53b75eaa48d71cf8c710312316392ed40899cb34710d092e96745a358b7"},
843 | {file = "pyasn1-0.4.8-py3.3.egg", hash = "sha256:03840c999ba71680a131cfaee6fab142e1ed9bbd9c693e285cc6aca0d555e576"},
844 | {file = "pyasn1-0.4.8-py3.4.egg", hash = "sha256:7ab8a544af125fb704feadb008c99a88805126fb525280b2270bb25cc1d78a12"},
845 | {file = "pyasn1-0.4.8-py3.5.egg", hash = "sha256:e89bf84b5437b532b0803ba5c9a5e054d21fec423a89952a74f87fa2c9b7bce2"},
846 | {file = "pyasn1-0.4.8-py3.6.egg", hash = "sha256:014c0e9976956a08139dc0712ae195324a75e142284d5f87f1a87ee1b068a359"},
847 | {file = "pyasn1-0.4.8-py3.7.egg", hash = "sha256:99fcc3c8d804d1bc6d9a099921e39d827026409a58f2a720dcdb89374ea0c776"},
848 | {file = "pyasn1-0.4.8.tar.gz", hash = "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba"},
849 | ]
850 | pyasn1-modules = [
851 | {file = "pyasn1-modules-0.2.8.tar.gz", hash = "sha256:905f84c712230b2c592c19470d3ca8d552de726050d1d1716282a1f6146be65e"},
852 | {file = "pyasn1_modules-0.2.8-py2.4.egg", hash = "sha256:0fe1b68d1e486a1ed5473f1302bd991c1611d319bba158e98b106ff86e1d7199"},
853 | {file = "pyasn1_modules-0.2.8-py2.5.egg", hash = "sha256:fe0644d9ab041506b62782e92b06b8c68cca799e1a9636ec398675459e031405"},
854 | {file = "pyasn1_modules-0.2.8-py2.6.egg", hash = "sha256:a99324196732f53093a84c4369c996713eb8c89d360a496b599fb1a9c47fc3eb"},
855 | {file = "pyasn1_modules-0.2.8-py2.7.egg", hash = "sha256:0845a5582f6a02bb3e1bde9ecfc4bfcae6ec3210dd270522fee602365430c3f8"},
856 | {file = "pyasn1_modules-0.2.8-py2.py3-none-any.whl", hash = "sha256:a50b808ffeb97cb3601dd25981f6b016cbb3d31fbf57a8b8a87428e6158d0c74"},
857 | {file = "pyasn1_modules-0.2.8-py3.1.egg", hash = "sha256:f39edd8c4ecaa4556e989147ebf219227e2cd2e8a43c7e7fcb1f1c18c5fd6a3d"},
858 | {file = "pyasn1_modules-0.2.8-py3.2.egg", hash = "sha256:b80486a6c77252ea3a3e9b1e360bc9cf28eaac41263d173c032581ad2f20fe45"},
859 | {file = "pyasn1_modules-0.2.8-py3.3.egg", hash = "sha256:65cebbaffc913f4fe9e4808735c95ea22d7a7775646ab690518c056784bc21b4"},
860 | {file = "pyasn1_modules-0.2.8-py3.4.egg", hash = "sha256:15b7c67fabc7fc240d87fb9aabf999cf82311a6d6fb2c70d00d3d0604878c811"},
861 | {file = "pyasn1_modules-0.2.8-py3.5.egg", hash = "sha256:426edb7a5e8879f1ec54a1864f16b882c2837bfd06eee62f2c982315ee2473ed"},
862 | {file = "pyasn1_modules-0.2.8-py3.6.egg", hash = "sha256:cbac4bc38d117f2a49aeedec4407d23e8866ea4ac27ff2cf7fb3e5b570df19e0"},
863 | {file = "pyasn1_modules-0.2.8-py3.7.egg", hash = "sha256:c29a5e5cc7a3f05926aff34e097e84f8589cd790ce0ed41b67aed6857b26aafd"},
864 | ]
865 | pycodestyle = [
866 | {file = "pycodestyle-2.6.0-py2.py3-none-any.whl", hash = "sha256:2295e7b2f6b5bd100585ebcb1f616591b652db8a741695b3d8f5d28bdc934367"},
867 | {file = "pycodestyle-2.6.0.tar.gz", hash = "sha256:c58a7d2815e0e8d7972bf1803331fb0152f867bd89adf8a01dfd55085434192e"},
868 | ]
869 | pycparser = [
870 | {file = "pycparser-2.20-py2.py3-none-any.whl", hash = "sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705"},
871 | {file = "pycparser-2.20.tar.gz", hash = "sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0"},
872 | ]
873 | pydispatcher = [
874 | {file = "PyDispatcher-2.0.5.tar.gz", hash = "sha256:5570069e1b1769af1fe481de6dd1d3a388492acddd2cdad7a3bde145615d5caf"},
875 | {file = "PyDispatcher-2.0.5.zip", hash = "sha256:5be4a8be12805ef7d712dd9a93284fb8bc53f309867e573f653a72e5fd10e433"},
876 | ]
877 | pyflakes = [
878 | {file = "pyflakes-2.2.0-py2.py3-none-any.whl", hash = "sha256:0d94e0e05a19e57a99444b6ddcf9a6eb2e5c68d3ca1e98e90707af8152c90a92"},
879 | {file = "pyflakes-2.2.0.tar.gz", hash = "sha256:35b2d75ee967ea93b55750aa9edbbf72813e06a66ba54438df2cfac9e3c27fc8"},
880 | ]
881 | pygments = [
882 | {file = "Pygments-2.6.1-py3-none-any.whl", hash = "sha256:ff7a40b4860b727ab48fad6360eb351cc1b33cbf9b15a0f689ca5353e9463324"},
883 | {file = "Pygments-2.6.1.tar.gz", hash = "sha256:647344a061c249a3b74e230c739f434d7ea4d8b1d5f3721bc0f3558049b38f44"},
884 | ]
885 | pyhamcrest = [
886 | {file = "PyHamcrest-2.0.2-py3-none-any.whl", hash = "sha256:7ead136e03655af85069b6f47b23eb7c3e5c221aa9f022a4fbb499f5b7308f29"},
887 | {file = "PyHamcrest-2.0.2.tar.gz", hash = "sha256:412e00137858f04bde0729913874a48485665f2d36fe9ee449f26be864af9316"},
888 | ]
889 | pyopenssl = [
890 | {file = "pyOpenSSL-19.1.0-py2.py3-none-any.whl", hash = "sha256:621880965a720b8ece2f1b2f54ea2071966ab00e2970ad2ce11d596102063504"},
891 | {file = "pyOpenSSL-19.1.0.tar.gz", hash = "sha256:9a24494b2602aaf402be5c9e30a0b82d4a5c67528fe8fb475e3f3bc00dd69507"},
892 | ]
893 | pypydispatcher = [
894 | {file = "PyPyDispatcher-2.1.2.tar.gz", hash = "sha256:b6bec5dfcff9d2535bca2b23c80eae367b1ac250a645106948d315fcfa9130f2"},
895 | ]
896 | pytz = [
897 | {file = "pytz-2020.1-py2.py3-none-any.whl", hash = "sha256:a494d53b6d39c3c6e44c3bec237336e14305e4f29bbf800b599253057fbb79ed"},
898 | {file = "pytz-2020.1.tar.gz", hash = "sha256:c35965d010ce31b23eeb663ed3cc8c906275d6be1a34393a1d73a41febf4a048"},
899 | ]
900 | queuelib = [
901 | {file = "queuelib-1.5.0-py2.py3-none-any.whl", hash = "sha256:ff43b5b74b9266f8df4232a8f768dc4d67281a271905e2ed4a3689d4d304cd02"},
902 | {file = "queuelib-1.5.0.tar.gz", hash = "sha256:42b413295551bdc24ed9376c1a2cd7d0b1b0fa4746b77b27ca2b797a276a1a17"},
903 | ]
904 | scrapy = [
905 | {file = "Scrapy-2.2.1-py2.py3-none-any.whl", hash = "sha256:d9d898739f199bd9f9e2258770d5bfeeb754b6ed4eb84a41c04fd52e9649266d"},
906 | {file = "Scrapy-2.2.1.tar.gz", hash = "sha256:6a09beb5190bfdee2d72cf261822eae5d92fe8a86ac9ee1f55fc44b4864ca583"},
907 | ]
908 | service-identity = [
909 | {file = "service_identity-18.1.0-py2.py3-none-any.whl", hash = "sha256:001c0707759cb3de7e49c078a7c0c9cd12594161d3bf06b9c254fdcb1a60dc36"},
910 | {file = "service_identity-18.1.0.tar.gz", hash = "sha256:0858a54aabc5b459d1aafa8a518ed2081a285087f349fe3e55197989232e2e2d"},
911 | ]
912 | six = [
913 | {file = "six-1.15.0-py2.py3-none-any.whl", hash = "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced"},
914 | {file = "six-1.15.0.tar.gz", hash = "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259"},
915 | ]
916 | sqlparse = [
917 | {file = "sqlparse-0.3.1-py2.py3-none-any.whl", hash = "sha256:022fb9c87b524d1f7862b3037e541f68597a730a8843245c349fc93e1643dc4e"},
918 | {file = "sqlparse-0.3.1.tar.gz", hash = "sha256:e162203737712307dfe78860cc56c8da8a852ab2ee33750e33aeadf38d12c548"},
919 | ]
920 | traitlets = [
921 | {file = "traitlets-4.3.3-py2.py3-none-any.whl", hash = "sha256:70b4c6a1d9019d7b4f6846832288f86998aa3b9207c6821f3578a6a6a467fe44"},
922 | {file = "traitlets-4.3.3.tar.gz", hash = "sha256:d023ee369ddd2763310e4c3eae1ff649689440d4ae59d7485eb4cfbbe3e359f7"},
923 | ]
924 | twisted = [
925 | {file = "Twisted-20.3.0-cp27-cp27m-macosx_10_6_intel.whl", hash = "sha256:cdbc4c7f0cd7a2218b575844e970f05a1be1861c607b0e048c9bceca0c4d42f7"},
926 | {file = "Twisted-20.3.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:d267125cc0f1e8a0eed6319ba4ac7477da9b78a535601c49ecd20c875576433a"},
927 | {file = "Twisted-20.3.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:356e8d8dd3590e790e3dba4db139eb8a17aca64b46629c622e1b1597a4a92478"},
928 | {file = "Twisted-20.3.0-cp27-cp27m-win32.whl", hash = "sha256:ca3a0b8c9110800e576d89b5337373e52018b41069bc879f12fa42b7eb2d0274"},
929 | {file = "Twisted-20.3.0-cp27-cp27m-win_amd64.whl", hash = "sha256:cd1dc5c85b58494138a3917752b54bb1daa0045d234b7c132c37a61d5483ebad"},
930 | {file = "Twisted-20.3.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:94ac3d55a58c90e2075c5fe1853f2aa3892b73e3bf56395f743aefde8605eeaa"},
931 | {file = "Twisted-20.3.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:7408c6635ee1b96587289283ebe90ee15dbf9614b05857b446055116bc822d29"},
932 | {file = "Twisted-20.3.0-cp35-cp35m-macosx_10_6_intel.whl", hash = "sha256:c09c47ff9750a8e3aa60ad169c4b95006d455a29b80ad0901f031a103b2991cd"},
933 | {file = "Twisted-20.3.0-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:158ddb80719a4813d292293ac44ba41d8b56555ed009d90994a278237ee63d2c"},
934 | {file = "Twisted-20.3.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:040eb6641125d2a9a09cf198ec7b83dd8858c6f51f6770325ed9959c00f5098f"},
935 | {file = "Twisted-20.3.0-cp35-cp35m-win32.whl", hash = "sha256:147780b8caf21ba2aef3688628eaf13d7e7fe02a86747cd54bfaf2140538f042"},
936 | {file = "Twisted-20.3.0-cp35-cp35m-win_amd64.whl", hash = "sha256:25ffcf37944bdad4a99981bc74006d735a678d2b5c193781254fbbb6d69e3b22"},
937 | {file = "Twisted-20.3.0-cp36-cp36m-macosx_10_6_intel.whl", hash = "sha256:a58e61a2a01e5bcbe3b575c0099a2bcb8d70a75b1a087338e0c48dd6e01a5f15"},
938 | {file = "Twisted-20.3.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:7c547fd0215db9da8a1bc23182b309e84a232364cc26d829e9ee196ce840b114"},
939 | {file = "Twisted-20.3.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:2182000d6ffc05d269e6c03bfcec8b57e20259ca1086180edaedec3f1e689292"},
940 | {file = "Twisted-20.3.0-cp36-cp36m-win32.whl", hash = "sha256:70952c56e4965b9f53b180daecf20a9595cf22b8d0935cd3bd664c90273c3ab2"},
941 | {file = "Twisted-20.3.0-cp36-cp36m-win_amd64.whl", hash = "sha256:3281d9ce889f7b21bdb73658e887141aa45a102baf3b2320eafcfba954fcefec"},
942 | {file = "Twisted-20.3.0-cp37-cp37m-macosx_10_6_intel.whl", hash = "sha256:e92703bed0cc21d6cb5c61d66922b3b1564015ca8a51325bd164a5e33798d504"},
943 | {file = "Twisted-20.3.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:f058bd0168271de4dcdc39845b52dd0a4a2fecf5f1246335f13f5e96eaebb467"},
944 | {file = "Twisted-20.3.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:894f6f3cfa57a15ea0d0714e4283913a5f2511dbd18653dd148eba53b3919797"},
945 | {file = "Twisted-20.3.0-cp37-cp37m-win32.whl", hash = "sha256:f3c19e5bd42bbe4bf345704ad7c326c74d3fd7a1b3844987853bef180be638d4"},
946 | {file = "Twisted-20.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d95803193561a243cb0401b0567c6b7987d3f2a67046770e1dccd1c9e49a9780"},
947 | {file = "Twisted-20.3.0.tar.bz2", hash = "sha256:d72c55b5d56e176563b91d11952d13b01af8725c623e498db5507b6614fc1e10"},
948 | ]
949 | w3lib = [
950 | {file = "w3lib-1.22.0-py2.py3-none-any.whl", hash = "sha256:0161d55537063e00d95a241663ede3395c4c6d7b777972ba2fd58bbab2001e53"},
951 | {file = "w3lib-1.22.0.tar.gz", hash = "sha256:0ad6d0203157d61149fd45aaed2e24f53902989c32fc1dccc2e2bfba371560df"},
952 | ]
953 | wcwidth = [
954 | {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"},
955 | {file = "wcwidth-0.2.5.tar.gz", hash = "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"},
956 | ]
957 | "zope.interface" = [
958 | {file = "zope.interface-5.1.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:645a7092b77fdbc3f68d3cc98f9d3e71510e419f54019d6e282328c0dd140dcd"},
959 | {file = "zope.interface-5.1.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:d1fe9d7d09bb07228650903d6a9dc48ea649e3b8c69b1d263419cc722b3938e8"},
960 | {file = "zope.interface-5.1.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:a744132d0abaa854d1aad50ba9bc64e79c6f835b3e92521db4235a1991176813"},
961 | {file = "zope.interface-5.1.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:461d4339b3b8f3335d7e2c90ce335eb275488c587b61aca4b305196dde2ff086"},
962 | {file = "zope.interface-5.1.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:269b27f60bcf45438e8683269f8ecd1235fa13e5411de93dae3b9ee4fe7f7bc7"},
963 | {file = "zope.interface-5.1.0-cp27-cp27m-win32.whl", hash = "sha256:6874367586c020705a44eecdad5d6b587c64b892e34305bb6ed87c9bbe22a5e9"},
964 | {file = "zope.interface-5.1.0-cp27-cp27m-win_amd64.whl", hash = "sha256:8149ded7f90154fdc1a40e0c8975df58041a6f693b8f7edcd9348484e9dc17fe"},
965 | {file = "zope.interface-5.1.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:0103cba5ed09f27d2e3de7e48bb320338592e2fabc5ce1432cf33808eb2dfd8b"},
966 | {file = "zope.interface-5.1.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:b0becb75418f8a130e9d465e718316cd17c7a8acce6fe8fe07adc72762bee425"},
967 | {file = "zope.interface-5.1.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:fb55c182a3f7b84c1a2d6de5fa7b1a05d4660d866b91dbf8d74549c57a1499e8"},
968 | {file = "zope.interface-5.1.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:4f98f70328bc788c86a6a1a8a14b0ea979f81ae6015dd6c72978f1feff70ecda"},
969 | {file = "zope.interface-5.1.0-cp35-cp35m-macosx_10_6_intel.whl", hash = "sha256:af2c14efc0bb0e91af63d00080ccc067866fb8cbbaca2b0438ab4105f5e0f08d"},
970 | {file = "zope.interface-5.1.0-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:f68bf937f113b88c866d090fea0bc52a098695173fc613b055a17ff0cf9683b6"},
971 | {file = "zope.interface-5.1.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:d7804f6a71fc2dda888ef2de266727ec2f3915373d5a785ed4ddc603bbc91e08"},
972 | {file = "zope.interface-5.1.0-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:74bf0a4f9091131de09286f9a605db449840e313753949fe07c8d0fe7659ad1e"},
973 | {file = "zope.interface-5.1.0-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:ba4261c8ad00b49d48bbb3b5af388bb7576edfc0ca50a49c11dcb77caa1d897e"},
974 | {file = "zope.interface-5.1.0-cp35-cp35m-win32.whl", hash = "sha256:ebb4e637a1fb861c34e48a00d03cffa9234f42bef923aec44e5625ffb9a8e8f9"},
975 | {file = "zope.interface-5.1.0-cp35-cp35m-win_amd64.whl", hash = "sha256:911714b08b63d155f9c948da2b5534b223a1a4fc50bb67139ab68b277c938578"},
976 | {file = "zope.interface-5.1.0-cp36-cp36m-macosx_10_6_intel.whl", hash = "sha256:e74671e43ed4569fbd7989e5eecc7d06dc134b571872ab1d5a88f4a123814e9f"},
977 | {file = "zope.interface-5.1.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:b1d2ed1cbda2ae107283befd9284e650d840f8f7568cb9060b5466d25dc48975"},
978 | {file = "zope.interface-5.1.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:ef739fe89e7f43fb6494a43b1878a36273e5924869ba1d866f752c5812ae8d58"},
979 | {file = "zope.interface-5.1.0-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:eb9b92f456ff3ec746cd4935b73c1117538d6124b8617bc0fe6fda0b3816e345"},
980 | {file = "zope.interface-5.1.0-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:dcefc97d1daf8d55199420e9162ab584ed0893a109f45e438b9794ced44c9fd0"},
981 | {file = "zope.interface-5.1.0-cp36-cp36m-win32.whl", hash = "sha256:f40db0e02a8157d2b90857c24d89b6310f9b6c3642369852cdc3b5ac49b92afc"},
982 | {file = "zope.interface-5.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:14415d6979356629f1c386c8c4249b4d0082f2ea7f75871ebad2e29584bd16c5"},
983 | {file = "zope.interface-5.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5e86c66a6dea8ab6152e83b0facc856dc4d435fe0f872f01d66ce0a2131b7f1d"},
984 | {file = "zope.interface-5.1.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:39106649c3082972106f930766ae23d1464a73b7d30b3698c986f74bf1256a34"},
985 | {file = "zope.interface-5.1.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:8cccf7057c7d19064a9e27660f5aec4e5c4001ffcf653a47531bde19b5aa2a8a"},
986 | {file = "zope.interface-5.1.0-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:562dccd37acec149458c1791da459f130c6cf8902c94c93b8d47c6337b9fb826"},
987 | {file = "zope.interface-5.1.0-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:da2844fba024dd58eaa712561da47dcd1e7ad544a257482392472eae1c86d5e5"},
988 | {file = "zope.interface-5.1.0-cp37-cp37m-win32.whl", hash = "sha256:1ae4693ccee94c6e0c88a4568fb3b34af8871c60f5ba30cf9f94977ed0e53ddd"},
989 | {file = "zope.interface-5.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:dd98c436a1fc56f48c70882cc243df89ad036210d871c7427dc164b31500dc11"},
990 | {file = "zope.interface-5.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1b87ed2dc05cb835138f6a6e3595593fea3564d712cb2eb2de963a41fd35758c"},
991 | {file = "zope.interface-5.1.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:558a20a0845d1a5dc6ff87cd0f63d7dac982d7c3be05d2ffb6322a87c17fa286"},
992 | {file = "zope.interface-5.1.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7b726194f938791a6691c7592c8b9e805fc6d1b9632a833b9c0640828cd49cbc"},
993 | {file = "zope.interface-5.1.0-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:60a207efcd8c11d6bbeb7862e33418fba4e4ad79846d88d160d7231fcb42a5ee"},
994 | {file = "zope.interface-5.1.0-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:b054eb0a8aa712c8e9030065a59b5e6a5cf0746ecdb5f087cca5ec7685690c19"},
995 | {file = "zope.interface-5.1.0-cp38-cp38-win32.whl", hash = "sha256:27d287e61639d692563d9dab76bafe071fbeb26818dd6a32a0022f3f7ca884b5"},
996 | {file = "zope.interface-5.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:a5f8f85986197d1dd6444763c4a15c991bfed86d835a1f6f7d476f7198d5f56a"},
997 | {file = "zope.interface-5.1.0.tar.gz", hash = "sha256:40e4c42bd27ed3c11b2c983fecfb03356fae1209de10686d03c02c8696a1d90e"},
998 | ]
999 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [tool.poetry]
2 | name = "dm_extracts"
3 | version = "0.1.0"
4 | description = ""
5 | authors = ["Yevgeny Bulochnik "]
6 |
7 | [tool.poetry.dependencies]
8 | python = "^3.7"
9 | ipython = "^7.16.1"
10 | scrapy = "^2.2.1"
11 | djangorestframework = "^3.11.1"
12 | django-filter = "^2.3.0"
13 | asgiref = "^3.2.10"
14 | flake8 = "^3.8.3"
15 | gunicorn = "^20.0.4"
16 |
17 | [tool.poetry.dev-dependencies]
18 |
19 | [build-system]
20 | requires = ["poetry>=0.12"]
21 | build-backend = "poetry.masonry.api"
22 |
--------------------------------------------------------------------------------
/scraper/scraper/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/coderxio/dailymed-api/90fe5f8b40a854ff7543ec9b8c4fc0232fdd0dfc/scraper/scraper/__init__.py
--------------------------------------------------------------------------------
/scraper/scraper/items.py:
--------------------------------------------------------------------------------
1 | # Define here the models for your scraped items
2 | #
3 | # See documentation in:
4 | # https://docs.scrapy.org/en/latest/topics/items.html
5 |
6 | from scrapy import Item, Field
7 | from scrapy.loader.processors import TakeFirst
8 |
9 |
10 | class SplItem(Item):
11 | id = Field(
12 | output_processor=TakeFirst()
13 | )
14 | set_id = Field(
15 | output_processor=TakeFirst()
16 | )
17 | labeler = Field(
18 | output_processor=TakeFirst()
19 | )
20 | schedule = Field(
21 | output_processor=TakeFirst()
22 | )
23 | products = Field()
24 |
25 |
26 | class ProductItem(Item):
27 | code = Field(
28 | output_processor=TakeFirst()
29 | )
30 | name = Field(
31 | output_processor=TakeFirst()
32 | )
33 | active_ingredients = Field()
34 | inactive_ingredients = Field()
35 | packages = Field()
36 | schedule = Field(
37 | output_processor=TakeFirst()
38 | )
39 |
40 |
41 | class InactiveIngredient(Item):
42 | name = Field(
43 | output_processor=TakeFirst()
44 | )
45 | unii = Field(
46 | output_processor=TakeFirst()
47 | )
48 |
49 |
50 | class PackageItem(Item):
51 | code = Field(
52 | output_processor=TakeFirst()
53 | )
54 |
--------------------------------------------------------------------------------
/scraper/scraper/middlewares.py:
--------------------------------------------------------------------------------
1 | # Define here the models for your spider middleware
2 | #
3 | # See documentation in:
4 | # https://docs.scrapy.org/en/latest/topics/spider-middleware.html
5 |
6 | from scrapy import signals
7 |
8 | # useful for handling different item types with a single interface
9 | # from itemadapter import is_item, ItemAdapter
10 |
11 |
12 | class ScraperSpiderMiddleware:
13 | # Not all methods need to be defined. If a method is not defined,
14 | # scrapy acts as if the spider middleware does not modify the
15 | # passed objects.
16 |
17 | @classmethod
18 | def from_crawler(cls, crawler):
19 | # This method is used by Scrapy to create your spiders.
20 | s = cls()
21 | crawler.signals.connect(s.spider_opened, signal=signals.spider_opened)
22 | return s
23 |
24 | def process_spider_input(self, response, spider):
25 | # Called for each response that goes through the spider
26 | # middleware and into the spider.
27 |
28 | # Should return None or raise an exception.
29 | return None
30 |
31 | def process_spider_output(self, response, result, spider):
32 | # Called with the results returned from the Spider, after
33 | # it has processed the response.
34 |
35 | # Must return an iterable of Request, or item objects.
36 | for i in result:
37 | yield i
38 |
39 | def process_spider_exception(self, response, exception, spider):
40 | # Called when a spider or process_spider_input() method
41 | # (from other spider middleware) raises an exception.
42 |
43 | # Should return either None or an iterable of Request or item objects.
44 | pass
45 |
46 | def process_start_requests(self, start_requests, spider):
47 | # Called with the start requests of the spider, and works
48 | # similarly to the process_spider_output() method, except
49 | # that it doesn’t have a response associated.
50 |
51 | # Must return only requests (not items).
52 | for r in start_requests:
53 | yield r
54 |
55 | def spider_opened(self, spider):
56 | spider.logger.info('Spider opened: %s' % spider.name)
57 |
58 |
59 | class ScraperDownloaderMiddleware:
60 | # Not all methods need to be defined. If a method is not defined,
61 | # scrapy acts as if the downloader middleware does not modify the
62 | # passed objects.
63 |
64 | @classmethod
65 | def from_crawler(cls, crawler):
66 | # This method is used by Scrapy to create your spiders.
67 | s = cls()
68 | crawler.signals.connect(s.spider_opened, signal=signals.spider_opened)
69 | return s
70 |
71 | def process_request(self, request, spider):
72 | # Called for each request that goes through the downloader
73 | # middleware.
74 |
75 | # Must either:
76 | # - return None: continue processing this request
77 | # - or return a Response object
78 | # - or return a Request object
79 | # - or raise IgnoreRequest: process_exception() methods of
80 | # installed downloader middleware will be called
81 | return None
82 |
83 | def process_response(self, request, response, spider):
84 | # Called with the response returned from the downloader.
85 |
86 | # Must either;
87 | # - return a Response object
88 | # - return a Request object
89 | # - or raise IgnoreRequest
90 | return response
91 |
92 | def process_exception(self, request, exception, spider):
93 | # Called when a download handler or a process_request()
94 | # (from other downloader middleware) raises an exception.
95 |
96 | # Must either:
97 | # - return None: continue processing this exception
98 | # - return a Response object: stops process_exception() chain
99 | # - return a Request object: stops process_exception() chain
100 | pass
101 |
102 | def spider_opened(self, spider):
103 | spider.logger.info('Spider opened: %s' % spider.name)
104 |
--------------------------------------------------------------------------------
/scraper/scraper/pipelines.py:
--------------------------------------------------------------------------------
1 | # Define your item pipelines here
2 | #
3 | # Don't forget to add your pipeline to the ITEM_PIPELINES setting
4 | # See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html
5 |
6 |
7 | # useful for handling different item types with a single interface
8 | import csv
9 |
10 | from scraper.utils import get_rxnorm
11 | from dailymed.models import Set, InactiveIngredient
12 |
13 |
14 | class ScraperPipeline:
15 | def process_item(self, item, spider):
16 | set_id = item.pop('set_id')
17 | products_data = item.pop('products')
18 | set = Set.objects.create(id=set_id)
19 | spl = set.spls.create(**item)
20 |
21 | for product_data in products_data:
22 | packages_data = product_data.pop('packages')
23 | inactive_ingredients_list = []
24 |
25 | if 'inactive_ingredients' in product_data:
26 | inactive_ingredients_data = product_data.pop(
27 | 'inactive_ingredients'
28 | )
29 | for inactive_ingredient_data in inactive_ingredients_data:
30 | try:
31 | ingredient = InactiveIngredient.objects.get(
32 | **inactive_ingredient_data
33 | )
34 | inactive_ingredients_list.append(ingredient)
35 | except Exception:
36 | ingredient = InactiveIngredient.objects.create(
37 | **inactive_ingredient_data
38 | )
39 | inactive_ingredients_list.append(ingredient)
40 |
41 | product = spl.products.create(**product_data)
42 |
43 | product.inactive_ingredients.add(*inactive_ingredients_list)
44 |
45 | for package_data in packages_data:
46 | product.packages.create(**package_data)
47 |
48 | return item
49 |
50 | def close_spider(self, spider):
51 | print("loading rxnorm data")
52 | columns = ['setId', 'splVersion', 'rxcui', 'rxstring', 'rxtty']
53 | with open(get_rxnorm()[0], newline='') as csvfile:
54 | rxreader = csv.reader(csvfile, delimiter='|')
55 | for row in rxreader:
56 | data = dict(zip(columns, row))
57 | data.pop('splVersion')
58 | setId = data.pop('setId')
59 | try:
60 | setObj = Set.objects.get(id=setId)
61 | except Exception:
62 | continue
63 | setObj.rxnorms.create(**data)
64 |
--------------------------------------------------------------------------------
/scraper/scraper/settings.py:
--------------------------------------------------------------------------------
1 | # Scrapy settings for scraper project
2 | #
3 | # For simplicity, this file contains only settings considered important or
4 | # commonly used. You can find more settings consulting the documentation:
5 | #
6 | # https://docs.scrapy.org/en/latest/topics/settings.html
7 | # https://docs.scrapy.org/en/latest/topics/downloader-middleware.html
8 | # https://docs.scrapy.org/en/latest/topics/spider-middleware.html
9 |
10 | # noqa: E501 Settings to allow django model imports, workaround should use setup scripts to make these installabale
11 | import sys
12 | import os
13 | import django
14 | from pathlib import Path
15 |
16 | BASE_DIR = Path(__file__).resolve(strict=True).parent.parent.parent
17 | DJANGO_DIR = BASE_DIR / 'api'
18 |
19 | sys.path.append(DJANGO_DIR)
20 | os.environ['DJANGO_SETTINGS_MODULE'] = 'api.settings'
21 | django.setup()
22 |
23 |
24 | BOT_NAME = 'scraper'
25 |
26 | SPIDER_MODULES = ['scraper.spiders']
27 | NEWSPIDER_MODULE = 'scraper.spiders'
28 |
29 |
30 | # noqa: E501 Crawl responsibly by identifying yourself (and your website) on the user-agent
31 | # USER_AGENT = 'scraper (+http://www.yourdomain.com)'
32 |
33 | # Obey robots.txt rules
34 | ROBOTSTXT_OBEY = False
35 |
36 | # Configure maximum concurrent requests performed by Scrapy (default: 16)
37 | # CONCURRENT_REQUESTS = 32
38 |
39 | # Configure a delay for requests for the same website (default: 0)
40 | # See https://docs.scrapy.org/en/latest/topics/settings.html#download-delay
41 | # See also autothrottle settings and docs
42 | # DOWNLOAD_DELAY = 3
43 | # The download delay setting will honor only one of:
44 | # CONCURRENT_REQUESTS_PER_DOMAIN = 16
45 | # CONCURRENT_REQUESTS_PER_IP = 16
46 |
47 | # Disable cookies (enabled by default)
48 | # COOKIES_ENABLED = False
49 |
50 | # Disable Telnet Console (enabled by default)
51 | # TELNETCONSOLE_ENABLED = False
52 |
53 | # Override the default request headers:
54 | # DEFAULT_REQUEST_HEADERS = {
55 | # 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
56 | # 'Accept-Language': 'en',
57 | # }
58 |
59 | # Enable or disable spider middlewares
60 | # See https://docs.scrapy.org/en/latest/topics/spider-middleware.html
61 | # SPIDER_MIDDLEWARES = {
62 | # 'scraper.middlewares.ScraperSpiderMiddleware': 543,
63 | # }
64 |
65 | # Enable or disable downloader middlewares
66 | # See https://docs.scrapy.org/en/latest/topics/downloader-middleware.html
67 | # DOWNLOADER_MIDDLEWARES = {
68 | # 'scraper.middlewares.ScraperDownloaderMiddleware': 543,
69 | # }
70 |
71 | # Enable or disable extensions
72 | # See https://docs.scrapy.org/en/latest/topics/extensions.html
73 | # EXTENSIONS = {
74 | # 'scrapy.extensions.telnet.TelnetConsole': None,
75 | # }
76 |
77 | # Configure item pipelines
78 | # See https://docs.scrapy.org/en/latest/topics/item-pipeline.html
79 | ITEM_PIPELINES = {
80 | 'scraper.pipelines.ScraperPipeline': 300,
81 | }
82 |
83 | # Enable and configure the AutoThrottle extension (disabled by default)
84 | # See https://docs.scrapy.org/en/latest/topics/autothrottle.html
85 | # AUTOTHROTTLE_ENABLED = True
86 | # The initial download delay
87 | # AUTOTHROTTLE_START_DELAY = 5
88 | # The maximum download delay to be set in case of high latencies
89 | # AUTOTHROTTLE_MAX_DELAY = 60
90 | # The average number of requests Scrapy should be sending in parallel to
91 | # each remote server
92 | # AUTOTHROTTLE_TARGET_CONCURRENCY = 1.0
93 | # Enable showing throttling stats for every response received:
94 | # AUTOTHROTTLE_DEBUG = False
95 |
96 | # Enable and configure HTTP caching (disabled by default)
97 | # noqa: E501 See https://docs.scrapy.org/en/latest/topics/downloader-middleware.html#httpcache-middleware-settings
98 | # HTTPCACHE_ENABLED = True
99 | # HTTPCACHE_EXPIRATION_SECS = 0
100 | # HTTPCACHE_DIR = 'httpcache'
101 | # HTTPCACHE_IGNORE_HTTP_CODES = []
102 | # HTTPCACHE_STORAGE = 'scrapy.extensions.httpcache.FilesystemCacheStorage'
103 |
--------------------------------------------------------------------------------
/scraper/scraper/spiders/__init__.py:
--------------------------------------------------------------------------------
1 | # This package will contain the spiders of your Scrapy project
2 | #
3 | # Please refer to the documentation for information on how to create and manage
4 | # your spiders.
5 |
--------------------------------------------------------------------------------
/scraper/scraper/spiders/basic_extract.py:
--------------------------------------------------------------------------------
1 | import scrapy
2 | # from scrapy.loader import ItemLoader
3 | from scraper.utils import get_spls
4 |
5 |
6 | class InactiveSpider(scrapy.Spider):
7 | name = 'basic_extract'
8 | start_urls = get_spls()
9 |
10 | def parse(self, response):
11 | response.selector.remove_namespaces()
12 | document = response.xpath("//document")
13 | manu_products = document.xpath(
14 | './/subject/manufacturedProduct/manufacturedProduct'
15 | )
16 |
17 | for manu_product in manu_products:
18 | for ndc in manu_product.xpath(
19 | './/containerPackagedProduct/code/@code'
20 | ).getall():
21 | data_dict = {
22 | 'set_id': document.xpath('./setId/@root').get(),
23 | 'spl_id': document.xpath('./id/@root').get(),
24 | 'org': document.xpath(
25 | './/representedOrganization/name/text()'
26 | ).get(),
27 | 'ndc': ndc,
28 | 'schedule': document.xpath(
29 | './/policy/code/@displayName'
30 | ).get(),
31 | 'name': manu_product.xpath('./name/text()').get(),
32 | 'active': manu_product.xpath('.//ingredient[starts-with(@classCode, "ACT")]\
33 | //name/text()').get(),
34 | 'inactive': manu_product.xpath('.//ingredient[@classCode="IACT"]\
35 | //name/text()').getall()
36 | }
37 |
38 | yield data_dict
39 |
--------------------------------------------------------------------------------
/scraper/scraper/spiders/json_extract.py:
--------------------------------------------------------------------------------
1 | import scrapy
2 | from scrapy.loader import ItemLoader
3 | from scraper.utils import get_spls
4 | from scraper.items import SplItem, ProductItem, PackageItem, InactiveIngredient
5 |
6 |
7 | class JsonSpider(scrapy.Spider):
8 | name = 'json_extract'
9 | start_urls = get_spls()
10 |
11 | def parse(self, response):
12 | response.selector.remove_namespaces()
13 | document = response.xpath('//document')
14 | manu_products = document.xpath('.//subject/manufacturedProduct')
15 |
16 | spl_il = ItemLoader(item=SplItem(), selector=document)
17 | spl_il.add_xpath('id', './id/@root')
18 | spl_il.add_xpath('set_id', './setId/@root')
19 | spl_il.add_xpath('labeler', './/representedOrganization/name/text()')
20 |
21 | for product in manu_products:
22 | product_il = ItemLoader(item=ProductItem(), selector=product)
23 | product_il.add_xpath('code', './manufacturedProduct/code/@code')
24 | product_il.add_xpath('name', './manufacturedProduct/name/text()')
25 | product_il.add_xpath(
26 | 'schedule',
27 | './/policy[@classCode="DEADrugSchedule"]/code/@displayName'
28 | )
29 |
30 | inactive_ingredients = product.xpath(
31 | './/ingredient[starts-with(@classCode, "IACT")]'
32 | )
33 |
34 | for inactive_ingredient in inactive_ingredients:
35 | inactive_il = ItemLoader(
36 | item=InactiveIngredient(),
37 | selector=inactive_ingredient,
38 | )
39 | inactive_il.add_xpath(
40 | 'name',
41 | './ingredientSubstance/name/text()',
42 | )
43 | inactive_il.add_xpath(
44 | 'unii',
45 | './ingredientSubstance/code/@code',
46 | )
47 |
48 | product_il.add_value(
49 | 'inactive_ingredients',
50 | inactive_il.load_item(),
51 | )
52 |
53 | for package in product.xpath('.//containerPackagedProduct'):
54 | package_il = ItemLoader(item=PackageItem(), selector=package)
55 | package_il.add_xpath('code', './code/@code')
56 |
57 | if not package_il.load_item():
58 | continue
59 |
60 | product_il.add_value('packages', package_il.load_item())
61 |
62 | spl_il.add_value('products', product_il.load_item())
63 |
64 | return spl_il.load_item()
65 |
--------------------------------------------------------------------------------
/scraper/scraper/utils.py:
--------------------------------------------------------------------------------
1 | from pathlib import Path
2 |
3 |
4 | def get_spls():
5 | cwd = Path(__file__)
6 | partial_dir = cwd.parent.parent.parent.absolute() / 'data' / 'spls'
7 | adjusted_filenames = [
8 | f'file://{path}' for path in list(partial_dir.iterdir())]
9 | return adjusted_filenames
10 |
11 |
12 | def get_rxnorm():
13 | cwd = Path(__file__)
14 | rxnorm_dir = cwd.parent.parent.parent.absolute() / 'data' / 'rxnorm'
15 | adjusted_filenames = [
16 | f'{path}' for path in list(rxnorm_dir.iterdir())
17 | ]
18 | return adjusted_filenames
19 |
--------------------------------------------------------------------------------
/scraper/scrapy.cfg:
--------------------------------------------------------------------------------
1 | # Automatically created by: scrapy startproject
2 | #
3 | # For more information about the [deploy] section see:
4 | # https://scrapyd.readthedocs.io/en/latest/deploy.html
5 |
6 | [settings]
7 | default = scraper.settings
8 |
9 | [deploy]
10 | #url = http://localhost:6800/
11 | project = scraper
12 |
--------------------------------------------------------------------------------