├── .gitignore
├── .pre-commit-config.yaml
├── .ruff.toml
├── .travis.yml
├── CHANGES.md
├── LICENSE
├── MANIFEST.in
├── README.rst
├── crossref
├── __init__.py
├── restful.py
├── utils.py
└── validators.py
├── poetry.lock
├── pyproject.toml
└── tests
├── __init__.py
├── test_restful.py
└── test_validators.py
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | env/
12 | build/
13 | develop-eggs/
14 | dist/
15 | downloads/
16 | eggs/
17 | .eggs/
18 | lib/
19 | lib64/
20 | parts/
21 | sdist/
22 | var/
23 | wheels/
24 | *.egg-info/
25 | .installed.cfg
26 | *.egg
27 |
28 | # PyInstaller
29 | # Usually these files are written by a python script from a template
30 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
31 | *.manifest
32 | *.spec
33 |
34 | # Installer logs
35 | pip-log.txt
36 | pip-delete-this-directory.txt
37 |
38 | # Unit test / coverage reports
39 | htmlcov/
40 | .tox/
41 | .coverage
42 | .coverage.*
43 | .cache
44 | nosetests.xml
45 | coverage.xml
46 | *.cover
47 | .hypothesis/
48 |
49 | # Translations
50 | *.mo
51 | *.pot
52 |
53 | # Django stuff:
54 | *.log
55 | local_settings.py
56 |
57 | # Flask stuff:
58 | instance/
59 | .webassets-cache
60 |
61 | # Scrapy stuff:
62 | .scrapy
63 |
64 | # Sphinx documentation
65 | docs/_build/
66 |
67 | # PyBuilder
68 | target/
69 |
70 | # Jupyter Notebook
71 | .ipynb_checkpoints
72 |
73 | # pyenv
74 | .python-version
75 |
76 | # celery beat schedule file
77 | celerybeat-schedule
78 |
79 | # SageMath parsed files
80 | *.sage.py
81 |
82 | # dotenv
83 | .env
84 |
85 | # virtualenv
86 | .venv*
87 | venv/
88 | ENV/
89 |
90 | # Spyder project settings
91 | .spyderproject
92 | .spyproject
93 |
94 | # Rope project settings
95 | .ropeproject
96 |
97 | # mkdocs documentation
98 | /site
99 |
100 | # mypy
101 | .mypy_cache/
102 |
103 | # pycharm
104 | .idea
105 |
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | repos:
2 | - repo: https://github.com/astral-sh/ruff-pre-commit
3 | rev: v0.0.277
4 | hooks:
5 | - id: ruff
--------------------------------------------------------------------------------
/.ruff.toml:
--------------------------------------------------------------------------------
1 | select = [
2 | "E", # pycodestyle errors
3 | "F", # pyflakes
4 | "B", # flake8-bugbear
5 | "W", # pycodestyle warnings
6 | "N", # pep8 naming
7 | "D419", "D300", "D301", "D419", # pydocstyle
8 | "UP", # pyupgrade
9 | "YTT", # flake-2020
10 | "S", # flake8-bandit
11 | # "BLE", # flake8-blind-except
12 | # "FBT", # flake8-boolean-trap
13 | "A", # flake8-builtins
14 | "COM", # flake8-commas
15 | "C4", # flake8-comprehensions
16 | "DTZ", # flake8-datetimez
17 | "T10", # flake8-debugger
18 | "EM", # flake8-errmsg
19 | "EXE", # flake8-executable
20 | "ISC", # flake8-implicit-str-concat
21 | "ICN", # flake8-import-conventions
22 | "G", # flake8-logging-format
23 | "INP", # flake8-no-pep420
24 | "PIE", # flake8-pie
25 | "T20", # flake8-print
26 | "PT", # flake8-pytest-style
27 | "Q", # flake8-quotes
28 | "RET", # flake8-return
29 | "SIM", # flake8-simplify
30 | "TID", # flake8-tidy-imports
31 | # "TCH", # flake8-type-checking -> maybe
32 | # "ARG", # flake8-unused-arguments -> No, doesn't work with pytest, django, celery...
33 | "PTH", # flake8-use-pathlib
34 | # "ERA", # eradicate -> No
35 | "PGH", # pygrep-hooks
36 | "PL", # pylint
37 | "TRY", # tryceratops
38 | "RSE", # flake8-raise
39 | "RUF", # ruff-specific
40 | # "C90", # mccabe for cyclomatic complexity -> maybe
41 | "I", # isort
42 | ]
43 |
44 | ignore = [
45 | "S101", # Use of assert detected
46 | "PLR2004", # Magic values are ok
47 | ]
48 |
49 | target-version = "py311"
50 |
51 | line-length = 100
52 |
53 | extend-exclude = [
54 | ".idea",
55 | "migrations",
56 | ]
--------------------------------------------------------------------------------
/.travis.yml:
--------------------------------------------------------------------------------
1 | dist: bionic
2 | arch: arm64
3 | language: python
4 | matrix:
5 | include:
6 | - python: 3.8
7 | - python: 3.9
8 | - python: 3.11
9 | before_install:
10 | - pip install poetry==1.5.1
11 | install:
12 | - poetry install --no-interaction --no-ansi -vvv
13 | script:
14 | - ruff tests
15 | - ruff crossref
16 | - pytest
--------------------------------------------------------------------------------
/CHANGES.md:
--------------------------------------------------------------------------------
1 | # 1.6.1
2 |
3 | * Fix Depositor including timeout attribute to it
4 |
5 | # 1.6.0
6 |
7 | * Major packaging re-structuration
8 | * Using poetry
9 | * No changes in the source code
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | BSD 2-Clause License
2 |
3 | Copyright (c) 2017, Fabio Batalha
4 | All rights reserved.
5 |
6 | Redistribution and use in source and binary forms, with or without
7 | modification, are permitted provided that the following conditions are met:
8 |
9 | * Redistributions of source code must retain the above copyright notice, this
10 | list of conditions and the following disclaimer.
11 |
12 | * Redistributions in binary form must reproduce the above copyright notice,
13 | this list of conditions and the following disclaimer in the documentation
14 | and/or other materials provided with the distribution.
15 |
16 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
17 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
18 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
19 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
20 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
21 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
22 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
23 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
24 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
25 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | recursive-include articlemeta *.thrift
2 |
--------------------------------------------------------------------------------
/README.rst:
--------------------------------------------------------------------------------
1 | -------------------
2 | Crossref API Client
3 | -------------------
4 |
5 | Library with functions to iterate through the Crossref API.
6 |
7 | .. image:: https://travis-ci.org/fabiobatalha/crossrefapi.svg?branch=master
8 | :target: https://travis-ci.org/fabiobatalha/crossrefapi
9 |
10 | --------------
11 | How to Install
12 | --------------
13 |
14 | .. code-block:: shell
15 |
16 | pip install crossrefapi
17 |
18 | ----------
19 | How to Use
20 | ----------
21 |
22 | Works
23 | -----
24 |
25 | Agency
26 | ``````
27 |
28 | .. code-block:: python
29 |
30 | In [1]: from crossref.restful import Works
31 |
32 | In [2]: works = Works()
33 |
34 | In [3]: works.agency('10.1590/0102-311x00133115')
35 | Out[3]:
36 | {'DOI': '10.1590/0102-311x00133115',
37 | 'agency': {'id': 'crossref', 'label': 'CrossRef'}}
38 |
39 | Sample
40 | ``````
41 |
42 | .. code-block:: python
43 |
44 | In [1]: from crossref.restful import Works
45 |
46 | In [2]: works = Works()
47 |
48 | In [3]: for item in works.sample(2):
49 | ...: print(item['title'])
50 | ...:
51 | ['On the Origin of the Color-Magnitude Relation in the Virgo Cluster']
52 | ['Biopsychosocial Wellbeing among Women with Gynaecological Cancer']
53 |
54 |
55 | Query
56 | `````
57 |
58 | See valid parameters in :code:`Works.FIELDS_QUERY`
59 |
60 | .. code-block:: python
61 |
62 | In [1]: from crossref.restful import Works
63 |
64 | In [2]: works = Works()
65 |
66 | In [3]: w1 = works.query(bibliographic='zika', author='johannes', publisher_name='Wiley-Blackwell')
67 |
68 | In [4]: for item in w1:
69 | ...: print(item['title'])
70 | ...:
71 | ...:
72 | ['Inactivation and removal of Zika virus during manufacture of plasma-derived medicinal products']
73 | ['Harmonization of nucleic acid testing for Zika virus: development of the 1st\n World Health Organization International Standard']
74 |
75 | Doi
76 | ```
77 |
78 | .. code-block:: python
79 |
80 | In [1]: from crossref.restful import Works
81 |
82 | In [2]: works = Works()
83 |
84 | In [3]: works.doi('10.1590/0102-311x00133115')
85 | Out[3]:
86 | {'DOI': '10.1590/0102-311x00133115',
87 | 'ISSN': ['0102-311X'],
88 | 'URL': 'http://dx.doi.org/10.1590/0102-311x00133115',
89 | 'alternative-id': ['S0102-311X2016001107002'],
90 | 'author': [{'affiliation': [{'name': 'Surin Rajabhat University, Thailand'}],
91 | 'family': 'Wiwanitki',
92 | 'given': 'Viroj'}],
93 | 'container-title': ['Cadernos de Saúde Pública'],
94 | 'content-domain': {'crossmark-restriction': False, 'domain': []},
95 | 'created': {'date-parts': [[2016, 12, 7]],
96 | 'date-time': '2016-12-07T21:52:08Z',
97 | 'timestamp': 1481147528000},
98 | 'deposited': {'date-parts': [[2017, 5, 24]],
99 | 'date-time': '2017-05-24T01:57:26Z',
100 | 'timestamp': 1495591046000},
101 | 'indexed': {'date-parts': [[2017, 5, 24]],
102 | 'date-time': '2017-05-24T22:39:11Z',
103 | 'timestamp': 1495665551858},
104 | 'is-referenced-by-count': 0,
105 | 'issn-type': [{'type': 'electronic', 'value': '0102-311X'}],
106 | 'issue': '11',
107 | 'issued': {'date-parts': [[2016, 11]]},
108 | 'member': '530',
109 | 'original-title': [],
110 | 'prefix': '10.1590',
111 | 'published-print': {'date-parts': [[2016, 11]]},
112 | 'publisher': 'FapUNIFESP (SciELO)',
113 | 'reference-count': 3,
114 | 'references-count': 3,
115 | 'relation': {},
116 | 'score': 1.0,
117 | 'short-container-title': ['Cad. Saúde Pública'],
118 | 'short-title': [],
119 | 'source': 'Crossref',
120 | 'subject': ['Medicine(all)'],
121 | 'subtitle': [],
122 | 'title': ['Congenital Zika virus syndrome'],
123 | 'type': 'journal-article',
124 | 'volume': '32'}
125 |
126 | Filter
127 | ``````
128 |
129 | See valid parameters in :code:`Works.FILTER_VALIDATOR`. Replace :code:`.` with :code:`__` and :code:`-` with :code:`_` when using parameters.
130 |
131 | .. code-block:: python
132 |
133 | In [1] from cross.restful import Works
134 |
135 | In [2]: works = Works()
136 |
137 | In [3]: for i in works.filter(license__url='https://creativecommons.org/licenses/by', from_pub_date='2016').sample(5).select('title'):
138 | ...: print(i)
139 | ...:
140 | {'title': ['Vers une économie circulaire... de proximité ? Une spatialité à géométrie variable']}
141 | {'title': ['The stakeholders of the Olympic System']}
142 | {'title': ["Un cas de compensation écologique dans le secteur minier : la réserve forestière Dékpa (Côte d'Ivoire) au secours des forêts et des populations locales"]}
143 | {'title': ['A simple extension of FFT-based methods to strain gradient loadings - Application to the homogenization of beams and plates with linear and non-linear behaviors']}
144 | {'title': ['Gestion des déchets ménagers dans la ville de Kinshasa : Enquête sur la perception des habitants et propositions']}
145 |
146 | Select
147 | ``````
148 |
149 | See valid parameters in :code:`Works.FIELDS_SELECT`
150 |
151 | .. code-block:: python
152 |
153 | In [1]: from crossref.restful import Works
154 |
155 | In [2]: works = Works()
156 |
157 | In [3]: for i in works.filter(has_funder='true', has_license='true').sample(5).select('DOI, prefix'):
158 | ...: print(i)
159 | ...:
160 | {'DOI': '10.1111/str.12144', 'member': 'http://id.crossref.org/member/311', 'prefix': '10.1111'}
161 | {'DOI': '10.1002/admi.201400154', 'member': 'http://id.crossref.org/member/311', 'prefix': '10.1002'}
162 | {'DOI': '10.1016/j.surfcoat.2010.10.057', 'member': 'http://id.crossref.org/member/78', 'prefix': '10.1016'}
163 | {'DOI': '10.1007/s10528-015-9707-8', 'member': 'http://id.crossref.org/member/297', 'prefix': '10.1007'}
164 | {'DOI': '10.1016/j.powtec.2016.04.009', 'member': 'http://id.crossref.org/member/78', 'prefix': '10.1016'}
165 |
166 | In [4]: for i in works.filter(has_funder='true', has_license='true').sample(5).select(['DOI', 'prefix']):
167 | ...: print(i)
168 | ...:
169 | {'DOI': '10.1002/jgrd.50059', 'member': 'http://id.crossref.org/member/311', 'prefix': '10.1002'}
170 | {'DOI': '10.1111/ajt.13880', 'member': 'http://id.crossref.org/member/311', 'prefix': '10.1111'}
171 | {'DOI': '10.1016/j.apgeochem.2015.05.006', 'member': 'http://id.crossref.org/member/78', 'prefix': '10.1016'}
172 | {'DOI': '10.1016/j.triboint.2015.01.023', 'member': 'http://id.crossref.org/member/78', 'prefix': '10.1016'}
173 | {'DOI': '10.1007/s10854-016-4649-4', 'member': 'http://id.crossref.org/member/297', 'prefix': '10.1007'}
174 |
175 | In [5]: for i in works.filter(has_funder='true', has_license='true').sample(5).select('DOI').select('prefix'):
176 | ...: print(i)
177 | ...:
178 | {'DOI': '10.1002/mrm.25790', 'member': 'http://id.crossref.org/member/311', 'prefix': '10.1002'}
179 | {'DOI': '10.1016/j.istruc.2016.11.001', 'member': 'http://id.crossref.org/member/78', 'prefix': '10.1016'}
180 | {'DOI': '10.1002/anie.201505015', 'member': 'http://id.crossref.org/member/311', 'prefix': '10.1002'}
181 | {'DOI': '10.1016/j.archoralbio.2010.11.011', 'member': 'http://id.crossref.org/member/78', 'prefix': '10.1016'}
182 | {'DOI': '10.1145/3035918.3064012', 'member': 'http://id.crossref.org/member/320', 'prefix': '10.1145'}
183 |
184 | In [6]: for i in works.filter(has_funder='true', has_license='true').sample(5).select('DOI', 'prefix'):
185 | ...: print(i)
186 | ...:
187 | {'DOI': '10.1016/j.cplett.2015.11.062', 'member': 'http://id.crossref.org/member/78', 'prefix': '10.1016'}
188 | {'DOI': '10.1016/j.bjp.2015.06.001', 'member': 'http://id.crossref.org/member/78', 'prefix': '10.1016'}
189 | {'DOI': '10.1111/php.12613', 'member': 'http://id.crossref.org/member/311', 'prefix': '10.1111'}
190 | {'DOI': '10.1002/cfg.144', 'member': 'http://id.crossref.org/member/98', 'prefix': '10.1155'}
191 | {'DOI': '10.1002/alr.21987', 'member': 'http://id.crossref.org/member/311', 'prefix': '10.1002'}
192 |
193 | Facet
194 | `````
195 |
196 | .. code-block:: python
197 |
198 | In [1]: from crossref.restful import Works, Prefixes
199 |
200 | In [2]: works = Works()
201 |
202 | In [3]: works.facet('issn', 10)
203 | Out[3]:
204 | {'issn': {'value-count': 10,
205 | 'values': {'http://id.crossref.org/issn/0009-2975': 306546,
206 | 'http://id.crossref.org/issn/0028-0836': 395353,
207 | 'http://id.crossref.org/issn/0140-6736': 458909,
208 | 'http://id.crossref.org/issn/0302-9743': 369955,
209 | 'http://id.crossref.org/issn/0931-7597': 487523,
210 | 'http://id.crossref.org/issn/0959-8138': 392754,
211 | 'http://id.crossref.org/issn/1095-9203': 253978,
212 | 'http://id.crossref.org/issn/1468-5833': 388355,
213 | 'http://id.crossref.org/issn/1556-5068': 273653,
214 | 'http://id.crossref.org/issn/1611-3349': 329573}}}
215 |
216 | In [4]: prefixes = Prefixes()
217 |
218 | In [5]: prefixes.works('10.1590').facet('issn', 10)
219 | Out[5]:
220 | {'issn': {'value-count': 10,
221 | 'values': {'http://id.crossref.org/issn/0004-282X': 7712,
222 | 'http://id.crossref.org/issn/0034-8910': 4752,
223 | 'http://id.crossref.org/issn/0037-8682': 4179,
224 | 'http://id.crossref.org/issn/0074-0276': 7941,
225 | 'http://id.crossref.org/issn/0100-204X': 3946,
226 | 'http://id.crossref.org/issn/0100-4042': 4198,
227 | 'http://id.crossref.org/issn/0102-311X': 6548,
228 | 'http://id.crossref.org/issn/0103-8478': 6607,
229 | 'http://id.crossref.org/issn/1413-8123': 4658,
230 | 'http://id.crossref.org/issn/1516-3598': 4678}}}
231 |
232 | In [6]: prefixes.works('10.1590').query('zika').facet('issn', 10)
233 | Out[6]:
234 | {'issn': {'value-count': 10,
235 | 'values': {'http://id.crossref.org/issn/0004-282X': 4,
236 | 'http://id.crossref.org/issn/0036-4665': 4,
237 | 'http://id.crossref.org/issn/0037-8682': 7,
238 | 'http://id.crossref.org/issn/0074-0276': 7,
239 | 'http://id.crossref.org/issn/0102-311X': 12,
240 | 'http://id.crossref.org/issn/0103-7331': 2,
241 | 'http://id.crossref.org/issn/0104-4230': 3,
242 | 'http://id.crossref.org/issn/1519-3829': 7,
243 | 'http://id.crossref.org/issn/1679-4508': 2,
244 | 'http://id.crossref.org/issn/1806-8324': 2}}}
245 |
246 | Journals
247 | --------
248 |
249 | Exemplifying the use of API Library to retrieve data from Journals endpoint.
250 |
251 | .. code-block:: python
252 |
253 | In [1]: from crossref.restful import Journals
254 |
255 | In [2]: journals = Journals()
256 |
257 | In [3]: journals.journal('0102-311X')
258 | Out[3]:
259 | {'ISSN': ['0102-311X', '0102-311X'],
260 | 'breakdowns': {'dois-by-issued-year': [[2013, 462],
261 | [2007, 433],
262 | [2008, 416],
263 | [2009, 347],
264 | [2006, 344],
265 | [2014, 292],
266 | [2004, 275],
267 | [2012, 273],
268 | [2011, 270],
269 | [2010, 270],
270 | [2005, 264],
271 | [2003, 257],
272 | [2001, 220],
273 | [2002, 219],
274 | [1998, 187],
275 | [2000, 169],
276 | [1997, 142],
277 | [1999, 136],
278 | [1994, 110],
279 | [1995, 104],
280 | [1996, 103],
281 | [1993, 99],
282 | [2015, 93],
283 | [1992, 65],
284 | [1986, 63],
285 | [1985, 53],
286 | [1990, 49],
287 | [1988, 49],
288 | [1991, 48],
289 | [1987, 46],
290 | [1989, 45]]},
291 | 'counts': {'backfile-dois': 5565, 'current-dois': 335, 'total-dois': 5900},
292 | 'coverage': {'award-numbers-backfile': 0.0,
293 | 'award-numbers-current': 0.0,
294 | 'funders-backfile': 0.0,
295 | 'funders-current': 0.0,
296 | 'licenses-backfile': 0.0,
297 | 'licenses-current': 0.0,
298 | 'orcids-backfile': 0.0,
299 | 'orcids-current': 0.0,
300 | 'references-backfile': 0.0,
301 | 'references-current': 0.0,
302 | 'resource-links-backfile': 0.0,
303 | 'resource-links-current': 0.0,
304 | 'update-policies-backfile': 0.0,
305 | 'update-policies-current': 0.0},
306 | 'flags': {'deposits': True,
307 | 'deposits-articles': True,
308 | 'deposits-award-numbers-backfile': False,
309 | 'deposits-award-numbers-current': False,
310 | 'deposits-funders-backfile': False,
311 | 'deposits-funders-current': False,
312 | 'deposits-licenses-backfile': False,
313 | 'deposits-licenses-current': False,
314 | 'deposits-orcids-backfile': False,
315 | 'deposits-orcids-current': False,
316 | 'deposits-references-backfile': False,
317 | 'deposits-references-current': False,
318 | 'deposits-resource-links-backfile': False,
319 | 'deposits-resource-links-current': False,
320 | 'deposits-update-policies-backfile': False,
321 | 'deposits-update-policies-current': False},
322 | 'last-status-check-time': 1459491023622,
323 | 'publisher': 'SciELO',
324 | 'title': 'Cadernos de Saúde Pública'}
325 |
326 | In [4]: journals.journal_exists('0102-311X')
327 | Out[4]: True
328 |
329 | In [5]: journals.query('Cadernos').url
330 | Out[5]: 'https://api.crossref.org/journals?query=Cadernos'
331 |
332 | In [6]: journals.query('Cadernos').count()
333 | Out[6]: 60
334 |
335 | In [7]: journals.works('0102-311X').query('zika').url
336 | Out[7]: 'https://api.crossref.org/journals/0102-311X/works?query=zika'
337 |
338 | In [8]: journals.works('0102-311X').query('zika').count()
339 | Out[8]: 12
340 |
341 | In [9]: journals.works('0102-311X').query('zika').query(author='Diniz').url
342 | Out[9]: 'https://api.crossref.org/journals/0102-311X/works?query.author=Diniz&query=zika'
343 |
344 | In [10]: journals.works('0102-311X').query('zika').query(author='Diniz').count()
345 | Out[10]: 1
346 |
347 | Base Methods
348 | ------------
349 |
350 | The base methods could be used along with the query, filter, sort, order and facet methods.
351 |
352 | Version
353 | ```````
354 |
355 | This method returns the Crossref API version.
356 |
357 | .. code-block:: python
358 |
359 | In [1]: from crossref.restful import Journals
360 |
361 | In [2]: journals = Journals()
362 |
363 | In [3]: journals.version
364 | Out[3]: '1.0.0'
365 |
366 | Count
367 | `````
368 | This method returns the total number of items a query result should retrieve. This method will not
369 | iterate through and retrieve the API documents. This method will fetch 0 documents
370 | and retrieve the value of **total-result** attribute.
371 |
372 | .. code-block:: python
373 |
374 | In [1]: from crossref.restful import Works
375 |
376 | In [2]: works = Works()
377 |
378 | In [3]: works.query('zika').count()
379 | Out[3]: 3597
380 |
381 | In [4]: works.query('zika').filter(from_online_pub_date='2017').count()
382 | Out[4]: 444
383 |
384 | Url
385 | ```
386 |
387 | This method returns the url that will be used to query the Crossref API.
388 |
389 | .. code-block:: python
390 |
391 | In [1]: from crossref.restful import Works
392 |
393 | In [2]: works = Works()
394 |
395 | In [3]: works.query('zika').url
396 | Out[3]: 'https://api.crossref.org/works?query=zika'
397 |
398 | In [4]: works.query('zika').filter(from_online_pub_date='2017').url
399 | Out[4]: 'https://api.crossref.org/works?query=zika&filter=from-online-pub-date%3A2017'
400 |
401 | In [5]: works.query('zika').filter(from_online_pub_date='2017').query(author='Mari').url
402 | Out[5]: 'https://api.crossref.org/works?query.author=Mari&filter=from-online-pub-date%3A2017&query=zika'
403 |
404 | In [6]: works.query('zika').filter(from_online_pub_date='2017').query(author='Mari').sort('published').url
405 | Out[6]: 'https://api.crossref.org/works?query.author=Mari&query=zika&filter=from-online-pub-date%3A2017&sort=published'
406 |
407 | In [7]: works.query('zika').filter(from_online_pub_date='2017').query(author='Mari').sort('published').order('asc').url
408 | Out[7]: 'https://api.crossref.org/works?filter=from-online-pub-date%3A2017&query.author=Mari&order=asc&query=zika&sort=published'
409 |
410 | In [8]: from crossref.restful import Prefixes
411 |
412 | In [9]: prefixes = Prefixes()
413 |
414 | In [10]: prefixes.works('10.1590').query('zike').url
415 | Out[10]: 'https://api.crossref.org/prefixes/10.1590/works?query=zike'
416 |
417 | In [11]: from crossref.restful import Journals
418 |
419 | In [12]: journals = Journals()
420 |
421 | In [13]: journals.url
422 | Out[13]: 'https://api.crossref.org/journals'
423 |
424 | In [14]: journals.works('0102-311X').url
425 | Out[14]: 'https://api.crossref.org/journals/0102-311X/works'
426 |
427 | In [15]: journals.works('0102-311X').query('zika').url
428 | Out[15]: 'https://api.crossref.org/journals/0102-311X/works?query=zika'
429 |
430 | In [16]: journals.works('0102-311X').query('zika').count()
431 | Out[16]: 12
432 |
433 | All
434 | ```
435 |
436 | This method returns all items of an endpoint. It will use the limit offset
437 | parameters to iterate through the endpoints Journals, Types, Members and Prefixes.
438 |
439 | For the **works** endpoint, the library will make use of the **cursor** to paginate through
440 | API until it is totally consumed.
441 |
442 | .. code-block:: python
443 |
444 | In [1]: from crossref.restful import Journals
445 |
446 | In [2]: journals = Journals()
447 |
448 | In [3]: for item in journals.all():
449 | ...: print(item['title'])
450 | ...:
451 | JNSM
452 | New Comprehensive Biochemistry
453 | New Frontiers in Ophthalmology
454 | Oral Health Case Reports
455 | Orbit A Journal of American Literature
456 | ORDO
457 |
458 | Support for Polite Requests (Etiquette)
459 | ---------------------------------------
460 |
461 | Respecting the Crossref API polices for polite requests. This library allows users
462 | to setup an Etiquette object to be used in the http requests.
463 |
464 | .. code-block:: python
465 |
466 | In [1]: from crossref.restful import Works, Etiquette
467 |
468 | In [2]: my_etiquette = Etiquette('My Project Name', 'My Project version', 'My Project URL', 'My contact email')
469 |
470 | In [3]: str(my_etiquette)
471 | Out[3]: 'My Project Name/My Project version (My Project URL; mailto:My contact email) BasedOn: CrossrefAPI/1.1.0'
472 |
473 | In [4]: my_etiquette = Etiquette('My Project Name', '0.2alpha', 'https://myalphaproject.com', 'anonymous@myalphaproject.com')
474 |
475 | In [5]: str(my_etiquette)
476 | Out[5]: 'My Project Name/0.2alpha (https://myalphaproject.com; mailto:anonymous@myalphaproject.com) BasedOn: CrossrefAPI/1.1.0'
477 |
478 | In [6]: works = Works(etiquette=my_etiquette)
479 |
480 | In [7]: for i in works.sample(5).select('DOI'):
481 | ...: print(i)
482 | ...:
483 |
484 | {'DOI': '10.1016/j.ceramint.2014.10.086'}
485 | {'DOI': '10.1016/j.biomaterials.2012.02.034'}
486 | {'DOI': '10.1001/jamaoto.2013.6450'}
487 | {'DOI': '10.1016/s0021-9290(17)30138-0'}
488 | {'DOI': '10.1109/ancs.2011.11'}
489 |
490 | Voilá!!! The requests made for the Crossref API, were made setting the user-agent as: 'My Project Name/0.2alpha (https://myalphaproject.com; mailto:anonymous@myalphaproject.com) BasedOn: CrossrefAPI/1.1.0'
491 |
492 | Depositing Metadata to Crossref
493 | -------------------------------
494 |
495 | This library implements the deposit operation "doMDUpload", it means you are able to submit Digital Objects Metadata to Crossref. Se more are: https://support.crossref.org/hc/en-us/articles/214960123
496 |
497 | To do that, you must have an active publisher account with crossref.org.
498 |
499 | First of all, you need a valid XML following the crossref DTD.
500 |
501 | .. code-block:: xml
502 |
503 |
504 |
505 |
506 | c5473e12dc8e4f36a40f76f8eae15280
507 | 20171009132847
508 |
509 | SciELO
510 | crossref@scielo.org
511 |
512 | SciELO
513 |
514 |
515 |
516 |
517 | Revista Brasileira de Ciência Avícola
518 | Rev. Bras. Cienc. Avic.
519 | 1516-635X
520 |
521 |
522 |
523 | 09
524 | 2017
525 |
526 |
527 | 19
528 |
529 | 3
530 |
531 |
532 |
533 | Climatic Variation: Effects on Stress Levels, Feed Intake, and Bodyweight of Broilers
534 |
535 |
536 |
537 | R
538 | Osti
539 | Huazhong Agricultural University, China
540 |
541 |
542 | D
543 | Bhattarai
544 | Huazhong Agricultural University, China
545 |
546 |
547 | D
548 | Zhou
549 | Huazhong Agricultural University, China
550 |
551 |
552 |
553 | 09
554 | 2017
555 |
556 |
557 | 489
558 | 496
559 |
560 |
561 | S1516-635X2017000300489
562 |
563 |
564 | 10.1590/1806-9061-2017-0494
565 | http://www.scielo.br/scielo.php?script=sci_arttext&pid=S1516-635X2017000300489&lng=en&tlng=en
566 |
567 |
568 |
569 | Journal of Agriculture Science
570 | Alade O
571 | 5
572 | 176
573 | 2013
574 | Perceived effect of climate variation on poultry production in Oke Ogun area of Oyo State
575 |
576 |
577 | ...
578 |
579 |
580 | Poultry Science
581 | Zulkifli I
582 | 88
583 | 471
584 | 2009
585 | Crating and heat stress influence blood parameters and heat shock protein 70 expression in broiler chickens showing short or long tonic immobility reactions
586 |
587 |
588 |
589 |
590 |
591 |
592 |
593 | Second! Using the library
594 |
595 | .. code-block:: python
596 |
597 | In [1]: from crossref.restful import Depositor
598 |
599 | In [2]: request_xml = open('tests/fixtures/deposit_xml_sample.xml', 'r').read()
600 |
601 | In [3]: depositor = Depositor('your prefix', 'your crossref user', 'your crossref password')
602 |
603 | In [4]: response = depositor.register_doi('testing_20171011', request_xml)
604 |
605 | In [5]: response.status_code
606 | Out[5]: 200
607 |
608 | In [6]: response.text
609 | Out[6]: '\n\n\n\n\n
SUCCESS\n\n\nSUCCESS
\nYour batch submission was successfully received.
\n\n\n'
610 |
611 | In [7]: response = depositor.request_doi_status_by_filename('testing_20171011.xml')
612 |
613 | In [8]: response.text
614 | Out[8]: '\n\r\n 1415653976\r\n \r\n'
615 |
616 | In [9]: response = depositor.request_doi_status_by_filename('testing_20171011.xml')
617 |
618 | In [10]: response.text
619 | Out[10]: '\n\r\n 1415653976\r\n \r\n'
620 |
621 | In [11]: response = depositor.request_doi_status_by_filename('testing_20171011.xml', data_type='result')
622 |
623 | In [12]: response.text
624 | Out[12]: '\n\r\n 1415653976\r\n \r\n'
625 |
626 | In [13]: response = depositor.request_doi_status_by_filename('testing_20171011.xml', data_type='contents')
627 |
628 | In [14]: response.text
629 | Out[14]: '\n\n \n c5473e12dc8e4f36a40f76f8eae15280\n 20171009132847\n \n SciELO\n crossref@scielo.org\n \n SciELO\n \n \n \n \n Revista Brasileira de Ciência Avícola\n Rev. Bras. Cienc. Avic.\n 1516-635X\n \n \n \n 09\n 2017\n \n \n 19\n \n 3\n \n \n \n Climatic Variation: Effects on Stress Levels, Feed Intake, and Bodyweight of Broilers\n \n \n \n R\n Osti\n Huazhong Agricultural University, China\n \n \n D\n Bhattarai\n Huazhong Agricultural University, China\n \n \n D\n Zhou\n Huazhong Agricultural University, China\n \n \n \n 09\n 2017\n \n \n 489\n 496\n \n \n S1516-635X2017000300489\n \n'
630 |
631 | In [15]: response = depositor.request_doi_status_by_filename('testing_20171011.xml', data_type='result')
632 |
633 | In [16]: response.text
634 | Out[16]:
635 |
636 | 1415649102
637 | 9112073c7f474394adc01b82e27ea2a8
638 |
639 | 10.1590/0037-8682-0216-2016
640 | Successfully updated
641 |
642 | 10.1590/0037-8682-0284-2014
643 | 10.1371/journal.pone.0090237
644 | 10.1093/infdis/172.6.1561
645 | 10.1016/j.ijpara.2011.01.005
646 | 10.1016/j.rvsc.2013.01.006
647 | 10.1093/trstmh/tru113
648 | 10.1590/0074-02760150459
649 |
650 |
651 |
652 | 1
653 | 1
654 | 0
655 | 0
656 |
657 |
658 |
659 | Explaining the code
660 | ```````````````````
661 |
662 | **Line 1:** Importing the Depositor Class
663 |
664 | **Line 2:** Loading a valid XML for deposit
665 |
666 | **Line 3:** Creating an instance of Depositor. You should use you crossref credentials at this point. If you wana be polite, you should also give an etiquette object at this momment.
667 |
668 | .. block-code:: python
669 |
670 | etiquette = Etiquette('My Project Name', 'My Project version', 'My Project URL', 'My contact email')
671 | Depositor('your prefix', 'your crossref user', 'your crossref password', etiquette)
672 |
673 | **Line 4:** Requesting the DOI (Id do not means you DOI was registered, it is just a DOI Request)
674 |
675 | **Line 5:** Checking the DOI request response.
676 |
677 | **Line 6:** Printing the DOI request response body.
678 |
679 | **Line 7:** Requesting the DOI registering status.
680 |
681 | **Line 8:** Checking the DOI registering status, reading the body of the response. You should parse this XML to have the current status of the DOI registering request. You should do this util have an success or error status retrieved.
682 |
683 | **Line 9-12:** Rechecking the request status. It is still in queue. You can also set the response type between ['result', 'contents'], where result will retrieve the status of the DOI registering process, and contents will retrieve the submitted XML content while requesting the DOI.
684 |
685 | **Line 13-14:** Checking the content submitted passing the attribute data_type='contents'.
686 |
687 | **Line 15-16:** After a while, the success status was received.
688 |
689 |
690 |
691 |
--------------------------------------------------------------------------------
/crossref/__init__.py:
--------------------------------------------------------------------------------
1 | from importlib import metadata
2 |
3 | VERSION = metadata.version("crossrefapi")
4 |
--------------------------------------------------------------------------------
/crossref/restful.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import contextlib
4 | import typing
5 | from time import sleep
6 |
7 | import requests
8 |
9 | from crossref import VERSION, validators
10 |
11 | LIMIT = 100
12 | MAXOFFSET = 10000
13 | FACETS_MAX_LIMIT = 1000
14 |
15 | API = "api.crossref.org"
16 |
17 |
18 | class CrossrefAPIError(Exception):
19 | pass
20 |
21 |
22 | class MaxOffsetError(CrossrefAPIError):
23 | pass
24 |
25 |
26 | class UrlSyntaxError(CrossrefAPIError, ValueError):
27 | pass
28 |
29 |
30 | class HTTPRequest:
31 |
32 | def __init__(self, throttle=True):
33 | self.throttle = throttle
34 | self.rate_limits = {"x-rate-limit-limit": 50, "x-rate-limit-interval": 1}
35 |
36 | def _update_rate_limits(self, headers):
37 |
38 | with contextlib.suppress(ValueError):
39 | self.rate_limits["x-rate-limit-limit"] = int(headers.get("x-rate-limit-limit", 50))
40 |
41 | with contextlib.suppress(ValueError):
42 | interval_value = int(headers.get("x-rate-limit-interval", "1s")[:-1])
43 |
44 | interval_scope = headers.get("x-rate-limit-interval", "1s")[-1]
45 |
46 | if interval_scope == "m":
47 | interval_value = interval_value * 60
48 |
49 | if interval_scope == "h":
50 | interval_value = interval_value * 60 * 60
51 |
52 | self.rate_limits["x-rate-limit-interval"] = interval_value
53 |
54 | @property
55 | def throttling_time(self):
56 | return self.rate_limits["x-rate-limit-interval"] / self.rate_limits["x-rate-limit-limit"]
57 |
58 | def do_http_request( # noqa: PLR0913
59 | self,
60 | method,
61 | endpoint,
62 | data=None,
63 | files=None,
64 | timeout=100,
65 | only_headers=False,
66 | custom_header=None,
67 | ):
68 |
69 | if only_headers is True:
70 | return requests.head(endpoint, timeout=2)
71 |
72 | action = requests.post if method == "post" else requests.get
73 |
74 | headers = custom_header if custom_header else {"user-agent": str(Etiquette())}
75 | if method == "post":
76 | result = action(endpoint, data=data, files=files, timeout=timeout, headers=headers)
77 | else:
78 | result = action(endpoint, params=data, timeout=timeout, headers=headers)
79 |
80 | if self.throttle is True:
81 | self._update_rate_limits(result.headers)
82 | sleep(self.throttling_time)
83 |
84 | return result
85 |
86 |
87 | def build_url_endpoint(endpoint, context=None):
88 | endpoint = "/".join([i for i in [context, endpoint] if i])
89 |
90 | return f"https://{API}/{endpoint}"
91 |
92 |
93 | class Etiquette:
94 | def __init__(
95 | self,
96 | application_name="undefined",
97 | application_version="undefined",
98 | application_url="undefined",
99 | contact_email="anonymous",
100 | ):
101 | self.application_name = application_name
102 | self.application_version = application_version
103 | self.application_url = application_url
104 | self.contact_email = contact_email
105 |
106 | def __str__(self):
107 | return "{}/{} ({}; mailto:{}) BasedOn: CrossrefAPI/{}".format(
108 | self.application_name,
109 | self.application_version,
110 | self.application_url,
111 | self.contact_email,
112 | VERSION,
113 | )
114 |
115 |
116 | class Endpoint:
117 | CURSOR_AS_ITER_METHOD = False
118 |
119 | def __init__( # noqa: PLR0913
120 | self,
121 | request_url=None,
122 | request_params=None,
123 | context=None,
124 | etiquette=None,
125 | throttle=True,
126 | crossref_plus_token=None,
127 | timeout=30,
128 | ):
129 | self.do_http_request = HTTPRequest(throttle=throttle).do_http_request
130 | self.etiquette = etiquette or Etiquette()
131 | self.custom_header = {"user-agent": str(self.etiquette)}
132 | self.crossref_plus_token = crossref_plus_token
133 | if crossref_plus_token:
134 | self.custom_header["Crossref-Plus-API-Token"] = self.crossref_plus_token
135 | self.request_url = request_url or build_url_endpoint(self.ENDPOINT, context)
136 | self.request_params = request_params or {}
137 | self.context = context or ""
138 | self.timeout = timeout
139 |
140 | @property
141 | def _rate_limits(self):
142 | request_url = str(self.request_url)
143 |
144 | result = self.do_http_request(
145 | "get",
146 | request_url,
147 | only_headers=True,
148 | custom_header=self.custom_header,
149 | timeout=self.timeout,
150 | throttle=False,
151 | )
152 |
153 | return {
154 | "x-rate-limit-limit": result.headers.get("x-rate-limit-limit", "undefined"),
155 | "x-rate-limit-interval": result.headers.get("x-rate-limit-interval", "undefined"),
156 | }
157 |
158 | def _escaped_pagging(self):
159 | escape_pagging = ["offset", "rows"]
160 | request_params = dict(self.request_params)
161 |
162 | for item in escape_pagging:
163 | with contextlib.suppress(KeyError):
164 | del request_params[item]
165 |
166 | return request_params
167 |
168 | @property
169 | def version(self):
170 | """
171 | This attribute retrieve the API version.
172 |
173 | >>> Works().version
174 | '1.0.0'
175 | """
176 | request_params = dict(self.request_params)
177 | request_url = str(self.request_url)
178 |
179 | result = self.do_http_request(
180 | "get",
181 | request_url,
182 | data=request_params,
183 | custom_header=self.custom_header,
184 | timeout=self.timeout,
185 | ).json()
186 |
187 | return result["message-version"]
188 |
189 | @property
190 | def x_rate_limit_limit(self):
191 |
192 | return self._rate_limits.get("x-rate-limit-limit", "undefined")
193 |
194 | @property
195 | def x_rate_limit_interval(self):
196 |
197 | return self._rate_limits.get("x-rate-limit-interval", "undefined")
198 |
199 | def count(self):
200 | """
201 | This method retrieve the total of records resulting from a given query.
202 |
203 | This attribute can be used compounded with query, filter,
204 | sort, order and facet methods.
205 |
206 | Examples:
207 | >>> from crossref.restful import Works
208 | >>> Works().query('zika').count()
209 | 3597
210 | >>> Works().query('zika').filter(prefix='10.1590').count()
211 | 61
212 | >>> Works().query('zika').filter(prefix='10.1590').sort('published') \
213 | .order('desc').filter(has_abstract='true').count()
214 | 14
215 | >>> Works().query('zika').filter(prefix='10.1590').sort('published') \
216 | .order('desc').filter(has_abstract='true').query(author='Marli').count()
217 | 1
218 | """
219 | request_params = dict(self.request_params)
220 | request_url = str(self.request_url)
221 | request_params["rows"] = 0
222 |
223 | result = self.do_http_request(
224 | "get",
225 | request_url,
226 | data=request_params,
227 | custom_header=self.custom_header,
228 | timeout=self.timeout,
229 | ).json()
230 |
231 | return int(result["message"]["total-results"])
232 |
233 | @property
234 | def url(self):
235 | """
236 | This attribute retrieve the url that will be used as a HTTP request to
237 | the Crossref API.
238 |
239 | This attribute can be used compounded with query, filter,
240 | sort, order and facet methods.
241 |
242 | Examples:
243 | >>> from crossref.restful import Works
244 | >>> Works().query('zika').url
245 | 'https://api.crossref.org/works?query=zika'
246 | >>> Works().query('zika').filter(prefix='10.1590').url
247 | 'https://api.crossref.org/works?query=zika&filter=prefix%3A10.1590'
248 | >>> Works().query('zika').filter(prefix='10.1590').sort('published') \
249 | .order('desc').url
250 | 'https://api.crossref.org/works?sort=published
251 | &order=desc&query=zika&filter=prefix%3A10.1590'
252 | >>> Works().query('zika').filter(prefix='10.1590').sort('published') \
253 | .order('desc').filter(has_abstract='true').query(author='Marli').url
254 | 'https://api.crossref.org/works?sort=published
255 | &filter=prefix%3A10.1590%2Chas-abstract%3Atrue&query=zika&order=desc&query.author=Marli'
256 | """
257 | request_params = self._escaped_pagging()
258 |
259 | sorted_request_params = sorted([(k, v) for k, v in request_params.items()])
260 | req = requests.Request("get", self.request_url, params=sorted_request_params).prepare()
261 |
262 | return req.url
263 |
264 | def all(self, request_params: dict | None): # noqa: A003
265 | context = str(self.context)
266 | request_url = build_url_endpoint(self.ENDPOINT, context)
267 |
268 | if request_params is None:
269 | request_params = {}
270 |
271 | return iter(
272 | self.__class__(
273 | request_url=request_url,
274 | request_params=request_params,
275 | context=context,
276 | etiquette=self.etiquette,
277 | crossref_plus_token=self.crossref_plus_token,
278 | timeout=self.timeout,
279 | ),
280 | )
281 |
282 | def __iter__(self): # noqa: PLR0912 - To many branches is not a problem.
283 | request_url = str(self.request_url)
284 |
285 | if "sample" in self.request_params:
286 | request_params = self._escaped_pagging()
287 | result = self.do_http_request(
288 | "get",
289 | self.request_url,
290 | data=request_params,
291 | custom_header=self.custom_header,
292 | timeout=self.timeout,
293 | )
294 |
295 | if result.status_code == 404:
296 | return
297 |
298 | result = result.json()
299 |
300 | for item in result["message"]["items"]:
301 | yield item
302 |
303 | return
304 |
305 | if self.CURSOR_AS_ITER_METHOD is True:
306 | request_params = dict(self.request_params)
307 | request_params["cursor"] = "*"
308 | request_params["rows"] = LIMIT
309 | while True:
310 | result = self.do_http_request(
311 | "get",
312 | request_url,
313 | data=request_params,
314 | custom_header=self.custom_header,
315 | timeout=self.timeout,
316 | )
317 |
318 | if result.status_code == 404:
319 | return
320 |
321 | result = result.json()
322 |
323 | if len(result["message"]["items"]) == 0:
324 | return
325 |
326 | for item in result["message"]["items"]:
327 | yield item
328 |
329 | request_params["cursor"] = result["message"]["next-cursor"]
330 | else:
331 | request_params = dict(self.request_params)
332 | request_params["offset"] = 0
333 | request_params["rows"] = LIMIT
334 | while True:
335 | result = self.do_http_request(
336 | "get",
337 | request_url,
338 | data=request_params,
339 | custom_header=self.custom_header,
340 | timeout=self.timeout,
341 | )
342 |
343 | if result.status_code == 404:
344 | return
345 |
346 | result = result.json()
347 |
348 | if len(result["message"]["items"]) == 0:
349 | return
350 |
351 | for item in result["message"]["items"]:
352 | yield item
353 |
354 | request_params["offset"] += LIMIT
355 |
356 | if request_params["offset"] >= MAXOFFSET:
357 | msg = "Offset exceded the max offset of %d"
358 | raise MaxOffsetError(msg, MAXOFFSET)
359 |
360 |
361 | class Works(Endpoint):
362 | CURSOR_AS_ITER_METHOD = True
363 |
364 | ENDPOINT = "works"
365 |
366 | ORDER_VALUES = ("asc", "desc", "1", "-1")
367 |
368 | SORT_VALUES = (
369 | "created",
370 | "deposited",
371 | "indexed",
372 | "is-referenced-by-count",
373 | "issued",
374 | "published",
375 | "published-online",
376 | "published-print",
377 | "references-count",
378 | "relevance",
379 | "score",
380 | "submitted",
381 | "updated",
382 | )
383 |
384 | FIELDS_QUERY = (
385 | "affiliation",
386 | "author",
387 | "bibliographic",
388 | "chair",
389 | "container_title",
390 | "contributor",
391 | "editor",
392 | "event_acronym",
393 | "event_location",
394 | "event_name",
395 | "event_sponsor",
396 | "event_theme",
397 | "funder_name",
398 | "publisher_location",
399 | "publisher_name",
400 | "translator",
401 | )
402 |
403 | FIELDS_SELECT = (
404 | "DOI",
405 | "ISBN",
406 | "ISSN",
407 | "URL",
408 | "abstract",
409 | "accepted",
410 | "alternative-id",
411 | "approved",
412 | "archive",
413 | "article-number",
414 | "assertion",
415 | "author",
416 | "chair",
417 | "clinical-trial-number",
418 | "container-title",
419 | "content-created",
420 | "content-domain",
421 | "created",
422 | "degree",
423 | "deposited",
424 | "editor",
425 | "event",
426 | "funder",
427 | "group-title",
428 | "indexed",
429 | "is-referenced-by-count",
430 | "issn-type",
431 | "issue",
432 | "issued",
433 | "license",
434 | "link",
435 | "member",
436 | "original-title",
437 | "page",
438 | "posted",
439 | "prefix",
440 | "published",
441 | "published-online",
442 | "published-print",
443 | "publisher",
444 | "publisher-location",
445 | "reference",
446 | "references-count",
447 | "relation",
448 | "score",
449 | "short-container-title",
450 | "short-title",
451 | "standards-body",
452 | "subject",
453 | "subtitle",
454 | "title",
455 | "translator",
456 | "type",
457 | "update-policy",
458 | "update-to",
459 | "updated-by",
460 | "volume",
461 | )
462 |
463 | FILTER_VALIDATOR: typing.ClassVar[dict] = {
464 | "alternative_id": None,
465 | "archive": validators.archive,
466 | "article_number": None,
467 | "assertion": None,
468 | "assertion-group": None,
469 | "award.funder": None,
470 | "award.number": None,
471 | "category-name": None,
472 | "clinical-trial-number": None,
473 | "container-title": None,
474 | "content-domain": None,
475 | "directory": validators.directory,
476 | "doi": None,
477 | "from-accepted-date": validators.is_date,
478 | "from-created-date": validators.is_date,
479 | "from-deposit-date": validators.is_date,
480 | "from-event-end-date": validators.is_date,
481 | "from-event-start-date": validators.is_date,
482 | "from-index-date": validators.is_date,
483 | "from-issued-date": validators.is_date,
484 | "from-online-pub-date": validators.is_date,
485 | "from-posted-date": validators.is_date,
486 | "from-print-pub-date": validators.is_date,
487 | "from-pub-date": validators.is_date,
488 | "from-update-date": validators.is_date,
489 | "full-text.application": None,
490 | "full-text.type": None,
491 | "full-text.version": None,
492 | "funder": None,
493 | "funder-doi-asserted-by": None,
494 | "group-title": None,
495 | "has-abstract": validators.is_bool,
496 | "has-affiliation": validators.is_bool,
497 | "has-archive": validators.is_bool,
498 | "has-assertion": validators.is_bool,
499 | "has-authenticated-orcid": validators.is_bool,
500 | "has-award": validators.is_bool,
501 | "has-clinical-trial-number": validators.is_bool,
502 | "has-content-domain": validators.is_bool,
503 | "has-domain-restriction": validators.is_bool,
504 | "has-event": validators.is_bool,
505 | "has-full-text": validators.is_bool,
506 | "has-funder": validators.is_bool,
507 | "has-funder-doi": validators.is_bool,
508 | "has-license": validators.is_bool,
509 | "has-orcid": validators.is_bool,
510 | "has-references": validators.is_bool,
511 | "has-relation": validators.is_bool,
512 | "has-update": validators.is_bool,
513 | "has-update-policy": validators.is_bool,
514 | "is-update": validators.is_bool,
515 | "isbn": None,
516 | "issn": None,
517 | "license.delay": validators.is_integer,
518 | "license.url": None,
519 | "license.version": None,
520 | "location": None,
521 | "member": validators.is_integer,
522 | "orcid": None,
523 | "prefix": None,
524 | "relation.object": None,
525 | "relation.object-type": None,
526 | "relation.type": None,
527 | "type": validators.document_type,
528 | "type-name": None,
529 | "until-accepted-date": validators.is_date,
530 | "until-created-date": validators.is_date,
531 | "until-deposit-date": validators.is_date,
532 | "until-event-end-date": validators.is_date,
533 | "until-event-start-date": validators.is_date,
534 | "until-index-date": validators.is_date,
535 | "until-issued-date": validators.is_date,
536 | "until-online-pub-date": validators.is_date,
537 | "until-posted-date": validators.is_date,
538 | "until-print-pub-date": validators.is_date,
539 | "until-pub-date": validators.is_date,
540 | "until-update-date": validators.is_date,
541 | "update-type": None,
542 | "updates": None,
543 | }
544 |
545 | FACET_VALUES: typing.ClassVar[dict] = {
546 | "archive": None,
547 | "affiliation": None,
548 | "assertion": None,
549 | "assertion-group": None,
550 | "category-name": None,
551 | "container-title": 1000,
552 | "license": None,
553 | "funder-doi": None,
554 | "funder-name": None,
555 | "issn": 1000,
556 | "orcid": 1000,
557 | "published": None,
558 | "publisher-name": None,
559 | "relation-type": None,
560 | "source": None,
561 | "type-name": None,
562 | "update-type": None,
563 | }
564 |
565 | def order(self, order="asc"):
566 | """
567 | This method retrieve an iterable object that implements the method
568 | __iter__. The arguments given will compose the parameters in the
569 | request url.
570 |
571 | This method can be used compounded with query, filter,
572 | sort and facet methods.
573 |
574 | kwargs: valid SORT_VALUES arguments.
575 |
576 | return: iterable object of Works metadata
577 |
578 | Example 1:
579 | >>> from crossref.restful import Works
580 | >>> works.query('zika').sort('deposited').order('asc').url
581 | 'https://api.crossref.org/works?sort=deposited&query=zika&order=asc'
582 | >>> query = works.query('zika').sort('deposited').order('asc')
583 | >>> for item in query:
584 | ... print(item['title'], item['deposited']['date-time'])
585 | ...
586 | ['A Facile Preparation of ... an-1-one'] 2007-02-13T20:56:13Z
587 | ['Contributions to the F ... Vermont, III'] 2007-02-13T20:56:13Z
588 | ['Pilularia americana A. Braun in Klamath County, Oregon'] 2007-02-13T20:56:13Z
589 | ...
590 |
591 | Example 2:
592 | >>> from crossref.restful import Works
593 | >>> works.query('zika').sort('deposited').order('desc').url
594 | 'https://api.crossref.org/works?sort=deposited&query=zika&order=desc'
595 | >>> query = works.query('zika').sort('deposited').order('desc')
596 | >>> for item in query:
597 | ... print(item['title'], item['deposited']['date-time'])
598 | ...
599 | ["Planning for the unexpected: ... , Zika virus, what's next?"] 2017-05-29T12:55:53Z
600 | ['Sensitivity of RT-PCR method ... or competence studies'] 2017-05-29T12:53:54Z
601 | ['Re-evaluation of routine den ... a of Zika virus emergence'] 2017-05-29T10:46:11Z
602 | ...
603 | """
604 |
605 | context = str(self.context)
606 | request_url = build_url_endpoint(self.ENDPOINT, context)
607 | request_params = dict(self.request_params)
608 |
609 | if order not in self.ORDER_VALUES:
610 | msg = "Sort order specified as {} but must be one of: {}".format(str(order), ", ".join(
611 | self.ORDER_VALUES))
612 | raise UrlSyntaxError(
613 | msg,
614 | )
615 |
616 | request_params["order"] = order
617 |
618 | return self.__class__(
619 | request_url=request_url,
620 | request_params=request_params,
621 | context=context,
622 | etiquette=self.etiquette,
623 | timeout=self.timeout,
624 | )
625 |
626 | def select(self, *args):
627 | """
628 | This method retrieve an iterable object that implements the method
629 | __iter__. The arguments given will compose the parameters in the
630 | request url.
631 |
632 | This method can be used compounded with query, filter,
633 | sort and facet methods.
634 |
635 | args: valid FIELDS_SELECT arguments.
636 |
637 | return: iterable object of Works metadata
638 |
639 | Example 1:
640 | >>> from crossref.restful import Works
641 | >>> works = Works()
642 | >>> for i in works.filter(has_funder='true', has_license='true') \
643 | .sample(5).select('DOI, prefix'):
644 | ... print(i)
645 | ...
646 | {'DOI': '10.1016/j.jdiacomp.2016.06.005', 'prefix': '10.1016'}
647 | {'DOI': '10.1016/j.mssp.2015.07.076', 'prefix': '10.1016'}
648 | {'DOI': '10.1002/slct.201700168', 'prefix': '10.1002'}
649 | {'DOI': '10.1016/j.actbio.2017.01.034', 'prefix': '10.1016'}
650 | {'DOI': '10.1016/j.optcom.2013.11.013', 'prefix': '10.1016'}
651 | ...
652 | Example 2:
653 | >>> from crossref.restful import Works
654 | >>> works = Works()
655 |
656 | >>> for i in works.filter(has_funder='true', has_license='true') \
657 | .sample(5).select('DOI').select('prefix'):
658 | >>> print(i)
659 | ...
660 | {'DOI': '10.1016/j.sajb.2016.03.010', 'prefix': '10.1016'}
661 | {'DOI': '10.1016/j.jneumeth.2009.08.017', 'prefix': '10.1016'}
662 | {'DOI': '10.1016/j.tetlet.2016.05.058', 'prefix': '10.1016'}
663 | {'DOI': '10.1007/s00170-017-0689-z', 'prefix': '10.1007'}
664 | {'DOI': '10.1016/j.dsr.2016.03.004', 'prefix': '10.1016'}
665 | ...
666 | Example: 3:
667 | >>> from crossref.restful import Works
668 | >>> works = Works()
669 | >>>: for i in works.filter(has_funder='true', has_license='true') \
670 | .sample(5).select(['DOI', 'prefix']):
671 | >>> print(i)
672 | ...
673 | {'DOI': '10.1111/zoj.12146', 'prefix': '10.1093'}
674 | {'DOI': '10.1016/j.bios.2014.04.018', 'prefix': '10.1016}
675 | {'DOI': '10.1016/j.cej.2016.10.011', 'prefix': '10.1016'}
676 | {'DOI': '10.1016/j.dci.2017.08.001', 'prefix': '10.1016'}
677 | {'DOI': '10.1016/j.icheatmasstransfer.2016.09.012', 'prefix': '10.1016'}
678 | ...
679 | Example: 4:
680 | >>> from crossref.restful import Works
681 | >>> works = Works()
682 | >>>: for i in works.filter(has_funder='true', has_license='true') \
683 | .sample(5).select('DOI', 'prefix'):
684 | >>> print(i)
685 | ...
686 | {'DOI': '10.1111/zoj.12146', 'prefix': '10.1093'}
687 | {'DOI': '10.1016/j.bios.2014.04.018', 'prefix': '10.1016'}
688 | {'DOI': '10.1016/j.cej.2016.10.011', 'prefix': '10.1016'}
689 | {'DOI': '10.1016/j.dci.2017.08.001', 'prefix': '10.1016'}
690 | {'DOI': '10.1016/j.icheatmasstransfer.2016.09.012', 'prefix': '10.1016'}
691 | ...
692 | """
693 |
694 | context = str(self.context)
695 | request_url = build_url_endpoint(self.ENDPOINT, context)
696 | request_params = dict(self.request_params)
697 |
698 | select_args = []
699 |
700 | invalid_select_args = []
701 | for item in args:
702 | if isinstance(item, list):
703 | select_args += [i.strip() for i in item]
704 |
705 | if isinstance(item, str):
706 | select_args += [i.strip() for i in item.split(",")]
707 |
708 | invalid_select_args = set(select_args) - set(self.FIELDS_SELECT)
709 |
710 | if len(invalid_select_args) != 0:
711 | msg = "Select field's specified as ({}) but must be one of: {}".format(
712 | ", ".join(invalid_select_args), ", ".join(self.FIELDS_SELECT))
713 | raise UrlSyntaxError(
714 | msg,
715 | )
716 |
717 | request_params["select"] = ",".join(
718 | sorted(
719 | [i for i in set(request_params.get("select", "").split(",") + select_args) if i]),
720 | )
721 |
722 | return self.__class__(
723 | request_url=request_url,
724 | request_params=request_params,
725 | context=context,
726 | etiquette=self.etiquette,
727 | timeout=self.timeout,
728 | )
729 |
730 | def sort(self, sort="score"):
731 | """
732 | This method retrieve an iterable object that implements the method
733 | __iter__. The arguments given will compose the parameters in the
734 | request url.
735 |
736 | This method can be used compounded with query, filter,
737 | order and facet methods.
738 |
739 | kwargs: valid SORT_VALUES arguments.
740 |
741 | return: iterable object of Works metadata
742 |
743 | Example 1:
744 | >>> from crossref.restful import Works
745 | >>> works = Works()
746 | >>> query = works.sort('deposited')
747 | >>> for item in query:
748 | ... print(item['title'])
749 | ...
750 | ['Integralidade e transdisciplinaridade em ... multiprofissionais na saúde coletiva']
751 | ['Aprendizagem em grupo operativo de diabetes: uma abordagem etnográfica']
752 | ['A rotatividade de enfermeiros e médicos: ... da Estratégia de Saúde da Família']
753 | ...
754 |
755 | Example 2:
756 | >>> from crossref.restful import Works
757 | >>> works = Works()
758 | >>> query = works.sort('relevance')
759 | >>> for item in query:
760 | ... print(item['title'])
761 | ...
762 | ['Proceedings of the American Physical Society']
763 | ['Annual Meeting of the Research Society on Alcoholism']
764 | ['Local steroid injections: ... hip and on the letter by Swezey']
765 | ['Intraventricular neurocytoma']
766 | ['Mammography accreditation']
767 | ['Temporal lobe necrosis in nasopharyngeal carcinoma: Pictorial essay']
768 | ...
769 | """
770 | context = str(self.context)
771 | request_url = build_url_endpoint(self.ENDPOINT, context)
772 | request_params = dict(self.request_params)
773 |
774 | if sort not in self.SORT_VALUES:
775 | msg = "Sort field specified as {} but must be one of: {}".format(str(sort), ", ".join(
776 | self.SORT_VALUES))
777 | raise UrlSyntaxError(
778 | msg,
779 | )
780 |
781 | request_params["sort"] = sort
782 |
783 | return self.__class__(
784 | request_url=request_url,
785 | request_params=request_params,
786 | context=context,
787 | etiquette=self.etiquette,
788 | timeout=self.timeout,
789 | )
790 |
791 | def filter(self, **kwargs): # noqa: A003
792 | """
793 | This method retrieve an iterable object that implements the method
794 | __iter__. The arguments given will compose the parameters in the
795 | request url.
796 |
797 | This method can be used compounded and recursively with query, filter,
798 | order, sort and facet methods.
799 |
800 | kwargs: valid FILTER_VALIDATOR arguments. Replace `.` with `__` and
801 | `-` with `_` when using parameters.
802 |
803 | return: iterable object of Works metadata
804 |
805 | Example:
806 | >>> from crossref.restful import Works
807 | >>> works = Works()
808 | >>> query = works.filter(has_funder='true', has_license='true')
809 | >>> for item in query:
810 | ... print(item['title'])
811 | ...
812 | ['Design of smiling-face-shaped band-notched UWB antenna']
813 | ['Phase I clinical and pharmacokinetic ... tients with advanced solid tumors']
814 | ...
815 | """
816 | context = str(self.context)
817 | request_url = build_url_endpoint(self.ENDPOINT, context)
818 | request_params = dict(self.request_params)
819 |
820 | for fltr, value in kwargs.items():
821 | decoded_fltr = fltr.replace("__", ".").replace("_", "-")
822 | if decoded_fltr not in self.FILTER_VALIDATOR.keys():
823 | msg = (
824 | f"Filter {decoded_fltr!s} specified but there is no such filter for"
825 | f" this route. Valid filters for this route"
826 | f" are: {', '.join(self.FILTER_VALIDATOR.keys())}"
827 | )
828 | raise UrlSyntaxError(
829 | msg,
830 | )
831 |
832 | if self.FILTER_VALIDATOR[decoded_fltr] is not None:
833 | self.FILTER_VALIDATOR[decoded_fltr](str(value))
834 |
835 | if "filter" not in request_params:
836 | request_params["filter"] = decoded_fltr + ":" + str(value)
837 | else:
838 | request_params["filter"] += "," + decoded_fltr + ":" + str(value)
839 |
840 | return self.__class__(
841 | request_url=request_url,
842 | request_params=request_params,
843 | context=context,
844 | etiquette=self.etiquette,
845 | timeout=self.timeout,
846 | )
847 |
848 | def facet(self, facet_name, facet_count=100):
849 | context = str(self.context)
850 | request_url = build_url_endpoint(self.ENDPOINT, context)
851 | request_params = dict(self.request_params)
852 | request_params["rows"] = 0
853 |
854 | if facet_name not in self.FACET_VALUES.keys():
855 | msg = (
856 | f"Facet {facet_name} specified but there is no such facet for this route."
857 | f" Valid facets for this route are: *, affiliation, funder-name, funder-doi,"
858 | f" publisher-name, orcid, container-title, assertion, archive, update-type,"
859 | f" issn, published, source, type-name, license, category-name, relation-type,"
860 | f" assertion-group"
861 | )
862 | raise UrlSyntaxError((
863 | msg
864 | ),
865 | ", ".join(self.FACET_VALUES.keys()),
866 | )
867 |
868 | facet_count = (
869 | self.FACET_VALUES[facet_name]
870 | if self.FACET_VALUES[facet_name] is not None
871 | and self.FACET_VALUES[facet_name] <= facet_count
872 | else facet_count
873 | )
874 |
875 | request_params["facet"] = f"{facet_name}:{facet_count}"
876 | result = self.do_http_request(
877 | "get",
878 | request_url,
879 | data=request_params,
880 | custom_header=self.custom_header,
881 | timeout=self.timeout,
882 | ).json()
883 |
884 | return result["message"]["facets"]
885 |
886 | def query(self, *args, **kwargs):
887 | """
888 | This method retrieve an iterable object that implements the method
889 | __iter__. The arguments given will compose the parameters in the
890 | request url.
891 |
892 | This method can be used compounded and recursively with query, filter,
893 | order, sort and facet methods.
894 |
895 | args: strings (String)
896 |
897 | kwargs: valid FIELDS_QUERY arguments.
898 |
899 | return: iterable object of Works metadata
900 |
901 | Example:
902 | >>> from crossref.restful import Works
903 | >>> works = Works()
904 | >>> query = works.query('Zika Virus')
905 | >>> query.url
906 | 'https://api.crossref.org/works?query=Zika+Virus'
907 | >>> for item in query:
908 | ... print(item['title'])
909 | ...
910 | ['Zika Virus']
911 | ['Zika virus disease']
912 | ['Zika Virus: Laboratory Diagnosis']
913 | ['Spread of Zika virus disease']
914 | ['Carditis in Zika Virus Infection']
915 | ['Understanding Zika virus']
916 | ['Zika Virus: History and Infectology']
917 | ...
918 | """
919 | context = str(self.context)
920 | request_url = build_url_endpoint(self.ENDPOINT, context)
921 | request_params = dict(self.request_params)
922 |
923 | if args:
924 | request_params["query"] = " ".join([str(i) for i in args])
925 |
926 | for field, value in kwargs.items():
927 | if field not in self.FIELDS_QUERY:
928 | msg = (
929 | f"Field query {field!s} specified but there is no such field query for"
930 | " this route."
931 | f" Valid field queries for this route are: {', '.join(self.FIELDS_QUERY)}"
932 | )
933 | raise UrlSyntaxError(
934 | msg,
935 | )
936 | request_params["query.%s" % field.replace("_", "-")] = value
937 |
938 | return self.__class__(request_url=request_url,
939 | request_params=request_params,
940 | context=context,
941 | etiquette=self.etiquette,
942 | timeout=self.timeout)
943 |
944 | def sample(self, sample_size=20):
945 | """
946 | This method retrieve an iterable object that implements the method
947 | __iter__. The arguments given will compose the parameters in the
948 | request url.
949 |
950 | kwargs: sample_size (Integer) between 0 and 100.
951 |
952 | return: iterable object of Works metadata
953 |
954 | Example:
955 | >>> from crossref.restful import Works
956 | >>> works = Works()
957 | >>> works.sample(2).url
958 | 'https://api.crossref.org/works?sample=2'
959 | >>> [i['title'] for i in works.sample(2)]
960 | [['A study on the hemolytic properties ofPrevotella nigrescens'],
961 | ['The geometry and the radial ... of carbon nanotubes: beyond the ideal behaviour']]
962 | """
963 | context = str(self.context)
964 | request_url = build_url_endpoint(self.ENDPOINT, context)
965 | request_params = dict(self.request_params)
966 |
967 | try:
968 | if sample_size > 100:
969 | msg = (
970 | f"Integer specified as {sample_size!s} but"
971 | " must be a positive integer less than or equal to 100."
972 | )
973 | raise UrlSyntaxError(msg) # noqa: TRY301
974 | except TypeError as exc:
975 | msg = (
976 | f"Integer specified as {sample_size!s} but"
977 | " must be a positive integer less than or equal to 100."
978 | )
979 | raise UrlSyntaxError(msg) from exc
980 |
981 | request_params["sample"] = sample_size
982 |
983 | return self.__class__(
984 | request_url=request_url,
985 | request_params=request_params,
986 | context=context,
987 | etiquette=self.etiquette,
988 | timeout=self.timeout,
989 | )
990 |
991 | def doi(self, doi, only_message=True):
992 | """
993 | This method retrieve the DOI metadata related to a given DOI
994 | number.
995 |
996 | args: Crossref DOI id (String)
997 |
998 | return: JSON
999 |
1000 | Example:
1001 | >>> from crossref.restful import Works
1002 | >>> works = Works()
1003 | >>> works.doi('10.1590/S0004-28032013005000001')
1004 | {'is-referenced-by-count': 6, 'reference-count': 216,
1005 | 'DOI': '10.1590/s0004-28032013005000001',
1006 | 'subtitle': [], 'issued': {'date-parts': [[2013, 4, 19]]}, 'source': 'Crossref',
1007 | 'short-container-title': ['Arq. Gastroenterol.'], 'references-count': 216,
1008 | 'short-title': [],
1009 | 'deposited': {'timestamp': 1495911725000, 'date-time': '2017-05-27T19:02:05Z',
1010 | 'date-parts': [[2017, 5, 27]]}, 'ISSN': ['0004-2803'], 'type': 'journal-article',
1011 | 'URL': 'http://dx.doi.org/10.1590/s0004-28032013005000001',
1012 | 'indexed': {'timestamp': 1496034748592, 'date-time': '2017-05-29T05:12:28Z',
1013 | 'date-parts': [[2017, 5, 29]]}, 'content-domain': {'crossmark-restriction': False,
1014 | 'domain': []},
1015 | 'created': {'timestamp': 1374613284000, 'date-time': '2013-07-23T21:01:24Z',
1016 | 'date-parts': [[2013, 7, 23]]}, 'issn-type': [{'value': '0004-2803',
1017 | 'type': 'electronic'}],
1018 | 'page': '81-96', 'volume': '50', 'original-title': [], 'subject': ['Gastroenterology'],
1019 | 'relation': {}, 'container-title': ['Arquivos de Gastroenterologia'], 'member': '530',
1020 | 'prefix': '10.1590', 'published-print': {'date-parts': [[2013, 4, 19]]},
1021 | 'title': ['3rd BRAZILIAN CONSENSUS ON Helicobacter pylori'],
1022 | 'publisher': 'FapUNIFESP (SciELO)', 'alternative-id': ['S0004-28032013000200081'],
1023 | 'abstract': 'Significant abstract data..... .',
1024 | 'author': [{'affiliation': [{'name': 'Universidade Federal de Minas Gerais, BRAZIL'}],
1025 | 'family': 'Coelho', 'given': 'Luiz Gonzaga'}, {'affiliation': [
1026 | {'name': 'Universidade Federal do Rio Grande do Sul, Brazil'}], 'family': 'Maguinilk',
1027 | 'given': 'Ismael'}, {'affiliation': [
1028 | {'name': 'Presidente de Honra do Núcleo ... para Estudo do Helicobacter, Brazil'}],
1029 | 'family': 'Zaterka', 'given': 'Schlioma'}, {'affiliation': [
1030 | {'name': 'Universidade Federal do Piauí, Brasil'}], 'family': 'Parente',
1031 | 'given': 'José Miguel'},
1032 | {'affiliation': [{'name': 'Universidade Federal de Minas Gerais, BRAZIL'}],
1033 | 'family': 'Passos', 'given': 'Maria do Carmo Friche'}, {'affiliation': [
1034 | {'name': 'Universidade de São Paulo, Brasil'}], 'family': 'Moraes-Filho',
1035 | 'given': 'Joaquim Prado P.'}], 'score': 1.0, 'issue': '2'}
1036 | """
1037 | request_url = build_url_endpoint("/".join([self.ENDPOINT, doi]))
1038 | request_params = {}
1039 | result = self.do_http_request(
1040 | "get",
1041 | request_url,
1042 | data=request_params,
1043 | custom_header=self.custom_header,
1044 | timeout=self.timeout,
1045 | )
1046 |
1047 | if result.status_code == 404:
1048 | return None
1049 | result = result.json()
1050 |
1051 | return result["message"] if only_message is True else result
1052 |
1053 | def agency(self, doi, only_message=True):
1054 | """
1055 | This method retrieve the DOI Agency metadata related to a given DOI
1056 | number.
1057 |
1058 | args: Crossref DOI id (String)
1059 |
1060 | return: JSON
1061 |
1062 | Example:
1063 | >>> from crossref.restful import Works
1064 | >>> works = Works()
1065 | >>> works.agency('10.1590/S0004-28032013005000001')
1066 | {'DOI': '10.1590/s0004-2...5000001', 'agency': {'label': 'CrossRef', 'id': 'crossref'}}
1067 | """
1068 | request_url = build_url_endpoint("/".join([self.ENDPOINT, doi, "agency"]))
1069 | request_params = {}
1070 |
1071 | result = self.do_http_request(
1072 | "get",
1073 | request_url,
1074 | data=request_params,
1075 | custom_header=self.custom_header,
1076 | timeout=self.timeout,
1077 | )
1078 |
1079 | if result.status_code == 404:
1080 | return None
1081 |
1082 | result = result.json()
1083 |
1084 | return result["message"] if only_message is True else result
1085 |
1086 | def doi_exists(self, doi):
1087 | """
1088 | This method retrieve a boolean according to the existence of a crossref
1089 | DOI number. It returns False if the API results a 404 status code.
1090 |
1091 | args: Crossref DOI id (String)
1092 |
1093 | return: Boolean
1094 |
1095 | Example 1:
1096 | >>> from crossref.restful import Works
1097 | >>> works = Works()
1098 | >>> works.doi_exists('10.1590/S0004-28032013005000001')
1099 | True
1100 |
1101 | Example 2:
1102 | >>> from crossref.restful import Works
1103 | >>> works = Works()
1104 | >>> works.doi_exists('10.1590/S0004-28032013005000001_invalid_doi')
1105 | False
1106 | """
1107 | request_url = build_url_endpoint("/".join([self.ENDPOINT, doi]))
1108 | request_params = {}
1109 |
1110 | result = self.do_http_request(
1111 | "get",
1112 | request_url,
1113 | data=request_params,
1114 | only_headers=True,
1115 | custom_header=self.custom_header,
1116 | timeout=self.timeout,
1117 | )
1118 |
1119 | if result.status_code == 404:
1120 | return False
1121 |
1122 | return True
1123 |
1124 |
1125 | class Funders(Endpoint):
1126 | CURSOR_AS_ITER_METHOD = False
1127 |
1128 | ENDPOINT = "funders"
1129 |
1130 | FILTER_VALIDATOR: typing.ClassVar[dict] = {"location": None}
1131 |
1132 | def query(self, *args):
1133 | """
1134 | This method retrieve an iterable object that implements the method
1135 | __iter__. The arguments given will compose the parameters in the
1136 | request url.
1137 |
1138 | args: Funder ID (Integer)
1139 |
1140 | return: iterable object of Funders metadata
1141 |
1142 | Example:
1143 | >>> from crossref.restful import Funders
1144 | >>> funders = Funders()
1145 | >>> funders.query('ABBEY').url
1146 | 'https://api.crossref.org/funders?query=ABBEY'
1147 | >>> next(iter(funders.query('ABBEY')))
1148 | {'alt-names': ['Abbey'], 'location': 'United Kingdom', 'replaced-by': [],
1149 | 'replaces': [], 'name': 'ABBEY AWARDS', 'id': '501100000314',
1150 | 'tokens': ['abbey', 'awards', 'abbey'],
1151 | 'uri': 'http://dx.doi.org/10.13039/501100000314'}
1152 | """
1153 | request_url = build_url_endpoint(self.ENDPOINT)
1154 | request_params = dict(self.request_params)
1155 |
1156 | if args:
1157 | request_params["query"] = " ".join([str(i) for i in args])
1158 |
1159 | return self.__class__(
1160 | request_url=request_url,
1161 | request_params=request_params,
1162 | etiquette=self.etiquette,
1163 | timeout=self.timeout,
1164 | )
1165 |
1166 | def filter(self, **kwargs): # noqa: A003
1167 | """
1168 | This method retrieve an iterable object that implements the method
1169 | __iter__. The arguments given will compose the parameters in the
1170 | request url.
1171 |
1172 | This method can be used compounded and recursively with query, filter,
1173 | order, sort and facet methods.
1174 |
1175 | kwargs: valid FILTER_VALIDATOR arguments.
1176 |
1177 | return: iterable object of Funders metadata
1178 |
1179 | Example:
1180 | >>> from crossref.restful import Funders
1181 | >>> funders = Funders()
1182 | >>> query = funders.filter(location='Japan')
1183 | >>> for item in query:
1184 | ... print(item['name'], item['location'])
1185 | ...
1186 | (u'Central Research Institute, Fukuoka University', u'Japan')
1187 | (u'Tohoku University', u'Japan')
1188 | (u'Information-Technology Promotion Agency', u'Japan')
1189 | ...
1190 | """
1191 | context = str(self.context)
1192 | request_url = build_url_endpoint(self.ENDPOINT, context)
1193 | request_params = dict(self.request_params)
1194 |
1195 | for fltr, value in kwargs.items():
1196 | decoded_fltr = fltr.replace("__", ".").replace("_", "-")
1197 | if decoded_fltr not in self.FILTER_VALIDATOR.keys():
1198 | msg = (
1199 | f"Filter {decoded_fltr!s} specified but there is no such filter for this route."
1200 | f" Valid filters for this route are: {', '.join(self.FILTER_VALIDATOR.keys())}"
1201 | )
1202 | raise UrlSyntaxError(
1203 | msg,
1204 | )
1205 |
1206 | if self.FILTER_VALIDATOR[decoded_fltr] is not None:
1207 | self.FILTER_VALIDATOR[decoded_fltr](str(value))
1208 |
1209 | if "filter" not in request_params:
1210 | request_params["filter"] = decoded_fltr + ":" + str(value)
1211 | else:
1212 | request_params["filter"] += "," + decoded_fltr + ":" + str(value)
1213 |
1214 | return self.__class__(
1215 | request_url=request_url,
1216 | request_params=request_params,
1217 | context=context,
1218 | etiquette=self.etiquette,
1219 | timeout=self.timeout,
1220 | )
1221 |
1222 | def funder(self, funder_id, only_message=True):
1223 | """
1224 | This method retrive a crossref funder metadata related to the
1225 | given funder_id.
1226 |
1227 | args: Funder ID (Integer)
1228 |
1229 | Example:
1230 | >>> from crossref.restful import Funders
1231 | >>> funders = Funders()
1232 | >>> funders.funder('501100000314')
1233 | {'hierarchy': {'501100000314': {}}, 'alt-names': ['Abbey'],
1234 | 'work-count': 3, 'replaced-by': [], 'replaces': [],
1235 | 'hierarchy-names': {'501100000314': 'ABBEY AWARDS'},
1236 | 'uri': 'http://dx.doi.org/10.13039/501100000314', 'location': 'United Kingdom',
1237 | 'descendant-work-count': 3, 'descendants': [], 'name': 'ABBEY AWARDS',
1238 | 'id': '501100000314', 'tokens': ['abbey', 'awards', 'abbey']}
1239 | """
1240 | request_url = build_url_endpoint("/".join([self.ENDPOINT, str(funder_id)]))
1241 | request_params = {}
1242 |
1243 | result = self.do_http_request(
1244 | "get",
1245 | request_url,
1246 | data=request_params,
1247 | custom_header=self.custom_header,
1248 | timeout=self.timeout,
1249 | )
1250 |
1251 | if result.status_code == 404:
1252 | return None
1253 |
1254 | result = result.json()
1255 |
1256 | return result["message"] if only_message is True else result
1257 |
1258 | def funder_exists(self, funder_id):
1259 | """
1260 | This method retrieve a boolean according to the existence of a crossref
1261 | funder. It returns False if the API results a 404 status code.
1262 |
1263 | args: Crossref Funder id (Integer)
1264 |
1265 | return: Boolean
1266 |
1267 | Example 1:
1268 | >>> from crossref.restful import Funders
1269 | >>> funders = Funders()
1270 | >>> funders.funder_exists('501100000314')
1271 | True
1272 |
1273 | Example 2:
1274 | >>> from crossref.restful import Funders
1275 | >>> funders = Funders()
1276 | >>> funders.funder_exists('999999999999')
1277 | False
1278 | """
1279 | request_url = build_url_endpoint("/".join([self.ENDPOINT, str(funder_id)]))
1280 | request_params = {}
1281 |
1282 | result = self.do_http_request(
1283 | "get",
1284 | request_url,
1285 | data=request_params,
1286 | only_headers=True,
1287 | custom_header=self.custom_header,
1288 | timeout=self.timeout,
1289 | )
1290 |
1291 | if result.status_code == 404:
1292 | return False
1293 |
1294 | return True
1295 |
1296 | def works(self, funder_id):
1297 | """
1298 | This method retrieve a iterable of Works of the given funder.
1299 |
1300 | args: Crossref allowed document Types (String)
1301 |
1302 | return: Works()
1303 | """
1304 | context = f"{self.ENDPOINT}/{funder_id!s}"
1305 | return Works(context=context)
1306 |
1307 |
1308 | class Members(Endpoint):
1309 | CURSOR_AS_ITER_METHOD = False
1310 |
1311 | ENDPOINT = "members"
1312 |
1313 | FILTER_VALIDATOR: typing.ClassVar[dict] = {
1314 | "prefix": None,
1315 | "has-public-references": validators.is_bool,
1316 | "backfile-doi-count": validators.is_integer,
1317 | "current-doi-count": validators.is_integer,
1318 | }
1319 |
1320 | def query(self, *args):
1321 | """
1322 | This method retrieve an iterable object that implements the method
1323 | __iter__. The arguments given will compose the parameters in the
1324 | request url.
1325 |
1326 | args: strings (String)
1327 |
1328 | return: iterable object of Members metadata
1329 |
1330 | Example:
1331 | >>> from crossref.restful import Members
1332 | >>> members = Members().query('Korean Association')
1333 | members.query('Korean Association').url
1334 | 'https://api.crossref.org/journals?query=Public+Health+Health+Science'
1335 | >>> next(iter(members.query('Korean Association')))
1336 | {'prefix': [{'value': '10.20433', 'public-references': False,
1337 | 'name': 'The New Korean Philosophical Association'}],
1338 | 'counts': {'total-dois': 0, 'backfile-dois': 0,
1339 | 'current-dois': 0}, 'coverage': {'references-backfile': 0, 'references-current': 0,
1340 | 'abstracts-current': 0, 'update-policies-backfile': 0, 'orcids-current': 0,
1341 | 'orcids-backfile': 0,
1342 | 'licenses-current': 0, 'affiliations-backfile': 0, 'licenses-backfile': 0,
1343 | 'update-policies-current': 0,
1344 | 'resource-links-current': 0, 'resource-links-backfile': 0, 'award-numbers-backfile': 0,
1345 | 'abstracts-backfile': 0, 'funders-current': 0, 'funders-backfile': 0,
1346 | 'affiliations-current': 0,
1347 | 'award-numbers-current': 0}, 'flags': {'deposits-orcids-backfile': False,
1348 | 'deposits-references-backfile': False, 'deposits-licenses-current': False,
1349 | 'deposits': False,
1350 | 'deposits-abstracts-current': False, 'deposits-award-numbers-current': False,
1351 | 'deposits-articles': False,
1352 | 'deposits-resource-links-backfile': False, 'deposits-funders-current': False,
1353 | 'deposits-award-numbers-backfile': False, 'deposits-references-current': False,
1354 | 'deposits-abstracts-backfile': False, 'deposits-funders-backfile': False,
1355 | 'deposits-update-policies-current': False, 'deposits-orcids-current': False,
1356 | 'deposits-licenses-backfile': False, 'deposits-affiliations-backfile': False,
1357 | 'deposits-update-policies-backfile': False, 'deposits-resource-links-current': False,
1358 | 'deposits-affiliations-current': False}, 'names': ['The New Korean ... Association'],
1359 | 'breakdowns': {'dois-by-issued-year': []}, 'location': 'Dongsin Tow ... llae-dong 6-ga,
1360 | Mullae-dong 6-ga Seoul 150-096 South Korea', 'prefixes': ['10.20433'],
1361 | 'last-status-check-time': 1496034177684, 'id': 8334,
1362 | 'tokens': ['the', 'new', 'korean', 'philosophical',
1363 | 'association'], 'primary-name': 'The New Korean Philosophical Association'}
1364 | """
1365 | context = str(self.context)
1366 | request_url = build_url_endpoint(self.ENDPOINT)
1367 | request_params = dict(self.request_params)
1368 |
1369 | if args:
1370 | request_params["query"] = " ".join([str(i) for i in args])
1371 |
1372 | return self.__class__(
1373 | request_url=request_url,
1374 | request_params=request_params,
1375 | context=context,
1376 | etiquette=self.etiquette,
1377 | timeout=self.timeout,
1378 | )
1379 |
1380 | def filter(self, **kwargs): # noqa: A003
1381 | """
1382 | This method retrieve an iterable object that implements the method
1383 | __iter__. The arguments given will compose the parameters in the
1384 | request url.
1385 |
1386 | This method can be used compounded and recursively with query, filter,
1387 | order, sort and facet methods.
1388 |
1389 | kwargs: valid FILTER_VALIDATOR arguments.
1390 |
1391 | return: iterable object of Members metadata
1392 |
1393 | Example:
1394 | >>> from crossref.restful import Members
1395 | >>> members = Members()
1396 | >>> query = members.filter(has_public_references='true')
1397 | >>> for item in query:
1398 | ... print(item['prefix'])
1399 | ...
1400 | [
1401 | {u'public-references': False, u'name': u'Open Library of Humanities',
1402 | u'value': u'10.16995'},
1403 | {u'public-references': True, u'name': u'Martin Eve', u'value': u'10.7766'}
1404 | ]
1405 | [
1406 | {u'public-references': True, u'name': u'Institute of Business Research',
1407 | u'value': u'10.24122'}
1408 | ]
1409 | ...
1410 | """
1411 | context = str(self.context)
1412 | request_url = build_url_endpoint(self.ENDPOINT, context)
1413 | request_params = dict(self.request_params)
1414 |
1415 | for fltr, value in kwargs.items():
1416 | decoded_fltr = fltr.replace("__", ".").replace("_", "-")
1417 | if decoded_fltr not in self.FILTER_VALIDATOR.keys():
1418 | msg = (
1419 | f"Filter {decoded_fltr!s} specified but there is no such filter for this route."
1420 | f" Valid filters for this route are: {', '.join(self.FILTER_VALIDATOR.keys())}"
1421 | )
1422 | raise UrlSyntaxError(msg)
1423 |
1424 | if self.FILTER_VALIDATOR[decoded_fltr] is not None:
1425 | self.FILTER_VALIDATOR[decoded_fltr](str(value))
1426 |
1427 | if "filter" not in request_params:
1428 | request_params["filter"] = decoded_fltr + ":" + str(value)
1429 | else:
1430 | request_params["filter"] += "," + decoded_fltr + ":" + str(value)
1431 |
1432 | return self.__class__(
1433 | request_url=request_url,
1434 | request_params=request_params,
1435 | context=context,
1436 | etiquette=self.etiquette,
1437 | timeout=self.timeout,
1438 | )
1439 |
1440 | def member(self, member_id, only_message=True):
1441 | """
1442 | This method retrive a crossref member metadata related to the
1443 | given member_id.
1444 |
1445 | args: Member ID (Integer)
1446 |
1447 | Example:
1448 | >>> from crossref.restful import Members
1449 | >>> members = Members()
1450 | >>> members.member(101)
1451 | {'prefix': [{'value': '10.1024', 'public-references': False,
1452 | 'name': 'Hogrefe Publishing Group'}, {'value': '10.1027', 'public-references': False,
1453 | 'name': 'Hogrefe Publishing Group'}, {'value': '10.1026', 'public-references': False,
1454 | 'name': 'Hogrefe Publishing Group'}], 'counts': {'total-dois': 35039,
1455 | 'backfile-dois': 31430,
1456 | 'current-dois': 3609}, 'coverage': {'references-backfile': 0.3601972758769989,
1457 | 'references-current': 0.019118869677186012, 'abstracts-current': 0.0,
1458 | 'update-policies-backfile': 0.0, 'orcids-current': 0.0, 'orcids-backfile': 0.0,
1459 | 'licenses-current': 0.0, 'affiliations-backfile': 0.05685650557279587,
1460 | 'licenses-backfile': 0.0, 'update-policies-current': 0.0, 'resource-links-current': 0.0,
1461 | 'resource-links-backfile': 0.0, 'award-numbers-backfile': 0.0,
1462 | 'abstracts-backfile': 0.0,
1463 | 'funders-current': 0.0, 'funders-backfile': 0.0,
1464 | 'affiliations-current': 0.15710723400115967,
1465 | 'award-numbers-current': 0.0}, 'flags': {'deposits-orcids-backfile': False,
1466 | 'deposits-references-backfile': True, 'deposits-licenses-current': False,
1467 | 'deposits': True,
1468 | 'deposits-abstracts-current': False, 'deposits-award-numbers-current': False,
1469 | 'deposits-articles': True, 'deposits-resource-links-backfile': False,
1470 | 'deposits-funders-current': False, 'deposits-award-numbers-backfile': False,
1471 | 'deposits-references-current': True, 'deposits-abstracts-backfile': False,
1472 | 'deposits-funders-backfile': False, 'deposits-update-policies-current': False,
1473 | 'deposits-orcids-current': False, 'deposits-licenses-backfile': False,
1474 | 'deposits-affiliations-backfile': True, 'deposits-update-policies-backfile': False,
1475 | 'deposits-resource-links-current': False, 'deposits-affiliations-current': True},
1476 | 'names': ['Hogrefe Publishing Group'], 'breakdowns': {'dois-by-issued-year':
1477 | [[2003, 2329], [2004, 2264], [2002, 2211], [2005, 2204], [2006, 2158],
1478 | [2007, 2121], [2016, 1954],
1479 | [2008, 1884], [2015, 1838], [2012, 1827], [2013, 1805], [2014, 1796],
1480 | [2009, 1760], [2010, 1718],
1481 | [2011, 1681], [2001, 1479], [2000, 1477], [1999, 1267], [2017, 767],
1482 | [1997, 164], [1996, 140],
1483 | [1998, 138], [1995, 103], [1994, 11], [1993, 11], [0, 1]]},
1484 | 'location': 'Langgass-Strasse 76 Berne CH-3000 Switzerland', 'prefixes':
1485 | ['10.1024', '10.1027',
1486 | '10.1026'], 'last-status-check-time': 1496034132646, 'id': 101, 'tokens':
1487 | ['hogrefe', 'publishing',
1488 | 'group'], 'primary-name': 'Hogrefe Publishing Group'}
1489 | """
1490 | request_url = build_url_endpoint("/".join([self.ENDPOINT, str(member_id)]))
1491 | request_params = {}
1492 |
1493 | result = self.do_http_request(
1494 | "get",
1495 | request_url,
1496 | data=request_params,
1497 | custom_header=self.custom_header,
1498 | timeout=self.timeout,
1499 | )
1500 |
1501 | if result.status_code == 404:
1502 | return None
1503 |
1504 | result = result.json()
1505 |
1506 | return result["message"] if only_message is True else result
1507 |
1508 | def member_exists(self, member_id):
1509 | """
1510 | This method retrieve a boolean according to the existence of a crossref
1511 | member. It returns False if the API results a 404 status code.
1512 |
1513 | args: Crossref allowed document Type (String)
1514 |
1515 | return: Boolean
1516 |
1517 | Example 1:
1518 | >>> from crossref.restful import Members
1519 | >>> members = Members()
1520 | >>> members.member_exists(101)
1521 | True
1522 |
1523 | Example 2:
1524 | >>> from crossref.restful import Members
1525 | >>> members = Members()
1526 | >>> members.member_exists(88888)
1527 | False
1528 | """
1529 | request_url = build_url_endpoint("/".join([self.ENDPOINT, str(member_id)]))
1530 | request_params = {}
1531 |
1532 | result = self.do_http_request(
1533 | "get",
1534 | request_url,
1535 | data=request_params,
1536 | only_headers=True,
1537 | custom_header=self.custom_header,
1538 | timeout=self.timeout,
1539 | )
1540 |
1541 | if result.status_code == 404:
1542 | return False
1543 |
1544 | return True
1545 |
1546 | def works(self, member_id):
1547 | """
1548 | This method retrieve a iterable of Works of the given member.
1549 |
1550 | args: Member ID (Integer)
1551 |
1552 | return: Works()
1553 | """
1554 | context = f"{self.ENDPOINT}/{member_id!s}"
1555 | return Works(context=context)
1556 |
1557 |
1558 | class Types(Endpoint):
1559 | CURSOR_AS_ITER_METHOD = False
1560 |
1561 | ENDPOINT = "types"
1562 |
1563 | def type(self, type_id, only_message=True): # noqa: A003
1564 | """
1565 | This method retrive a crossref document type metadata related to the
1566 | given type_id.
1567 |
1568 | args: Crossref allowed document Types (String)
1569 |
1570 | Example:
1571 | >>> types.type('journal-article')
1572 | {'label': 'Journal Article', 'id': 'journal-article'}
1573 | """
1574 | request_url = build_url_endpoint("/".join([self.ENDPOINT, str(type_id)]))
1575 | request_params = {}
1576 |
1577 | result = self.do_http_request(
1578 | "get",
1579 | request_url,
1580 | data=request_params,
1581 | custom_header=self.custom_header,
1582 | timeout=self.timeout,
1583 | )
1584 |
1585 | if result.status_code == 404:
1586 | return None
1587 |
1588 | result = result.json()
1589 |
1590 | return result["message"] if only_message is True else result
1591 |
1592 | def all(self): # noqa: A003
1593 | """
1594 | This method retrieve an iterator with all the available types.
1595 |
1596 | return: iterator of crossref document types
1597 |
1598 | Example:
1599 | >>> from crossref.restful import Types
1600 | >>> types = Types()
1601 | >>> [i for i in types.all()]
1602 | [{'label': 'Book Section', 'id': 'book-section'},
1603 | {'label': 'Monograph', 'id': 'monograph'},
1604 | {'label': 'Report', 'id': 'report'},
1605 | {'label': 'Book Track', 'id': 'book-track'},
1606 | {'label': 'Journal Article', 'id': 'journal-article'},
1607 | {'label': 'Part', 'id': 'book-part'},
1608 | ...
1609 | }]
1610 | """
1611 | request_url = build_url_endpoint(self.ENDPOINT, self.context)
1612 | request_params = dict(self.request_params)
1613 |
1614 | result = self.do_http_request(
1615 | "get",
1616 | request_url,
1617 | data=request_params,
1618 | custom_header=self.custom_header,
1619 | timeout=self.timeout,
1620 | )
1621 |
1622 | if result.status_code == 404:
1623 | return
1624 |
1625 | result = result.json()
1626 |
1627 | yield from result["message"]["items"]
1628 |
1629 | def type_exists(self, type_id):
1630 | """
1631 | This method retrieve a boolean according to the existence of a crossref
1632 | document type. It returns False if the API results a 404 status code.
1633 |
1634 | args: Crossref allowed document Type (String)
1635 |
1636 | return: Boolean
1637 |
1638 | Example 1:
1639 | >>> from crossref.restful import Types
1640 | >>> types = Types()
1641 | >>> types.type_exists('journal-article')
1642 | True
1643 |
1644 | Example 2:
1645 | >>> from crossref.restful import Types
1646 | >>> types = Types()
1647 | >>> types.type_exists('unavailable type')
1648 | False
1649 | """
1650 | request_url = build_url_endpoint("/".join([self.ENDPOINT, str(type_id)]))
1651 | request_params = {}
1652 |
1653 | result = self.do_http_request(
1654 | "get",
1655 | request_url,
1656 | data=request_params,
1657 | only_headers=True,
1658 | custom_header=self.custom_header,
1659 | timeout=self.timeout,
1660 | )
1661 |
1662 | if result.status_code == 404:
1663 | return False
1664 |
1665 | return True
1666 |
1667 | def works(self, type_id):
1668 | """
1669 | This method retrieve a iterable of Works of the given type.
1670 |
1671 | args: Crossref allowed document Types (String)
1672 |
1673 | return: Works()
1674 | """
1675 | context = f"{self.ENDPOINT}/{type_id!s}"
1676 | return Works(context=context)
1677 |
1678 |
1679 | class Prefixes(Endpoint):
1680 | CURSOR_AS_ITER_METHOD = False
1681 |
1682 | ENDPOINT = "prefixes"
1683 |
1684 | def prefix(self, prefix_id, only_message=True):
1685 | """
1686 | This method retrieve a json with the given Prefix metadata
1687 |
1688 | args: Crossref Prefix (String)
1689 |
1690 | return: JSON
1691 |
1692 | Example:
1693 | >>> from crossref.restful import Prefixes
1694 | >>> prefixes = Prefixes()
1695 | >>> prefixes.prefix('10.1590')
1696 | {'name': 'FapUNIFESP (SciELO)', 'member': 'http://id.crossref.org/member/530',
1697 | 'prefix': 'http://id.crossref.org/prefix/10.1590'}
1698 | """
1699 | request_url = build_url_endpoint("/".join([self.ENDPOINT, str(prefix_id)]))
1700 | request_params = {}
1701 |
1702 | result = self.do_http_request(
1703 | "get",
1704 | request_url,
1705 | data=request_params,
1706 | custom_header=self.custom_header,
1707 | timeout=self.timeout,
1708 | )
1709 |
1710 | if result.status_code == 404:
1711 | return None
1712 |
1713 | result = result.json()
1714 |
1715 | return result["message"] if only_message is True else result
1716 |
1717 | def works(self, prefix_id):
1718 | """
1719 | This method retrieve a iterable of Works of the given prefix.
1720 |
1721 | args: Crossref Prefix (String)
1722 |
1723 | return: Works()
1724 | """
1725 | context = f"{self.ENDPOINT}/{prefix_id!s}"
1726 | return Works(context=context)
1727 |
1728 |
1729 | class Journals(Endpoint):
1730 | CURSOR_AS_ITER_METHOD = False
1731 |
1732 | ENDPOINT = "journals"
1733 |
1734 | def query(self, *args):
1735 | """
1736 | This method retrieve an iterable object that implements the method
1737 | __iter__. The arguments given will compose the parameters in the
1738 | request url.
1739 |
1740 | args: strings (String)
1741 |
1742 | return: iterable object of Journals metadata
1743 |
1744 | Example:
1745 | >>> from crossref.restful import Journals
1746 | >>> journals = Journals().query('Public Health', 'Health Science')
1747 | >>> journals.url
1748 | 'https://api.crossref.org/journals?query=Public+Health+Health+Science'
1749 | >>> next(iter(journals))
1750 | {'last-status-check-time': None, 'counts': None, 'coverage': None,
1751 | 'publisher': 'ScopeMed International Medical Journal Managment and Indexing System',
1752 | 'flags': None, 'breakdowns': None, 'ISSN': ['2320-4664', '2277-338X'],
1753 | 'title': 'International Journal of Medical Science and Public Health'}
1754 | """
1755 | context = str(self.context)
1756 | request_url = build_url_endpoint(self.ENDPOINT)
1757 | request_params = dict(self.request_params)
1758 |
1759 | if args:
1760 | request_params["query"] = " ".join([str(i) for i in args])
1761 |
1762 | return self.__class__(
1763 | request_url=request_url,
1764 | request_params=request_params,
1765 | context=context,
1766 | etiquette=self.etiquette,
1767 | timeout=self.timeout,
1768 | )
1769 |
1770 | def journal(self, issn, only_message=True):
1771 | """
1772 | This method retrieve a json with the given ISSN metadata
1773 |
1774 | args: Journal ISSN (String)
1775 |
1776 | return: Journal JSON data
1777 |
1778 | Example:
1779 | >>> from crossref.restful import Journals
1780 | >>> journals = Journals()
1781 | >>> journals.journal('2277-338X')
1782 | {'last-status-check-time': None, 'counts': None, 'coverage': None,
1783 | 'publisher': 'ScopeMed International Medical Journal Managment and Indexing System',
1784 | 'flags': None, 'breakdowns': None, 'ISSN': ['2320-4664', '2277-338X'],
1785 | 'title': 'International Journal of Medical Science and Public Health'}
1786 | """
1787 | request_url = build_url_endpoint("/".join([self.ENDPOINT, str(issn)]))
1788 | request_params = {}
1789 |
1790 | result = self.do_http_request(
1791 | "get",
1792 | request_url,
1793 | data=request_params,
1794 | custom_header=self.custom_header,
1795 | timeout=self.timeout,
1796 | )
1797 |
1798 | if result.status_code == 404:
1799 | return None
1800 |
1801 | result = result.json()
1802 |
1803 | return result["message"] if only_message is True else result
1804 |
1805 | def journal_exists(self, issn):
1806 | """
1807 | This method retrieve a boolean according to the existence of a journal
1808 | in the Crossref database. It returns False if the API results a 404
1809 | status code.
1810 |
1811 | args: Journal ISSN (String)
1812 |
1813 | return: Boolean
1814 |
1815 | Example 1:
1816 | >>> from crossref.restful import Journals
1817 | >>> journals = Journals()
1818 | >>> journals.journal_exists('2277-338X')
1819 | True
1820 |
1821 | Example 2:
1822 | >>> from crossref.restful import Journals
1823 | >>> journals = Journals()
1824 | >>> journals.journal_exists('9999-AAAA')
1825 | False
1826 | """
1827 | request_url = build_url_endpoint("/".join([self.ENDPOINT, str(issn)]))
1828 | request_params = {}
1829 |
1830 | result = self.do_http_request(
1831 | "get",
1832 | request_url,
1833 | data=request_params,
1834 | only_headers=True,
1835 | custom_header=self.custom_header,
1836 | timeout=self.timeout,
1837 | )
1838 |
1839 | if result.status_code == 404:
1840 | return False
1841 |
1842 | return True
1843 |
1844 | def works(self, issn):
1845 | """
1846 | This method retrieve a iterable of Works of the given journal.
1847 |
1848 | args: Journal ISSN (String)
1849 |
1850 | return: Works()
1851 | """
1852 |
1853 | context = f"{self.ENDPOINT}/{issn!s}"
1854 | return Works(context=context)
1855 |
1856 |
1857 | class Depositor:
1858 | def __init__( # noqa: PLR0913
1859 | self, prefix, api_user, api_key, etiquette=None, use_test_server=False, timeout=100,
1860 | ):
1861 | self.do_http_request = HTTPRequest(throttle=False).do_http_request
1862 | self.etiquette = etiquette or Etiquette()
1863 | self.custom_header = {"user-agent": str(self.etiquette)}
1864 | self.prefix = prefix
1865 | self.api_user = api_user
1866 | self.api_key = api_key
1867 | self.use_test_server = use_test_server
1868 | self.timeout = timeout
1869 |
1870 | def get_endpoint(self, verb):
1871 | subdomain = "test" if self.use_test_server else "doi"
1872 | return f"https://{subdomain}.crossref.org/servlet/{verb}"
1873 |
1874 | def register_doi(self, submission_id, request_xml):
1875 | """
1876 | This method registry a new DOI number in Crossref or update some DOI
1877 | metadata.
1878 |
1879 | submission_id: Will be used as the submission file name. The file name
1880 | could be used in future requests to retrieve the submission status.
1881 |
1882 | request_xml: The XML with the document metadata. It must be under
1883 | compliance with the Crossref Submission Schema.
1884 | """
1885 |
1886 | endpoint = self.get_endpoint("deposit")
1887 |
1888 | files = {"mdFile": ("%s.xml" % submission_id, request_xml)}
1889 |
1890 | params = {
1891 | "operation": "doMDUpload",
1892 | "login_id": self.api_user,
1893 | "login_passwd": self.api_key,
1894 | }
1895 |
1896 | return self.do_http_request(
1897 | "post",
1898 | endpoint,
1899 | data=params,
1900 | files=files,
1901 | custom_header=self.custom_header,
1902 | timeout=self.timeout,
1903 | )
1904 |
1905 | def request_doi_status_by_filename(self, file_name, data_type="result"):
1906 | """
1907 | This method retrieve the DOI requests status.
1908 |
1909 | file_name: Used as unique ID to identify a deposit.
1910 |
1911 | data_type: [contents, result]
1912 | contents - retrieve the XML submited by the publisher
1913 | result - retrieve a JSON with the status of the submission
1914 | """
1915 |
1916 | endpoint = self.get_endpoint("submissionDownload")
1917 |
1918 | params = {
1919 | "usr": self.api_user,
1920 | "pwd": self.api_key,
1921 | "file_name": file_name,
1922 | "type": data_type,
1923 | }
1924 |
1925 | return self.do_http_request(
1926 | "get", endpoint, data=params, custom_header=self.custom_header, timeout=self.timeout,
1927 | )
1928 |
1929 | def request_doi_status_by_batch_id(self, doi_batch_id, data_type="result"):
1930 | """
1931 | This method retrieve the DOI requests status.
1932 |
1933 | file_name: Used as unique ID to identify a deposit.
1934 |
1935 | data_type: [contents, result]
1936 | contents - retrieve the XML submited by the publisher
1937 | result - retrieve a XML with the status of the submission
1938 | """
1939 |
1940 | endpoint = self.get_endpoint("submissionDownload")
1941 |
1942 | params = {
1943 | "usr": self.api_user,
1944 | "pwd": self.api_key,
1945 | "doi_batch_id": doi_batch_id,
1946 | "type": data_type,
1947 | }
1948 |
1949 | return self.do_http_request(
1950 | "get", endpoint, data=params, custom_header=self.custom_header, timeout=self.timeout,
1951 | )
1952 |
--------------------------------------------------------------------------------
/crossref/utils.py:
--------------------------------------------------------------------------------
1 |
2 | truthy = frozenset(("t", "true", "y", "yes", "on", "1"))
3 |
4 |
5 | def asbool(s):
6 | """Return the boolean value ``True`` if the case-lowered value of string
7 | input ``s`` is a :term:`truthy string`. If ``s`` is already one of the
8 | boolean values ``True`` or ``False``, return it."""
9 | if s is None:
10 | return False
11 | if isinstance(s, bool):
12 | return s
13 | s = str(s).strip()
14 | return s.lower() in truthy
15 |
--------------------------------------------------------------------------------
/crossref/validators.py:
--------------------------------------------------------------------------------
1 | from datetime import datetime
2 |
3 |
4 | def directory(value):
5 | expected = "DOAJ"
6 |
7 | if str(value) in expected:
8 | return True
9 |
10 | msg = "Directory specified as {} but must be one of: {}".format(str(value), ", ".join(expected))
11 | raise ValueError(
12 | msg,
13 | )
14 |
15 |
16 | def archive(value):
17 | expected = ("Portico", "CLOCKSS", "DWT")
18 |
19 | if str(value) in expected:
20 | return True
21 |
22 | msg = "Archive specified as {} but must be one of: {}".format(str(value), ", ".join(expected))
23 | raise ValueError(
24 | msg,
25 | )
26 |
27 |
28 | def document_type(value):
29 |
30 | expected = (
31 | "book-section",
32 | "monograph",
33 | "report",
34 | "book-track",
35 | "journal-article",
36 | "book-part",
37 | "other",
38 | "book",
39 | "journal-volume",
40 | "book-set",
41 | "reference-entry",
42 | "proceedings-article",
43 | "journal",
44 | "component",
45 | "book-chapter",
46 | "report-series",
47 | "proceedings",
48 | "standard",
49 | "reference-book",
50 | "posted-content",
51 | "journal-issue",
52 | "dissertation",
53 | "dataset",
54 | "book-series",
55 | "edited-book",
56 | "standard-series",
57 | )
58 |
59 | if str(value) in expected:
60 | return True
61 |
62 | msg = "Type specified as {} but must be one of: {}".format(str(value), ", ".join(expected))
63 | raise ValueError(
64 | msg,
65 | )
66 |
67 |
68 | def is_bool(value):
69 |
70 | expected = ["t", "true", "1", "f", "false", "0"]
71 |
72 | if str(value) in expected:
73 | return True
74 |
75 | msg = "Boolean specified {} True but must be one of: {}".format(str(value), ", ".join(expected))
76 | raise ValueError(
77 | msg,
78 | )
79 |
80 |
81 | def is_date(value):
82 | try:
83 | datetime.strptime(value, "%Y") # noqa: DTZ007
84 | except ValueError:
85 | try:
86 | datetime.strptime(value, "%Y-%m") # noqa: DTZ007
87 | except ValueError:
88 | try:
89 | datetime.strptime(value, "%Y-%m-%d") # noqa: DTZ007
90 | except ValueError as exc:
91 | msg = f"Invalid date {value}."
92 | raise ValueError(msg) from exc
93 | return True
94 |
95 |
96 | def is_integer(value):
97 |
98 | try:
99 | value = int(value)
100 | if value >= 0:
101 | return True
102 | except ValueError:
103 | pass
104 |
105 | raise ValueError("Integer specified as %s but must be a positive integer." % str(value))
106 |
--------------------------------------------------------------------------------
/poetry.lock:
--------------------------------------------------------------------------------
1 | # This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand.
2 |
3 | [[package]]
4 | name = "asttokens"
5 | version = "2.4.1"
6 | description = "Annotate AST trees with source code positions"
7 | optional = false
8 | python-versions = "*"
9 | files = [
10 | {file = "asttokens-2.4.1-py2.py3-none-any.whl", hash = "sha256:051ed49c3dcae8913ea7cd08e46a606dba30b79993209636c4875bc1d637bc24"},
11 | {file = "asttokens-2.4.1.tar.gz", hash = "sha256:b03869718ba9a6eb027e134bfdf69f38a236d681c83c160d510768af11254ba0"},
12 | ]
13 |
14 | [package.dependencies]
15 | six = ">=1.12.0"
16 |
17 | [package.extras]
18 | astroid = ["astroid (>=1,<2)", "astroid (>=2,<4)"]
19 | test = ["astroid (>=1,<2)", "astroid (>=2,<4)", "pytest"]
20 |
21 | [[package]]
22 | name = "certifi"
23 | version = "2024.8.30"
24 | description = "Python package for providing Mozilla's CA Bundle."
25 | optional = false
26 | python-versions = ">=3.6"
27 | files = [
28 | {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"},
29 | {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"},
30 | ]
31 |
32 | [[package]]
33 | name = "cfgv"
34 | version = "3.4.0"
35 | description = "Validate configuration and produce human readable error messages."
36 | optional = false
37 | python-versions = ">=3.8"
38 | files = [
39 | {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"},
40 | {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"},
41 | ]
42 |
43 | [[package]]
44 | name = "charset-normalizer"
45 | version = "3.3.2"
46 | description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
47 | optional = false
48 | python-versions = ">=3.7.0"
49 | files = [
50 | {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"},
51 | {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"},
52 | {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"},
53 | {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"},
54 | {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"},
55 | {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"},
56 | {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"},
57 | {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"},
58 | {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"},
59 | {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"},
60 | {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"},
61 | {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"},
62 | {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"},
63 | {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"},
64 | {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"},
65 | {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"},
66 | {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"},
67 | {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"},
68 | {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"},
69 | {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"},
70 | {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"},
71 | {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"},
72 | {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"},
73 | {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"},
74 | {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"},
75 | {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"},
76 | {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"},
77 | {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"},
78 | {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"},
79 | {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"},
80 | {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"},
81 | {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"},
82 | {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"},
83 | {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"},
84 | {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"},
85 | {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"},
86 | {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"},
87 | {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"},
88 | {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"},
89 | {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"},
90 | {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"},
91 | {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"},
92 | {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"},
93 | {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"},
94 | {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"},
95 | {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"},
96 | {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"},
97 | {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"},
98 | {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"},
99 | {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"},
100 | {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"},
101 | {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"},
102 | {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"},
103 | {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"},
104 | {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"},
105 | {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"},
106 | {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"},
107 | {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"},
108 | {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"},
109 | {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"},
110 | {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"},
111 | {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"},
112 | {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"},
113 | {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"},
114 | {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"},
115 | {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"},
116 | {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"},
117 | {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"},
118 | {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"},
119 | {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"},
120 | {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"},
121 | {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"},
122 | {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"},
123 | {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"},
124 | {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"},
125 | {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"},
126 | {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"},
127 | {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"},
128 | {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"},
129 | {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"},
130 | {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"},
131 | {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"},
132 | {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"},
133 | {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"},
134 | {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"},
135 | {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"},
136 | {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"},
137 | {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"},
138 | {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"},
139 | {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"},
140 | ]
141 |
142 | [[package]]
143 | name = "colorama"
144 | version = "0.4.6"
145 | description = "Cross-platform colored terminal text."
146 | optional = false
147 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
148 | files = [
149 | {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
150 | {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
151 | ]
152 |
153 | [[package]]
154 | name = "decorator"
155 | version = "5.1.1"
156 | description = "Decorators for Humans"
157 | optional = false
158 | python-versions = ">=3.5"
159 | files = [
160 | {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"},
161 | {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"},
162 | ]
163 |
164 | [[package]]
165 | name = "distlib"
166 | version = "0.3.8"
167 | description = "Distribution utilities"
168 | optional = false
169 | python-versions = "*"
170 | files = [
171 | {file = "distlib-0.3.8-py2.py3-none-any.whl", hash = "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784"},
172 | {file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"},
173 | ]
174 |
175 | [[package]]
176 | name = "exceptiongroup"
177 | version = "1.2.2"
178 | description = "Backport of PEP 654 (exception groups)"
179 | optional = false
180 | python-versions = ">=3.7"
181 | files = [
182 | {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"},
183 | {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"},
184 | ]
185 |
186 | [package.extras]
187 | test = ["pytest (>=6)"]
188 |
189 | [[package]]
190 | name = "executing"
191 | version = "2.1.0"
192 | description = "Get the currently executing AST node of a frame, and other information"
193 | optional = false
194 | python-versions = ">=3.8"
195 | files = [
196 | {file = "executing-2.1.0-py2.py3-none-any.whl", hash = "sha256:8d63781349375b5ebccc3142f4b30350c0cd9c79f921cde38be2be4637e98eaf"},
197 | {file = "executing-2.1.0.tar.gz", hash = "sha256:8ea27ddd260da8150fa5a708269c4a10e76161e2496ec3e587da9e3c0fe4b9ab"},
198 | ]
199 |
200 | [package.extras]
201 | tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich"]
202 |
203 | [[package]]
204 | name = "filelock"
205 | version = "3.16.1"
206 | description = "A platform independent file lock."
207 | optional = false
208 | python-versions = ">=3.8"
209 | files = [
210 | {file = "filelock-3.16.1-py3-none-any.whl", hash = "sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0"},
211 | {file = "filelock-3.16.1.tar.gz", hash = "sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435"},
212 | ]
213 |
214 | [package.extras]
215 | docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4.1)"]
216 | testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.2)", "pytest (>=8.3.3)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.4)"]
217 | typing = ["typing-extensions (>=4.12.2)"]
218 |
219 | [[package]]
220 | name = "identify"
221 | version = "2.6.1"
222 | description = "File identification library for Python"
223 | optional = false
224 | python-versions = ">=3.8"
225 | files = [
226 | {file = "identify-2.6.1-py2.py3-none-any.whl", hash = "sha256:53863bcac7caf8d2ed85bd20312ea5dcfc22226800f6d6881f232d861db5a8f0"},
227 | {file = "identify-2.6.1.tar.gz", hash = "sha256:91478c5fb7c3aac5ff7bf9b4344f803843dc586832d5f110d672b19aa1984c98"},
228 | ]
229 |
230 | [package.extras]
231 | license = ["ukkonen"]
232 |
233 | [[package]]
234 | name = "idna"
235 | version = "3.10"
236 | description = "Internationalized Domain Names in Applications (IDNA)"
237 | optional = false
238 | python-versions = ">=3.6"
239 | files = [
240 | {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"},
241 | {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"},
242 | ]
243 |
244 | [package.extras]
245 | all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"]
246 |
247 | [[package]]
248 | name = "iniconfig"
249 | version = "2.0.0"
250 | description = "brain-dead simple config-ini parsing"
251 | optional = false
252 | python-versions = ">=3.7"
253 | files = [
254 | {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"},
255 | {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"},
256 | ]
257 |
258 | [[package]]
259 | name = "ipython"
260 | version = "8.28.0"
261 | description = "IPython: Productive Interactive Computing"
262 | optional = false
263 | python-versions = ">=3.10"
264 | files = [
265 | {file = "ipython-8.28.0-py3-none-any.whl", hash = "sha256:530ef1e7bb693724d3cdc37287c80b07ad9b25986c007a53aa1857272dac3f35"},
266 | {file = "ipython-8.28.0.tar.gz", hash = "sha256:0d0d15ca1e01faeb868ef56bc7ee5a0de5bd66885735682e8a322ae289a13d1a"},
267 | ]
268 |
269 | [package.dependencies]
270 | colorama = {version = "*", markers = "sys_platform == \"win32\""}
271 | decorator = "*"
272 | exceptiongroup = {version = "*", markers = "python_version < \"3.11\""}
273 | jedi = ">=0.16"
274 | matplotlib-inline = "*"
275 | pexpect = {version = ">4.3", markers = "sys_platform != \"win32\" and sys_platform != \"emscripten\""}
276 | prompt-toolkit = ">=3.0.41,<3.1.0"
277 | pygments = ">=2.4.0"
278 | stack-data = "*"
279 | traitlets = ">=5.13.0"
280 | typing-extensions = {version = ">=4.6", markers = "python_version < \"3.12\""}
281 |
282 | [package.extras]
283 | all = ["ipython[black,doc,kernel,matplotlib,nbconvert,nbformat,notebook,parallel,qtconsole]", "ipython[test,test-extra]"]
284 | black = ["black"]
285 | doc = ["docrepr", "exceptiongroup", "intersphinx-registry", "ipykernel", "ipython[test]", "matplotlib", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "sphinxcontrib-jquery", "tomli", "typing-extensions"]
286 | kernel = ["ipykernel"]
287 | matplotlib = ["matplotlib"]
288 | nbconvert = ["nbconvert"]
289 | nbformat = ["nbformat"]
290 | notebook = ["ipywidgets", "notebook"]
291 | parallel = ["ipyparallel"]
292 | qtconsole = ["qtconsole"]
293 | test = ["packaging", "pickleshare", "pytest", "pytest-asyncio (<0.22)", "testpath"]
294 | test-extra = ["curio", "ipython[test]", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.23)", "pandas", "trio"]
295 |
296 | [[package]]
297 | name = "jedi"
298 | version = "0.19.1"
299 | description = "An autocompletion tool for Python that can be used for text editors."
300 | optional = false
301 | python-versions = ">=3.6"
302 | files = [
303 | {file = "jedi-0.19.1-py2.py3-none-any.whl", hash = "sha256:e983c654fe5c02867aef4cdfce5a2fbb4a50adc0af145f70504238f18ef5e7e0"},
304 | {file = "jedi-0.19.1.tar.gz", hash = "sha256:cf0496f3651bc65d7174ac1b7d043eff454892c708a87d1b683e57b569927ffd"},
305 | ]
306 |
307 | [package.dependencies]
308 | parso = ">=0.8.3,<0.9.0"
309 |
310 | [package.extras]
311 | docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx (==1.8.5)", "sphinx-rtd-theme (==0.4.3)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"]
312 | qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"]
313 | testing = ["Django", "attrs", "colorama", "docopt", "pytest (<7.0.0)"]
314 |
315 | [[package]]
316 | name = "matplotlib-inline"
317 | version = "0.1.7"
318 | description = "Inline Matplotlib backend for Jupyter"
319 | optional = false
320 | python-versions = ">=3.8"
321 | files = [
322 | {file = "matplotlib_inline-0.1.7-py3-none-any.whl", hash = "sha256:df192d39a4ff8f21b1895d72e6a13f5fcc5099f00fa84384e0ea28c2cc0653ca"},
323 | {file = "matplotlib_inline-0.1.7.tar.gz", hash = "sha256:8423b23ec666be3d16e16b60bdd8ac4e86e840ebd1dd11a30b9f117f2fa0ab90"},
324 | ]
325 |
326 | [package.dependencies]
327 | traitlets = "*"
328 |
329 | [[package]]
330 | name = "nodeenv"
331 | version = "1.9.1"
332 | description = "Node.js virtual environment builder"
333 | optional = false
334 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
335 | files = [
336 | {file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"},
337 | {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"},
338 | ]
339 |
340 | [[package]]
341 | name = "packaging"
342 | version = "24.1"
343 | description = "Core utilities for Python packages"
344 | optional = false
345 | python-versions = ">=3.8"
346 | files = [
347 | {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"},
348 | {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"},
349 | ]
350 |
351 | [[package]]
352 | name = "parso"
353 | version = "0.8.4"
354 | description = "A Python Parser"
355 | optional = false
356 | python-versions = ">=3.6"
357 | files = [
358 | {file = "parso-0.8.4-py2.py3-none-any.whl", hash = "sha256:a418670a20291dacd2dddc80c377c5c3791378ee1e8d12bffc35420643d43f18"},
359 | {file = "parso-0.8.4.tar.gz", hash = "sha256:eb3a7b58240fb99099a345571deecc0f9540ea5f4dd2fe14c2a99d6b281ab92d"},
360 | ]
361 |
362 | [package.extras]
363 | qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"]
364 | testing = ["docopt", "pytest"]
365 |
366 | [[package]]
367 | name = "pexpect"
368 | version = "4.9.0"
369 | description = "Pexpect allows easy control of interactive console applications."
370 | optional = false
371 | python-versions = "*"
372 | files = [
373 | {file = "pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523"},
374 | {file = "pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f"},
375 | ]
376 |
377 | [package.dependencies]
378 | ptyprocess = ">=0.5"
379 |
380 | [[package]]
381 | name = "platformdirs"
382 | version = "4.3.6"
383 | description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`."
384 | optional = false
385 | python-versions = ">=3.8"
386 | files = [
387 | {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"},
388 | {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"},
389 | ]
390 |
391 | [package.extras]
392 | docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"]
393 | test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"]
394 | type = ["mypy (>=1.11.2)"]
395 |
396 | [[package]]
397 | name = "pluggy"
398 | version = "1.5.0"
399 | description = "plugin and hook calling mechanisms for python"
400 | optional = false
401 | python-versions = ">=3.8"
402 | files = [
403 | {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"},
404 | {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"},
405 | ]
406 |
407 | [package.extras]
408 | dev = ["pre-commit", "tox"]
409 | testing = ["pytest", "pytest-benchmark"]
410 |
411 | [[package]]
412 | name = "pre-commit"
413 | version = "3.8.0"
414 | description = "A framework for managing and maintaining multi-language pre-commit hooks."
415 | optional = false
416 | python-versions = ">=3.9"
417 | files = [
418 | {file = "pre_commit-3.8.0-py2.py3-none-any.whl", hash = "sha256:9a90a53bf82fdd8778d58085faf8d83df56e40dfe18f45b19446e26bf1b3a63f"},
419 | {file = "pre_commit-3.8.0.tar.gz", hash = "sha256:8bb6494d4a20423842e198980c9ecf9f96607a07ea29549e180eef9ae80fe7af"},
420 | ]
421 |
422 | [package.dependencies]
423 | cfgv = ">=2.0.0"
424 | identify = ">=1.0.0"
425 | nodeenv = ">=0.11.1"
426 | pyyaml = ">=5.1"
427 | virtualenv = ">=20.10.0"
428 |
429 | [[package]]
430 | name = "prompt-toolkit"
431 | version = "3.0.48"
432 | description = "Library for building powerful interactive command lines in Python"
433 | optional = false
434 | python-versions = ">=3.7.0"
435 | files = [
436 | {file = "prompt_toolkit-3.0.48-py3-none-any.whl", hash = "sha256:f49a827f90062e411f1ce1f854f2aedb3c23353244f8108b89283587397ac10e"},
437 | {file = "prompt_toolkit-3.0.48.tar.gz", hash = "sha256:d6623ab0477a80df74e646bdbc93621143f5caf104206aa29294d53de1a03d90"},
438 | ]
439 |
440 | [package.dependencies]
441 | wcwidth = "*"
442 |
443 | [[package]]
444 | name = "ptyprocess"
445 | version = "0.7.0"
446 | description = "Run a subprocess in a pseudo terminal"
447 | optional = false
448 | python-versions = "*"
449 | files = [
450 | {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"},
451 | {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"},
452 | ]
453 |
454 | [[package]]
455 | name = "pure-eval"
456 | version = "0.2.3"
457 | description = "Safely evaluate AST nodes without side effects"
458 | optional = false
459 | python-versions = "*"
460 | files = [
461 | {file = "pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0"},
462 | {file = "pure_eval-0.2.3.tar.gz", hash = "sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42"},
463 | ]
464 |
465 | [package.extras]
466 | tests = ["pytest"]
467 |
468 | [[package]]
469 | name = "pygments"
470 | version = "2.18.0"
471 | description = "Pygments is a syntax highlighting package written in Python."
472 | optional = false
473 | python-versions = ">=3.8"
474 | files = [
475 | {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"},
476 | {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"},
477 | ]
478 |
479 | [package.extras]
480 | windows-terminal = ["colorama (>=0.4.6)"]
481 |
482 | [[package]]
483 | name = "pytest"
484 | version = "7.4.4"
485 | description = "pytest: simple powerful testing with Python"
486 | optional = false
487 | python-versions = ">=3.7"
488 | files = [
489 | {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"},
490 | {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"},
491 | ]
492 |
493 | [package.dependencies]
494 | colorama = {version = "*", markers = "sys_platform == \"win32\""}
495 | exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""}
496 | iniconfig = "*"
497 | packaging = "*"
498 | pluggy = ">=0.12,<2.0"
499 | tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""}
500 |
501 | [package.extras]
502 | testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"]
503 |
504 | [[package]]
505 | name = "pyyaml"
506 | version = "6.0.2"
507 | description = "YAML parser and emitter for Python"
508 | optional = false
509 | python-versions = ">=3.8"
510 | files = [
511 | {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"},
512 | {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"},
513 | {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"},
514 | {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"},
515 | {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"},
516 | {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"},
517 | {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"},
518 | {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"},
519 | {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"},
520 | {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"},
521 | {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"},
522 | {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"},
523 | {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"},
524 | {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"},
525 | {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"},
526 | {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"},
527 | {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"},
528 | {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"},
529 | {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"},
530 | {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"},
531 | {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"},
532 | {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"},
533 | {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"},
534 | {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"},
535 | {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"},
536 | {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"},
537 | {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"},
538 | {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"},
539 | {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"},
540 | {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"},
541 | {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"},
542 | {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"},
543 | {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"},
544 | {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"},
545 | {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"},
546 | {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"},
547 | {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"},
548 | {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"},
549 | {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"},
550 | {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"},
551 | {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"},
552 | {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"},
553 | {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"},
554 | {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"},
555 | {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"},
556 | {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"},
557 | {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"},
558 | {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"},
559 | {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"},
560 | {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"},
561 | {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"},
562 | {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"},
563 | {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"},
564 | ]
565 |
566 | [[package]]
567 | name = "requests"
568 | version = "2.32.3"
569 | description = "Python HTTP for Humans."
570 | optional = false
571 | python-versions = ">=3.8"
572 | files = [
573 | {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"},
574 | {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"},
575 | ]
576 |
577 | [package.dependencies]
578 | certifi = ">=2017.4.17"
579 | charset-normalizer = ">=2,<4"
580 | idna = ">=2.5,<4"
581 | urllib3 = ">=1.21.1,<3"
582 |
583 | [package.extras]
584 | socks = ["PySocks (>=1.5.6,!=1.5.7)"]
585 | use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
586 |
587 | [[package]]
588 | name = "ruff"
589 | version = "0.0.277"
590 | description = "An extremely fast Python linter, written in Rust."
591 | optional = false
592 | python-versions = ">=3.7"
593 | files = [
594 | {file = "ruff-0.0.277-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:3250b24333ef419b7a232080d9724ccc4d2da1dbbe4ce85c4caa2290d83200f8"},
595 | {file = "ruff-0.0.277-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:3e60605e07482183ba1c1b7237eca827bd6cbd3535fe8a4ede28cbe2a323cb97"},
596 | {file = "ruff-0.0.277-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7baa97c3d7186e5ed4d5d4f6834d759a27e56cf7d5874b98c507335f0ad5aadb"},
597 | {file = "ruff-0.0.277-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:74e4b206cb24f2e98a615f87dbe0bde18105217cbcc8eb785bb05a644855ba50"},
598 | {file = "ruff-0.0.277-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:479864a3ccd8a6a20a37a6e7577bdc2406868ee80b1e65605478ad3b8eb2ba0b"},
599 | {file = "ruff-0.0.277-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:468bfb0a7567443cec3d03cf408d6f562b52f30c3c29df19927f1e0e13a40cd7"},
600 | {file = "ruff-0.0.277-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f32ec416c24542ca2f9cc8c8b65b84560530d338aaf247a4a78e74b99cd476b4"},
601 | {file = "ruff-0.0.277-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:14a7b2f00f149c5a295f188a643ac25226ff8a4d08f7a62b1d4b0a1dc9f9b85c"},
602 | {file = "ruff-0.0.277-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9879f59f763cc5628aa01c31ad256a0f4dc61a29355c7315b83c2a5aac932b5"},
603 | {file = "ruff-0.0.277-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:f612e0a14b3d145d90eb6ead990064e22f6f27281d847237560b4e10bf2251f3"},
604 | {file = "ruff-0.0.277-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:323b674c98078be9aaded5b8b51c0d9c424486566fb6ec18439b496ce79e5998"},
605 | {file = "ruff-0.0.277-py3-none-musllinux_1_2_i686.whl", hash = "sha256:3a43fbe026ca1a2a8c45aa0d600a0116bec4dfa6f8bf0c3b871ecda51ef2b5dd"},
606 | {file = "ruff-0.0.277-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:734165ea8feb81b0d53e3bf523adc2413fdb76f1264cde99555161dd5a725522"},
607 | {file = "ruff-0.0.277-py3-none-win32.whl", hash = "sha256:88d0f2afb2e0c26ac1120e7061ddda2a566196ec4007bd66d558f13b374b9efc"},
608 | {file = "ruff-0.0.277-py3-none-win_amd64.whl", hash = "sha256:6fe81732f788894a00f6ade1fe69e996cc9e485b7c35b0f53fb00284397284b2"},
609 | {file = "ruff-0.0.277-py3-none-win_arm64.whl", hash = "sha256:2d4444c60f2e705c14cd802b55cd2b561d25bf4311702c463a002392d3116b22"},
610 | {file = "ruff-0.0.277.tar.gz", hash = "sha256:2dab13cdedbf3af6d4427c07f47143746b6b95d9e4a254ac369a0edb9280a0d2"},
611 | ]
612 |
613 | [[package]]
614 | name = "six"
615 | version = "1.16.0"
616 | description = "Python 2 and 3 compatibility utilities"
617 | optional = false
618 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
619 | files = [
620 | {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
621 | {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
622 | ]
623 |
624 | [[package]]
625 | name = "stack-data"
626 | version = "0.6.3"
627 | description = "Extract data from python stack frames and tracebacks for informative displays"
628 | optional = false
629 | python-versions = "*"
630 | files = [
631 | {file = "stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695"},
632 | {file = "stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9"},
633 | ]
634 |
635 | [package.dependencies]
636 | asttokens = ">=2.1.0"
637 | executing = ">=1.2.0"
638 | pure-eval = "*"
639 |
640 | [package.extras]
641 | tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"]
642 |
643 | [[package]]
644 | name = "tomli"
645 | version = "2.0.2"
646 | description = "A lil' TOML parser"
647 | optional = false
648 | python-versions = ">=3.8"
649 | files = [
650 | {file = "tomli-2.0.2-py3-none-any.whl", hash = "sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38"},
651 | {file = "tomli-2.0.2.tar.gz", hash = "sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed"},
652 | ]
653 |
654 | [[package]]
655 | name = "traitlets"
656 | version = "5.14.3"
657 | description = "Traitlets Python configuration system"
658 | optional = false
659 | python-versions = ">=3.8"
660 | files = [
661 | {file = "traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f"},
662 | {file = "traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7"},
663 | ]
664 |
665 | [package.extras]
666 | docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"]
667 | test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0,<8.2)", "pytest-mock", "pytest-mypy-testing"]
668 |
669 | [[package]]
670 | name = "typing-extensions"
671 | version = "4.12.2"
672 | description = "Backported and Experimental Type Hints for Python 3.8+"
673 | optional = false
674 | python-versions = ">=3.8"
675 | files = [
676 | {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"},
677 | {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"},
678 | ]
679 |
680 | [[package]]
681 | name = "urllib3"
682 | version = "2.2.3"
683 | description = "HTTP library with thread-safe connection pooling, file post, and more."
684 | optional = false
685 | python-versions = ">=3.8"
686 | files = [
687 | {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"},
688 | {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"},
689 | ]
690 |
691 | [package.extras]
692 | brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"]
693 | h2 = ["h2 (>=4,<5)"]
694 | socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"]
695 | zstd = ["zstandard (>=0.18.0)"]
696 |
697 | [[package]]
698 | name = "virtualenv"
699 | version = "20.26.6"
700 | description = "Virtual Python Environment builder"
701 | optional = false
702 | python-versions = ">=3.7"
703 | files = [
704 | {file = "virtualenv-20.26.6-py3-none-any.whl", hash = "sha256:7345cc5b25405607a624d8418154577459c3e0277f5466dd79c49d5e492995f2"},
705 | {file = "virtualenv-20.26.6.tar.gz", hash = "sha256:280aede09a2a5c317e409a00102e7077c6432c5a38f0ef938e643805a7ad2c48"},
706 | ]
707 |
708 | [package.dependencies]
709 | distlib = ">=0.3.7,<1"
710 | filelock = ">=3.12.2,<4"
711 | platformdirs = ">=3.9.1,<5"
712 |
713 | [package.extras]
714 | docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"]
715 | test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"]
716 |
717 | [[package]]
718 | name = "wcwidth"
719 | version = "0.2.13"
720 | description = "Measures the displayed width of unicode strings in a terminal"
721 | optional = false
722 | python-versions = "*"
723 | files = [
724 | {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"},
725 | {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"},
726 | ]
727 |
728 | [metadata]
729 | lock-version = "2.0"
730 | python-versions = "^3.10"
731 | content-hash = "0da5aa4acc06d8b5853e375f1313784cb22519528806b62b296086efae886b48"
732 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [tool.poetry]
2 | name = "crossrefapi"
3 | version = "1.6.1"
4 | description = "Library that implements the endpoints of the Crossref API"
5 | authors = ["Fabio Batalha "]
6 | packages = [
7 | { include = "crossref", from="."}
8 | ]
9 |
10 | [tool.poetry.dependencies]
11 | python = "^3.10"
12 | requests = "^2.32.3"
13 | urllib3 = "^2.2.3"
14 | ipython = "^8.28.0"
15 |
16 | [tool.poetry.group.dev.dependencies]
17 | pytest = "^7.4.0"
18 | ruff = "^0.0.277"
19 | pre-commit = "^3.3.3"
20 | ipython = "^8.20.0"
21 |
22 | [build-system]
23 | requires = ["poetry-core"]
24 | build-backend = "poetry.core.masonry.api"
25 |
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/tests/test_restful.py:
--------------------------------------------------------------------------------
1 |
2 | import unittest
3 |
4 | from crossref import VERSION, restful
5 |
6 |
7 | class RestfulTest(unittest.TestCase):
8 | """
9 | These tests are testing the API live integration, the main purpouse of these
10 | testes is to validate the JSON structure of the API results.
11 | These tests may lead to connectivity erros if the Crossref API is temporary
12 | out of service.
13 | """
14 |
15 | def setUp(self):
16 |
17 | self.etiquette = restful.Etiquette(
18 | application_name="UnitTest CrossrefAPI",
19 | application_version=VERSION,
20 | application_url="https://github.com/fabiobatalha/crossrefapi",
21 | contact_email="undefined",
22 | )
23 |
24 | def test_work_agency_message(self):
25 | """
26 | Testing the base structure for the /works/{DOI}/agency endpoint.
27 | If all the base structure is present, this test may not lead to dict
28 | keyerror exceptions.
29 | """
30 | works = restful.Works(etiquette=self.etiquette)
31 |
32 | result = works.agency("10.1590/S0102-09352010000200002")
33 |
34 | assert result["agency"]["id"] == "crossref"
35 |
36 | def test_work_agency_header(self):
37 | """
38 | Testing the base structure for the /works/{DOI}/agency endpoint.
39 | If all the base structure is present, this test may not lead to dict
40 | keyerror exceptions.
41 | """
42 | works = restful.Works(etiquette=self.etiquette)
43 |
44 | result = works.agency("10.1590/S0102-09352010000200002", only_message=False)
45 |
46 | assert result["message-type"] == "work-agency"
47 |
48 | def test_work_select_fields(self):
49 | result = restful.Works(etiquette=self.etiquette).select("DOI").url
50 |
51 | assert result == "https://api.crossref.org/works?select=DOI"
52 |
53 | def test_work_select_fields_multiple_parameter_and_array(self):
54 | result = restful.Works(etiquette=self.etiquette) \
55 | .select("DOI", "title").select("subject").select(["relation", "editor"]) \
56 | .select("relation, editor").url
57 |
58 | assert result == "https://api.crossref.org/works?select=DOI%2Ceditor%2Crelation%2Csubject%2Ctitle"
59 |
60 | def test_work_with_sample(self):
61 | result = restful.Works(etiquette=self.etiquette).sample(5).url
62 |
63 | assert result == "https://api.crossref.org/works?sample=5"
64 |
65 | def test_work_with_sample_and_filters(self):
66 | result = restful.Works(
67 | etiquette=self.etiquette).filter(type="journal-article").sample(5).url
68 |
69 | assert result == "https://api.crossref.org/works?filter=type%3Ajournal-article&sample=5"
70 |
71 | def test_members_filters(self):
72 | result = restful.Members(
73 | etiquette=self.etiquette).filter(has_public_references="true").url
74 |
75 | assert result == "https://api.crossref.org/members?filter=has-public-references%3Atrue"
76 |
77 | def test_funders_filters(self):
78 | result = restful.Funders(etiquette=self.etiquette).filter(location="Japan").url
79 |
80 | assert result == "https://api.crossref.org/funders?filter=location%3AJapan"
81 |
82 |
83 | class HTTPRequestTest(unittest.TestCase):
84 |
85 | def setUp(self):
86 |
87 | self.httprequest = restful.HTTPRequest()
88 |
89 | def test_default_rate_limits(self):
90 |
91 | expected = {"x-rate-limit-interval": 1, "x-rate-limit-limit": 50}
92 |
93 | assert self.httprequest.rate_limits == expected
94 |
95 | def test_update_rate_limits_seconds(self):
96 |
97 | headers = {"x-rate-limit-interval": "2s", "x-rate-limit-limit": 50}
98 |
99 | self.httprequest._update_rate_limits(headers)
100 |
101 | expected = {"x-rate-limit-interval": 2, "x-rate-limit-limit": 50}
102 |
103 | assert self.httprequest.rate_limits == expected
104 |
105 | def test_update_rate_limits_minutes(self):
106 |
107 | headers = {"x-rate-limit-interval": "2m", "x-rate-limit-limit": 50}
108 |
109 | self.httprequest._update_rate_limits(headers)
110 |
111 | expected = {"x-rate-limit-interval": 120, "x-rate-limit-limit": 50}
112 |
113 | assert self.httprequest.rate_limits == expected
114 |
115 | def test_update_rate_limits_hours(self):
116 |
117 | headers = {"x-rate-limit-interval": "2h", "x-rate-limit-limit": 50}
118 |
119 | self.httprequest._update_rate_limits(headers)
120 |
121 | expected = {"x-rate-limit-interval": 7200, "x-rate-limit-limit": 50}
122 |
123 | assert self.httprequest.rate_limits == expected
124 |
--------------------------------------------------------------------------------
/tests/test_validators.py:
--------------------------------------------------------------------------------
1 |
2 | import unittest
3 |
4 | from crossref import validators
5 |
6 |
7 | class ValidatorsTest(unittest.TestCase):
8 |
9 | def test_directory_1(self):
10 |
11 | result = validators.directory("DOAJ")
12 |
13 | assert result
14 |
15 | def test_directory_2(self):
16 |
17 | with self.assertRaises(ValueError):
18 | validators.directory("any invalid archive")
19 |
20 | def test_archive_1(self):
21 |
22 | result = validators.archive("CLOCKSS")
23 |
24 | assert result
25 |
26 | def test_archive_2(self):
27 |
28 | with self.assertRaises(ValueError):
29 | validators.archive("any invalid archive")
30 |
31 | def test_document_type_1(self):
32 |
33 | result = validators.document_type("book-chapter")
34 |
35 | assert result
36 |
37 | def test_document_type_2(self):
38 |
39 | with self.assertRaises(ValueError):
40 | validators.document_type("any invalid type")
41 |
42 | def test_is_bool_3(self):
43 |
44 | result = validators.is_bool("true")
45 |
46 | assert result
47 |
48 | def test_is_bool_4(self):
49 |
50 | result = validators.is_bool("false")
51 |
52 | assert result
53 |
54 | def test_is_bool_5(self):
55 |
56 | result = validators.is_bool("1")
57 |
58 | assert result
59 |
60 | def test_is_bool_5(self):
61 |
62 | with self.assertRaises(ValueError):
63 | validators.is_bool("jljlj")
64 |
65 | def test_is_date_1(self):
66 |
67 | result = validators.is_date("2017")
68 |
69 | assert result
70 |
71 | def test_is_date_2(self):
72 |
73 | result = validators.is_date("2017-12")
74 |
75 | assert result
76 |
77 | def test_is_date_3(self):
78 |
79 | result = validators.is_date("2017-12-31")
80 |
81 | assert result
82 |
83 | def test_is_date_4(self):
84 |
85 | with self.assertRaises(ValueError):
86 | validators.is_date("asas")
87 |
88 | def test_is_date_5(self):
89 |
90 | with self.assertRaises(ValueError):
91 | validators.is_date("2017-30")
92 |
93 | def test_is_date_6(self):
94 |
95 | with self.assertRaises(ValueError):
96 | validators.is_date("2017-12-00")
97 |
98 | def test_is_integer_1(self):
99 |
100 | result = validators.is_integer("10")
101 |
102 | assert result
103 |
104 | def test_is_integer_1(self):
105 |
106 | with self.assertRaises(ValueError):
107 | validators.is_integer("-1")
108 |
109 | def test_is_integer_3(self):
110 |
111 | with self.assertRaises(ValueError):
112 | validators.is_integer("dd")
113 |
--------------------------------------------------------------------------------