├── .gitignore
├── .travis.yml
├── CHANGELOG.md
├── CONTRIBUTING.rst
├── LICENSE
├── MANIFEST.in
├── README.rst
├── README.txt
├── docs
├── Makefile
├── conf.py
├── index.rst
└── make.bat
├── lib
├── c14n
│ ├── Canonicalize.py
│ ├── LICENSE
│ ├── NumberToJson.py
│ └── __init__.py
└── pyld
│ ├── __about__.py
│ ├── __init__.py
│ ├── context_resolver.py
│ ├── documentloader
│ ├── __init__.py
│ ├── aiohttp.py
│ └── requests.py
│ ├── jsonld.py
│ └── resolved_context.py
├── requirements.txt
├── setup.py
└── tests
└── runtests.py
/.gitignore:
--------------------------------------------------------------------------------
1 | *.py[co]
2 | *.sw[op]
3 | *~
4 | .coverage
5 | .project
6 | .pydevproject
7 | .settings
8 | MANIFEST
9 | build
10 | cover
11 | dist
12 | docs/_build
13 | lib/PyLD.egg-info
14 | profiler
15 | tests/test_caching.py
16 | tests/data/test_caching.json
17 |
--------------------------------------------------------------------------------
/.travis.yml:
--------------------------------------------------------------------------------
1 | dist: xenial
2 | language: python
3 | cache: pip
4 | python:
5 | - "3.4"
6 | - "3.5"
7 | - "3.6"
8 | - "3.7"
9 | - "3.8"
10 | - "pypy3"
11 | sudo: false
12 |
13 | # Define document loaders
14 | env:
15 | - LOADER=requests
16 | - LOADER=aiohttp
17 |
18 | matrix:
19 | exclude:
20 | - python: "3.4"
21 | env: LOADER=aiohttp
22 | allow_failures:
23 | - python: "3.4"
24 | - python: "3.5"
25 |
26 | install:
27 | - pip install -r requirements.txt
28 | - git clone --depth 1 https://github.com/w3c/json-ld-api.git _json-ld-api
29 | - git clone --depth 1 https://github.com/w3c/json-ld-framing.git _json-ld-framing
30 | - git clone --depth 1 https://github.com/json-ld/normalization.git _normalization
31 |
32 | # Download test suite and run tests... submodule? meta testing project with
33 | # all of the reference implementations?
34 | script:
35 | - python tests/runtests.py ./_json-ld-api/tests -l $LOADER
36 | - python tests/runtests.py ./_json-ld-framing/tests -l $LOADER
37 | - python tests/runtests.py ./_normalization/tests -l $LOADER
38 |
39 | notifications:
40 | email:
41 | on_success: change
42 | on_failure: change
43 |
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | # pyld ChangeLog
2 |
3 | ## 2.0.4 - 2024-02-16
4 |
5 | ### Fixed
6 | - Use explicit `None` or `False` for context checks. Fixes an issue while
7 | framing with an empty context.
8 |
9 | ## 2.0.3 - 2020-08-06
10 |
11 | ### Fixed
12 | - Fix deprecation warnings due to invalid escape sequences.
13 |
14 | ## 2.0.2 - 2020-04-20
15 |
16 | ### Fixed
17 | - Fix inverse context cache indexing to use the uuid field.
18 |
19 | ## 2.0.1 - 2020-04-15
20 |
21 | ### Changed
22 | - Improve EARL output.
23 |
24 | ## 2.0.0 - 2020-04-15
25 |
26 | ### Notes
27 | - This release adds JSON-LD 1.1 support. Significant thanks goes to Gregg
28 | Kellogg!
29 | - **BREAKING**: It is highly recommended to do proper testing when upgrading
30 | from the previous version. The framing API in particular now follows the 1.1
31 | spec and some of the defaults changed.
32 |
33 | ### Changed
34 | - **BREAKING**: Versions of Python before 3.6 are no longer supported.
35 | - Update conformance docs.
36 | - Add all keywords and update options.
37 | - Default `processingMode` to `json-ld-1.1`.
38 | - Implement logic for marking tests as pending, so that it will fail if a
39 | pending test passes.
40 | - Consolidate `documentLoader` option and defaults into a `load_document` method
41 | to also handle JSON (eventually HTML) parsing.
42 | - Add support for `rel=alternate` for non-JSON-LD docs.
43 | - Use `lxml.html` to load HTML and parse in `load_html`.
44 | - For HTML, the API base option can be updated from base element.
45 | - Context processing:
46 | - Support `@propagate` in context processing and propagate option.
47 | - Support for `@import`. (Some issues confusing recursion errors for invalid
48 | contexts).
49 | - Make `override_protected` and `propagate` optional arguments to
50 | `_create_term_definition` and `_process_context` instead of using option
51 | argument.
52 | - Improve management of previous contexts.
53 | - Imported contexts must resolve to an object.
54 | - Do remote context processing from within `_process_contexts`, as logic is
55 | too complicated for pre-loading. Removes `_find_context_urls` and
56 | `_retrieve_context_urls`.
57 | - Added a `ContextResolver` which can use a shared LRU cache for storing
58 | externally retrieved contexts, and the result of processing them relative
59 | to a particular active context.
60 | - Return a `frozendict` from context processing and reduce deepcopies.
61 | - Store inverse context in an LRU cache rather than trying to modify a frozen context.
62 | - Don't set `@base` in initial context and don't resolve a relative IRI
63 | when setting `@base` in a context, so that the document location can
64 | be kept separate from the context itself.
65 | - Use static initial contexts composed of just `mappings` and `processingMode`
66 | to enhance preprocessed context cachability.
67 | - Create Term Definition:
68 | - Allow `@type` as a term under certain circumstances.
69 | - Reject and warn on keyword-like terms.
70 | - Support protected term definitions.
71 | - Look for keyword patterns and warn/return.
72 | - Look for terms that are compact IRIs that don't expand to the same thing.
73 | - Basic support for `@json` and `@none` as values of `@type`.
74 | - If `@container` includes `@type`, `@type` must be `@id` or `@vocab`.
75 | - Support `@index` and `@direction`.
76 | - Corner-case checking for `@prefix`.
77 | - Validate scoped contexts even if not used.
78 | - Support relative vocabulary IRIs.
79 | - Fix check that term has the form of an IRI.
80 | - Delay adding mapping to end of `_create_term_definition`.
81 | - If a scoped context is null, wrap it in an array so it doesn't seem to be
82 | undefined.
83 | - IRI Expansion:
84 | - Find keyword patterns.
85 | - Don't treat terms starting with a colon as IRIs.
86 | - Only return a resulting IRI if it is absolute.
87 | - Fix `_is_absolute_iri` to use a reasonable regular expression and some
88 | other `_expand_iri issues`.
89 | - Fix to detecting relative IRIs.
90 | - Fix special case where relative path should not have a leading '/'
91 | - Pass in document location (through 'base' option) and use when resolving
92 | document-relative IRIs.
93 | - IRI Compaction:
94 | - Pass in document location (through 'base' option) and use when compacting
95 | document-relative IRIs.
96 | - Compaction:
97 | - Compact `@direction`.
98 | - Compact `@type`: `@none`.
99 | - Compact `@included`.
100 | - Honor `@container`: `@set` on `@type`.
101 | - Lists of Lists.
102 | - Improve handling of scoped contexts and propagate.
103 | - Improve map compaction, including indexed properties.
104 | - Catch Absolute IRI confused with prefix.
105 | - Expansion:
106 | - Updates to expansion algorithm.
107 | - `_expand_value` adds `@direction` from term definition.
108 | - JSON Literals.
109 | - Support `@direction` when expanding.
110 | - Support lists of lists.
111 | - Support property indexes.
112 | - Improve graph container expansion.
113 | - Order types when applying scoped contexts.
114 | - Use `type_scoped_ctx` when expanding values of `@type`.
115 | - Use propagate and `override_protected` properly when creating expansion
116 | contexts.
117 | - Flattening:
118 | - Rewrite `_create_node_map` based on 1.1 algorithm.
119 | - Flatten `@included`.
120 | - Flatten lists of lists.
121 | - Update `merge_node_maps` for `@type`.
122 | - Framing:
123 | - Change default for `requireAll` from True to False.
124 | - Change default for 'embed' from '@last' to '@once'.
125 | - Add defaults for `omitGraph` and `pruneBlankNodeIdentifiers`
126 | based on processing mode.
127 | - Change `_remove_preserve` to `_cleanup_preserve` which happens before
128 | compaction.
129 | - Add `_cleanup_null` which happens after compaction.
130 | - Update frame matching to 1.1 spec.
131 | - Support `@included`.
132 | - ToRdf:
133 | - Support for I18N direction.
134 | - Support for Lists of Lists.
135 | - Partial support for JSON canonicalization of JSON literals.
136 | - Includes local copy of JCS library, but doesn't load.
137 | - Lists of Lists.
138 | - Text Direction 'i18n-datatype'.
139 | - Testing
140 | - Switched to argparse.
141 | - **BREAKING**: Removed `-d` and `-m` test runner options in favor of just
142 | listing as arguments.
143 | - If no test manifests or directories are specified, default to sibling
144 | directories for json-ld-api, json-ld-framing, and normalization.
145 |
146 | ## 1.0.5 - 2019-05-09
147 |
148 | ### Fixed
149 | - Use `return` instead of `raise StopIteration` to terminate generator.
150 |
151 | ## 1.0.4 - 2018-12-11
152 |
153 | ### Fixed
154 | - Accept N-Quads upper case language tag.
155 |
156 | ## 1.0.3 - 2018-03-09
157 |
158 | ### Fixed
159 | - Reorder code to avoid undefined symbols.
160 |
161 | ## 1.0.2 - 2018-03-08
162 |
163 | ### Fixed
164 | - Missing error parameter.
165 |
166 | ## 1.0.1 - 2018-03-06
167 |
168 | ### Fixed
169 | - Include document loaders in distribution.
170 |
171 | ## 1.0.0 - 2018-03-06
172 |
173 | ### Notes
174 | - **1.0.0**!
175 | - [Semantic Versioning](https://semver.org/) is now past the "initial
176 | development" 0.x.y stage (after 6+ years!).
177 | - [Conformance](README.rst#conformance):
178 | - JSON-LD 1.0 + JSON-LD 1.0 errata
179 | - JSON-LD 1.1 drafts
180 | - Thanks to the JSON-LD and related communities and the many many people over
181 | the years who contributed ideas, code, bug reports, and support!
182 |
183 | ### Fixed
184 | - Don't always use arrays for `@graph`. Fixes 1.0 compatibility issue.
185 | - Process @type term contexts before key iteration.
186 |
187 | ### Changed
188 | - **BREAKING**: A dependency of pyld will not pull in [Requests][] anymore.
189 | One needs to define a dependency to `pyld[requests]` or create an
190 | explicit dependency on `requests` seperately. Use `pyld[aiohttp]` for
191 | [aiohttp][].
192 | - The default document loader is set to `request_document_loader`. If
193 | [Requests][] is not available, `aiohttp_document_loader` is used. When
194 | [aiohttp][] is not availabke, a `dummy_document_loader` is used.
195 | - Use the W3C standard MIME type for N-Quads of "application/n-quads". Accept
196 | "application/nquads" for compatibility.
197 |
198 | ### Added
199 | - Support for asynchronous document loader library [aiohttp][].
200 | - Added `dummy_document_loader` which allows libraries to depend on
201 | pyld without depending on [Requests][] or [aiohttp][].
202 | - The test runner contains an additional parameter `-l` to specify the
203 | default document loader.
204 | - Expansion and Compaction using scoped contexts on property and `@type` terms.
205 | - Expansion and Compaction of nested properties.
206 | - Index graph containers using `@id` and `@index`, with `@set` variations.
207 | - Index node objects using `@id` and `@type`, with `@set` variations.
208 | - Framing default and named graphs in addition to merged graph.
209 | - Value patterns when framing, allowing a subset of values to appear in the
210 | output.
211 |
212 | ## 0.8.2 - 2017-10-24
213 |
214 | ### Fixed
215 | - Use default document loader for older exposed `load_document` API.
216 |
217 | ## 0.8.1 - 2017-10-24
218 |
219 | ### Fixed
220 | - Use `__about__.py` to hold versioning and other meta data. Load file in
221 | `setup.py` and `jsonld.py`. Fixes testing and installation issues.
222 |
223 | ## 0.8.0 - 2017-10-20
224 |
225 | ### Fixed
226 | - **BREAKING**: Default http (80) and https (443) ports removed from URLs. This
227 | matches test suite behavior and other processing libs such as [jsonld.js][].
228 | - **BREAKING**: Fix path normalization to pass test suite RFC 3984 tests. This
229 | could change output for various relative URL edge cases.
230 | - Allow empty lists to be compacted to any `@list` container term. (Port from
231 | [jsonld.js][])
232 |
233 | ### Changed
234 | - **BREAKING**: Remove older document loader code. SSL/SNI support wasn't
235 | working well with newer Pythons.
236 | - **BREAKING**: Switch to [Requests][] for document loading. Some behavior
237 | could slightly change. Better supported in Python 2 and Python 3.
238 |
239 | ### Added
240 | - Support for test suite using http or https.
241 | - Easier to create a custom Requests document loader with the
242 | `requests_document_loader` call. Adds a `secure` flag to always use HTTPS.
243 | Can pass in keywords that [Requests][] understands. `verify` to disable SSL
244 | verification or use custom cert bundles. `cert` to use client certs.
245 | `timeout` to fail on timeouts (important for production use!). See
246 | [Requests][] docs for more info.
247 |
248 | ## Before 0.8.0
249 |
250 | - See git history for changes.
251 |
252 | [jsonld.js]: https://github.com/digitalbazaar/jsonld.js
253 | [Requests]: http://docs.python-requests.org/
254 | [aiohttp]: https://docs.aiohttp.org/
255 |
--------------------------------------------------------------------------------
/CONTRIBUTING.rst:
--------------------------------------------------------------------------------
1 | Contributing to PyLD
2 | ====================
3 |
4 | Want to contribute to PyLD? Great! Here are a few notes:
5 |
6 | Code
7 | ----
8 |
9 | * In general, follow the common `PEP 8 Style Guide`_.
10 | * Try to make the code pass flake8_ checks.
11 |
12 | * ``flake8 lib/pyld/jsonld.py``
13 |
14 | * Use version X.Y.Z-dev in dev mode.
15 | * Use version X.Y.Z for releases.
16 |
17 | Versioning
18 | ----------
19 |
20 | * Follow the `Semantic Versioning`_ guidelines.
21 |
22 | Release Process
23 | ---------------
24 |
25 | * ``$EDITOR CHANGELOG.md``: update CHANGELOG with new notes, version, and date.
26 | * commit changes
27 | * ``$EDITOR lib/pyld/__about__.py``: update to release version and remove ``-dev``
28 | suffix.
29 | * ``git commit CHANGELOG.md lib/pyld/__about__.py -m "Release {version}."``
30 | * ``git tag {version}``
31 | * ``$EDITOR lib/pyld/__about__.py``: update to next version and add ``-dev`` suffix.
32 | * ``git commit lib/pyld/__about__.py -m "Start {next-version}."``
33 | * ``git push --tags``
34 |
35 | To ensure a clean `package `_ upload to PyPI_,
36 | use a clean checkout, and run the following:
37 |
38 | * For more info, look at the packaging
39 | `guide `_.
40 | * Setup an `API token `_. Recommend using a
41 | specific "PyLD" token and set it up as a "repository" in your
42 | `~/.pypirc `_
43 | for use in the upload command.
44 | * The below builds and uploads a sdist and wheel. Adjust as needed depending
45 | on how you manage and clean "dist/" dir files.
46 | * ``git checkout {version}``
47 | * ``python3 -m build``
48 | * ``twine check dist/*``
49 | * ``twine upload -r PyLD dist/*``
50 |
51 | Implementation Report Process
52 | -----------------------------
53 |
54 | As of early 2020, the process to generate an EARL report for the official
55 | `JSON-LD Processor Conformance`_ page is:
56 |
57 | * Run the tests on the ``json-ld-api`` and ``json-ld-framing`` test repos to
58 | generate a ``.jsonld`` test report:
59 |
60 | * ``python tests/runtests.py ../json-ld-api/tests/ ../json-ld-framing/tests/ -e pyld-earl.jsonld``
61 |
62 | * Use the rdf_ tool to generate a ``.ttl``:
63 |
64 | * ``rdf serialize pyld-earl.jsonld --output-format turtle -o pyld-earl.ttl``
65 |
66 | * Optionally follow the `report instructions`_ to generate the HTML report for
67 | inspection.
68 | * Submit a PR to the `json-ld-api repository`_ with at least the ``.ttl``:
69 |
70 | .. _JSON-LD Processor Conformance: https://w3c.github.io/json-ld-api/reports/
71 | .. _PEP 8 Style Guide: https://www.python.org/dev/peps/pep-0008/
72 | .. _Semantic Versioning: https://semver.org/
73 | .. _flake8: https://pypi.python.org/pypi/flake8
74 | .. _json-ld-api repository: https://github.com/w3c/json-ld-api/pulls
75 | .. _rdf: https://rubygems.org/gems/rdf
76 | .. _report instructions: https://github.com/w3c/json-ld-api/tree/master/reports
77 | .. _PyPI: https://pypi.org/
78 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | You may use the PyLD project under the terms of the New BSD License.
2 |
3 | The New BSD License is recommended for most projects. It is simple and easy
4 | to understand and it places almost no restrictions on what you can do with
5 | the PyLD project.
6 |
7 | You are free to use this project in commercial projects as long as the
8 | copyright header is left intact.
9 |
10 | If you are a commercial entity and use this set of libraries in your
11 | commercial software then reasonable payment to Digital Bazaar, if you can
12 | afford it, is not required but is expected and would be appreciated. If this
13 | library saves you time, then it's saving you money. The cost of developing
14 | the PyLD software was on the order of several hundred hours and tens of
15 | thousands of dollars. We are attempting to strike a balance between helping
16 | the development community while not being taken advantage of by lucrative
17 | commercial entities for our efforts.
18 |
19 | -------------------------------------------------------------------------------
20 | New BSD License (3-clause)
21 | Copyright (c) 2011, Digital Bazaar, Inc.
22 | All rights reserved.
23 |
24 | Redistribution and use in source and binary forms, with or without
25 | modification, are permitted provided that the following conditions are met:
26 | * Redistributions of source code must retain the above copyright
27 | notice, this list of conditions and the following disclaimer.
28 | * Redistributions in binary form must reproduce the above copyright
29 | notice, this list of conditions and the following disclaimer in the
30 | documentation and/or other materials provided with the distribution.
31 | * Neither the name of Digital Bazaar, Inc. nor the
32 | names of its contributors may be used to endorse or promote products
33 | derived from this software without specific prior written permission.
34 |
35 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
36 | ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
37 | WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
38 | DISCLAIMED. IN NO EVENT SHALL DIGITAL BAZAAR BE LIABLE FOR ANY
39 | DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
40 | (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
41 | LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
42 | ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
43 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
44 | SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
45 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include README.rst README.txt LICENSE CHANGELOG.md
2 |
--------------------------------------------------------------------------------
/README.rst:
--------------------------------------------------------------------------------
1 | PyLD
2 | ====
3 |
4 | .. image:: https://travis-ci.org/digitalbazaar/pyld.png?branch=master
5 | :target: https://travis-ci.org/digitalbazaar/pyld
6 | :alt: Build Status
7 |
8 | Introduction
9 | ------------
10 |
11 | This library is an implementation of the JSON-LD_ specification in Python_.
12 |
13 | JSON, as specified in RFC7159_, is a simple language for representing
14 | objects on the Web. Linked Data is a way of describing content across
15 | different documents or Web sites. Web resources are described using
16 | IRIs, and typically are dereferencable entities that may be used to find
17 | more information, creating a "Web of Knowledge". JSON-LD_ is intended
18 | to be a simple publishing method for expressing not only Linked Data in
19 | JSON, but for adding semantics to existing JSON.
20 |
21 | JSON-LD is designed as a light-weight syntax that can be used to express
22 | Linked Data. It is primarily intended to be a way to express Linked Data
23 | in JavaScript and other Web-based programming environments. It is also
24 | useful when building interoperable Web Services and when storing Linked
25 | Data in JSON-based document storage engines. It is practical and
26 | designed to be as simple as possible, utilizing the large number of JSON
27 | parsers and existing code that is in use today. It is designed to be
28 | able to express key-value pairs, RDF data, RDFa_ data,
29 | Microformats_ data, and Microdata_. That is, it supports every
30 | major Web-based structured data model in use today.
31 |
32 | The syntax does not require many applications to change their JSON, but
33 | easily add meaning by adding context in a way that is either in-band or
34 | out-of-band. The syntax is designed to not disturb already deployed
35 | systems running on JSON, but provide a smooth migration path from JSON
36 | to JSON with added semantics. Finally, the format is intended to be fast
37 | to parse, fast to generate, stream-based and document-based processing
38 | compatible, and require a very small memory footprint in order to operate.
39 |
40 | Conformance
41 | -----------
42 |
43 | This library aims to conform with the following:
44 |
45 | - `JSON-LD 1.1 `_,
46 | W3C Candidate Recommendation,
47 | 2019-12-12 or `newer `_
48 | - `JSON-LD 1.1 Processing Algorithms and API `_,
49 | W3C Candidate Recommendation,
50 | 2019-12-12 or `newer `_
51 | - `JSON-LD 1.1 Framing `_,
52 | W3C Candidate Recommendation,
53 | 2019-12-12 or `newer `_
54 | - Working Group `test suite `_
55 |
56 | The `test runner`_ is often updated to note or skip newer tests that are not
57 | yet supported.
58 |
59 | Requirements
60 | ------------
61 |
62 | - Python_ (3.6 or later)
63 | - Requests_ (optional)
64 | - aiohttp_ (optional, Python 3.5 or later)
65 |
66 | Installation
67 | ------------
68 |
69 | PyLD can be installed with a pip_ `package `_
70 |
71 | .. code-block:: bash
72 |
73 | pip install PyLD
74 |
75 | Defining a dependency on pyld will not pull in Requests_ or aiohttp_. If you
76 | need one of these for a `Document Loader`_ then either depend on the desired
77 | external library directly or define the requirement as ``PyLD[requests]`` or
78 | ``PyLD[aiohttp]``.
79 |
80 | Quick Examples
81 | --------------
82 |
83 | .. code-block:: Python
84 |
85 | from pyld import jsonld
86 | import json
87 |
88 | doc = {
89 | "http://schema.org/name": "Manu Sporny",
90 | "http://schema.org/url": {"@id": "http://manu.sporny.org/"},
91 | "http://schema.org/image": {"@id": "http://manu.sporny.org/images/manu.png"}
92 | }
93 |
94 | context = {
95 | "name": "http://schema.org/name",
96 | "homepage": {"@id": "http://schema.org/url", "@type": "@id"},
97 | "image": {"@id": "http://schema.org/image", "@type": "@id"}
98 | }
99 |
100 | # compact a document according to a particular context
101 | # see: https://json-ld.org/spec/latest/json-ld/#compacted-document-form
102 | compacted = jsonld.compact(doc, context)
103 |
104 | print(json.dumps(compacted, indent=2))
105 | # Output:
106 | # {
107 | # "@context": {...},
108 | # "image": "http://manu.sporny.org/images/manu.png",
109 | # "homepage": "http://manu.sporny.org/",
110 | # "name": "Manu Sporny"
111 | # }
112 |
113 | # compact using URLs
114 | jsonld.compact('http://example.org/doc', 'http://example.org/context')
115 |
116 | # expand a document, removing its context
117 | # see: https://json-ld.org/spec/latest/json-ld/#expanded-document-form
118 | expanded = jsonld.expand(compacted)
119 |
120 | print(json.dumps(expanded, indent=2))
121 | # Output:
122 | # [{
123 | # "http://schema.org/image": [{"@id": "http://manu.sporny.org/images/manu.png"}],
124 | # "http://schema.org/name": [{"@value": "Manu Sporny"}],
125 | # "http://schema.org/url": [{"@id": "http://manu.sporny.org/"}]
126 | # }]
127 |
128 | # expand using URLs
129 | jsonld.expand('http://example.org/doc')
130 |
131 | # flatten a document
132 | # see: https://json-ld.org/spec/latest/json-ld/#flattened-document-form
133 | flattened = jsonld.flatten(doc)
134 | # all deep-level trees flattened to the top-level
135 |
136 | # frame a document
137 | # see: https://json-ld.org/spec/latest/json-ld-framing/#introduction
138 | framed = jsonld.frame(doc, frame)
139 | # document transformed into a particular tree structure per the given frame
140 |
141 | # normalize a document using the RDF Dataset Normalization Algorithm
142 | # (URDNA2015), see: https://www.w3.org/TR/rdf-canon/
143 | normalized = jsonld.normalize(
144 | doc, {'algorithm': 'URDNA2015', 'format': 'application/n-quads'})
145 | # normalized is a string that is a canonical representation of the document
146 | # that can be used for hashing, comparison, etc.
147 |
148 | Document Loader
149 | ---------------
150 |
151 | The default document loader for PyLD uses Requests_. In a production
152 | environment you may want to setup a custom loader that, at a minimum, sets a
153 | timeout value. You can also force requests to use https, set client certs,
154 | disable verification, or set other Requests_ parameters.
155 |
156 | .. code-block:: Python
157 |
158 | jsonld.set_document_loader(jsonld.requests_document_loader(timeout=...))
159 |
160 | An asynchronous document loader using aiohttp_ is also available. Please note
161 | that this document loader limits asynchronicity to fetching documents only.
162 | The processing loops remain synchronous.
163 |
164 | .. code-block:: Python
165 |
166 | jsonld.set_document_loader(jsonld.aiohttp_document_loader(timeout=...))
167 |
168 | When no document loader is specified, the default loader is set to Requests_.
169 | If Requests_ is not available, the loader is set to aiohttp_. The fallback
170 | document loader is a dummy document loader that raises an exception on every
171 | invocation.
172 |
173 | Commercial Support
174 | ------------------
175 |
176 | Commercial support for this library is available upon request from
177 | `Digital Bazaar`_: support@digitalbazaar.com.
178 |
179 | Source
180 | ------
181 |
182 | The source code for the Python implementation of the JSON-LD API
183 | is available at:
184 |
185 | https://github.com/digitalbazaar/pyld
186 |
187 | Tests
188 | -----
189 |
190 | This library includes a sample testing utility which may be used to verify
191 | that changes to the processor maintain the correct output.
192 |
193 | To run the sample tests you will need to get the test suite files by cloning
194 | the ``json-ld-api``, ``json-ld-framing``, and ``normalization`` repositories
195 | hosted on GitHub:
196 |
197 | - https://github.com/w3c/json-ld-api
198 | - https://github.com/w3c/json-ld-framing
199 | - https://github.com/json-ld/normalization
200 |
201 | If the suites repositories are available as sibling directories of the PyLD
202 | source directory, then all the tests can be run with the following:
203 |
204 | .. code-block:: bash
205 |
206 | python tests/runtests.py
207 |
208 | If you want to test individual manifest ``.jsonld`` files or directories
209 | containing a ``manifest.jsonld``, then you can supply these files or
210 | directories as arguments:
211 |
212 | .. code-block:: bash
213 |
214 | python tests/runtests.py TEST_PATH [TEST_PATH...]
215 |
216 | The test runner supports different document loaders by setting ``-l requests``
217 | or ``-l aiohttp``. The default document loader is set to Requests_.
218 |
219 | An EARL report can be generated using the ``-e`` or ``--earl`` option.
220 |
221 |
222 | .. _Digital Bazaar: https://digitalbazaar.com/
223 |
224 | .. _JSON-LD WG 1.1 API: https://www.w3.org/TR/json-ld11-api/
225 | .. _JSON-LD WG 1.1 Framing: https://www.w3.org/TR/json-ld11-framing/
226 | .. _JSON-LD WG 1.1: https://www.w3.org/TR/json-ld11/
227 |
228 | .. _JSON-LD WG API latest: https://w3c.github.io/json-ld-api/
229 | .. _JSON-LD WG Framing latest: https://w3c.github.io/json-ld-framing/
230 | .. _JSON-LD WG latest: https://w3c.github.io/json-ld-syntax/
231 |
232 | .. _JSON-LD Benchmarks: https://json-ld.org/benchmarks/
233 | .. _JSON-LD WG: https://www.w3.org/2018/json-ld-wg/
234 | .. _JSON-LD: https://json-ld.org/
235 | .. _Microdata: http://www.w3.org/TR/microdata/
236 | .. _Microformats: http://microformats.org/
237 | .. _Python: https://www.python.org/
238 | .. _Requests: http://docs.python-requests.org/
239 | .. _aiohttp: https://aiohttp.readthedocs.io/
240 | .. _RDFa: http://www.w3.org/TR/rdfa-core/
241 | .. _RFC7159: http://tools.ietf.org/html/rfc7159
242 | .. _WG test suite: https://github.com/w3c/json-ld-api/tree/master/tests
243 | .. _errata: http://www.w3.org/2014/json-ld-errata
244 | .. _pip: http://www.pip-installer.org/
245 | .. _test runner: https://github.com/digitalbazaar/pyld/blob/master/tests/runtests.py
246 | .. _test suite: https://github.com/json-ld/json-ld.org/tree/master/test-suite
247 |
--------------------------------------------------------------------------------
/README.txt:
--------------------------------------------------------------------------------
1 | README.rst
--------------------------------------------------------------------------------
/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line.
5 | SPHINXOPTS =
6 | SPHINXBUILD = sphinx-build
7 | PAPER =
8 | BUILDDIR = _build
9 |
10 | # Internal variables.
11 | PAPEROPT_a4 = -D latex_paper_size=a4
12 | PAPEROPT_letter = -D latex_paper_size=letter
13 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
14 |
15 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest
16 |
17 | help:
18 | @echo "Please use \`make ' where is one of"
19 | @echo " html to make standalone HTML files"
20 | @echo " dirhtml to make HTML files named index.html in directories"
21 | @echo " singlehtml to make a single large HTML file"
22 | @echo " pickle to make pickle files"
23 | @echo " json to make JSON files"
24 | @echo " htmlhelp to make HTML files and a HTML help project"
25 | @echo " qthelp to make HTML files and a qthelp project"
26 | @echo " devhelp to make HTML files and a Devhelp project"
27 | @echo " epub to make an epub"
28 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
29 | @echo " latexpdf to make LaTeX files and run them through pdflatex"
30 | @echo " text to make text files"
31 | @echo " man to make manual pages"
32 | @echo " changes to make an overview of all changed/added/deprecated items"
33 | @echo " linkcheck to check all external links for integrity"
34 | @echo " doctest to run all doctests embedded in the documentation (if enabled)"
35 |
36 | clean:
37 | -rm -rf $(BUILDDIR)/*
38 |
39 | html:
40 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
41 | @echo
42 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
43 |
44 | dirhtml:
45 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
46 | @echo
47 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
48 |
49 | singlehtml:
50 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
51 | @echo
52 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
53 |
54 | pickle:
55 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
56 | @echo
57 | @echo "Build finished; now you can process the pickle files."
58 |
59 | json:
60 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
61 | @echo
62 | @echo "Build finished; now you can process the JSON files."
63 |
64 | htmlhelp:
65 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
66 | @echo
67 | @echo "Build finished; now you can run HTML Help Workshop with the" \
68 | ".hhp project file in $(BUILDDIR)/htmlhelp."
69 |
70 | qthelp:
71 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
72 | @echo
73 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \
74 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
75 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/PyLD.qhcp"
76 | @echo "To view the help file:"
77 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/PyLD.qhc"
78 |
79 | devhelp:
80 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
81 | @echo
82 | @echo "Build finished."
83 | @echo "To view the help file:"
84 | @echo "# mkdir -p $$HOME/.local/share/devhelp/PyLD"
85 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/PyLD"
86 | @echo "# devhelp"
87 |
88 | epub:
89 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
90 | @echo
91 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub."
92 |
93 | latex:
94 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
95 | @echo
96 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
97 | @echo "Run \`make' in that directory to run these through (pdf)latex" \
98 | "(use \`make latexpdf' here to do that automatically)."
99 |
100 | latexpdf:
101 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
102 | @echo "Running LaTeX files through pdflatex..."
103 | make -C $(BUILDDIR)/latex all-pdf
104 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
105 |
106 | text:
107 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
108 | @echo
109 | @echo "Build finished. The text files are in $(BUILDDIR)/text."
110 |
111 | man:
112 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
113 | @echo
114 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man."
115 |
116 | changes:
117 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
118 | @echo
119 | @echo "The overview file is in $(BUILDDIR)/changes."
120 |
121 | linkcheck:
122 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
123 | @echo
124 | @echo "Link check complete; look for any errors in the above output " \
125 | "or in $(BUILDDIR)/linkcheck/output.txt."
126 |
127 | doctest:
128 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
129 | @echo "Testing of doctests in the sources finished, look at the " \
130 | "results in $(BUILDDIR)/doctest/output.txt."
131 |
--------------------------------------------------------------------------------
/docs/conf.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # PyLD documentation build configuration file, created by
4 | # sphinx-quickstart on Mon Aug 29 15:25:28 2011.
5 | #
6 | # This file is execfile()d with the current directory set to its containing dir.
7 | #
8 | # Note that not all possible configuration values are present in this
9 | # autogenerated file.
10 | #
11 | # All configuration values have a default; values that are commented out
12 | # serve to show the default.
13 |
14 | import sys, os
15 |
16 | # If extensions (or modules to document with autodoc) are in another directory,
17 | # add these directories to sys.path here. If the directory is relative to the
18 | # documentation root, use os.path.abspath to make it absolute, like shown here.
19 | #sys.path.insert(0, os.path.abspath('.'))
20 |
21 | current_path = os.path.abspath(os.path.dirname(__file__))
22 | path = os.path.join(current_path, '..')
23 |
24 | sys.path[0:0] = [
25 | os.path.join(path, 'lib'),
26 | ]
27 |
28 | # -- General configuration -----------------------------------------------------
29 |
30 | # If your documentation needs a minimal Sphinx version, state it here.
31 | #needs_sphinx = '1.0'
32 |
33 | # Add any Sphinx extension module names here, as strings. They can be extensions
34 | # coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
35 | extensions = ['sphinx.ext.autodoc', 'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.ifconfig', 'sphinx.ext.viewcode']
36 |
37 | # Add any paths that contain templates here, relative to this directory.
38 | templates_path = ['_templates']
39 |
40 | # The suffix of source filenames.
41 | source_suffix = '.rst'
42 |
43 | # The encoding of source files.
44 | #source_encoding = 'utf-8-sig'
45 |
46 | # The master toctree document.
47 | master_doc = 'index'
48 |
49 | # General information about the project.
50 | project = u'PyLD'
51 | copyright = u'2011, Digital Bazaar'
52 |
53 | # The version info for the project you're documenting, acts as replacement for
54 | # |version| and |release|, also used in various other places throughout the
55 | # built documents.
56 | #
57 | # The short X.Y version.
58 | version = '0.0'
59 | # The full version, including alpha/beta/rc tags.
60 | release = '0.0.1'
61 |
62 | # The language for content autogenerated by Sphinx. Refer to documentation
63 | # for a list of supported languages.
64 | #language = None
65 |
66 | # There are two options for replacing |today|: either, you set today to some
67 | # non-false value, then it is used:
68 | #today = ''
69 | # Else, today_fmt is used as the format for a strftime call.
70 | #today_fmt = '%B %d, %Y'
71 |
72 | # List of patterns, relative to source directory, that match files and
73 | # directories to ignore when looking for source files.
74 | exclude_patterns = ['_build']
75 |
76 | # The reST default role (used for this markup: `text`) to use for all documents.
77 | #default_role = None
78 |
79 | # If true, '()' will be appended to :func: etc. cross-reference text.
80 | #add_function_parentheses = True
81 |
82 | # If true, the current module name will be prepended to all description
83 | # unit titles (such as .. function::).
84 | #add_module_names = True
85 |
86 | # If true, sectionauthor and moduleauthor directives will be shown in the
87 | # output. They are ignored by default.
88 | #show_authors = False
89 |
90 | # The name of the Pygments (syntax highlighting) style to use.
91 | pygments_style = 'sphinx'
92 |
93 | # A list of ignored prefixes for module index sorting.
94 | #modindex_common_prefix = []
95 |
96 |
97 | # -- Options for HTML output ---------------------------------------------------
98 |
99 | # The theme to use for HTML and HTML Help pages. See the documentation for
100 | # a list of builtin themes.
101 | html_theme = 'default'
102 |
103 | # Theme options are theme-specific and customize the look and feel of a theme
104 | # further. For a list of options available for each theme, see the
105 | # documentation.
106 | #html_theme_options = {}
107 |
108 | # Add any paths that contain custom themes here, relative to this directory.
109 | #html_theme_path = []
110 |
111 | # The name for this set of Sphinx documents. If None, it defaults to
112 | # " v documentation".
113 | #html_title = None
114 |
115 | # A shorter title for the navigation bar. Default is the same as html_title.
116 | #html_short_title = None
117 |
118 | # The name of an image file (relative to this directory) to place at the top
119 | # of the sidebar.
120 | #html_logo = None
121 |
122 | # The name of an image file (within the static path) to use as favicon of the
123 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
124 | # pixels large.
125 | #html_favicon = None
126 |
127 | # Add any paths that contain custom static files (such as style sheets) here,
128 | # relative to this directory. They are copied after the builtin static files,
129 | # so a file named "default.css" will overwrite the builtin "default.css".
130 | html_static_path = ['_static']
131 |
132 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
133 | # using the given strftime format.
134 | #html_last_updated_fmt = '%b %d, %Y'
135 |
136 | # If true, SmartyPants will be used to convert quotes and dashes to
137 | # typographically correct entities.
138 | #html_use_smartypants = True
139 |
140 | # Custom sidebar templates, maps document names to template names.
141 | #html_sidebars = {}
142 |
143 | # Additional templates that should be rendered to pages, maps page names to
144 | # template names.
145 | #html_additional_pages = {}
146 |
147 | # If false, no module index is generated.
148 | #html_domain_indices = True
149 |
150 | # If false, no index is generated.
151 | #html_use_index = True
152 |
153 | # If true, the index is split into individual pages for each letter.
154 | #html_split_index = False
155 |
156 | # If true, links to the reST sources are added to the pages.
157 | #html_show_sourcelink = True
158 |
159 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
160 | #html_show_sphinx = True
161 |
162 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
163 | #html_show_copyright = True
164 |
165 | # If true, an OpenSearch description file will be output, and all pages will
166 | # contain a tag referring to it. The value of this option must be the
167 | # base URL from which the finished HTML is served.
168 | #html_use_opensearch = ''
169 |
170 | # This is the file name suffix for HTML files (e.g. ".xhtml").
171 | #html_file_suffix = None
172 |
173 | # Output file base name for HTML help builder.
174 | htmlhelp_basename = 'PyLDdoc'
175 |
176 |
177 | # -- Options for LaTeX output --------------------------------------------------
178 |
179 | # The paper size ('letter' or 'a4').
180 | #latex_paper_size = 'letter'
181 |
182 | # The font size ('10pt', '11pt' or '12pt').
183 | #latex_font_size = '10pt'
184 |
185 | # Grouping the document tree into LaTeX files. List of tuples
186 | # (source start file, target name, title, author, documentclass [howto/manual]).
187 | latex_documents = [
188 | ('index', 'PyLD.tex', u'PyLD Documentation',
189 | u'Digital Bazaar', 'manual'),
190 | ]
191 |
192 | # The name of an image file (relative to this directory) to place at the top of
193 | # the title page.
194 | #latex_logo = None
195 |
196 | # For "manual" documents, if this is true, then toplevel headings are parts,
197 | # not chapters.
198 | #latex_use_parts = False
199 |
200 | # If true, show page references after internal links.
201 | #latex_show_pagerefs = False
202 |
203 | # If true, show URL addresses after external links.
204 | #latex_show_urls = False
205 |
206 | # Additional stuff for the LaTeX preamble.
207 | #latex_preamble = ''
208 |
209 | # Documents to append as an appendix to all manuals.
210 | #latex_appendices = []
211 |
212 | # If false, no module index is generated.
213 | #latex_domain_indices = True
214 |
215 |
216 | # -- Options for manual page output --------------------------------------------
217 |
218 | # One entry per manual page. List of tuples
219 | # (source start file, name, description, authors, manual section).
220 | man_pages = [
221 | ('index', 'pyld', u'PyLD Documentation',
222 | [u'Digital Bazaar'], 1)
223 | ]
224 |
--------------------------------------------------------------------------------
/docs/index.rst:
--------------------------------------------------------------------------------
1 | .. PyLD documentation master file, created by
2 | sphinx-quickstart on Mon Aug 29 15:25:28 2011.
3 | You can adapt this file completely to your liking, but it should at least
4 | contain the root `toctree` directive.
5 |
6 | Welcome to PyLD!
7 | ================
8 |
9 | PyLD is a `Python`_ implementation of a `JSON-LD`_ processor.
10 |
11 |
12 | API Reference
13 | -------------
14 |
15 | .. toctree::
16 | :maxdepth: 2
17 |
18 | .. module:: pyld.jsonld
19 | .. autofunction:: compact
20 | .. autofunction:: expand
21 | .. autofunction:: flatten
22 | .. autofunction:: frame
23 | .. autofunction:: normalize
24 |
25 | Indices and tables
26 | ------------------
27 |
28 | * :ref:`genindex`
29 | * :ref:`modindex`
30 | * :ref:`search`
31 |
32 | Requirements
33 | ------------
34 |
35 | PyLD is compatible with `Python`_ 2.5 and newer.
36 |
37 | Credits
38 | -------
39 |
40 | Thanks to `Digital Bazaar`_, the JavaScript JSON-LD parser, and the `JSON-LD`_ community.
41 |
42 | Contribute
43 | ----------
44 |
45 | Source code is available:
46 |
47 | https://github.com/digitalbazaar/pyld
48 |
49 | License
50 | -------
51 |
52 | PyLD is licensed under a `BSD 3-Clause license`_.
53 |
54 | .. _JSON-LD: https://json-ld.org/
55 | .. _Digital Bazaar: https://digitalbazaar.com/
56 | .. _Python: https://www.python.org/
57 | .. _BSD 3-Clause License: https://opensource.org/licenses/BSD-3-Clause
58 |
--------------------------------------------------------------------------------
/docs/make.bat:
--------------------------------------------------------------------------------
1 | @ECHO OFF
2 |
3 | REM Command file for Sphinx documentation
4 |
5 | if "%SPHINXBUILD%" == "" (
6 | set SPHINXBUILD=sphinx-build
7 | )
8 | set BUILDDIR=_build
9 | set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% .
10 | if NOT "%PAPER%" == "" (
11 | set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%
12 | )
13 |
14 | if "%1" == "" goto help
15 |
16 | if "%1" == "help" (
17 | :help
18 | echo.Please use `make ^` where ^ is one of
19 | echo. html to make standalone HTML files
20 | echo. dirhtml to make HTML files named index.html in directories
21 | echo. singlehtml to make a single large HTML file
22 | echo. pickle to make pickle files
23 | echo. json to make JSON files
24 | echo. htmlhelp to make HTML files and a HTML help project
25 | echo. qthelp to make HTML files and a qthelp project
26 | echo. devhelp to make HTML files and a Devhelp project
27 | echo. epub to make an epub
28 | echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter
29 | echo. text to make text files
30 | echo. man to make manual pages
31 | echo. changes to make an overview over all changed/added/deprecated items
32 | echo. linkcheck to check all external links for integrity
33 | echo. doctest to run all doctests embedded in the documentation if enabled
34 | goto end
35 | )
36 |
37 | if "%1" == "clean" (
38 | for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i
39 | del /q /s %BUILDDIR%\*
40 | goto end
41 | )
42 |
43 | if "%1" == "html" (
44 | %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html
45 | if errorlevel 1 exit /b 1
46 | echo.
47 | echo.Build finished. The HTML pages are in %BUILDDIR%/html.
48 | goto end
49 | )
50 |
51 | if "%1" == "dirhtml" (
52 | %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml
53 | if errorlevel 1 exit /b 1
54 | echo.
55 | echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml.
56 | goto end
57 | )
58 |
59 | if "%1" == "singlehtml" (
60 | %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml
61 | if errorlevel 1 exit /b 1
62 | echo.
63 | echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml.
64 | goto end
65 | )
66 |
67 | if "%1" == "pickle" (
68 | %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle
69 | if errorlevel 1 exit /b 1
70 | echo.
71 | echo.Build finished; now you can process the pickle files.
72 | goto end
73 | )
74 |
75 | if "%1" == "json" (
76 | %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json
77 | if errorlevel 1 exit /b 1
78 | echo.
79 | echo.Build finished; now you can process the JSON files.
80 | goto end
81 | )
82 |
83 | if "%1" == "htmlhelp" (
84 | %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp
85 | if errorlevel 1 exit /b 1
86 | echo.
87 | echo.Build finished; now you can run HTML Help Workshop with the ^
88 | .hhp project file in %BUILDDIR%/htmlhelp.
89 | goto end
90 | )
91 |
92 | if "%1" == "qthelp" (
93 | %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp
94 | if errorlevel 1 exit /b 1
95 | echo.
96 | echo.Build finished; now you can run "qcollectiongenerator" with the ^
97 | .qhcp project file in %BUILDDIR%/qthelp, like this:
98 | echo.^> qcollectiongenerator %BUILDDIR%\qthelp\PyLD.qhcp
99 | echo.To view the help file:
100 | echo.^> assistant -collectionFile %BUILDDIR%\qthelp\PyLD.ghc
101 | goto end
102 | )
103 |
104 | if "%1" == "devhelp" (
105 | %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp
106 | if errorlevel 1 exit /b 1
107 | echo.
108 | echo.Build finished.
109 | goto end
110 | )
111 |
112 | if "%1" == "epub" (
113 | %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub
114 | if errorlevel 1 exit /b 1
115 | echo.
116 | echo.Build finished. The epub file is in %BUILDDIR%/epub.
117 | goto end
118 | )
119 |
120 | if "%1" == "latex" (
121 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
122 | if errorlevel 1 exit /b 1
123 | echo.
124 | echo.Build finished; the LaTeX files are in %BUILDDIR%/latex.
125 | goto end
126 | )
127 |
128 | if "%1" == "text" (
129 | %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text
130 | if errorlevel 1 exit /b 1
131 | echo.
132 | echo.Build finished. The text files are in %BUILDDIR%/text.
133 | goto end
134 | )
135 |
136 | if "%1" == "man" (
137 | %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man
138 | if errorlevel 1 exit /b 1
139 | echo.
140 | echo.Build finished. The manual pages are in %BUILDDIR%/man.
141 | goto end
142 | )
143 |
144 | if "%1" == "changes" (
145 | %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes
146 | if errorlevel 1 exit /b 1
147 | echo.
148 | echo.The overview file is in %BUILDDIR%/changes.
149 | goto end
150 | )
151 |
152 | if "%1" == "linkcheck" (
153 | %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck
154 | if errorlevel 1 exit /b 1
155 | echo.
156 | echo.Link check complete; look for any errors in the above output ^
157 | or in %BUILDDIR%/linkcheck/output.txt.
158 | goto end
159 | )
160 |
161 | if "%1" == "doctest" (
162 | %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest
163 | if errorlevel 1 exit /b 1
164 | echo.
165 | echo.Testing of doctests in the sources finished, look at the ^
166 | results in %BUILDDIR%/doctest/output.txt.
167 | goto end
168 | )
169 |
170 | :end
171 |
--------------------------------------------------------------------------------
/lib/c14n/Canonicalize.py:
--------------------------------------------------------------------------------
1 | ##############################################################################
2 | # #
3 | # Copyright 2006-2019 WebPKI.org (http://webpki.org). #
4 | # #
5 | # Licensed under the Apache License, Version 2.0 (the "License"); #
6 | # you may not use this file except in compliance with the License. #
7 | # You may obtain a copy of the License at #
8 | # #
9 | # https://www.apache.org/licenses/LICENSE-2.0 #
10 | # #
11 | # Unless required by applicable law or agreed to in writing, software #
12 | # distributed under the License is distributed on an "AS IS" BASIS, #
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
14 | # See the License for the specific language governing permissions and #
15 | # limitations under the License. #
16 | # #
17 | ##############################################################################
18 |
19 | #################################################
20 | # JCS compatible JSON serializer for Python 3.x #
21 | #################################################
22 |
23 | import re
24 |
25 | from c14n.NumberToJson import convert2Es6Format
26 |
27 | try:
28 | from _json import encode_basestring_ascii as c_encode_basestring_ascii
29 | except ImportError:
30 | c_encode_basestring_ascii = None
31 | try:
32 | from _json import encode_basestring as c_encode_basestring
33 | except ImportError:
34 | c_encode_basestring = None
35 | try:
36 | from _json import make_encoder as c_make_encoder
37 | except ImportError:
38 | c_make_encoder = None
39 |
40 | ESCAPE = re.compile(r'[\x00-\x1f\\"\b\f\n\r\t]')
41 | ESCAPE_ASCII = re.compile(r'([\\"]|[^\ -~])')
42 | HAS_UTF8 = re.compile(b'[\x80-\xff]')
43 | ESCAPE_DCT = {
44 | '\\': '\\\\',
45 | '"': '\\"',
46 | '\b': '\\b',
47 | '\f': '\\f',
48 | '\n': '\\n',
49 | '\r': '\\r',
50 | '\t': '\\t',
51 | }
52 | for i in range(0x20):
53 | ESCAPE_DCT.setdefault(chr(i), '\\u{0:04x}'.format(i))
54 | #ESCAPE_DCT.setdefault(chr(i), '\\u%04x' % (i,))
55 |
56 | INFINITY = float('inf')
57 |
58 | def py_encode_basestring(s):
59 | """Return a JSON representation of a Python string
60 |
61 | """
62 | def replace(match):
63 | return ESCAPE_DCT[match.group(0)]
64 | return '"' + ESCAPE.sub(replace, s) + '"'
65 |
66 |
67 | encode_basestring = (c_encode_basestring or py_encode_basestring)
68 |
69 | def py_encode_basestring_ascii(s):
70 | """Return an ASCII-only JSON representation of a Python string
71 |
72 | """
73 | def replace(match):
74 | s = match.group(0)
75 | try:
76 | return ESCAPE_DCT[s]
77 | except KeyError:
78 | n = ord(s)
79 | if n < 0x10000:
80 | return '\\u{0:04x}'.format(n)
81 | #return '\\u%04x' % (n,)
82 | else:
83 | # surrogate pair
84 | n -= 0x10000
85 | s1 = 0xd800 | ((n >> 10) & 0x3ff)
86 | s2 = 0xdc00 | (n & 0x3ff)
87 | return '\\u{0:04x}\\u{1:04x}'.format(s1, s2)
88 | return '"' + ESCAPE_ASCII.sub(replace, s) + '"'
89 |
90 |
91 | encode_basestring_ascii = (
92 | c_encode_basestring_ascii or py_encode_basestring_ascii)
93 |
94 | class JSONEncoder(object):
95 | """Extensible JSON encoder for Python data structures.
96 |
97 | Supports the following objects and types by default:
98 |
99 | +-------------------+---------------+
100 | | Python | JSON |
101 | +===================+===============+
102 | | dict | object |
103 | +-------------------+---------------+
104 | | list, tuple | array |
105 | +-------------------+---------------+
106 | | str | string |
107 | +-------------------+---------------+
108 | | int, float | number |
109 | +-------------------+---------------+
110 | | True | true |
111 | +-------------------+---------------+
112 | | False | false |
113 | +-------------------+---------------+
114 | | None | null |
115 | +-------------------+---------------+
116 |
117 | To extend this to recognize other objects, subclass and implement a
118 | ``.default()`` method with another method that returns a serializable
119 | object for ``o`` if possible, otherwise it should call the superclass
120 | implementation (to raise ``TypeError``).
121 |
122 | """
123 | item_separator = ', '
124 | key_separator = ': '
125 | def __init__(self, *, skipkeys=False, ensure_ascii=False,
126 | check_circular=True, allow_nan=True, sort_keys=True,
127 | indent=None, separators=(',', ':'), default=None):
128 | """Constructor for JSONEncoder, with sensible defaults.
129 |
130 | If skipkeys is false, then it is a TypeError to attempt
131 | encoding of keys that are not str, int, float or None. If
132 | skipkeys is True, such items are simply skipped.
133 |
134 | If ensure_ascii is true, the output is guaranteed to be str
135 | objects with all incoming non-ASCII characters escaped. If
136 | ensure_ascii is false, the output can contain non-ASCII characters.
137 |
138 | If check_circular is true, then lists, dicts, and custom encoded
139 | objects will be checked for circular references during encoding to
140 | prevent an infinite recursion (which would cause an OverflowError).
141 | Otherwise, no such check takes place.
142 |
143 | If allow_nan is true, then NaN, Infinity, and -Infinity will be
144 | encoded as such. This behavior is not JSON specification compliant,
145 | but is consistent with most JavaScript based encoders and decoders.
146 | Otherwise, it will be a ValueError to encode such floats.
147 |
148 | If sort_keys is true, then the output of dictionaries will be
149 | sorted by key; this is useful for regression tests to ensure
150 | that JSON serializations can be compared on a day-to-day basis.
151 |
152 | If indent is a non-negative integer, then JSON array
153 | elements and object members will be pretty-printed with that
154 | indent level. An indent level of 0 will only insert newlines.
155 | None is the most compact representation.
156 |
157 | If specified, separators should be an (item_separator, key_separator)
158 | tuple. The default is (', ', ': ') if *indent* is ``None`` and
159 | (',', ': ') otherwise. To get the most compact JSON representation,
160 | you should specify (',', ':') to eliminate whitespace.
161 |
162 | If specified, default is a function that gets called for objects
163 | that can't otherwise be serialized. It should return a JSON encodable
164 | version of the object or raise a ``TypeError``.
165 |
166 | """
167 |
168 | self.skipkeys = skipkeys
169 | self.ensure_ascii = ensure_ascii
170 | self.check_circular = check_circular
171 | self.allow_nan = allow_nan
172 | self.sort_keys = sort_keys
173 | self.indent = indent
174 | if separators is not None:
175 | self.item_separator, self.key_separator = separators
176 | elif indent is not None:
177 | self.item_separator = ','
178 | if default is not None:
179 | self.default = default
180 |
181 | def default(self, o):
182 | """Implement this method in a subclass such that it returns
183 | a serializable object for ``o``, or calls the base implementation
184 | (to raise a ``TypeError``).
185 |
186 | For example, to support arbitrary iterators, you could
187 | implement default like this::
188 |
189 | def default(self, o):
190 | try:
191 | iterable = iter(o)
192 | except TypeError:
193 | pass
194 | else:
195 | return list(iterable)
196 | # Let the base class default method raise the TypeError
197 | return JSONEncoder.default(self, o)
198 |
199 | """
200 | raise TypeError("Object of type '%s' is not JSON serializable" %
201 | o.__class__.__name__)
202 |
203 | def encode(self, o):
204 | """Return a JSON string representation of a Python data structure.
205 |
206 | >>> from json.encoder import JSONEncoder
207 | >>> JSONEncoder().encode({"foo": ["bar", "baz"]})
208 | '{"foo": ["bar", "baz"]}'
209 |
210 | """
211 | # This is for extremely simple cases and benchmarks.
212 | if isinstance(o, str):
213 | if self.ensure_ascii:
214 | return encode_basestring_ascii(o)
215 | else:
216 | return encode_basestring(o)
217 | # This doesn't pass the iterator directly to ''.join() because the
218 | # exceptions aren't as detailed. The list call should be roughly
219 | # equivalent to the PySequence_Fast that ''.join() would do.
220 | chunks = self.iterencode(o, _one_shot=False)
221 | if not isinstance(chunks, (list, tuple)):
222 | chunks = list(chunks)
223 | return ''.join(chunks)
224 |
225 | def iterencode(self, o, _one_shot=False):
226 | """Encode the given object and yield each string
227 | representation as available.
228 |
229 | For example::
230 |
231 | for chunk in JSONEncoder().iterencode(bigobject):
232 | mysocket.write(chunk)
233 |
234 | """
235 | if self.check_circular:
236 | markers = {}
237 | else:
238 | markers = None
239 | if self.ensure_ascii:
240 | _encoder = encode_basestring_ascii
241 | else:
242 | _encoder = encode_basestring
243 |
244 | def floatstr(o, allow_nan=self.allow_nan,
245 | _repr=float.__repr__, _inf=INFINITY, _neginf=-INFINITY):
246 | # Check for specials. Note that this type of test is processor
247 | # and/or platform-specific, so do tests which don't depend on the
248 | # internals.
249 |
250 | if o != o:
251 | text = 'NaN'
252 | elif o == _inf:
253 | text = 'Infinity'
254 | elif o == _neginf:
255 | text = '-Infinity'
256 | else:
257 | return _repr(o)
258 |
259 | if not allow_nan:
260 | raise ValueError(
261 | "Out of range float values are not JSON compliant: " +
262 | repr(o))
263 |
264 | return text
265 |
266 |
267 | if (_one_shot and c_make_encoder is not None
268 | and self.indent is None):
269 | _iterencode = c_make_encoder(
270 | markers, self.default, _encoder, self.indent,
271 | self.key_separator, self.item_separator, self.sort_keys,
272 | self.skipkeys, self.allow_nan)
273 | else:
274 | _iterencode = _make_iterencode(
275 | markers, self.default, _encoder, self.indent, floatstr,
276 | self.key_separator, self.item_separator, self.sort_keys,
277 | self.skipkeys, _one_shot)
278 | return _iterencode(o, 0)
279 |
280 | def _make_iterencode(markers, _default, _encoder, _indent, _floatstr,
281 | _key_separator, _item_separator, _sort_keys, _skipkeys, _one_shot,
282 | ## HACK: hand-optimized bytecode; turn globals into locals
283 | ValueError=ValueError,
284 | dict=dict,
285 | float=float,
286 | id=id,
287 | int=int,
288 | isinstance=isinstance,
289 | list=list,
290 | str=str,
291 | tuple=tuple,
292 | _intstr=int.__str__,
293 | ):
294 |
295 | if _indent is not None and not isinstance(_indent, str):
296 | _indent = ' ' * _indent
297 |
298 | def _iterencode_list(lst, _current_indent_level):
299 | if not lst:
300 | yield '[]'
301 | return
302 | if markers is not None:
303 | markerid = id(lst)
304 | if markerid in markers:
305 | raise ValueError("Circular reference detected")
306 | markers[markerid] = lst
307 | buf = '['
308 | if _indent is not None:
309 | _current_indent_level += 1
310 | newline_indent = '\n' + _indent * _current_indent_level
311 | separator = _item_separator + newline_indent
312 | buf += newline_indent
313 | else:
314 | newline_indent = None
315 | separator = _item_separator
316 | first = True
317 | for value in lst:
318 | if first:
319 | first = False
320 | else:
321 | buf = separator
322 | if isinstance(value, str):
323 | yield buf + _encoder(value)
324 | elif value is None:
325 | yield buf + 'null'
326 | elif value is True:
327 | yield buf + 'true'
328 | elif value is False:
329 | yield buf + 'false'
330 | elif isinstance(value, int):
331 | # Subclasses of int/float may override __str__, but we still
332 | # want to encode them as integers/floats in JSON. One example
333 | # within the standard library is IntEnum.
334 | yield buf + convert2Es6Format(value)
335 | elif isinstance(value, float):
336 | # see comment above for int
337 | yield buf + convert2Es6Format(value)
338 | else:
339 | yield buf
340 | if isinstance(value, (list, tuple)):
341 | chunks = _iterencode_list(value, _current_indent_level)
342 | elif isinstance(value, dict):
343 | chunks = _iterencode_dict(value, _current_indent_level)
344 | else:
345 | chunks = _iterencode(value, _current_indent_level)
346 | yield from chunks
347 | if newline_indent is not None:
348 | _current_indent_level -= 1
349 | yield '\n' + _indent * _current_indent_level
350 | yield ']'
351 | if markers is not None:
352 | del markers[markerid]
353 |
354 | def _iterencode_dict(dct, _current_indent_level):
355 | if not dct:
356 | yield '{}'
357 | return
358 | if markers is not None:
359 | markerid = id(dct)
360 | if markerid in markers:
361 | raise ValueError("Circular reference detected")
362 | markers[markerid] = dct
363 | yield '{'
364 | if _indent is not None:
365 | _current_indent_level += 1
366 | newline_indent = '\n' + _indent * _current_indent_level
367 | item_separator = _item_separator + newline_indent
368 | yield newline_indent
369 | else:
370 | newline_indent = None
371 | item_separator = _item_separator
372 | first = True
373 | if _sort_keys:
374 | items = sorted(dct.items(), key=lambda kv: kv[0].encode('utf-16_be'))
375 | else:
376 | items = dct.items()
377 | for key, value in items:
378 | if isinstance(key, str):
379 | pass
380 | # JavaScript is weakly typed for these, so it makes sense to
381 | # also allow them. Many encoders seem to do something like this.
382 | elif isinstance(key, float):
383 | # see comment for int/float in _make_iterencode
384 | key = convert2Es6Format(key)
385 | elif key is True:
386 | key = 'true'
387 | elif key is False:
388 | key = 'false'
389 | elif key is None:
390 | key = 'null'
391 | elif isinstance(key, int):
392 | # see comment for int/float in _make_iterencode
393 | key = convert2Es6Format(key)
394 | elif _skipkeys:
395 | continue
396 | else:
397 | raise TypeError("key " + repr(key) + " is not a string")
398 | if first:
399 | first = False
400 | else:
401 | yield item_separator
402 | yield _encoder(key)
403 | yield _key_separator
404 | if isinstance(value, str):
405 | yield _encoder(value)
406 | elif value is None:
407 | yield 'null'
408 | elif value is True:
409 | yield 'true'
410 | elif value is False:
411 | yield 'false'
412 | elif isinstance(value, int):
413 | # see comment for int/float in _make_iterencode
414 | yield convert2Es6Format(value)
415 | elif isinstance(value, float):
416 | # see comment for int/float in _make_iterencode
417 | yield convert2Es6Format(value)
418 | else:
419 | if isinstance(value, (list, tuple)):
420 | chunks = _iterencode_list(value, _current_indent_level)
421 | elif isinstance(value, dict):
422 | chunks = _iterencode_dict(value, _current_indent_level)
423 | else:
424 | chunks = _iterencode(value, _current_indent_level)
425 | yield from chunks
426 | if newline_indent is not None:
427 | _current_indent_level -= 1
428 | yield '\n' + _indent * _current_indent_level
429 | yield '}'
430 | if markers is not None:
431 | del markers[markerid]
432 |
433 | def _iterencode(o, _current_indent_level):
434 | if isinstance(o, str):
435 | yield _encoder(o)
436 | elif o is None:
437 | yield 'null'
438 | elif o is True:
439 | yield 'true'
440 | elif o is False:
441 | yield 'false'
442 | elif isinstance(o, int):
443 | # see comment for int/float in _make_iterencode
444 | yield convert2Es6Format(o)
445 | elif isinstance(o, float):
446 | # see comment for int/float in _make_iterencode
447 | yield convert2Es6Format(o)
448 | elif isinstance(o, (list, tuple)):
449 | yield from _iterencode_list(o, _current_indent_level)
450 | elif isinstance(o, dict):
451 | yield from _iterencode_dict(o, _current_indent_level)
452 | else:
453 | if markers is not None:
454 | markerid = id(o)
455 | if markerid in markers:
456 | raise ValueError("Circular reference detected")
457 | markers[markerid] = o
458 | o = _default(o)
459 | yield from _iterencode(o, _current_indent_level)
460 | if markers is not None:
461 | del markers[markerid]
462 | return _iterencode
463 |
464 | def canonicalize(obj,utf8=True):
465 | textVal = JSONEncoder(sort_keys=True).encode(obj)
466 | if utf8:
467 | return textVal.encode()
468 | return textVal
469 |
470 | def serialize(obj,utf8=True):
471 | textVal = JSONEncoder(sort_keys=False).encode(obj)
472 | if utf8:
473 | return textVal.encode()
474 | return textVal
475 |
--------------------------------------------------------------------------------
/lib/c14n/LICENSE:
--------------------------------------------------------------------------------
1 | A. HISTORY OF THE SOFTWARE
2 | ==========================
3 |
4 | Python was created in the early 1990s by Guido van Rossum at Stichting
5 | Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands
6 | as a successor of a language called ABC. Guido remains Python's
7 | principal author, although it includes many contributions from others.
8 |
9 | In 1995, Guido continued his work on Python at the Corporation for
10 | National Research Initiatives (CNRI, see http://www.cnri.reston.va.us)
11 | in Reston, Virginia where he released several versions of the
12 | software.
13 |
14 | In May 2000, Guido and the Python core development team moved to
15 | BeOpen.com to form the BeOpen PythonLabs team. In October of the same
16 | year, the PythonLabs team moved to Digital Creations, which became
17 | Zope Corporation. In 2001, the Python Software Foundation (PSF, see
18 | https://www.python.org/psf/) was formed, a non-profit organization
19 | created specifically to own Python-related Intellectual Property.
20 | Zope Corporation was a sponsoring member of the PSF.
21 |
22 | All Python releases are Open Source (see http://www.opensource.org for
23 | the Open Source Definition). Historically, most, but not all, Python
24 | releases have also been GPL-compatible; the table below summarizes
25 | the various releases.
26 |
27 | Release Derived Year Owner GPL-
28 | from compatible? (1)
29 |
30 | 0.9.0 thru 1.2 1991-1995 CWI yes
31 | 1.3 thru 1.5.2 1.2 1995-1999 CNRI yes
32 | 1.6 1.5.2 2000 CNRI no
33 | 2.0 1.6 2000 BeOpen.com no
34 | 1.6.1 1.6 2001 CNRI yes (2)
35 | 2.1 2.0+1.6.1 2001 PSF no
36 | 2.0.1 2.0+1.6.1 2001 PSF yes
37 | 2.1.1 2.1+2.0.1 2001 PSF yes
38 | 2.1.2 2.1.1 2002 PSF yes
39 | 2.1.3 2.1.2 2002 PSF yes
40 | 2.2 and above 2.1.1 2001-now PSF yes
41 |
42 | Footnotes:
43 |
44 | (1) GPL-compatible doesn't mean that we're distributing Python under
45 | the GPL. All Python licenses, unlike the GPL, let you distribute
46 | a modified version without making your changes open source. The
47 | GPL-compatible licenses make it possible to combine Python with
48 | other software that is released under the GPL; the others don't.
49 |
50 | (2) According to Richard Stallman, 1.6.1 is not GPL-compatible,
51 | because its license has a choice of law clause. According to
52 | CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1
53 | is "not incompatible" with the GPL.
54 |
55 | Thanks to the many outside volunteers who have worked under Guido's
56 | direction to make these releases possible.
57 |
58 |
59 | B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON
60 | ===============================================================
61 |
62 | PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
63 | --------------------------------------------
64 |
65 | 1. This LICENSE AGREEMENT is between the Python Software Foundation
66 | ("PSF"), and the Individual or Organization ("Licensee") accessing and
67 | otherwise using this software ("Python") in source or binary form and
68 | its associated documentation.
69 |
70 | 2. Subject to the terms and conditions of this License Agreement, PSF hereby
71 | grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
72 | analyze, test, perform and/or display publicly, prepare derivative works,
73 | distribute, and otherwise use Python alone or in any derivative version,
74 | provided, however, that PSF's License Agreement and PSF's notice of copyright,
75 | i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
76 | 2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018 Python Software Foundation; All
77 | Rights Reserved" are retained in Python alone or in any derivative version
78 | prepared by Licensee.
79 |
80 | 3. In the event Licensee prepares a derivative work that is based on
81 | or incorporates Python or any part thereof, and wants to make
82 | the derivative work available to others as provided herein, then
83 | Licensee hereby agrees to include in any such work a brief summary of
84 | the changes made to Python.
85 |
86 | 4. PSF is making Python available to Licensee on an "AS IS"
87 | basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
88 | IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
89 | DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
90 | FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
91 | INFRINGE ANY THIRD PARTY RIGHTS.
92 |
93 | 5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
94 | FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
95 | A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
96 | OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
97 |
98 | 6. This License Agreement will automatically terminate upon a material
99 | breach of its terms and conditions.
100 |
101 | 7. Nothing in this License Agreement shall be deemed to create any
102 | relationship of agency, partnership, or joint venture between PSF and
103 | Licensee. This License Agreement does not grant permission to use PSF
104 | trademarks or trade name in a trademark sense to endorse or promote
105 | products or services of Licensee, or any third party.
106 |
107 | 8. By copying, installing or otherwise using Python, Licensee
108 | agrees to be bound by the terms and conditions of this License
109 | Agreement.
110 |
111 |
112 | BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0
113 | -------------------------------------------
114 |
115 | BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1
116 |
117 | 1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an
118 | office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the
119 | Individual or Organization ("Licensee") accessing and otherwise using
120 | this software in source or binary form and its associated
121 | documentation ("the Software").
122 |
123 | 2. Subject to the terms and conditions of this BeOpen Python License
124 | Agreement, BeOpen hereby grants Licensee a non-exclusive,
125 | royalty-free, world-wide license to reproduce, analyze, test, perform
126 | and/or display publicly, prepare derivative works, distribute, and
127 | otherwise use the Software alone or in any derivative version,
128 | provided, however, that the BeOpen Python License is retained in the
129 | Software, alone or in any derivative version prepared by Licensee.
130 |
131 | 3. BeOpen is making the Software available to Licensee on an "AS IS"
132 | basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
133 | IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND
134 | DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
135 | FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT
136 | INFRINGE ANY THIRD PARTY RIGHTS.
137 |
138 | 4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE
139 | SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS
140 | AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY
141 | DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
142 |
143 | 5. This License Agreement will automatically terminate upon a material
144 | breach of its terms and conditions.
145 |
146 | 6. This License Agreement shall be governed by and interpreted in all
147 | respects by the law of the State of California, excluding conflict of
148 | law provisions. Nothing in this License Agreement shall be deemed to
149 | create any relationship of agency, partnership, or joint venture
150 | between BeOpen and Licensee. This License Agreement does not grant
151 | permission to use BeOpen trademarks or trade names in a trademark
152 | sense to endorse or promote products or services of Licensee, or any
153 | third party. As an exception, the "BeOpen Python" logos available at
154 | http://www.pythonlabs.com/logos.html may be used according to the
155 | permissions granted on that web page.
156 |
157 | 7. By copying, installing or otherwise using the software, Licensee
158 | agrees to be bound by the terms and conditions of this License
159 | Agreement.
160 |
161 |
162 | CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1
163 | ---------------------------------------
164 |
165 | 1. This LICENSE AGREEMENT is between the Corporation for National
166 | Research Initiatives, having an office at 1895 Preston White Drive,
167 | Reston, VA 20191 ("CNRI"), and the Individual or Organization
168 | ("Licensee") accessing and otherwise using Python 1.6.1 software in
169 | source or binary form and its associated documentation.
170 |
171 | 2. Subject to the terms and conditions of this License Agreement, CNRI
172 | hereby grants Licensee a nonexclusive, royalty-free, world-wide
173 | license to reproduce, analyze, test, perform and/or display publicly,
174 | prepare derivative works, distribute, and otherwise use Python 1.6.1
175 | alone or in any derivative version, provided, however, that CNRI's
176 | License Agreement and CNRI's notice of copyright, i.e., "Copyright (c)
177 | 1995-2001 Corporation for National Research Initiatives; All Rights
178 | Reserved" are retained in Python 1.6.1 alone or in any derivative
179 | version prepared by Licensee. Alternately, in lieu of CNRI's License
180 | Agreement, Licensee may substitute the following text (omitting the
181 | quotes): "Python 1.6.1 is made available subject to the terms and
182 | conditions in CNRI's License Agreement. This Agreement together with
183 | Python 1.6.1 may be located on the Internet using the following
184 | unique, persistent identifier (known as a handle): 1895.22/1013. This
185 | Agreement may also be obtained from a proxy server on the Internet
186 | using the following URL: http://hdl.handle.net/1895.22/1013".
187 |
188 | 3. In the event Licensee prepares a derivative work that is based on
189 | or incorporates Python 1.6.1 or any part thereof, and wants to make
190 | the derivative work available to others as provided herein, then
191 | Licensee hereby agrees to include in any such work a brief summary of
192 | the changes made to Python 1.6.1.
193 |
194 | 4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS"
195 | basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
196 | IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND
197 | DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
198 | FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT
199 | INFRINGE ANY THIRD PARTY RIGHTS.
200 |
201 | 5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
202 | 1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
203 | A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1,
204 | OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
205 |
206 | 6. This License Agreement will automatically terminate upon a material
207 | breach of its terms and conditions.
208 |
209 | 7. This License Agreement shall be governed by the federal
210 | intellectual property law of the United States, including without
211 | limitation the federal copyright law, and, to the extent such
212 | U.S. federal law does not apply, by the law of the Commonwealth of
213 | Virginia, excluding Virginia's conflict of law provisions.
214 | Notwithstanding the foregoing, with regard to derivative works based
215 | on Python 1.6.1 that incorporate non-separable material that was
216 | previously distributed under the GNU General Public License (GPL), the
217 | law of the Commonwealth of Virginia shall govern this License
218 | Agreement only as to issues arising under or with respect to
219 | Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this
220 | License Agreement shall be deemed to create any relationship of
221 | agency, partnership, or joint venture between CNRI and Licensee. This
222 | License Agreement does not grant permission to use CNRI trademarks or
223 | trade name in a trademark sense to endorse or promote products or
224 | services of Licensee, or any third party.
225 |
226 | 8. By clicking on the "ACCEPT" button where indicated, or by copying,
227 | installing or otherwise using Python 1.6.1, Licensee agrees to be
228 | bound by the terms and conditions of this License Agreement.
229 |
230 | ACCEPT
231 |
232 |
233 | CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2
234 | --------------------------------------------------
235 |
236 | Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam,
237 | The Netherlands. All rights reserved.
238 |
239 | Permission to use, copy, modify, and distribute this software and its
240 | documentation for any purpose and without fee is hereby granted,
241 | provided that the above copyright notice appear in all copies and that
242 | both that copyright notice and this permission notice appear in
243 | supporting documentation, and that the name of Stichting Mathematisch
244 | Centrum or CWI not be used in advertising or publicity pertaining to
245 | distribution of the software without specific, written prior
246 | permission.
247 |
248 | STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO
249 | THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
250 | FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE
251 | FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
252 | WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
253 | ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
254 | OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
255 |
--------------------------------------------------------------------------------
/lib/c14n/NumberToJson.py:
--------------------------------------------------------------------------------
1 | ##############################################################################
2 | # #
3 | # Copyright 2006-2019 WebPKI.org (http://webpki.org). #
4 | # #
5 | # Licensed under the Apache License, Version 2.0 (the "License"); #
6 | # you may not use this file except in compliance with the License. #
7 | # You may obtain a copy of the License at #
8 | # #
9 | # https://www.apache.org/licenses/LICENSE-2.0 #
10 | # #
11 | # Unless required by applicable law or agreed to in writing, software #
12 | # distributed under the License is distributed on an "AS IS" BASIS, #
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
14 | # See the License for the specific language governing permissions and #
15 | # limitations under the License. #
16 | # #
17 | ##############################################################################
18 |
19 |
20 | ##################################################################
21 | # Convert a Python double/float into an ES6/V8 compatible string #
22 | ##################################################################
23 | def convert2Es6Format(value):
24 | # Convert double/float to str using the native Python formatter
25 | fvalue = float(value)
26 | #
27 | # Zero is a special case. The following line takes "-0" case as well
28 | #
29 | if fvalue == 0:
30 | return '0'
31 | #
32 | # The rest of the algorithm works on the textual representation only
33 | #
34 | pyDouble = str(fvalue)
35 | #
36 | # The following line catches the "inf" and "nan" values returned by str(fvalue)
37 | #
38 | if pyDouble.find('n') >= 0:
39 | raise ValueError("Invalid JSON number: " + pyDouble)
40 | #
41 | # Save sign separately, it doesn't have any role in the algorithm
42 | #
43 | pySign = ''
44 | if pyDouble.find('-') == 0:
45 | pySign = '-'
46 | pyDouble = pyDouble[1:]
47 | #
48 | # Now we should only have valid non-zero values
49 | #
50 | pyExpStr = ''
51 | pyExpVal = 0
52 | q = pyDouble.find('e')
53 | if q > 0:
54 | #
55 | # Grab the exponent and remove it from the number
56 | #
57 | pyExpStr = pyDouble[q:]
58 | if pyExpStr[2:3] == '0':
59 | #
60 | # Supress leading zero on exponents
61 | #
62 | pyExpStr = pyExpStr[:2] + pyExpStr[3:]
63 | pyDouble = pyDouble[0:q]
64 | pyExpVal = int(pyExpStr[1:])
65 | #
66 | # Split number in pyFirst + pyDot + pyLast
67 | #
68 | pyFirst = pyDouble
69 | pyDot = ''
70 | pyLast = ''
71 | q = pyDouble.find('.')
72 | if q > 0:
73 | pyDot = '.'
74 | pyFirst = pyDouble[:q]
75 | pyLast = pyDouble[q + 1:]
76 | #
77 | # Now the string is split into: pySign + pyFirst + pyDot + pyLast + pyExpStr
78 | #
79 | if pyLast == '0':
80 | #
81 | # Always remove trailing .0
82 | #
83 | pyDot = ''
84 | pyLast = ''
85 | if pyExpVal > 0 and pyExpVal < 21:
86 | #
87 | # Integers are shown as is with up to 21 digits
88 | #
89 | pyFirst += pyLast
90 | pyLast = ''
91 | pyDot = ''
92 | pyExpStr = ''
93 | q = pyExpVal - len(pyFirst)
94 | while q >= 0:
95 | q -= 1;
96 | pyFirst += '0'
97 | elif pyExpVal < 0 and pyExpVal > -7:
98 | #
99 | # Small numbers are shown as 0.etc with e-6 as lower limit
100 | #
101 | pyLast = pyFirst + pyLast
102 | pyFirst = '0'
103 | pyDot = '.'
104 | pyExpStr = ''
105 | q = pyExpVal
106 | while q < -1:
107 | q += 1;
108 | pyLast = '0' + pyLast
109 | #
110 | # The resulting sub-strings are concatenated
111 | #
112 | return pySign + pyFirst + pyDot + pyLast + pyExpStr
113 |
--------------------------------------------------------------------------------
/lib/c14n/__init__.py:
--------------------------------------------------------------------------------
1 | """ JSON Canonicalization. """
2 | from .Canonicalize import canonicalize
3 |
4 | __all__ = ['canonicalize']
5 |
--------------------------------------------------------------------------------
/lib/pyld/__about__.py:
--------------------------------------------------------------------------------
1 | # PyLD JSON-LD meta data
2 |
3 | __all__ = [
4 | '__copyright__', '__license__', '__version__'
5 | ]
6 |
7 | __copyright__ = 'Copyright (c) 2011-2024 Digital Bazaar, Inc.'
8 | __license__ = 'New BSD license'
9 | __version__ = '2.0.5-dev'
10 |
--------------------------------------------------------------------------------
/lib/pyld/__init__.py:
--------------------------------------------------------------------------------
1 | """ The PyLD module is used to process JSON-LD. """
2 | from . import jsonld
3 | from .context_resolver import ContextResolver
4 |
5 | __all__ = ['jsonld', 'ContextResolver']
6 |
--------------------------------------------------------------------------------
/lib/pyld/context_resolver.py:
--------------------------------------------------------------------------------
1 | """
2 | Context Resolver for managing remote contexts.
3 |
4 | .. module:: context_resolver
5 | :synopsis: Creates a ContextResolver
6 |
7 | .. moduleauthor:: Dave Longley
8 | .. moduleauthor:: Gregg Kellogg
9 | """
10 |
11 | from frozendict import frozendict
12 | from c14n.Canonicalize import canonicalize
13 | from pyld import jsonld
14 | from .resolved_context import ResolvedContext
15 |
16 | MAX_CONTEXT_URLS = 10
17 |
18 | class ContextResolver:
19 | """
20 | Resolves and caches remote contexts.
21 | """
22 | def __init__(self, shared_cache, document_loader):
23 | """
24 | Creates a ContextResolver.
25 | """
26 | # processor-specific RDF parsers
27 | self.per_op_cache = {}
28 | self.shared_cache = shared_cache
29 | self.document_loader = document_loader
30 |
31 | def resolve(self, active_ctx, context, base, cycles=None):
32 | """
33 | Resolve a context.
34 |
35 | :param active_ctx: the current active context.
36 | :param context: the context to resolve.
37 | :param base: the absolute URL to use for making url absolute.
38 | :param cycles: the maximum number of times to recusively fetch contexts.
39 | (default MAX_CONTEXT_URLS).
40 | """
41 | if cycles is None:
42 | cycles = set()
43 |
44 | # process `@context`
45 | if (isinstance(context, dict) or isinstance(context, frozendict)) and '@context' in context:
46 | context = context['@context']
47 |
48 | # context is one or more contexts
49 | if not isinstance(context, list):
50 | context = [context]
51 |
52 | # resolve each context in the array
53 | all_resolved = []
54 | for ctx in context:
55 | if isinstance(ctx, str):
56 | resolved = self._get(ctx)
57 | if not resolved:
58 | resolved = self._resolve_remote_context(
59 | active_ctx, ctx, base, cycles)
60 |
61 | # add to output and continue
62 | if isinstance(resolved, list):
63 | all_resolved.extend(resolved)
64 | else:
65 | all_resolved.append(resolved)
66 | elif ctx is None or ctx is False:
67 | all_resolved.append(ResolvedContext(False))
68 | elif not isinstance(ctx, dict) and not isinstance(ctx, frozendict):
69 | raise jsonld.JsonLdError(
70 | 'Invalid JSON-LD syntax; @context must be an object.',
71 | 'jsonld.SyntaxError', {'context': ctx},
72 | code='invalid local context')
73 | else:
74 | # context is an object, get/create `ResolvedContext` for it
75 | key = canonicalize(dict(ctx)).decode('UTF-8')
76 | resolved = self._get(key)
77 | if not resolved:
78 | # create a new static `ResolvedContext` and cache it
79 | resolved = ResolvedContext(ctx)
80 | self._cache_resolved_context(key, resolved, 'static')
81 | all_resolved.append(resolved)
82 |
83 | return all_resolved
84 |
85 | def _get(self, key):
86 | resolved = self.per_op_cache.get(key)
87 | if not resolved:
88 | tag_map = self.shared_cache.get(key)
89 | if tag_map:
90 | resolved = tag_map.get('static')
91 | if resolved:
92 | self.per_op_cache[key] = resolved
93 | return resolved
94 |
95 | def _cache_resolved_context(self, key, resolved, tag):
96 | self.per_op_cache[key] = resolved
97 | if tag:
98 | tag_map = self.shared_cache.get(key)
99 | if not tag_map:
100 | tag_map = {}
101 | self.shared_cache[key] = tag_map
102 | tag_map[tag] = resolved
103 | return resolved
104 |
105 | def _resolve_remote_context(self, active_ctx, url, base, cycles):
106 | # resolve relative URL and fetch context
107 | url = jsonld.prepend_base(base, url)
108 | context, remote_doc = self._fetch_context(active_ctx, url, cycles)
109 |
110 | # update base according to remote document and resolve any relative URLs
111 | base = remote_doc.get('documentUrl', url)
112 | self._resolve_context_urls(context, base)
113 |
114 | # resolve, cache, and return context
115 | resolved = self.resolve(active_ctx, context, base, cycles)
116 | self._cache_resolved_context(url, resolved, remote_doc.get('tag'))
117 | return resolved
118 |
119 | def _fetch_context(self, active_ctx, url, cycles):
120 | # check for max context URLs fetched during a resolve operation
121 | if len(cycles) > MAX_CONTEXT_URLS:
122 | raise jsonld.JsonLdError(
123 | 'Maximum number of @context URLs exceeded.',
124 | 'jsonld.ContextUrlError', {'max': MAX_CONTEXT_URLS},
125 | code=('loading remote context failed'
126 | if active_ctx.get('processingMode') == 'json-ld-1.0'
127 | else 'context overflow'))
128 |
129 | # check for context URL cycle
130 | # shortcut to avoid extra work that would eventually hit the max above
131 | if url in cycles:
132 | raise jsonld.JsonLdError(
133 | 'Cyclical @context URLs detected.',
134 | 'jsonld.ContextUrlError', {'url': url},
135 | code=('recursive context inclusion'
136 | if active_ctx.get('processingMode') == 'json-ld-1.0'
137 | else 'context overflow'))
138 |
139 | # track cycles
140 | cycles.add(url)
141 |
142 | try:
143 | remote_doc = jsonld.load_document(url,
144 | {'documentLoader': self.document_loader},
145 | requestProfile='http://www.w3.org/ns/json-ld#context')
146 | context = remote_doc.get('document', url)
147 | except Exception as cause:
148 | raise jsonld.JsonLdError(
149 | 'Dereferencing a URL did not result in a valid JSON-LD object. ' +
150 | 'Possible causes are an inaccessible URL perhaps due to ' +
151 | 'a same-origin policy (ensure the server uses CORS if you are ' +
152 | 'using client-side JavaScript), too many redirects, a ' +
153 | 'non-JSON response, or more than one HTTP Link Header was ' +
154 | 'provided for a remote context.',
155 | 'jsonld.InvalidUrl',
156 | {'url': url, 'cause': cause},
157 | code='loading remote context failed')
158 |
159 | # ensure ctx is an object
160 | if not isinstance(context, dict) and not isinstance(context, frozendict):
161 | raise jsonld.JsonLdError(
162 | 'Dereferencing a URL did not result in a JSON object. The ' +
163 | 'response was valid JSON, but it was not a JSON object.',
164 | 'jsonld.InvalidUrl',
165 | {'url': url},
166 | code='invalid remote context')
167 |
168 | # use empty context if no @context key is present
169 | if '@context' not in context:
170 | context = {'@context': {}}
171 | else:
172 | context = {'@context': context['@context']}
173 |
174 | # append @context URL to context if given
175 | if remote_doc['contextUrl']:
176 | if not isinstance(context['@context'], list):
177 | context['@context'] = [context['@context']]
178 | context['@context'].append(remote_doc['contextUrl'])
179 |
180 | return (context, remote_doc)
181 |
182 |
183 | def _resolve_context_urls(self, context, base):
184 | """
185 | Resolve all relative `@context` URLs in the given context by inline
186 | replacing them with absolute URLs.
187 |
188 | :param context: the context.
189 | :param base: the base IRI to use to resolve relative IRIs.
190 | """
191 | if not isinstance(context, dict) and not isinstance(context, frozendict):
192 | return
193 |
194 | ctx = context.get('@context')
195 |
196 | if isinstance(ctx, str):
197 | context['@context'] = jsonld.prepend_base(base, ctx)
198 | return
199 |
200 | if isinstance(ctx, list):
201 | for num, element in enumerate(ctx):
202 | if isinstance(element, str):
203 | ctx[num] = jsonld.prepend_base(base, element)
204 | elif isinstance(element, dict) or isinstance(element, frozendict):
205 | self. _resolve_context_urls({'@context': element}, base)
206 | return
207 |
208 | if not isinstance(ctx, dict) and not isinstance(ctx, frozendict):
209 | # no @context URLs can be found in non-object
210 | return
211 |
212 | # ctx is an object, resolve any context URLs in terms
213 | # (Iterate using keys() as items() returns a copy we can't modify)
214 | for _, definition in ctx.items():
215 | self._resolve_context_urls(definition, base)
216 |
--------------------------------------------------------------------------------
/lib/pyld/documentloader/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/digitalbazaar/pyld/2c6b0a65bee700b42c8d0806364f4fc4ebddcc52/lib/pyld/documentloader/__init__.py
--------------------------------------------------------------------------------
/lib/pyld/documentloader/aiohttp.py:
--------------------------------------------------------------------------------
1 | """
2 | Remote document loader using aiohttp.
3 |
4 | .. module:: jsonld.documentloader.aiohttp
5 | :synopsis: Remote document loader using aiohttp
6 |
7 | .. moduleauthor:: Olaf Conradi
8 | """
9 |
10 | import string
11 | import urllib.parse as urllib_parse
12 |
13 | from pyld.jsonld import (JsonLdError, parse_link_header, LINK_HEADER_REL)
14 |
15 |
16 | def aiohttp_document_loader(loop=None, secure=False, **kwargs):
17 | """
18 | Create an Asynchronous document loader using aiohttp.
19 |
20 | :param loop: the event loop used for processing HTTP requests.
21 | :param secure: require all requests to use HTTPS (default: False).
22 | :param **kwargs: extra keyword args for the aiohttp request get() call.
23 |
24 | :return: the RemoteDocument loader function.
25 | """
26 | import asyncio
27 | import aiohttp
28 |
29 | if loop is None:
30 | loop = asyncio.get_event_loop()
31 |
32 | async def async_loader(url, headers):
33 | """
34 | Retrieves JSON-LD at the given URL asynchronously.
35 |
36 | :param url: the URL to retrieve.
37 |
38 | :return: the RemoteDocument.
39 | """
40 | try:
41 | # validate URL
42 | pieces = urllib_parse.urlparse(url)
43 | if (not all([pieces.scheme, pieces.netloc]) or
44 | pieces.scheme not in ['http', 'https'] or
45 | set(pieces.netloc) > set(
46 | string.ascii_letters + string.digits + '-.:')):
47 | raise JsonLdError(
48 | 'URL could not be dereferenced; '
49 | 'only "http" and "https" URLs are supported.',
50 | 'jsonld.InvalidUrl', {'url': url},
51 | code='loading document failed')
52 | if secure and pieces.scheme != 'https':
53 | raise JsonLdError(
54 | 'URL could not be dereferenced; '
55 | 'secure mode enabled and '
56 | 'the URL\'s scheme is not "https".',
57 | 'jsonld.InvalidUrl', {'url': url},
58 | code='loading document failed')
59 | async with aiohttp.ClientSession(loop=loop) as session:
60 | async with session.get(url,
61 | headers=headers,
62 | **kwargs) as response:
63 | # Allow any content_type in trying to parse json
64 | # similar to requests library
65 | json_body = await response.json(content_type=None)
66 | content_type = response.headers.get('content-type')
67 | if not content_type:
68 | content_type = 'application/octet-stream'
69 | doc = {
70 | 'contentType': content_type,
71 | 'contextUrl': None,
72 | 'documentUrl': response.url.human_repr(),
73 | 'document': json_body
74 | }
75 | link_header = response.headers.get('link')
76 | if link_header:
77 | linked_context = parse_link_header(link_header).get(
78 | LINK_HEADER_REL)
79 | # only 1 related link header permitted
80 | if linked_context and content_type != 'application/ld+json':
81 | if isinstance(linked_context, list):
82 | raise JsonLdError(
83 | 'URL could not be dereferenced, '
84 | 'it has more than one '
85 | 'associated HTTP Link Header.',
86 | 'jsonld.LoadDocumentError',
87 | {'url': url},
88 | code='multiple context link headers')
89 | doc['contextUrl'] = linked_context['target']
90 | linked_alternate = parse_link_header(link_header).get('alternate')
91 | # if not JSON-LD, alternate may point there
92 | if (linked_alternate and
93 | linked_alternate.get('type') == 'application/ld+json' and
94 | not re.match(r'^application\/(\w*\+)?json$', content_type)):
95 | doc['contentType'] = 'application/ld+json'
96 | doc['documentUrl'] = jsonld.prepend_base(url, linked_alternate['target'])
97 |
98 | return doc
99 | except JsonLdError as e:
100 | raise e
101 | except Exception as cause:
102 | raise JsonLdError(
103 | 'Could not retrieve a JSON-LD document from the URL.',
104 | 'jsonld.LoadDocumentError', code='loading document failed',
105 | cause=cause)
106 |
107 | def loader(url, options={}):
108 | """
109 | Retrieves JSON-LD at the given URL.
110 |
111 | :param url: the URL to retrieve.
112 |
113 | :return: the RemoteDocument.
114 | """
115 | return loop.run_until_complete(
116 | async_loader(url,
117 | options.get('headers', {'Accept': 'application/ld+json, application/json'})))
118 |
119 | return loader
120 |
--------------------------------------------------------------------------------
/lib/pyld/documentloader/requests.py:
--------------------------------------------------------------------------------
1 | """
2 | Remote document loader using Requests.
3 |
4 | .. module:: jsonld.documentloader.requests
5 | :synopsis: Remote document loader using Requests
6 |
7 | .. moduleauthor:: Dave Longley
8 | .. moduleauthor:: Mike Johnson
9 | .. moduleauthor:: Tim McNamara
10 | .. moduleauthor:: Olaf Conradi
11 | """
12 | import string
13 | import urllib.parse as urllib_parse
14 |
15 | from pyld.jsonld import (JsonLdError, parse_link_header, LINK_HEADER_REL)
16 |
17 |
18 | def requests_document_loader(secure=False, **kwargs):
19 | """
20 | Create a Requests document loader.
21 |
22 | Can be used to setup extra Requests args such as verify, cert, timeout,
23 | or others.
24 |
25 | :param secure: require all requests to use HTTPS (default: False).
26 | :param **kwargs: extra keyword args for Requests get() call.
27 |
28 | :return: the RemoteDocument loader function.
29 | """
30 | import requests
31 |
32 | def loader(url, options={}):
33 | """
34 | Retrieves JSON-LD at the given URL.
35 |
36 | :param url: the URL to retrieve.
37 |
38 | :return: the RemoteDocument.
39 | """
40 | try:
41 | # validate URL
42 | pieces = urllib_parse.urlparse(url)
43 | if (not all([pieces.scheme, pieces.netloc]) or
44 | pieces.scheme not in ['http', 'https'] or
45 | set(pieces.netloc) > set(
46 | string.ascii_letters + string.digits + '-.:')):
47 | raise JsonLdError(
48 | 'URL could not be dereferenced; only "http" and "https" '
49 | 'URLs are supported.',
50 | 'jsonld.InvalidUrl', {'url': url},
51 | code='loading document failed')
52 | if secure and pieces.scheme != 'https':
53 | raise JsonLdError(
54 | 'URL could not be dereferenced; secure mode enabled and '
55 | 'the URL\'s scheme is not "https".',
56 | 'jsonld.InvalidUrl', {'url': url},
57 | code='loading document failed')
58 | headers = options.get('headers')
59 | if headers is None:
60 | headers = {
61 | 'Accept': 'application/ld+json, application/json'
62 | }
63 | response = requests.get(url, headers=headers, **kwargs)
64 |
65 | content_type = response.headers.get('content-type')
66 | if not content_type:
67 | content_type = 'application/octet-stream'
68 | doc = {
69 | 'contentType': content_type,
70 | 'contextUrl': None,
71 | 'documentUrl': response.url,
72 | 'document': response.json()
73 | }
74 | link_header = response.headers.get('link')
75 | if link_header:
76 | linked_context = parse_link_header(link_header).get(
77 | LINK_HEADER_REL)
78 | # only 1 related link header permitted
79 | if linked_context and content_type != 'application/ld+json':
80 | if isinstance(linked_context, list):
81 | raise JsonLdError(
82 | 'URL could not be dereferenced, '
83 | 'it has more than one '
84 | 'associated HTTP Link Header.',
85 | 'jsonld.LoadDocumentError',
86 | {'url': url},
87 | code='multiple context link headers')
88 | doc['contextUrl'] = linked_context['target']
89 | linked_alternate = parse_link_header(link_header).get('alternate')
90 | # if not JSON-LD, alternate may point there
91 | if (linked_alternate and
92 | linked_alternate.get('type') == 'application/ld+json' and
93 | not re.match(r'^application\/(\w*\+)?json$', content_type)):
94 | doc['contentType'] = 'application/ld+json'
95 | doc['documentUrl'] = jsonld.prepend_base(url, linked_alternate['target'])
96 | return doc
97 | except JsonLdError as e:
98 | raise e
99 | except Exception as cause:
100 | raise JsonLdError(
101 | 'Could not retrieve a JSON-LD document from the URL.',
102 | 'jsonld.LoadDocumentError', code='loading document failed',
103 | cause=cause)
104 |
105 | return loader
106 |
--------------------------------------------------------------------------------
/lib/pyld/resolved_context.py:
--------------------------------------------------------------------------------
1 | """
2 | Representation for a resolved Context.
3 |
4 | .. module:: resolved_context
5 | :synopsis: Creates a ContextResolver
6 |
7 | .. moduleauthor:: Dave Longley
8 | .. moduleauthor:: Gregg Kellogg
9 | """
10 |
11 | from cachetools import LRUCache
12 |
13 | MAX_ACTIVE_CONTEXTS = 10
14 |
15 | class ResolvedContext:
16 | """
17 | A cached contex document, with a cache indexed by referencing active context.
18 | """
19 | def __init__(self, document):
20 | """
21 | Creates a ResolvedContext with caching for processed contexts
22 | relative to some other Active Context.
23 | """
24 | # processor-specific RDF parsers
25 | self.document = document
26 | self.cache = LRUCache(maxsize=MAX_ACTIVE_CONTEXTS)
27 |
28 | def get_processed(self, active_ctx):
29 | """
30 | Returns any processed context for this resolved context relative to an active context.
31 | """
32 | return self.cache.get(active_ctx['_uuid'])
33 |
34 | def set_processed(self, active_ctx, processed_ctx):
35 | """
36 | Sets any processed context for this resolved context relative to an active context.
37 | """
38 | self.cache[active_ctx['_uuid']] = processed_ctx
39 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | requests
2 | aiohttp; python_version >= '3.5'
3 | lxml
4 | cachetools
5 | frozendict
6 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 | PyLD
4 | ====
5 |
6 | PyLD_ is a Python JSON-LD_ library.
7 |
8 | .. _PyLD: https://github.com/digitalbazaar/pyld
9 | .. _JSON-LD: https://json-ld.org/
10 | """
11 |
12 | from distutils.core import setup
13 | import os
14 |
15 | # get meta data
16 | about = {}
17 | with open(os.path.join(
18 | os.path.dirname(__file__), 'lib', 'pyld', '__about__.py')) as fp:
19 | exec(fp.read(), about)
20 |
21 | with open('README.rst') as fp:
22 | long_description = fp.read()
23 |
24 | setup(
25 | name='PyLD',
26 | version=about['__version__'],
27 | description='Python implementation of the JSON-LD API',
28 | long_description=long_description,
29 | author='Digital Bazaar',
30 | author_email='support@digitalbazaar.com',
31 | url='https://github.com/digitalbazaar/pyld',
32 | packages=[
33 | 'c14n',
34 | 'pyld',
35 | 'pyld.documentloader',
36 | ],
37 | package_dir={'': 'lib'},
38 | license='BSD 3-Clause license',
39 | classifiers=[
40 | 'Development Status :: 4 - Beta',
41 | 'Environment :: Console',
42 | 'Environment :: Web Environment',
43 | 'Intended Audience :: Developers',
44 | 'License :: OSI Approved :: BSD License',
45 | 'Operating System :: OS Independent',
46 | 'Programming Language :: Python',
47 | 'Topic :: Internet',
48 | 'Topic :: Software Development :: Libraries',
49 | ],
50 | install_requires=[
51 | 'cachetools',
52 | 'frozendict',
53 | 'lxml',
54 | ],
55 | extras_require={
56 | 'requests': ['requests'],
57 | 'aiohttp': ['aiohttp'],
58 | 'cachetools': ['cachetools'],
59 | 'frozendict': ['frozendict'],
60 | }
61 | )
62 |
--------------------------------------------------------------------------------
/tests/runtests.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | """
3 | Test runner for JSON-LD.
4 |
5 | .. module:: runtests
6 | :synopsis: Test harness for pyld
7 |
8 | .. moduleauthor:: Dave Longley
9 | .. moduleauthor:: Olaf Conradi
10 | """
11 |
12 | import datetime
13 | import json
14 | import os
15 | import sys
16 | import traceback
17 | import unittest
18 | import re
19 | from argparse import ArgumentParser
20 | from unittest import TextTestResult
21 |
22 | sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', 'lib'))
23 | from pyld import jsonld
24 |
25 | __copyright__ = 'Copyright (c) 2011-2013 Digital Bazaar, Inc.'
26 | __license__ = 'New BSD license'
27 |
28 | ROOT_MANIFEST_DIR = None
29 | SKIP_TESTS = []
30 | ONLY_IDENTIFIER = None
31 |
32 | LOCAL_BASES = [
33 | 'https://w3c.github.io/json-ld-api/tests',
34 | 'https://w3c.github.io/json-ld-framing/tests',
35 | 'https://github.com/json-ld/normalization/tests'
36 | ]
37 |
38 | class TestRunner(unittest.TextTestRunner):
39 | """
40 | Loads test manifests and runs tests.
41 | """
42 |
43 | def __init__(self, stream=sys.stderr, descriptions=True, verbosity=1):
44 | unittest.TextTestRunner.__init__(
45 | self, stream, descriptions, verbosity)
46 |
47 | # command line args
48 | self.options = {}
49 | self.parser = ArgumentParser()
50 |
51 | def _makeResult(self):
52 | return EarlTestResult(self.stream, self.descriptions, self.verbosity)
53 |
54 | def main(self):
55 | print('PyLD Tests')
56 | print('Use -h or --help to view options.\n')
57 |
58 | # add program options
59 | self.parser.add_argument('tests', metavar='TEST', nargs='*',
60 | help='A manifest or directory to test')
61 | self.parser.add_argument('-e', '--earl', dest='earl',
62 | help='The filename to write an EARL report to')
63 | self.parser.add_argument('-b', '--bail', dest='bail',
64 | action='store_true', default=False,
65 | help='Bail out as soon as any test fails')
66 | self.parser.add_argument('-l', '--loader', dest='loader',
67 | default='requests',
68 | help='The remote URL document loader: requests, aiohttp '
69 | '[default: %(default)s]')
70 | self.parser.add_argument('-n', '--number', dest='number',
71 | help='Limit tests to those containing the specified test identifier')
72 | self.parser.add_argument('-v', '--verbose', dest='verbose',
73 | action='store_true', default=False,
74 | help='Print verbose test data')
75 |
76 | # parse command line args
77 | self.options = self.parser.parse_args()
78 |
79 | # Set a default JSON-LD document loader
80 | if self.options.loader == 'requests':
81 | jsonld._default_document_loader = jsonld.requests_document_loader()
82 | elif self.options.loader == 'aiohttp':
83 | jsonld._default_document_loader = jsonld.aiohttp_document_loader()
84 |
85 | # config runner
86 | self.failfast = self.options.bail
87 |
88 | # Global for saving test numbers to focus on
89 | global ONLY_IDENTIFIER
90 | if self.options.number:
91 | ONLY_IDENTIFIER = self.options.number
92 |
93 | if len(self.options.tests):
94 | # tests given on command line
95 | test_targets = self.options.tests
96 | else:
97 | # default to find known sibling test dirs
98 | test_targets = []
99 | sibling_dirs = [
100 | '../json-ld-api/tests/',
101 | '../json-ld-framing/tests/',
102 | '../normalization/tests/',
103 | ]
104 | for dir in sibling_dirs:
105 | if os.path.exists(dir):
106 | print('Test dir found', dir)
107 | test_targets.append(dir)
108 | else:
109 | print('Test dir not found', dir)
110 |
111 | # ensure a manifest or a directory was specified
112 | if len(test_targets) == 0:
113 | raise Exception('No test manifest or directory specified.')
114 |
115 | # make root manifest with target files and dirs
116 | root_manifest = {
117 | '@context': 'https://w3c.github.io/tests/context.jsonld',
118 | '@id': '',
119 | '@type': 'mf:Manifest',
120 | 'description': 'Top level PyLD test manifest',
121 | 'name': 'PyLD',
122 | 'sequence': [],
123 | 'filename': '/'
124 | }
125 | for test in test_targets:
126 | if os.path.isfile(test):
127 | root, ext = os.path.splitext(test)
128 | if ext in ['.json', '.jsonld']:
129 | root_manifest['sequence'].append(os.path.abspath(test))
130 | #root_manifest['sequence'].append(test)
131 | else:
132 | raise Exception('Unknown test file ext', root, ext)
133 | elif os.path.isdir(test):
134 | filename = os.path.join(test, 'manifest.jsonld')
135 | if os.path.exists(filename):
136 | root_manifest['sequence'].append(os.path.abspath(filename))
137 | else:
138 | raise Exception('Manifest not found', filename)
139 | else:
140 | raise Exception('Unknown test target.', test)
141 |
142 | # load root manifest
143 | global ROOT_MANIFEST_DIR
144 | #ROOT_MANIFEST_DIR = os.path.dirname(root_manifest['filename'])
145 | ROOT_MANIFEST_DIR = root_manifest['filename']
146 | suite = Manifest(root_manifest, root_manifest['filename']).load()
147 |
148 | # run tests
149 | result = self.run(suite)
150 |
151 | # output earl report if specified
152 | if self.options.earl:
153 | filename = os.path.abspath(self.options.earl)
154 | print('Writing EARL report to: %s' % filename)
155 | result.writeReport(filename)
156 |
157 | if not result.wasSuccessful():
158 | exit(1)
159 |
160 |
161 | class Manifest:
162 | def __init__(self, data, filename):
163 | self.data = data
164 | self.suite = unittest.TestSuite()
165 | self.filename = filename
166 | self.dirname = os.path.dirname(self.filename)
167 |
168 | def load(self):
169 | entries = []
170 | # get entries and sequence (alias for entries)
171 | entries.extend(get_jsonld_values(self.data, 'entries'))
172 | entries.extend(get_jsonld_values(self.data, 'sequence'))
173 |
174 | # add includes to entries as jsonld files
175 | includes = get_jsonld_values(self.data, 'include')
176 | for filename in includes:
177 | entries.append(filename + '.jsonld')
178 |
179 | global ONLY_IDENTIFIER
180 |
181 | for entry in entries:
182 | if isinstance(entry, str):
183 | filename = os.path.join(self.dirname, entry)
184 | entry = read_json(filename)
185 | else:
186 | filename = self.filename
187 |
188 | # entry is another manifest
189 | if is_jsonld_type(entry, 'mf:Manifest'):
190 | self.suite = unittest.TestSuite(
191 | [self.suite, Manifest(entry, filename).load()])
192 | # don't add tests that are not focused
193 |
194 | # assume entry is a test
195 | elif not ONLY_IDENTIFIER or ONLY_IDENTIFIER in entry['@id']:
196 | self.suite.addTest(Test(self, entry, filename))
197 |
198 | return self.suite
199 |
200 |
201 | class Test(unittest.TestCase):
202 | def __init__(self, manifest, data, filename):
203 | unittest.TestCase.__init__(self)
204 | #self.maxDiff = None
205 | self.manifest = manifest
206 | self.data = data
207 | self.filename = filename
208 | self.dirname = os.path.dirname(filename)
209 | self.is_positive = is_jsonld_type(data, 'jld:PositiveEvaluationTest')
210 | self.is_negative = is_jsonld_type(data, 'jld:NegativeEvaluationTest')
211 | self.is_syntax = is_jsonld_type(data, 'jld:PositiveSyntaxTest')
212 | self.test_type = None
213 | self.pending = False
214 | global TEST_TYPES
215 | for t in TEST_TYPES.keys():
216 | if is_jsonld_type(data, t):
217 | self.test_type = t
218 | break
219 |
220 | def __str__(self):
221 | manifest = self.manifest.data.get(
222 | 'name', self.manifest.data.get('label'))
223 | test_id = self.data.get('id', self.data.get('@id'))
224 | label = self.data.get(
225 | 'purpose', self.data.get('name', self.data.get('label')))
226 |
227 | return ('%s: %s: %s' % (manifest, test_id, label))
228 |
229 | def _get_expect_property(self):
230 | '''Find the expected output property or raise error.'''
231 | if 'expect' in self.data:
232 | return 'expect'
233 | elif 'result' in self.data:
234 | return 'result'
235 | else:
236 | raise Exception('No expected output property found')
237 |
238 | def _get_expect_error_code_property(self):
239 | '''Find the expectErrorCode property.'''
240 | if 'expectErrorCode' in self.data:
241 | return 'expectErrorCode'
242 | else:
243 | raise Exception('No expectErrorCode property found')
244 |
245 | def setUp(self):
246 | data = self.data
247 | manifest = self.manifest
248 | # skip unknown and explicitly skipped test types
249 | global SKIP_TESTS
250 | types = []
251 | types.extend(get_jsonld_values(data, '@type'))
252 | types.extend(get_jsonld_values(data, 'type'))
253 | if self.test_type is None or self.test_type in SKIP_TESTS:
254 | self.skipTest('Test type of %s' % types)
255 |
256 | global TEST_TYPES
257 | test_info = TEST_TYPES[self.test_type]
258 |
259 | # expand @id and input base
260 | if 'baseIri' in manifest.data:
261 | data['@id'] = (
262 | manifest.data['baseIri'] +
263 | os.path.basename(str.replace(manifest.filename, '.jsonld', '')) + data['@id'])
264 | self.base = self.manifest.data['baseIri'] + data['input']
265 |
266 | # skip based on id regular expression
267 | skip_id_re = test_info.get('skip', {}).get('idRegex', [])
268 | for regex in skip_id_re:
269 | if re.match(regex, data.get('@id', data.get('id', ''))):
270 | self.skipTest('Test with id regex %s' % regex)
271 |
272 | # mark tests as pending, meaning that they are expected to fail
273 | pending_id_re = test_info.get('pending', {}).get('idRegex', [])
274 | for regex in pending_id_re:
275 | if re.match(regex, data.get('@id', data.get('id', ''))):
276 | self.pending = 'Test with id regex %s' % regex
277 |
278 | # skip based on description regular expression
279 | skip_description_re = test_info.get('skip', {}).get(
280 | 'descriptionRegex', [])
281 | for regex in skip_description_re:
282 | if re.match(regex, data.get('description', '')):
283 | self.skipTest('Test with description regex %s' % regex)
284 |
285 | # skip based on processingMode
286 | skip_pm = test_info.get('skip', {}).get('processingMode', [])
287 | data_pm = data.get('option', {}).get('processingMode', None)
288 | if data_pm in skip_pm:
289 | self.skipTest('Test with processingMode %s' % data_pm)
290 |
291 | # skip based on specVersion
292 | skip_sv = test_info.get('skip', {}).get('specVersion', [])
293 | data_sv = data.get('option', {}).get('specVersion', None)
294 | if data_sv in skip_sv:
295 | self.skipTest('Test with specVersion %s' % data_sv)
296 |
297 | # mark tests to run with local loader
298 | run_remote_re = test_info.get('runLocal', [])
299 | for regex in run_remote_re:
300 | if re.match(regex, data.get('@id', data.get('id', ''))):
301 | data['runLocal'] = True
302 |
303 | def runTest(self):
304 | data = self.data
305 | global TEST_TYPES
306 | test_info = TEST_TYPES[self.test_type]
307 | fn = test_info['fn']
308 | params = test_info['params']
309 | params = [param(self) for param in params]
310 | result = None
311 | if self.is_negative:
312 | expect = data[self._get_expect_error_code_property()]
313 | elif self.is_syntax:
314 | expect = None
315 | else:
316 | expect = read_test_property(self._get_expect_property())(self)
317 |
318 | try:
319 | result = getattr(jsonld, fn)(*params)
320 | if self.is_negative and not self.pending:
321 | raise AssertionError('Expected an error; one was not raised')
322 | if self.is_syntax and not self.pending:
323 | self.assertTrue(True)
324 | elif self.test_type == 'jld:ToRDFTest':
325 | # Test normalized results
326 | result = jsonld.normalize(result, {
327 | 'algorithm': 'URGNA2012',
328 | 'inputFormat': 'application/n-quads',
329 | 'format': 'application/n-quads'
330 | })
331 | expect = jsonld.normalize(expect, {
332 | 'algorithm': 'URGNA2012',
333 | 'inputFormat': 'application/n-quads',
334 | 'format': 'application/n-quads'
335 | })
336 | if result == expect:
337 | self.assertTrue(True)
338 | else:
339 | print('\nEXPECTED: ', expect)
340 | print('ACTUAL: ', result)
341 | raise AssertionError('results differ')
342 | elif not self.is_negative:
343 | # Perform order-independent equivalence test
344 | if equalUnordered(result, expect):
345 | self.assertTrue(True)
346 | else:
347 | print('\nEXPECTED: ', json.dumps(expect, indent=2))
348 | print('ACTUAL: ', json.dumps(result, indent=2))
349 | raise AssertionError('results differ')
350 | else:
351 | self.assertEqual(result, expect)
352 | if self.pending and not self.is_negative:
353 | raise AssertionError('pending positive test passed')
354 | except AssertionError as e:
355 | if e.args[0] == 'pending positive test passed':
356 | print(e)
357 | raise e
358 | elif not self.is_negative and not self.pending:
359 | print('\nEXPECTED: ', json.dumps(expect, indent=2))
360 | print('ACTUAL: ', json.dumps(result, indent=2))
361 | raise e
362 | elif not self.is_negative:
363 | print('pending')
364 | elif self.is_negative and self.pending:
365 | print('pending')
366 | else:
367 | raise e
368 | except Exception as e:
369 | if not self.is_negative and not self.pending:
370 | print('\n')
371 | traceback.print_exc(file=sys.stdout)
372 | raise e
373 | result = get_jsonld_error_code(e)
374 | if self.pending and result == expect:
375 | print('pending negative test passed')
376 | raise AssertionError('pending negative test passed')
377 | elif self.pending:
378 | print('pending')
379 | else:
380 | #import pdb; pdb.set_trace()
381 | self.assertEqual(result, expect)
382 |
383 | # Compare values with order-insensitive array tests
384 | def equalUnordered(result, expect):
385 | if isinstance(result, list) and isinstance(expect, list):
386 | return(len(result) == len(expect) and
387 | all(any(equalUnordered(v1, v2) for v2 in expect) for v1 in result))
388 | elif isinstance(result, dict) and isinstance(expect, dict):
389 | return(len(result) == len(expect) and
390 | all(k in expect and equalUnordered(v, expect[k]) for k, v in result.items()))
391 | else:
392 | return(result == expect)
393 |
394 | def is_jsonld_type(node, type_):
395 | node_types = []
396 | node_types.extend(get_jsonld_values(node, '@type'))
397 | node_types.extend(get_jsonld_values(node, 'type'))
398 | types = type_ if isinstance(type_, list) else [type_]
399 | return len(set(node_types).intersection(set(types))) > 0
400 |
401 |
402 | def get_jsonld_values(node, property):
403 | rval = []
404 | if property in node:
405 | rval = node[property]
406 | if not isinstance(rval, list):
407 | rval = [rval]
408 | return rval
409 |
410 |
411 | def get_jsonld_error_code(err):
412 | if isinstance(err, jsonld.JsonLdError):
413 | if err.code:
414 | return err.code
415 | elif err.cause:
416 | return get_jsonld_error_code(err.cause)
417 | return str(err)
418 |
419 |
420 | def read_json(filename):
421 | with open(filename) as f:
422 | return json.load(f)
423 |
424 |
425 | def read_file(filename):
426 | with open(filename) as f:
427 | if sys.version_info[0] >= 3:
428 | return f.read()
429 | else:
430 | return f.read().decode('utf8')
431 |
432 |
433 | def read_test_url(property):
434 | def read(test):
435 | if property not in test.data:
436 | return None
437 | if 'baseIri' in test.manifest.data:
438 | return test.manifest.data['baseIri'] + test.data[property]
439 | else:
440 | return test.data[property]
441 | return read
442 |
443 |
444 | def read_test_property(property):
445 | def read(test):
446 | if property not in test.data:
447 | return None
448 | filename = os.path.join(test.dirname, test.data[property])
449 | if filename.endswith('.jsonld'):
450 | return read_json(filename)
451 | else:
452 | return read_file(filename)
453 | return read
454 |
455 |
456 | def create_test_options(opts=None):
457 | def create(test):
458 | http_options = ['contentType', 'httpLink', 'httpStatus', 'redirectTo']
459 | test_options = test.data.get('option', {})
460 | options = {}
461 | for k, v in test_options.items():
462 | if k not in http_options:
463 | options[k] = v
464 | options['documentLoader'] = create_document_loader(test)
465 | options.update(opts or {})
466 | if 'expandContext' in options:
467 | filename = os.path.join(test.dirname, options['expandContext'])
468 | options['expandContext'] = read_json(filename)
469 | return options
470 | return create
471 |
472 |
473 | def create_document_loader(test):
474 | loader = jsonld.get_document_loader()
475 |
476 | def is_test_suite_url(url):
477 | return any(url.startswith(base) for base in LOCAL_BASES)
478 |
479 | def strip_base(url):
480 | for base in LOCAL_BASES:
481 | if url.startswith(base):
482 | return url[len(base):]
483 | raise Exception('unkonwn base')
484 |
485 | def strip_fragment(url):
486 | if '#' in url:
487 | return url[:url.index('#')]
488 | else:
489 | return url
490 |
491 | def load_locally(url):
492 | options = test.data.get('option', {})
493 | content_type = options.get('contentType')
494 |
495 | url_no_frag = strip_fragment(url)
496 | if not content_type and url_no_frag.endswith('.jsonld'):
497 | content_type = 'application/ld+json'
498 | if not content_type and url_no_frag.endswith('.json'):
499 | content_type = 'application/json'
500 | if not content_type and url_no_frag.endswith('.html'):
501 | content_type = 'text/html'
502 | if not content_type:
503 | content_type = 'application/octet-stream'
504 | doc = {
505 | 'contentType': content_type,
506 | 'contextUrl': None,
507 | 'documentUrl': url,
508 | 'document': None
509 | }
510 | if options and url == test.base:
511 | if ('redirectTo' in options and options.get('httpStatus') >= 300):
512 | doc['documentUrl'] = (
513 | test.manifest.data['baseIri'] + options['redirectTo'])
514 | elif 'httpLink' in options:
515 | link_header = options.get('httpLink', '')
516 | if isinstance(link_header, list):
517 | link_header = ','.join(link_header)
518 | linked_context = jsonld.parse_link_header(
519 | link_header).get('http://www.w3.org/ns/json-ld#context')
520 | if linked_context and content_type != 'application/ld+json':
521 | if isinstance(linked_context, list):
522 | raise Exception('multiple context link headers')
523 | doc['contextUrl'] = linked_context['target']
524 | linked_alternate = jsonld.parse_link_header(
525 | link_header).get('alternate')
526 | # if not JSON-LD, alternate may point there
527 | if (linked_alternate and
528 | linked_alternate.get('type') == 'application/ld+json' and
529 | not re.match(r'^application\/(\w*\+)?json$', content_type)):
530 | doc['contentType'] = 'application/ld+json'
531 | doc['documentUrl'] = jsonld.prepend_base(url, linked_alternate['target'])
532 | global ROOT_MANIFEST_DIR
533 | if doc['documentUrl'].find(':') == -1:
534 | filename = os.path.join(ROOT_MANIFEST_DIR, doc['documentUrl'])
535 | doc['documentUrl'] = 'file://' + filename
536 | else:
537 | filename = test.dirname + strip_fragment(strip_base(doc['documentUrl']))
538 | try:
539 | doc['document'] = read_file(filename)
540 | except:
541 | raise Exception('loading document failed')
542 | return doc
543 |
544 | def local_loader(url, headers):
545 | # always load remote-doc tests remotely
546 | # (some skipped due to lack of reasonable HTTP header support)
547 | if (test.manifest.data.get('name') == 'Remote document' and
548 | not test.data.get('runLocal')):
549 | return loader(url)
550 |
551 | # always load non-base tests remotely
552 | if not is_test_suite_url(url) and url.find(':') != -1:
553 | return loader(url)
554 |
555 | # attempt to load locally
556 | return load_locally(url)
557 |
558 | return local_loader
559 |
560 |
561 | class EarlTestResult(TextTestResult):
562 | def __init__(self, stream, descriptions, verbosity):
563 | TextTestResult.__init__(self, stream, descriptions, verbosity)
564 | self.report = EarlReport()
565 |
566 | def addError(self, test, err):
567 | TextTestResult.addError(self, test, err)
568 | self.report.add_assertion(test, False)
569 |
570 | def addFailure(self, test, err):
571 | TextTestResult.addFailure(self, test, err)
572 | self.report.add_assertion(test, False)
573 |
574 | def addSuccess(self, test):
575 | TextTestResult.addSuccess(self, test)
576 | self.report.add_assertion(test, True)
577 |
578 | def writeReport(self, filename):
579 | self.report.write(filename)
580 |
581 |
582 | class EarlReport():
583 | """
584 | Generates an EARL report.
585 | """
586 |
587 | def __init__(self):
588 | about = {}
589 | with open(os.path.join(
590 | os.path.dirname(__file__), '..', 'lib', 'pyld', '__about__.py')) as fp:
591 | exec(fp.read(), about)
592 | self.now = datetime.datetime.utcnow().replace(microsecond=0)
593 | self.report = {
594 | '@context': {
595 | 'doap': 'http://usefulinc.com/ns/doap#',
596 | 'foaf': 'http://xmlns.com/foaf/0.1/',
597 | 'dc': 'http://purl.org/dc/terms/',
598 | 'earl': 'http://www.w3.org/ns/earl#',
599 | 'xsd': 'http://www.w3.org/2001/XMLSchema#',
600 | 'doap:homepage': {'@type': '@id'},
601 | 'doap:license': {'@type': '@id'},
602 | 'dc:creator': {'@type': '@id'},
603 | 'foaf:homepage': {'@type': '@id'},
604 | 'subjectOf': {'@reverse': 'earl:subject'},
605 | 'earl:assertedBy': {'@type': '@id'},
606 | 'earl:mode': {'@type': '@id'},
607 | 'earl:test': {'@type': '@id'},
608 | 'earl:outcome': {'@type': '@id'},
609 | 'dc:date': {'@type': 'xsd:date'},
610 | 'doap:created': {'@type': 'xsd:date'}
611 | },
612 | '@id': 'https://github.com/digitalbazaar/pyld',
613 | '@type': [
614 | 'doap:Project',
615 | 'earl:TestSubject',
616 | 'earl:Software'
617 | ],
618 | 'doap:name': 'PyLD',
619 | 'dc:title': 'PyLD',
620 | 'doap:homepage': 'https://github.com/digitalbazaar/pyld',
621 | 'doap:license': 'https://github.com/digitalbazaar/pyld/blob/master/LICENSE',
622 | 'doap:description': 'A JSON-LD processor for Python',
623 | 'doap:programming-language': 'Python',
624 | 'dc:creator': 'https://github.com/dlongley',
625 | 'doap:developer': {
626 | '@id': 'https://github.com/dlongley',
627 | '@type': [
628 | 'foaf:Person',
629 | 'earl:Assertor'
630 | ],
631 | 'foaf:name': 'Dave Longley',
632 | 'foaf:homepage': 'https://github.com/dlongley'
633 | },
634 | 'doap:release': {
635 | 'doap:name': 'PyLD ' + about['__version__'],
636 | 'doap:revision': about['__version__'],
637 | 'doap:created': self.now.strftime('%Y-%m-%d')
638 | },
639 | 'subjectOf': []
640 | }
641 |
642 | def add_assertion(self, test, success):
643 | self.report['subjectOf'].append({
644 | '@type': 'earl:Assertion',
645 | 'earl:assertedBy': self.report['doap:developer']['@id'],
646 | 'earl:mode': 'earl:automatic',
647 | 'earl:test': test.data.get('id', test.data.get('@id')),
648 | 'earl:result': {
649 | '@type': 'earl:TestResult',
650 | 'dc:date': self.now.isoformat() + 'Z',
651 | 'earl:outcome': 'earl:passed' if success else 'earl:failed'
652 | }
653 | })
654 | return self
655 |
656 | def write(self, filename):
657 | with open(filename, 'w') as f:
658 | f.write(json.dumps(self.report, indent=2))
659 | f.close()
660 |
661 |
662 | # supported test types
663 | TEST_TYPES = {
664 | 'jld:CompactTest': {
665 | 'pending': {
666 | 'idRegex': [
667 | ]
668 | },
669 | 'skip': {
670 | # skip tests where behavior changed for a 1.1 processor
671 | # see JSON-LD 1.0 Errata
672 | 'specVersion': ['json-ld-1.0'],
673 | 'idRegex': [
674 | ]
675 | },
676 | 'fn': 'compact',
677 | 'params': [
678 | read_test_url('input'),
679 | read_test_property('context'),
680 | create_test_options()
681 | ]
682 | },
683 | 'jld:ExpandTest': {
684 | 'pending': {
685 | 'idRegex': [
686 | ]
687 | },
688 | 'runLocal': [
689 | '.*remote-doc-manifest#t0003$',
690 | '.*remote-doc-manifest#t0004$',
691 | '.*remote-doc-manifest#t0005$',
692 | '.*remote-doc-manifest#t0006$',
693 | '.*remote-doc-manifest#t0007$',
694 | '.*remote-doc-manifest#t0009$',
695 | '.*remote-doc-manifest#t0010$',
696 | '.*remote-doc-manifest#t0011$',
697 | '.*remote-doc-manifest#t0012$',
698 | '.*remote-doc-manifest#t0013$',
699 | '.*remote-doc-manifest#tla01$',
700 | '.*remote-doc-manifest#tla02$',
701 | '.*remote-doc-manifest#tla03$',
702 | '.*remote-doc-manifest#tla04$',
703 | '.*remote-doc-manifest#tla05$',
704 | ],
705 | 'skip': {
706 | # skip tests where behavior changed for a 1.1 processor
707 | # see JSON-LD 1.0 Errata
708 | 'specVersion': ['json-ld-1.0'],
709 | 'idRegex': [
710 | ]
711 | },
712 | 'fn': 'expand',
713 | 'params': [
714 | read_test_url('input'),
715 | create_test_options()
716 | ]
717 | },
718 | 'jld:FlattenTest': {
719 | 'pending': {
720 | 'idRegex': [
721 | ]
722 | },
723 | 'skip': {
724 | # skip tests where behavior changed for a 1.1 processor
725 | # see JSON-LD 1.0 Errata
726 | 'specVersion': ['json-ld-1.0'],
727 | 'idRegex': [
728 | ]
729 | },
730 | 'fn': 'flatten',
731 | 'params': [
732 | read_test_url('input'),
733 | read_test_property('context'),
734 | create_test_options()
735 | ]
736 | },
737 | 'jld:FrameTest': {
738 | 'pending': {
739 | 'idRegex': [
740 | ]
741 | },
742 | 'skip': {
743 | # skip tests where behavior changed for a 1.1 processor
744 | # see JSON-LD 1.0 Errata
745 | 'specVersion': ['json-ld-1.0'],
746 | 'idRegex': [
747 | ]
748 | },
749 | 'fn': 'frame',
750 | 'params': [
751 | read_test_url('input'),
752 | read_test_property('frame'),
753 | create_test_options()
754 | ]
755 | },
756 | 'jld:FromRDFTest': {
757 | 'skip': {
758 | 'specVersion': ['json-ld-1.0'],
759 | 'idRegex': [
760 | # direction (compound-literal)
761 | '.*fromRdf-manifest#tdi11$',
762 | '.*fromRdf-manifest#tdi12$',
763 | ]
764 | },
765 | 'fn': 'from_rdf',
766 | 'params': [
767 | read_test_property('input'),
768 | create_test_options({'format': 'application/n-quads'})
769 | ]
770 | },
771 | 'jld:NormalizeTest': {
772 | 'skip': {},
773 | 'fn': 'normalize',
774 | 'params': [
775 | read_test_property('input'),
776 | create_test_options({'format': 'application/n-quads'})
777 | ]
778 | },
779 | 'jld:ToRDFTest': {
780 | 'pending': {
781 | 'idRegex': [
782 | # blank node property
783 | '.*toRdf-manifest#te075$',
784 | '.*toRdf-manifest#te122$',
785 | # rel vocab
786 | '.*toRdf-manifest#te111$',
787 | '.*toRdf-manifest#te112$',
788 | # number fixes
789 | '.*toRdf-manifest#trt01$',
790 | # type:none
791 | '.*toRdf-manifest#ttn02$',
792 | # well formed
793 | '.*toRdf-manifest#twf05$',
794 | '.*toRdf-manifest#twf06$',
795 | ]
796 | },
797 | 'skip': {
798 | # skip tests where behavior changed for a 1.1 processor
799 | # see JSON-LD 1.0 Errata
800 | 'specVersion': ['json-ld-1.0'],
801 | 'idRegex': [
802 | # node object direction
803 | '.*toRdf-manifest#tdi11$',
804 | '.*toRdf-manifest#tdi12$',
805 | ]
806 | },
807 | 'fn': 'to_rdf',
808 | 'params': [
809 | read_test_url('input'),
810 | create_test_options({'format': 'application/n-quads'})
811 | ]
812 | },
813 | 'rdfn:Urgna2012EvalTest': {
814 | 'pending': {
815 | 'idRegex': [
816 | ]
817 | },
818 | 'skip': {
819 | 'idRegex': [
820 | '.*manifest-urgna2012#test060$',
821 | ]
822 | },
823 | 'fn': 'normalize',
824 | 'params': [
825 | read_test_property('action'),
826 | create_test_options({
827 | 'algorithm': 'URGNA2012',
828 | 'inputFormat': 'application/n-quads',
829 | 'format': 'application/n-quads'
830 | })
831 | ]
832 | },
833 | 'rdfn:Urdna2015EvalTest': {
834 | 'pending': {
835 | 'idRegex': [
836 | ]
837 | },
838 | 'skip': {
839 | 'idRegex': [
840 | '.*manifest-urdna2015#test059$',
841 | '.*manifest-urdna2015#test060$',
842 | ]
843 | },
844 | 'fn': 'normalize',
845 | 'params': [
846 | read_test_property('action'),
847 | create_test_options({
848 | 'algorithm': 'URDNA2015',
849 | 'inputFormat': 'application/n-quads',
850 | 'format': 'application/n-quads'
851 | })
852 | ]
853 | }
854 | }
855 |
856 |
857 | if __name__ == '__main__':
858 | TestRunner(verbosity=2).main()
859 |
--------------------------------------------------------------------------------