├── .circleci
└── config.yml
├── .gitignore
├── LICENSE
├── README.md
├── TYPES.md
├── json_syntax
├── __init__.py
├── action_v1.py
├── attrs.py
├── cache.py
├── errors.py
├── extras
│ ├── README.md
│ ├── __init__.py
│ ├── dynamodb.py
│ ├── flags.py
│ └── loose_dates.py
├── helpers.py
├── pattern.py
├── product.py
├── ruleset.py
├── std.py
├── string.py
├── types.py
└── unions.py
├── poetry.lock
├── pyproject.toml
├── setup.cfg
└── tests
├── __init__.py
├── _strategies.py
├── common.py
├── extras
├── __init__.py
├── test_dynamodb.py
├── test_flags.py
└── test_loose_dates.py
├── test_attrs.py
├── test_cache.py
├── test_errors.py
├── test_helpers.py
├── test_std.py
├── test_std_ruleset.py
├── test_types.py
├── test_union.py
├── test_union_prop.py
├── type_strategies.py
├── types_attrs_ann.py
├── types_attrs_common.py
├── types_std_ann.py
└── types_std_noann.py
/.circleci/config.yml:
--------------------------------------------------------------------------------
1 | version: 2.1
2 | jobs:
3 | test-34:
4 | docker:
5 | - image: circleci/python:3.4
6 | environment:
7 | &std_env
8 | TERM: xterm
9 | LANG: en_US.UTF-8
10 | PIP_DISABLE_PIP_VERSION_CHECK: 1
11 | working_directory: ~/json-syntax
12 | steps:
13 | &steps34
14 | - checkout
15 | - run:
16 | name: Set up virtualenv
17 | command: |
18 | pip install --user 'poetry>=1'
19 | python -m poetry install
20 |
21 | - run:
22 | name: Run tests
23 | command: |
24 | python -m poetry run pytest tests/
25 |
26 | - store_artifacts: # If a property test fails, this contains the example that failed.
27 | path: ".hypothesis"
28 | destination: ".hypothesis"
29 | test-35:
30 | docker:
31 | - image: circleci/python:3.5
32 | environment: *std_env
33 | steps: *steps34
34 | working_directory: ~/json-syntax
35 | test-36:
36 | docker:
37 | - image: circleci/python:3.6
38 | environment: *std_env
39 | working_directory: ~/json-syntax
40 | steps:
41 | &steps36
42 | - checkout
43 | - run:
44 | name: Set up virtualenv
45 | command: |
46 | pip install --user 'poetry>=1'
47 | python -m poetry install
48 |
49 | - run:
50 | name: Run tests
51 | command: |
52 | python -m poetry run pytest --doctest-modules json_syntax/ tests/
53 |
54 | - store_artifacts: # If a property test fails, this contains the example that failed.
55 | path: ".hypothesis"
56 | destination: ".hypothesis"
57 | test-37:
58 | docker:
59 | - image: circleci/python:3.7
60 | environment: *std_env
61 | steps: *steps36
62 | working_directory: ~/json-syntax
63 | test-38:
64 | docker:
65 | - image: circleci/python:3.8
66 | environment: *std_env
67 | steps: *steps36
68 | working_directory: ~/json-syntax
69 |
70 | workflows:
71 | test:
72 | jobs:
73 | - test-34
74 | - test-35
75 | - test-36
76 | - test-37
77 | - test-38
78 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | /*.egg-info
2 | /build/
3 | /dist/
4 | /.python-version
5 | /.coverage
6 | /.hypothesis
7 | /htmlcov
8 | /pip-wheel-metadata
9 | setup.py
10 | requirements.txt
11 | .tox/
12 | README.rst
13 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2019 Ben Samuel
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # United Income at Capital One created this project with the intention of it helping us with JSON parsing. This project has not gained wide adoption. As such, we have stopped providing updates to this project and archived it as of June 28th, 2021.
2 |
3 | # json-syntax
4 |
5 | A Python library to translate between JSON compatible structures and native Python
6 | classes using customizable rules.
7 |
8 | ## Use case
9 |
10 | If you're like the authors, you tried writing an encoding function that attempted to
11 | encode and decode by interrogating the types at runtime, maybe calling some method like
12 | `asdict`. This works fine for generating JSON, but it gets sketchy[1](#f1) when trying to decode the same JSON.
14 |
15 | Further, we have annotations in Python 3! Even if you're not using a type checker, just
16 | labeling the types of fields makes complex data structures far more comprehensible.
17 |
18 | This library is aimed at projects that have a complex JSON schema that they're trying to
19 | structure using libraries like [attrs][].
20 |
21 | * It exploits [gradual typing][] via annotations, [typing][] and [dataclasses][]
22 | * It expects classes to be *statically* described using types
23 | * But a fallback can be provided to handle data described at runtime
24 | * It provides hooks to normalize legacy inputs
25 | * It makes it trivial to extend the library with your own rules
26 | * Actions and Rules are simply functions
27 | * Encoders and decoders can be pickled
28 | * The library has no dependencies of its own on python 3.7+
29 | * It does not read or write JSON
30 |
31 | ### Supported types
32 |
33 | * Atoms including `None`, `bool`, `int`, `float`, `str`.
34 | * Floats may optionally be represented as strings.
35 | * The `decimal.Decimal` class, represented as itself or in string form.
36 | * The `datetime.date` and `datetime.datetime` classes, represented in ISO8601 form.
37 | * Preliminary support for `datetime.timedelta` as ISO8601 time durations.
38 | * Subclasses of `enum.Enum`, represented by the string names.
39 | * Also, a `faux_enums` rule will accept an Enum type if you just use strings in your
40 | code.
41 | * The `typing.Optional[E]` type allows a JSON `null` to be substituted for a value.
42 | * Collections including `typing.List[E]`, `typing.Tuple[E, ...]`, `typing.Set[E]` and
43 | `typing.FrozenSet[E]`.
44 | * The `...` is [literal][ellipsis] and indicates a homogenous tuple, essentially a
45 | frozen list.
46 | * The `typing.Dict[K, V]` type allows a JSON object to represent a homogenous `dict`.
47 | * Restriction: the keys must be strings, ints, enums or dates.
48 | * **New**: The `typing.TypedDict` type allows a JSON object to represent a `dict` with specific
49 | keys.
50 | * Python classes implemented using `attrs.attrs`, `dataclasses.dataclass` are
51 | represented as JSON dicts and
52 | * Named tuples via `typing.NamedTuple` and heterogenous tuples via `typing.Tuple`.
53 | * Though, you should consider converting these to `dataclass`.
54 | * The `typing.Union[A, B, C]` rule will recognize alternate types by inspection.
55 |
56 | In addition, `dataclass` and `attrs` classes support hooks to let you completely customize
57 | their JSON representation.
58 |
59 | ### Extras
60 |
61 | These were originally intended as examples for how to use the package, but they're potentially
62 | useful in their own right.
63 |
64 | * [A ruleset][extras ddb] for use with AWS DynamoDB is included with basic facilities.
65 | * Restriction: No general support for `typing.Union`, only `Optional`.
66 | * Restriction: No general support for `Set`, only the special cases that are native to DynamoDB.
67 | * [A `Flag` pseudo-type][extras flag] allows you to use regular strings directly as flags.
68 | * [A rule][extras loose] that will accept a complete `datetime` and return a `date` by truncating the timestamp.
69 |
70 | ## Usage
71 |
72 | This example is also implemented in unit tests. First, let's declare some classes.
73 |
74 | ```python
75 | import json_syntax as syn
76 | from dataclasses import dataclass # attrs works too
77 | from decimal import Decimal
78 | from datetime import date
79 | from enum import Enum
80 |
81 | @dataclass
82 | class Account:
83 | user: str
84 | transactions: List['Trans'] # Forward references work!
85 | balance: Decimal = Decimal()
86 |
87 | class TransType(Enum):
88 | withdraw = 0
89 | deposit = 1
90 |
91 | @dataclass
92 | class Trans:
93 | type: TransType
94 | amount: Decimal
95 | stamp: date
96 | ```
97 |
98 | We'll next set up a RuleSet and use it to construct an encoder. The `std_ruleset`
99 | function is a one-liner with some reasonable overrides. Here, we've decided that because
100 | some intermediate services don't reliably retain decimal values, we're going to
101 | represent them in JSON as strings.
102 |
103 | ```python
104 | >>> rules = syn.std_ruleset(decimals=syn.decimals_as_str)
105 | >>> encode_account = rules.python_to_json(typ=Account)
106 | >>> decode_account = rules.json_to_python(typ=Account)
107 | ```
108 |
109 | The RuleSet examines the type and verb, searches its list of Rules, and then uses the
110 | first one that handles that type and verb to produce an Action.
111 |
112 | For example, `attrs_classes` is a Rule that recognizes the verbs `python_to_json` and
113 | `json_to_python` and will accept any class decorated with `@attr.s` or `@dataclass`.
114 |
115 | It will scan the fields and ask the RuleSet how to encode them. So when it sees
116 | `Account.user`, the `atoms` rule will match and report that converting a `str` to JSON
117 | can be accomplished by simply calling `str` on it. The action it returns will literally
118 | be the `str` builtin.
119 |
120 | Thus `attrs_classes` will build a list of attributes on `Account` and actions to convert
121 | them, and constructs an action to represent them.
122 |
123 | ```python
124 | >>> sample_value = Account(
125 | ... 'bob', [
126 | ... Trans(TransType.withdraw, Decimal('523.33'), date(2019, 4, 4))
127 | ... ], Decimal('77.00')
128 | ... )
129 |
130 | >>> encode_account(sample_value)
131 | {
132 | 'user': 'bob',
133 | 'transactions': [
134 | {
135 | 'type': 'withdraw',
136 | 'amount': '523.33',
137 | 'stamp': '2019-04-04'
138 | }
139 | ], 'balance': '77.00'
140 | }
141 | ```
142 |
143 | #### Encoding and decoding
144 |
145 | The aim of all this is to enable reliable usage with your preferred JSON library:
146 |
147 | ```python
148 | with open('myfile.json', 'r') as fh:
149 | my_account = decode_account(json.load(fh))
150 |
151 | with open('myfile.json', 'w') as fh:
152 | json.dump(encode_account(my_account))
153 | ```
154 |
155 | ### Using generic types
156 |
157 | Generally, the [typing][] module simply provides capital letter type names that explicitly
158 | correspond to the internal types. [See TYPES for a more thorough introduction][types].
159 |
160 | And you specify the type of the contents as a parameter in square brackets.
161 |
162 | Thus we have:
163 |
164 | * `list` and `List[E]`
165 | * `set` and `Set[E]`
166 | * `tuple` and `Tuple[E, ...]` is a special case!
167 | * `frozenset` and `FrozenSet[E]`
168 | * `dict` and `Dict[K, V]`
169 |
170 | Tuple is a special case. In Python, they're often used to mean "frozenlist", so
171 | `Tuple[E, ...]` (the `...` is [the Ellipsis object][ellipsis]) indicates all elements have
172 | the type `E`.
173 |
174 | They're also used to represent an unnamed record. In this case, you can use
175 | `Tuple[A, B, C, D]` or however many types. It's generally better to use a `dataclass`.
176 |
177 | The standard rules don't support:
178 |
179 | 1. Using abstract types like `Iterable` or `Mapping`.
180 | 2. Using type variables.
181 | 3. Any kind of callable, coroutine, file handle, etc.
182 |
183 | #### Support for deriving from Generic
184 |
185 | There is experimental support for deriving from `typing.Generic`. An `attrs` or `dataclass`
186 | may declare itself a generic class. If another class invokes it as `YourGeneric[Param,
187 | Param]`, those `Param` types will be substituted into the fields during encoding. This is
188 | useful to construct parameterized container types. Example:
189 |
190 | @attr.s(auto_attribs=True)
191 | class Wrapper(Generic[T, M]):
192 | body: T
193 | count: int
194 | messages: List[M]
195 |
196 | @attr.s(auto_attribs=True)
197 | class Message:
198 | first: Wrapper[str, str]
199 | second: Wrapper[Dict[str, str], int]
200 |
201 | #### Unions
202 |
203 | A union type lets you present alternate types that the converters will attempt in
204 | sequence, e.g. `typing.Union[MyType, int, MyEnum]`.
205 |
206 | This is implemented in the `unions` rule as a so-called[2](#f2)
207 | undiscriminated union. It means the module won't add any additional information to the
208 | value such as some kind of explicit tag.
209 |
210 | When converting from Python to JSON, the checks are generally just using `isinstance`,
211 | but when converting from JSON to Python, the check may be examining strings and `dict`
212 | fields.
213 |
214 | Thus, ambiguous values, especially JSON representations, may confuse the decoder.
215 | See the section on [sharp edges][sharp] for more details.
216 |
217 | ### Hooks
218 |
219 | We'll first examine decode and encode hooks. These let us entirely rewrite the JSON
220 | representation before the normal logic is applied.
221 |
222 | Let's suppose our `Account` class used to name the `balance` field `bal` and we need to
223 | support legacy users.
224 |
225 | ```python
226 | @dataclass
227 | class Account:
228 | @classmethod
229 | def __json_pre_decode__(cls, value):
230 | if 'bal' in value:
231 | value = dict(value)
232 | value['balance'] = value.pop('bal')
233 | return value
234 |
235 | def __json_post_encode__(self, value):
236 | return dict(value, bal=value['balance'])
237 |
238 | ...
239 | ```
240 |
241 | When we decode the value, the following sequence of steps takes place:
242 |
243 | 1. `__json_pre_decode__` is called with `{'user': 'bob', 'bal': '77.0', ...}` and it
244 | returns `{'user': 'bob', 'balance': '77.0', ...}`
245 | 2. Decoders are called against `user` and `balance` and the other fields
246 | 3. The resulting dictionary is passed to `Account(**result)` to construct the instance.
247 |
248 | During encoding, the reverse sequence takes place:
249 |
250 | 1. The instance's fields are read and passed to encoders.
251 | 2. The values are combined into a `dict`.
252 | 3. `__json_post_encode__` is called with `{'user': 'bob', 'balance': '77.0', ...}` and
253 | can adjust the field name to `bal`.
254 |
255 | #### JSON type check hook
256 |
257 | Type checks are only used in _json-syntax_ to support `typing.Union`; in a nutshell, the
258 | `unions` rule will inspect some JSON to see which variant is present.
259 |
260 | If a type-check hook is not defined, `__json_pre_decode__` will be called before the
261 | standard check is completed. (The standard check attempts to determine if required
262 | fields are present and have the correct type.)
263 |
264 | If you have information that can determine the type faster, a check hook can help.
265 |
266 | Going back to our Account example, suppose we decide to support multiple account types
267 | through a special ``class`` field. This is faster and more robust.
268 |
269 | ```python
270 | class AbstractAccount:
271 | @classmethod
272 | def __json_check__(cls, value):
273 | return isinstance(value, dict) and value.get('class') == cls.__name__
274 |
275 | @dataclass
276 | class AccountA(AbstractAccount):
277 | ...
278 |
279 | encode_account = rules.lookup(typ=Union[AccountA, AccountB, AccountC],
280 | verb='python_to_json')
281 | ```
282 |
283 | ### Adding custom rules
284 |
285 | See [the extras][] for details on custom rules, but generally a rule is just a
286 | function. Say, for instance, your type has class methods that encode and decode, this
287 | would be sufficient for many cases:
288 |
289 | ```python
290 | def my_rule(verb, typ, ctx):
291 | if issubclass(typ, MyType):
292 | if verb == 'json_to_python':
293 | return typ.decoder
294 | elif verb == 'python_to_json':
295 | return typ.encoder
296 | ```
297 |
298 | If your rule needs an encoder or decoder for a standard type, it can call
299 | `ctx.lookup(verb=verb, typ=subtype)`. The helper functions defined in `json_syntax.action_v1`
300 | are intended to stay the same so that custom rules can reuse them.
301 |
302 | ### Debugging ambiguous structures
303 |
304 | (May need more docs and some test cases.)
305 |
306 | As _json-syntax_ tries to directly translate your Python types to JSON, it is possible
307 | to write ambiguous structures. To avoid this, there is a handy `is_ambiguous` method:
308 |
309 | ```python
310 | # This is true because both are represented as an array of numbers in JSON.
311 | rules.is_ambiguous(typ=Union[List[int], Set[int]])
312 |
313 | @dataclass
314 | class Account:
315 | user: str
316 | address: str
317 |
318 | # This is true because such a dictionary would always match the contents of the account.
319 | rules.is_ambiguous(typ=Union[Dict[str, str], Account])
320 | ```
321 |
322 | The aim of this is to let you put a check in your unit tests to make sure data can be
323 | reliably expressed given your particular case.
324 |
325 | Internally, this is using the `PATTERN` verb to represent the JSON pattern, so this may
326 | be helpful in understanding how _json-syntax_ is trying to represent your data:
327 |
328 | ```python
329 | print(rules.lookup(typ=MyAmbiguousClass, verb='show_pattern'))
330 | ```
331 |
332 | ### Sharp edges
333 |
334 | _The RuleSet caches encoders._ Construct a new ruleset if you want to change settings.
335 |
336 | _Encoders and decoders do very little checking._ Especially, if you're translating
337 | Python to JSON, it's assumed that your Python classes are correct. The encoders and
338 | decoders may mask subtle issues as they are calling constructors like `str` and `int`
339 | for you. And, by design, if you're translating from JSON to Python, it's assumed you
340 | want to be tolerant of extra data.
341 |
342 | _Everything to do with typing._ It's a bit magical and sort of wasn't designed for this.
343 | [We have a guide to it to try and help][types].
344 |
345 | _Union types._ You can use `typing.Union` to allow a member to be one of some number of
346 | alternates, but there are some caveats. You should use the `.is_ambiguous()` method of
347 | RuleSet to warn you of these.
348 |
349 | _Atom rules accept specific types._ At present, the rules for atomic types (`int`,
350 | `str`, `bool`, `date`, `float`, `Decimal`) must be declared as exactly those types. With
351 | multiple inheritance, it's not clear which rule should apply
352 |
353 | _Checks are stricter than converters._ For example, a check for `int` will check whether
354 | the value is an integer, whereas the converter simply calls `int` on it. Thus there are
355 | inputs for where `MyType` would pass but `Union[MyType, Dummy]` will fail. (Note
356 | that `Optional` is special-cased to look for `None` and doesn't have this problem.)
357 |
358 | _Numbers are hard._ See the rules named `floats`, `floats_nan_str`, `decimals`,
359 | `decimals_as_str` for details on how to get numbers to transmit reliably. There is no rule for
360 | fractions or complex numbers as there's no canonical way to transmit them via JSON.
361 |
362 | ## Maintenance
363 |
364 | This package is maintained via the [poetry][] tool. Some useful commands:
365 |
366 | 1. Setup: `poetry install`
367 | 2. Run tests: `poetry run pytest tests/`
368 | 3. Reformat: `black json_syntax/ tests/`
369 | 4. Generate setup.py: `dephell deps convert -e setup`
370 | 5. Generate requirements.txt: `dephell deps convert -e req`
371 |
372 | ### Running tests via docker
373 |
374 | The environments for 3.4 through 3.9 are in `pyproject.toml`, so just run:
375 |
376 | dephell deps convert -e req # Create requirements.txt
377 | dephell docker run -e test34 pip install -r requirements.txt
378 | dephell docker run -e test34 pytest tests/
379 | dephell docker shell -e test34 pytest tests/
380 | dephell docker destroy -e test34
381 |
382 | ### Notes
383 |
384 | 1: Writing the encoder is deceptively easy because the instances in
385 | Python has complete information. The standard `json` module provides a hook to let
386 | you encode an object, and another hook to recognize `dict`s that have some special
387 | attribute. This can work quite well, but you'll have to encode *all* non-JSON types
388 | with dict-wrappers for the process to work in reverse. [↩](#a1)
389 |
390 | 2: A discriminated union has a tag that identifies the variant, such as
391 | status codes that indicate success and a payload, or some error. Strictly, all unions
392 | must be discriminated in some way if different code paths are executed. In the `unions`
393 | rule, the discriminant is the class information in Python, and the structure of the JSON
394 | data. A less flattering description would be that this is a "poorly" discriminated
395 | union. [↩](#a2)
396 |
397 | [poetry]: https://poetry.eustace.io/docs/#installation
398 | [gradual typing]: https://www.python.org/dev/peps/pep-0483/#summary-of-gradual-typing
399 | [the extras]: https://github.com/UnitedIncome/json-syntax/tree/master/json_syntax/extras
400 | [typing]: https://docs.python.org/3/library/typing.html
401 | [types]: https://github.com/UnitedIncome/json-syntax/blob/master/TYPES.md
402 | [attrs]: https://attrs.readthedocs.io/en/stable/
403 | [dataclasses]: https://docs.python.org/3/library/dataclasses.html
404 | [sharp]: https://github.com/UnitedIncome/json-syntax/blob/master/README.md#sharp-edges
405 | [ellipsis]: https://docs.python.org/3/library/stdtypes.html#the-ellipsis-object
406 | [extras ddb]: https://github.com/UnitedIncome/json-syntax/tree/master/json_syntax/extras/dynamodb.py
407 | [extras flag]: https://github.com/UnitedIncome/json-syntax/tree/master/json_syntax/extras/flags.py
408 | [extras loose]: https://github.com/UnitedIncome/json-syntax/tree/master/json_syntax/extras/loose_dates.py
409 |
--------------------------------------------------------------------------------
/TYPES.md:
--------------------------------------------------------------------------------
1 | # Type hints and generic types for the practitioner
2 |
3 | One pitfall of type hints and generic types is they are different from what Python coders already know. Even if you were diligent and read the entire [tutorial][], they didn't get a mention and the standard library reference has them squirrled away under "development tools." They're obscure, but we need them[1](#f1) so we ought to explain them.
4 |
5 | ## What do they do?
6 |
7 | Type hints are used by static type checkers like [mypy][] and [Pyre][] to prove that functions are passing the correct type of data to each other. They are the same concept as [TypeScript][] and [Flow][] in the Javascript world.
8 |
9 | The premise of "gradual typing" is that it's optional. If code works, leave it alone. If you chase down a `TypeError`, though, you can add a few annotations directly in the source rather than write yet another unit test.
10 |
11 | Generic types are the weird capitalized square bracketed types like `Dict[str, Tuple[int, ...]]` provided by the [typing][] module.
12 |
13 | ## What's the difference between a type, a type hint and a generic type?
14 |
15 | In Python, the primary distinction is that type hints and generic types are not native to the interpreter.
16 |
17 | To summarize them:
18 |
19 | * Types
20 | * The regular `int`, `bool`, `set`, `Decimal` you already know.
21 | * A **value** always has a type, so `5` is implicitly `int`.
22 | * Used extensively by the interpreter.
23 | * Type hints
24 | * Usually looks like `name: hint`.
25 | * Uses either a type or a generic type.
26 | * A **variable** _may_ have a hint.
27 | * Largely ignored by the interpreter.
28 | * (Also an argument to a function or member of a class has a hint.)
29 | * Generic types
30 | * Imported from [typing][].
31 | * Look like `FrozenSet[Decimal]`, `Dict[str, Tuple[int, str]]`.
32 | * Used in hints to describe the type of a variable with greater precision.
33 |
34 | The reason for all this is that if you can nail down what kind of data is coming into a function, your code doesn't have to deal with all kinds of exceptional cases.
35 |
36 | Python doesn't have a problem with a list like `[1, 2, 'three', 'four']`, but if you're trying to sum the elements of the list, it's going to fail because summation is only defined for numbers.
37 |
38 | A generic type like `List[int]` is an assertion that the specific `list` will _only_ contain `int`s. A type checker can scan those assertions and look for contradictions. It's going to scan your code, finding those assertions and try to generate a proof that your code is sound _before_ you run it.
39 |
40 | And just as type checkers can use type hints to generate proofs, json-syntax can unpack such assertions and write a converter based on the structure of the data.
41 |
42 | ## How do I use type hints in my code?
43 |
44 | This document won't go into how type checkers use hints, and [mypy][] and [Pyre][] both have tutorials. In a nutshell, though, you can put hints in your function signatures.
45 |
46 | For what we're trying to do, which is describe your data so you can convert it to and from JSON, the nicest way is through either the [attrs][] package or the (since 3.7) standard [dataclasses][] package. They're similar because `dataclasses` is a standardized `attrs`. It typically looks something like this:
47 |
48 | ```python
49 | @attr.s(auto_attribs=True)
50 | class Employee:
51 | name: str
52 | widgets_made: int
53 |
54 | # Don't actually mix attrs and dataclasses,
55 | # this is just to show they're similar.
56 |
57 | @dataclass
58 | class Department:
59 | name: str
60 | budget: float
61 | staff: List[Employee]
62 |
63 | @property
64 | def widgets_made(self):
65 | return sum(peon.widgets_made for peon in staff)
66 | ```
67 |
68 | And what they do is write the `__dunder__` methods for you:
69 |
70 | ```python
71 | >>> Employee('Bob', 55) # __init__ and __repr__
72 | Employee('Bob', 55)
73 | >>> Employee('Bob', 55) == Employe('Bob', 55) # comparisons
74 | True
75 | >>> {Employee('Bob', 55), Employee('Liz', 56)} # __hash__
76 | {Employee('Bob', 55), Employee('Liz', 56)}
77 | ```
78 |
79 | That said, the type hints don't enforce anything by themselves:
80 |
81 | ```python
82 | >>> Employee(name=123, widgets_made='wat?')
83 | Employee(name=123, widgets_made='wat?')
84 | ```
85 |
86 | But [mypy][mypy-add] and [Pyre][pyre-dc][4](#f4) can use them to check the correctness of your code, and json-syntax uses them to write converters for you.
87 |
88 | ### Are generic types subclasses of their related types?
89 |
90 | Let's ask Python:
91 |
92 | ```python
93 | >>> issubclass(List[int], list)
94 | TypeError: issubclass() arg 1 must be a class
95 |
96 | >>> isinstance([1, 2, 3], List[int])
97 | TypeError: Subscripted generics cannot be used with class and instance checks
98 |
99 | >>> List[int]([1, 2, 3])
100 | TypeError: Type List cannot be instantiated; use list() instead
101 |
102 | >>> type(List[int])
103 |
104 | ```
105 |
106 | Generic types are special objects that _describe_ types, but there's a twist. Let's check the method-resolution order of `List[int]` to list all the known base classes:
107 |
108 | ```python
109 | >>> List[int].mro()
110 | [, ]
111 | ```
112 |
113 | The `mro` method is only defined on `type`s, and it turns out `List[int]` *does* inherit from `list`. Weirder still:
114 |
115 | ```python
116 | >>> class MyList(List[int]):
117 | ... def average(self):
118 | ... return sum(self) / len(self)
119 |
120 | >>> MyList([1, 2, 3]).average()
121 | 2
122 |
123 | >>> MyList.mro()
124 | [, , , ]
125 | ```
126 |
127 | So it's valid for your own class to inherit from `List[int]`, whereupon it will behave like a `list`.
128 |
129 | Your type checker can then enforce that your code only stores `int`s in that class for you.
130 |
131 | At the time of writing, inheriting from a generic type won't work with json-syntax; we'll have to see if and how people want to use that.
132 |
133 | ## How does it work?
134 |
135 | As an example, let's suppose we have a type hint `Set[date]` and we want to convert that back and forth between the Python representation and a reasonable[2](#f2) JSON representation.
136 |
137 | ```python
138 | >>> json.loads('["2020-02-02", "2020-03-03", "2020-04-04"]')
139 | ['2020-02-02', '2020-03-03', '2020-04-04']
140 | ```
141 |
142 | We want a decoder that will convert this to a Python set. And json-syntax will write us a function to do that based on the type hints:
143 |
144 | ```python
145 | decoder = lookup(verb='json_to_python', typ=Set[date])
146 |
147 | # Should result in equivalent to:
148 |
149 | def decoder(value):
150 | return {date.fromisoformat(elem) for elem in data}
151 |
152 | # And so we get our desired python values:
153 |
154 | >>> decoder(['2020-02-02', '2020-03-03', '2020-04-04'])
155 | {date(2020, 2, 2), date(2020, 3, 3), date(2020, 4, 4)}
156 | ```
157 |
158 | ### Under the hood
159 |
160 | The algorithm can be visualized as transforming one tree into another.
161 |
162 | ```
163 | Type convert_type
164 | / \ ---> / \
165 | Type Type convert_type convert_type
166 |
167 |
168 | Set convert_set
169 | | ----> |
170 | date convert_date
171 | ```
172 |
173 | We can deconstruct complex types, like an `attrs` class:
174 |
175 | ```python
176 | >>> [(a.name, a.type) for a in attrs.fields(Employee)]
177 | [('name', str), ('widgets_made', int)]
178 | ```
179 |
180 | Back to our example:
181 |
182 | ```python
183 | decoder = lookup(verb='json_to_python', typ=Set[date])
184 | ```
185 |
186 | We first need to take apart that generic `Set[date]`:
187 |
188 | ```python
189 | >>> from typing import Set
190 | >>> Set[date].__origin__
191 | set
192 | >>> Set[date].__args__
193 | (date,)
194 | ```
195 |
196 | We know it's a python `set` of something, and that it takes a single argument `date`.
197 |
198 | The `sets` rule catches that we're dealing with a set, but it doesn't know how `date`s work, so it internally calls:
199 |
200 | ```python
201 | inner = lookup(verb='json_to_python', typ=date)
202 | ```
203 |
204 | The `dates` rule knows that `date` is an atom, it has no inner types to deal with. So it can simply return:
205 |
206 | ```python
207 | def convert_date(value):
208 | return date.fromisoformat(value)
209 | ```
210 |
211 | The `date.fromisoformat` method will parse a correctly formatted `str` to a `date`.
212 |
213 | Now we're back in the `sets` rule and it knows that in the JSON representation it will have a `list` of something that it should convert to a `set`. Its action is a little less elegant than our original set comprehension:
214 |
215 | ```python
216 | def convert_set(value, inner):
217 | return set(map(inner, value))
218 | ```
219 |
220 | We use the [functools.partial][functools] builtin[3](#f3) to put this together, and wind up with an expression like:
221 |
222 | ```python
223 | decoder = partial(convert_set, inner=convert_date)
224 |
225 | # Same as:
226 | def decoder(value):
227 | return convert_set(value, inner=convert_date)
228 | ```
229 |
230 | ### What are other generic types in `typing` for?
231 |
232 | Some of the generic types are generic versions of abstract base classes from `collections` and others, which can be used to write custom classes, or to declare as little as possible. In the latter case, if your function just uses `for` to walk through the contents of an argument, it could hint that argument with `Iterable[Whatever]`.
233 |
234 | This package doesn't have any standard rules supporting abstract types, as they seem like they'd suit specific use cases.
235 |
236 | Type variables are used to allow types to change in lockstep. You might define a function `first` like this:
237 |
238 | ```python
239 | T = TypeVar('T')
240 | def first(elems: Iterable[T]) -> T:
241 | for elem in elems:
242 | return elem
243 | ```
244 |
245 | The `T` may be different when the function is invoked in different contexts, but a type checker could infer from this that if `a: Set[str]` and `b = first(a)` that `b`'s type is `str`.
246 |
247 | You can create a generic user-defined class with type variables. This package doesn't support type variables yet.
248 |
249 | ```python
250 | @dataclass
251 | class Pair(Generic[T]):
252 | a: T
253 | b: Set[T]
254 |
255 | @dataclass
256 | class Info:
257 | x: Pair[int]
258 | y: Pair[str]
259 |
260 | # Effectively the same as:
261 |
262 | @dataclass
263 | class PairInt:
264 | a: int
265 | b: Set[int]
266 |
267 | @dataclass
268 | class PairStr:
269 | a: str
270 | b: Set[str]
271 |
272 | @dataclass
273 | class Info:
274 | x: PairInt
275 | y: PairStr
276 | ```
277 |
278 | The `Union` generic type lets you select alternate types, and this is supported by json-syntax. There are some caveats, mentioned in the top level README.
279 |
280 | ## Footnotes
281 |
282 | 1: It's trivial to write an encoder that asks Python types to convert themselves to JSON, and `attrs`, `simplejson` and other libraries support this. Writing the decoder is trickier because you have to reconstruct that information. It can be done, it's how we did it before writing this library, but our experience was that it became a giant kludge over time.[↩](#a1)
283 |
284 | 2: This package defines "reasonable" as representing a set of dates as a JSON array of strings in the common ISO8601 format. You may have different needs, so you can swap in your own rules, and please submit a PR if you think they're addressing a broader need.[↩](#a2)
285 |
286 | 3: Using `partial` ensures that the converter can be pickled; not sure at this time if that's really helpful but it's easy to do. It should also make an `explain` function relatively easy to write.[↩](#a3)
287 |
288 | 4: Pyre only seems to support `dataclasses`.[↩](#a4)
289 |
290 | [tutorial]: https://docs.python.org/3/tutorial/index.html
291 | [dataclasses]: https://docs.python.org/3/library/dataclasses.html
292 | [functools]: https://docs.python.org/3/library/functools.html
293 | [typing]: https://docs.python.org/3/library/typing.html
294 | [attrs]: https://attrs.readthedocs.io/en/stable/
295 | [pyre]: https://pyre-check.org/
296 | [pyre-dc]: https://github.com/facebook/pyre-check/blob/master/plugin/dataClass.ml
297 | [mypy]: http://mypy-lang.org/
298 | [mypy-add]: https://mypy.readthedocs.io/en/stable/additional_features.html
299 | [typescript]: https://www.typescriptlang.org/
300 | [flow]: https://flow.org/
301 |
--------------------------------------------------------------------------------
/json_syntax/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | The JSON syntax library is a combinatorial parser / generator library for managing
3 | conversion of Python objects to and from common JSON types.
4 |
5 | It's not strictly limited to JSON, but that's the major use case.
6 | """
7 |
8 | from .ruleset import RuleSet
9 | from .std import ( # noqa
10 | atoms,
11 | decimals,
12 | decimals_as_str,
13 | floats,
14 | floats_nan_str,
15 | iso_dates,
16 | optional,
17 | enums,
18 | faux_enums,
19 | lists,
20 | sets,
21 | dicts,
22 | )
23 | from .attrs import attrs_classes, named_tuples, tuples
24 | from .unions import unions
25 | from .string import stringify_keys
26 | from .helpers import JSON2PY, PY2JSON, INSP_PY, INSP_JSON, PATTERN # noqa
27 |
28 |
29 | def std_ruleset(
30 | floats=floats,
31 | decimals=decimals,
32 | dates=iso_dates,
33 | enums=enums,
34 | lists=lists,
35 | sets=sets,
36 | unions=unions,
37 | extras=(),
38 | custom=RuleSet,
39 | cache=None,
40 | ):
41 | """
42 | Constructs a RuleSet with the provided rules. The arguments here are to make it easy to
43 | override.
44 |
45 | For example, to replace ``decimals`` with ``decimals_as_str`` just call
46 | ``std_ruleset(decimals=decimals_as_str)``
47 | """
48 | return custom(
49 | enums,
50 | atoms,
51 | floats,
52 | decimals,
53 | dates,
54 | optional,
55 | lists,
56 | attrs_classes,
57 | sets,
58 | named_tuples,
59 | tuples,
60 | dicts,
61 | stringify_keys,
62 | unions,
63 | *extras,
64 | cache=cache
65 | )
66 |
--------------------------------------------------------------------------------
/json_syntax/action_v1.py:
--------------------------------------------------------------------------------
1 | from .errors import ErrorContext, err_ctx
2 |
3 | from datetime import date, datetime, time, timedelta
4 | from decimal import InvalidOperation
5 | import math
6 | import re
7 |
8 |
9 | def check_parse_error(value, parser, error):
10 | try:
11 | parser(value)
12 | except error:
13 | return False
14 | else:
15 | return True
16 |
17 |
18 | def check_isinst(value, typ):
19 | return isinstance(value, typ)
20 |
21 |
22 | def check_has_type(value, typ):
23 | return type(value) == typ
24 |
25 |
26 | def convert_decimal_str(value):
27 | result = str(value)
28 | if result == "sNaN":
29 | raise InvalidOperation("Won't save signalling NaN")
30 | return result
31 |
32 |
33 | def convert_float(value):
34 | value = float(value)
35 | if math.isfinite(value):
36 | return value
37 | elif math.isnan(value):
38 | return "NaN"
39 | elif value < 0.0:
40 | return "-Infinity"
41 | else:
42 | return "Infinity"
43 |
44 |
45 | def check_float(value):
46 | return (
47 | isinstance(value, (int, float))
48 | or isinstance(value, str)
49 | and value.lower()
50 | in ("nan", "inf", "infinity" "-inf", "-infinity", "+inf", "+infinity")
51 | )
52 |
53 |
54 | def convert_enum_str(value, typ):
55 | return typ(value).name
56 |
57 |
58 | def convert_none(value):
59 | if value is not None:
60 | raise ValueError("Expected None")
61 | return None
62 |
63 |
64 | def check_str_enum(value, typ):
65 | try:
66 | typ[value]
67 | except (KeyError, TypeError):
68 | return False
69 | else:
70 | return True
71 |
72 |
73 | def convert_str_enum(value, typ):
74 | return typ[value]
75 |
76 |
77 | def pass_faux_enum(value, typ):
78 | typ[value]
79 | return value
80 |
81 |
82 | if hasattr(datetime, "fromisoformat"):
83 | convert_date = date.fromisoformat
84 | convert_datetime = datetime.fromisoformat
85 | convert_time = time.fromisoformat
86 | else:
87 | from dateutil.parser import isoparser
88 |
89 | instance = isoparser(sep="T")
90 | convert_date = instance.parse_isodate
91 | convert_datetime = instance.isoparse
92 | convert_time = instance.parse_isotime
93 | del instance
94 |
95 |
96 | def convert_timedelta_str(dur):
97 | "Barebones support for storing a timedelta as an ISO8601 duration."
98 | micro = ".{:06d}".format(dur.microseconds) if dur.microseconds else ""
99 | return "P{:d}DT{:d}{}S".format(dur.days, dur.seconds, micro)
100 |
101 |
102 | _iso8601_duration = re.compile(
103 | r"^P(?!$)([-+]?\d+(?:[.,]\d+)?Y)?"
104 | r"([-+]?\d+(?:[.,]\d+)?M)?"
105 | r"([-+]?\d+(?:[.,]\d+)?W)?"
106 | r"([-+]?\d+(?:[.,]\d+)?D)?"
107 | r"(?:(T)(?=[0-9+-])"
108 | r"([-+]?\d+(?:[.,]\d+)?H)?"
109 | r"([-+]?\d+(?:[.,]\d+)?M)?"
110 | r"([-+]?\d+(?:[.,]\d+)?S)?)?$"
111 | )
112 | _duration_args = {
113 | "PW": "weeks",
114 | "PD": "days",
115 | "TH": "hours",
116 | "TM": "minutes",
117 | "TS": "seconds",
118 | }
119 |
120 |
121 | def convert_str_timedelta(dur):
122 | if not isinstance(dur, str):
123 | raise ValueError("Value was not a string.")
124 | match = _iso8601_duration.match(dur.upper().replace(",", "."))
125 | section = "P"
126 | if not match:
127 | raise ValueError("Value was not an ISO8601 duration.")
128 | args = {}
129 | for elem in match.groups():
130 | if elem is None:
131 | continue
132 | if elem == "T":
133 | section = "T"
134 | continue
135 | part = section + elem[-1]
136 | value = float(elem[:-1])
137 | if not value:
138 | continue
139 |
140 | if part in ("PY", "PM"):
141 | raise ValueError("Year and month durations not supported")
142 | args[_duration_args[part]] = value
143 | return timedelta(**args)
144 |
145 |
146 | def convert_optional(value, inner):
147 | if value is None:
148 | return None
149 | return inner(value)
150 |
151 |
152 | def check_optional(value, inner):
153 | return value is None or inner(value)
154 |
155 |
156 | def convert_collection(value, inner, con):
157 | return con(
158 | err_ctx("[{}]".format(i), lambda: inner(val)) for i, val in enumerate(value)
159 | )
160 |
161 |
162 | def check_collection(value, inner, con):
163 | return isinstance(value, con) and all(
164 | err_ctx("[{}]".format(i), lambda: inner(val)) for i, val in enumerate(value)
165 | )
166 |
167 |
168 | def convert_mapping(value, key, val, con):
169 | return con(err_ctx(k, lambda: (key(k), val(v))) for k, v in value.items())
170 |
171 |
172 | def check_mapping(value, key, val, con):
173 | return isinstance(value, con) and all(
174 | err_ctx(k, lambda: key(k) and val(v)) for k, v in value.items()
175 | )
176 |
177 |
178 | def convert_dict_to_attrs(value, pre_hook, inner_map, con):
179 | value = pre_hook(value)
180 | args = {}
181 | for attr in inner_map:
182 | with ErrorContext("[{!r}]".format(attr.name)):
183 | try:
184 | arg = value[attr.name]
185 | except KeyError:
186 | if attr.is_required:
187 | raise KeyError("Missing key {!r}".format(attr.name)) from None
188 | else:
189 | args[attr.init_name] = attr.inner(arg)
190 | return con(**args)
191 |
192 |
193 | def convert_dict_to_dict(value, inner_map, con):
194 | args = {}
195 | for attr in inner_map:
196 | with ErrorContext("[{!r}]".format(attr.name)):
197 | try:
198 | arg = value[attr.name]
199 | except KeyError:
200 | if attr.is_required:
201 | raise KeyError("Missing key {!r}".format(attr.name)) from None
202 | else:
203 | args[attr.name] = attr.inner(arg)
204 | return con(args)
205 |
206 |
207 | def check_dict(value, inner_map, pre_hook):
208 | value = pre_hook(value)
209 | if not isinstance(value, dict):
210 | return False
211 | for attr in inner_map:
212 | with ErrorContext("[{!r}]".format(attr.name)):
213 | try:
214 | arg = value[attr.name]
215 | except KeyError:
216 | if attr.is_required:
217 | return False
218 | else:
219 | if not attr.inner(arg):
220 | return False
221 | return True
222 |
223 |
224 | def convert_attrs_to_dict(value, post_hook, inner_map):
225 | out = {}
226 | for attr in inner_map:
227 | with ErrorContext("." + attr.name):
228 | field = getattr(value, attr.name)
229 | if not attr.is_required and field == attr.default:
230 | continue
231 | out[attr.name] = attr.inner(field)
232 | if post_hook is not None:
233 | out = getattr(value, post_hook)(out)
234 | return out
235 |
236 |
237 | def convert_tuple_as_list(value, inner, con):
238 | return con(
239 | err_ctx("[{}]".format(i), lambda: cvt(val))
240 | for i, (val, cvt) in enumerate(zip(value, inner))
241 | )
242 |
243 |
244 | def check_tuple_as_list(value, inner, con):
245 | return (
246 | isinstance(value, con)
247 | and len(value) == len(inner)
248 | and all(
249 | err_ctx("[{}]".format(i), lambda: chk(val))
250 | for i, (val, chk) in enumerate(zip(value, inner))
251 | )
252 | )
253 |
254 |
255 | def check_union(value, steps):
256 | return any(err_ctx(name, lambda: step(value)) for step, name in steps)
257 |
258 |
259 | def convert_union(value, steps, typename):
260 | for check, convert, name in steps:
261 | with ErrorContext(name):
262 | if check(value):
263 | return convert(value)
264 | raise ValueError("Expected value of type {} got {!r}".format(typename, value))
265 |
--------------------------------------------------------------------------------
/json_syntax/attrs.py:
--------------------------------------------------------------------------------
1 | from .helpers import JSON2PY, PY2JSON, INSP_JSON, INSP_PY, PATTERN, has_origin, identity
2 | from .action_v1 import (
3 | check_dict,
4 | check_isinst,
5 | check_tuple_as_list,
6 | convert_attrs_to_dict,
7 | convert_dict_to_dict,
8 | convert_dict_to_attrs,
9 | convert_tuple_as_list,
10 | )
11 | from . import pattern as pat
12 | from .product import build_attribute_map, build_named_tuple_map, build_typed_dict_map
13 | from .types import is_generic, get_origin, get_argument_map
14 |
15 | from functools import partial
16 |
17 | _SUPPORTED_VERBS = (JSON2PY, PY2JSON, INSP_PY, INSP_JSON, PATTERN)
18 |
19 |
20 | def attrs_classes(
21 | verb,
22 | typ,
23 | ctx,
24 | pre_hook="__json_pre_decode__",
25 | post_hook="__json_post_encode__",
26 | check="__json_check__",
27 | ):
28 | """
29 | Handle an ``@attr.s`` or ``@dataclass`` decorated class.
30 |
31 | This rule also implements several hooks to handle complex cases, especially to
32 | manage backwards compatibility. Hooks should be resilient against invalid data,
33 | and should not mutate their inputs.
34 |
35 | `__json_pre_decode__` is used by decoders constructed by `RuleSet.json_to_python`.
36 | It will be called before decoding with the JSON object and may adjust them to fit
37 | the expected structure, which must be a `dict` with the necessary fields.
38 |
39 | The checker generated by `inspect_json` will also call `__json_pre_decode__` before
40 | inspecting the value generated.
41 |
42 | `__json_post_encode__` is used by encoders constructed by `RuleSet.python_to_json`.
43 | It will be called after encoding with the JSON object and may adjust it as
44 | necessary.
45 |
46 | `__json_check__` may be used to completely override the `inspect_json` check generated
47 | for this class.
48 | """
49 | if verb not in _SUPPORTED_VERBS:
50 | return
51 | if is_generic(typ):
52 | typ_args = get_argument_map(typ)
53 | typ = get_origin(typ)
54 | else:
55 | typ_args = None
56 |
57 | inner_map = build_attribute_map(verb, typ, ctx, typ_args)
58 | if inner_map is None:
59 | return
60 |
61 | if verb == INSP_PY:
62 | return partial(check_isinst, typ=typ)
63 |
64 | if verb == JSON2PY:
65 | pre_hook_method = getattr(typ, pre_hook, identity)
66 | return partial(
67 | convert_dict_to_attrs,
68 | pre_hook=pre_hook_method,
69 | inner_map=inner_map,
70 | con=typ,
71 | )
72 | elif verb == PY2JSON:
73 | post_hook = post_hook if hasattr(typ, post_hook) else None
74 | return partial(convert_attrs_to_dict, post_hook=post_hook, inner_map=inner_map)
75 | elif verb == INSP_JSON:
76 | check = getattr(typ, check, None)
77 | if check:
78 | return check
79 | pre_hook_method = getattr(typ, pre_hook, identity)
80 | return partial(check_dict, inner_map=inner_map, pre_hook=pre_hook_method)
81 | elif verb == PATTERN:
82 | return pat.Object.exact(
83 | (pat.String.exact(attr.name), attr.inner or pat.Unkown)
84 | for attr in inner_map
85 | if attr.is_required
86 | )
87 |
88 |
89 | def named_tuples(verb, typ, ctx):
90 | """
91 | Handle a ``NamedTuple(name, [('field', type), ('field', type)])`` type.
92 |
93 | Also handles a ``collections.namedtuple`` if you have a fallback handler.
94 |
95 | Warning: there's no clear runtime marker that something is a namedtuple; it's just
96 | a subclass of ``tuple`` that has some special fields.
97 | """
98 | if verb not in _SUPPORTED_VERBS:
99 | return
100 |
101 | inner_map = build_named_tuple_map(verb, typ, ctx)
102 | if inner_map is None:
103 | return
104 |
105 | if verb == INSP_PY:
106 | return partial(check_isinst, typ=typ)
107 | elif verb == JSON2PY:
108 | return partial(
109 | convert_dict_to_attrs, pre_hook=identity, inner_map=inner_map, con=typ
110 | )
111 | elif verb == PY2JSON:
112 | return partial(convert_attrs_to_dict, post_hook=None, inner_map=inner_map)
113 | elif verb == INSP_JSON:
114 | return partial(check_dict, pre_hook=identity, inner_map=inner_map)
115 | elif verb == PATTERN:
116 | return pat.Object.exact(
117 | (pat.String.exact(attr.name), attr.inner)
118 | for attr in inner_map
119 | if attr.is_required
120 | )
121 |
122 |
123 | def typed_dicts(verb, typ, ctx):
124 | """
125 | Handle the TypedDict product type. This allows you to construct a dict with specific
126 | (string) keys, which is often how people really use dicts.
127 |
128 | Both the class form and the functional form,
129 | ``TypedDict('Name', {'field': type, 'field': type})`` are supported.
130 | """
131 | if verb not in _SUPPORTED_VERBS:
132 | return
133 |
134 | inner_map = build_typed_dict_map(verb, typ, ctx)
135 | if inner_map is None:
136 | return
137 |
138 | # Note: we pass `dict` as the typ here because it's the correct constructor.
139 | if verb in (JSON2PY, PY2JSON):
140 | return partial(convert_dict_to_dict, inner_map=inner_map, con=typ)
141 | elif verb in (INSP_JSON, INSP_PY):
142 | return partial(check_dict, pre_hook=identity, inner_map=inner_map)
143 | elif verb == PATTERN:
144 | return pat.Object.exact(
145 | (pat.String.exact(attr.name), attr.inner)
146 | for attr in inner_map
147 | if attr.is_required
148 | )
149 |
150 |
151 | def tuples(verb, typ, ctx):
152 | """
153 | Handle a ``Tuple[type, type, type]`` product type. Use a ``NamedTuple`` if you don't
154 | want a list. Though, if possible, prefer ``attrs`` or ``dataclass``.
155 | """
156 | if verb not in _SUPPORTED_VERBS or not has_origin(typ, tuple):
157 | return
158 |
159 | args = typ.__args__
160 | if Ellipsis in args:
161 | # This is a homogeneous tuple, use the lists rule.
162 | return
163 | inner = [ctx.lookup(verb=verb, typ=arg) for arg in args]
164 | if verb == JSON2PY:
165 | return partial(convert_tuple_as_list, inner=inner, con=tuple)
166 | elif verb == PY2JSON:
167 | return partial(convert_tuple_as_list, inner=inner, con=list)
168 | elif verb == INSP_PY:
169 | return partial(check_tuple_as_list, inner=inner, con=tuple)
170 | elif verb == INSP_JSON:
171 | return partial(check_tuple_as_list, inner=inner, con=list)
172 | elif verb == PATTERN:
173 | return pat.Array.exact(inner)
174 |
--------------------------------------------------------------------------------
/json_syntax/cache.py:
--------------------------------------------------------------------------------
1 | from warnings import warn
2 | import threading
3 |
4 |
5 | class UnhashableType(UserWarning):
6 | pass
7 |
8 |
9 | class ForwardAction:
10 | """
11 | A mutable callable. Since actions are simply functions, this lets us create a
12 | promise of a function and replace it when we have the actual function ready. This is
13 | a simple way to handle cycles in types.
14 | """
15 |
16 | __slots__ = ("__call__",)
17 |
18 | def __init__(self, call):
19 | self.__call__ = call
20 |
21 | def __repr__(self):
22 | return "".format(self.__call__)
23 |
24 |
25 | class SimpleCache:
26 | def __init__(self):
27 | self.cache = {}
28 |
29 | def access(self):
30 | """Requests a context manager to access the cache."""
31 | return self
32 |
33 | def __enter__(self):
34 | """Stub implementation; see subclasses."""
35 | return self
36 |
37 | def __exit__(self, e_typ, e_val, e_tb):
38 | """Stub implementation; see subclasses."""
39 | return
40 |
41 | def get(self, verb, typ):
42 | result = self._lookup(verb, typ)
43 | return result if result is not NotImplemented else None
44 |
45 | def _lookup(self, verb, typ):
46 | """
47 | Handle unhashable types by warning about them.
48 | """
49 | try:
50 | return self.cache.get((verb, typ))
51 | except TypeError:
52 | warn(
53 | "Type {} is unhashable; json_syntax probably can't handle this".format(
54 | typ
55 | ),
56 | category=UnhashableType,
57 | )
58 | return NotImplemented
59 |
60 | def in_flight(self, verb, typ):
61 | """
62 | Called when we begin determining the action for a type. We construct a forward
63 | action that will be fulfilled by the ``complete`` call.
64 | """
65 | if self._lookup(verb, typ) is None:
66 |
67 | def unfulfilled(value):
68 | # This can't be pickled, which is a good thing.
69 | raise TypeError(
70 | "Forward reference was never fulfilled to {} for {}".format(
71 | verb, typ
72 | )
73 | )
74 |
75 | forward = ForwardAction(unfulfilled)
76 | self.cache[verb, typ] = forward
77 | return forward
78 |
79 | def de_flight(self, verb, typ, forward):
80 | """
81 | If a lookup fails, this removes the entry so that further attempts can be made.
82 | """
83 | present = self._lookup(verb, typ)
84 | if present is forward:
85 | del self.cache[verb, typ]
86 |
87 | def complete(self, verb, typ, action):
88 | """
89 | Once a type is complete, we fulfill any ForwardActions and replace the cache
90 | entry with the actual action.
91 | """
92 | present = self._lookup(verb, typ)
93 | if present is NotImplemented:
94 | return # Unhashable.
95 | elif present is None:
96 | self.cache[verb, typ] = action
97 | elif isinstance(present, ForwardAction):
98 | present.__call__ = action
99 | # Replace the cache entry, if it's never been used let the ForwardAction be
100 | # garbage collected.
101 | self.cache[verb, typ] = action
102 |
103 |
104 | class ThreadLocalCache(SimpleCache):
105 | """
106 | Avoids threads conflicting while looking up rules by keeping the cache in thread local
107 | storage.
108 |
109 | You can also prevent this by looking up rules during module loading.
110 | """
111 |
112 | def __init__(self):
113 | self._local = threading.local()
114 |
115 | @property
116 | def cache(self):
117 | local = self._local
118 | try:
119 | return local.cache
120 | except AttributeError:
121 | _cache = local.cache = {}
122 | return _cache
123 |
124 |
125 | class RLockCache(SimpleCache):
126 | """
127 | Uses a re-entrant lock to ensure only one thread is touching rules at a time.
128 | """
129 |
130 | def __init__(self, timeout=-1):
131 | self._rlock = threading.RLock()
132 | self._timeout = -1
133 | self.cache = {}
134 |
135 | def __enter__(self):
136 | if not self._rlock.acquire(timeout=self._timeout):
137 | raise TypeError("acquire failed to acquire a lock")
138 | return self
139 |
140 | def __exit__(self, e_typ, e_val, e_tb):
141 | self._rlock.release()
142 |
--------------------------------------------------------------------------------
/json_syntax/errors.py:
--------------------------------------------------------------------------------
1 | class _Context:
2 | """
3 | Stash contextual information in an exception. As we don't know exactly when an exception
4 | is displayed to a user, this class tries to keep it always up to date.
5 |
6 | This class subclasses string (to be compatible) and tracks an insertion point.
7 | """
8 |
9 | __slots__ = ("original", "context", "lead")
10 |
11 | def __init__(self, original, lead, context):
12 | self.original = original
13 | self.lead = lead
14 | self.context = [context]
15 |
16 | def __str__(self):
17 | return "{}{}{}".format(
18 | self.original, self.lead, "".join(map(str, reversed(self.context)))
19 | )
20 |
21 | def __repr__(self):
22 | return repr(self.__str__())
23 |
24 | @classmethod
25 | def add(cls, exc, context):
26 | args = exc.args
27 | if args and isinstance(args[0], cls):
28 | args[0].context.append(context)
29 | return
30 | args = list(exc.args)
31 | if args:
32 | args[0] = cls(args[0], "; at ", context)
33 | else:
34 | args.append(cls("", "At ", context))
35 | exc.args = tuple(args)
36 |
37 |
38 | class ErrorContext:
39 | """
40 | Inject contextual information into an exception message. This won't work for some
41 | exceptions like OSError that ignore changes to `args`; likely not an issue for this
42 | library. There is a neglible performance hit if there is no exception.
43 |
44 | >>> with ErrorContext('.foo'):
45 | ... with ErrorContext('[0]'):
46 | ... with ErrorContext('.qux'):
47 | ... 1 / 0
48 | Traceback (most recent call last):
49 | ZeroDivisionError: division by zero; at .foo[0].qux
50 |
51 | The `__exit__` method will catch the exception and look for a `_context` attribute
52 | assigned to it. If none exists, it appends `; at ` and the context string to the first
53 | string argument.
54 |
55 | As the exception walks up the stack, outer ErrorContexts will be called. They will see
56 | the `_context` attribute and insert their context immediately after `; at ` and before
57 | the existing context.
58 |
59 | Thus, in the example above:
60 |
61 | ('division by zero',) -- the original message
62 | ('division by zero; at .qux',) -- the innermost context
63 | ('division by zero; at [0].qux',)
64 | ('division by zero; at .foo[0].qux',) -- the outermost context
65 |
66 | For simplicity, the method doesn't attempt to inject whitespace. To represent names,
67 | consider surrounding them with angle brackets, e.g. ``
68 | """
69 |
70 | def __init__(self, *context):
71 | self.context = context
72 |
73 | def __enter__(self):
74 | pass
75 |
76 | def __exit__(self, exc_type, exc_value, traceback):
77 | if exc_value is not None:
78 | _Context.add(exc_value, "".join(self.context))
79 |
80 |
81 | def err_ctx(context, func):
82 | """
83 | Execute a callable, decorating exceptions raised with error context.
84 |
85 | ``err_ctx(context, func)`` has the same effect as:
86 |
87 | with ErrorContext(context):
88 | return func()
89 | """
90 | try:
91 | return func()
92 | except Exception as exc:
93 | _Context.add(exc, context)
94 | raise
95 |
--------------------------------------------------------------------------------
/json_syntax/extras/README.md:
--------------------------------------------------------------------------------
1 | # The flags rule
2 |
3 | This rule lets you use enums as strings without losing all Enums as the `faux_enums` rule does.
4 |
5 | ## Demonstrates
6 |
7 | * How to write a rule
8 | * How to write an action
9 | * How to write a fake type that's compatible with `typing.Union`
10 |
11 | ## Caveats
12 |
13 | * Requires Python 3.7
14 | * A user could mistakenly create a Flag instance
15 | * You'd probably be better off using enums
16 |
--------------------------------------------------------------------------------
/json_syntax/extras/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | Examples of additional rules are in this directory.
3 | """
4 |
--------------------------------------------------------------------------------
/json_syntax/extras/dynamodb.py:
--------------------------------------------------------------------------------
1 | """
2 | While the main suite is fairly complex, it's really not hard to construct a small, useful
3 | translation.
4 |
5 | AWS's DynamoDB decorates values to represent them in JSON. The rules for the decorations are
6 | fairly simple, and we'd like to translate to and from Python objects.
7 |
8 | The a Dynamo values look like this:
9 |
10 | {"BOOL": true}
11 | {"L": [{"N": "1.5"}, {"S": "apple"}]}
12 |
13 | We will generate rules to convert Python primitive types, lists and attrs classes into
14 | Dynamo types.
15 |
16 | This will special case the kinds of sets Dynamo handles. In keeping with the principle of
17 | least astonishment, it won't convert, e.g. ``Set[MyType]`` into a Dynamo list. This will
18 | just fail because Dynamo doesn't actually support that. You could add a rule if that's the
19 | correct semantics.
20 |
21 | For boto3 users: you must use the **client**, not the resource.
22 |
23 | ddb = boto3.client('dynamodb')
24 | ddb.put_item(TableName='chair', Item=...)
25 |
26 | The ``boto3.resource('dynamodb').Table`` is already doing a conversion step we don't want.
27 |
28 | Ref: https://docs.aws.amazon.com/amazondynamodb/latest/APIReference/API_AttributeValue.html#DDB-Type-AttributeValue-NS # noqa
29 | """
30 |
31 | from json_syntax.helpers import (
32 | issub_safe,
33 | NoneType,
34 | has_origin,
35 | get_origin,
36 | STR2PY,
37 | PY2STR,
38 | )
39 | from json_syntax.product import build_attribute_map
40 | from json_syntax.string import stringify_keys
41 | from json_syntax.ruleset import SimpleRuleSet
42 |
43 | import base64 as b64
44 | from decimal import Decimal
45 | from enum import Enum
46 | from functools import partial
47 | from math import isfinite
48 | from numbers import Real
49 | from typing import Union
50 |
51 | DDB2PY = "dynamodb_to_python"
52 | PY2DDB = "python_to_dynamodb"
53 | _STRING_ACTIONS = {DDB2PY: STR2PY, PY2DDB: PY2STR}
54 |
55 |
56 | def booleans(verb, typ, ctx):
57 | """
58 | A rule to represent boolean values as Dynamo booleans.
59 | """
60 | if typ != bool:
61 | return
62 | if verb == DDB2PY:
63 | return decode_boolean
64 | elif verb == PY2DDB:
65 | return encode_boolean
66 |
67 |
68 | def numbers(verb, typ, ctx):
69 | """
70 | A rule to represent numeric values as Dynamo numbers. Any number type should work,
71 | however both Decimal and float support NaN and infinity and I haven't tested these in
72 | Dynamo.
73 | """
74 | if typ == bool or not issub_safe(typ, (Decimal, Real)):
75 | return
76 | if verb == DDB2PY:
77 | return partial(decode_number, typ=typ)
78 | elif verb == PY2DDB:
79 | return encode_number
80 |
81 |
82 | def strings(verb, typ, ctx):
83 | """
84 | A rule to represent string values as Dynamo strings.
85 | """
86 | if typ != str:
87 | return
88 | if verb == DDB2PY:
89 | return decode_string
90 | elif verb == PY2DDB:
91 | return encode_string
92 |
93 |
94 | def enums(verb, typ, ctx):
95 | "Rule to convert between enumerated types and strings."
96 | if issub_safe(typ, Enum):
97 | if verb == PY2DDB:
98 | return encode_enum
99 | elif verb == DDB2PY:
100 | return partial(decode_enum, typ=typ)
101 |
102 |
103 | def binary(verb, typ, ctx):
104 | """
105 | A rule to represent bytes as Dynamo binary values.
106 | """
107 | if typ != bytes:
108 | return
109 | if verb == DDB2PY:
110 | return decode_binary
111 | elif verb == PY2DDB:
112 | return encode_binary
113 |
114 |
115 | def lists(verb, typ, ctx):
116 | """
117 | A rule to represent lists as Dynamo list values.
118 | """
119 | if has_origin(typ, list, num_args=1):
120 | (inner,) = typ.__args__
121 | elif has_origin(typ, tuple, num_args=2):
122 | (inner, ell) = typ.__args__
123 | if ell is not Ellipsis:
124 | return
125 | else:
126 | return
127 | inner = ctx.lookup(verb=verb, typ=inner)
128 | if verb == DDB2PY:
129 | return partial(decode_list, inner=inner, typ=get_origin(typ))
130 | elif verb == PY2DDB:
131 | return partial(encode_list, inner=inner)
132 |
133 |
134 | def dicts(verb, typ, ctx):
135 | """
136 | A rule to represent lists as Dynamo list values.
137 | """
138 | if verb not in _STRING_ACTIONS or not has_origin(typ, dict, num_args=2):
139 | return
140 | (key_typ, val_typ) = typ.__args__
141 | inner_key = ctx.lookup(verb=_STRING_ACTIONS[verb], typ=key_typ)
142 | inner_val = ctx.lookup(verb=verb, typ=val_typ)
143 | if verb == DDB2PY:
144 | return partial(
145 | decode_dict, inner_key=inner_key, inner_val=inner_val, con=get_origin(typ)
146 | )
147 | elif verb == PY2DDB:
148 | return partial(encode_dict, inner_key=inner_key, inner_val=inner_val)
149 |
150 |
151 | def sets(verb, typ, ctx):
152 | """
153 | A rule to represent sets. Will only use specialized Dynamo sets, to abide by principle
154 | of least astonishment.
155 |
156 | Valid python types include Set[Decimal], Set[str], Set[bytes], or FrozenSet for any of
157 | these. Also, any number that converts from Decimal and converts to a decimal if str is
158 | called should work.
159 | """
160 | if not has_origin(typ, (set, frozenset), num_args=1):
161 | return
162 | (inner,) = typ.__args__
163 | con = get_origin(typ)
164 | if inner == bytes:
165 | if verb == DDB2PY:
166 | return partial(decode_binary_set, con=con)
167 | elif verb == PY2DDB:
168 | return encode_binary_set
169 |
170 | if inner != bool and issub_safe(inner, (Decimal, Real)):
171 | if verb == DDB2PY:
172 | return partial(decode_number_set, elem=inner, con=con)
173 | elif verb == PY2DDB:
174 | return encode_number_set
175 |
176 | if inner == str:
177 | if verb == DDB2PY:
178 | return partial(decode_string_set, con=con)
179 | elif verb == PY2DDB:
180 | return encode_string_set
181 |
182 |
183 | def attrs(verb, typ, ctx):
184 | """
185 | A rule to represent attrs classes. This isn't trying to support hooks or any of that.
186 | """
187 | inner_map = build_attribute_map(verb, typ, ctx)
188 | if inner_map is None:
189 | return
190 |
191 | if verb == DDB2PY:
192 | return partial(decode_map, inner_map=inner_map, con=typ)
193 | elif verb == PY2DDB:
194 | return partial(encode_map, inner_map=inner_map)
195 |
196 |
197 | def nulls(verb, typ, ctx):
198 | """
199 | A rule to represent boolean values as Dynamo nulls.
200 | """
201 | if typ != NoneType:
202 | return
203 | if verb == DDB2PY:
204 | return decode_null
205 | elif verb == PY2DDB:
206 | return encode_null
207 |
208 |
209 | def optionals(verb, typ, ctx):
210 | """
211 | Handle an ``Optional[inner]`` by passing ``None`` through.
212 | """
213 | if has_origin(typ, Union, num_args=2):
214 | if NoneType not in typ.__args__:
215 | return
216 | inner = None
217 | for arg in typ.__args__:
218 | if arg is not NoneType:
219 | inner = arg
220 | if inner is None:
221 | raise TypeError("Could not find inner type for Optional: " + str(typ))
222 | else:
223 | return
224 | inner = ctx.lookup(verb=verb, typ=inner)
225 | if verb == DDB2PY:
226 | return partial(decode_optional, inner=inner)
227 | elif verb == PY2DDB:
228 | return partial(encode_optional, inner=inner)
229 |
230 |
231 | class DynamodbRuleSet(SimpleRuleSet):
232 | def dynamodb_to_python(self, typ):
233 | """
234 | Gets a function to convert a DynamoDB structure to a Python object.
235 |
236 | This method is here for completeness; see ddb_item_to_python.
237 | """
238 | return self.lookup(verb=DDB2PY, typ=typ)
239 |
240 | def python_to_dynamodb(self, typ):
241 | """
242 | Gets a function to convert a Python object to a DynamoDB structure.
243 |
244 | This method is here for completeness; see python_to_ddb_item and ad_hoc.
245 | """
246 | return self.lookup(verb=PY2DDB, typ=typ)
247 |
248 | def ddb_item_to_python(self, typ):
249 | """
250 | Gets a function to convert a DynamoDB Item to a Python object.
251 |
252 | The `typ` argument must be an attrs class, but this method won't check that.
253 |
254 | Usage:
255 |
256 | rs = dynamodb_ruleset()
257 | response = client.get_item(TableName='my_table',
258 | Key=rs.ad_hoc(my_key='some_string'))
259 | decoder = rs.ddb_item_to_python(MyAttrsType)
260 | result = decoder(response['Item'])
261 | """
262 | inner = self.lookup(verb=DDB2PY, typ=typ)
263 | return partial(wrap_item, inner=inner)
264 |
265 | def python_to_ddb_item(self, typ):
266 | """
267 | Gets a function to convert a Python object to a DynamoDB Item.
268 |
269 | The `typ` argument must be an attrs class, but this method won't check that.
270 |
271 | Usage:
272 |
273 | rs = dynamodb_ruleset()
274 | encoder = rs.python_to_ddb_item(MyAttrsType)
275 | client.put_item(TableName='my_table', Item=encoder(my_item))
276 | """
277 | inner = self.lookup(verb=PY2DDB, typ=typ)
278 | return partial(unwrap_item, inner=inner)
279 |
280 | def ad_hoc(self, _key_prefix="", **kw):
281 | """
282 | Convenience method to encode an ad hoc set of arguments used in various DynamoDB
283 | APIs.
284 |
285 | If an argument is a tuple, it must be a two-item tuple of ``(value, type)``.
286 | If you want to use container types, you'll have to specify them fully. For empty
287 | dicts or lists, just use any type as the inner, e.g. ``({}, Dict[str, str])``.
288 |
289 | Example:
290 |
291 | rs = dynamodb_ruleset()
292 | client.update_item(
293 | TableName='my_table',
294 | Key=rs.ad_hoc(my_hash_key='some_string', my_int_key=77),
295 | UpdateExpression="SET counter=:my_int, info=:my_class, num=:my_float",
296 | ExpressionAttributeValue=rs.ad_hoc(
297 | ':', # indicates that keys are prefixed with :
298 | my_int=5,
299 | my_class=(instance, MyClass),
300 | my_float=3.3,
301 | )
302 | )
303 | """
304 | out = {}
305 | for key, val in kw.items():
306 | if isinstance(val, tuple):
307 | enc = self.python_to_dynamodb(val[1])(val[0])
308 | else:
309 | enc = self.python_to_dynamodb(type(val))(val)
310 | out[_key_prefix + key] = enc
311 | return out
312 |
313 |
314 | def dynamodb_ruleset(
315 | strings=strings,
316 | numbers=numbers,
317 | booleans=booleans,
318 | binary=binary,
319 | lists=lists,
320 | attrs=attrs,
321 | enums=enums,
322 | sets=sets,
323 | dicts=dicts,
324 | optionals=optionals,
325 | extras=(),
326 | custom=DynamodbRuleSet,
327 | cache=None,
328 | ):
329 | """
330 | Constructs a RuleSet to migrate data to and from DynamoDB.
331 | """
332 | return custom(
333 | strings,
334 | numbers,
335 | booleans,
336 | binary,
337 | lists,
338 | attrs,
339 | enums,
340 | sets,
341 | dicts,
342 | stringify_keys,
343 | optionals,
344 | nulls,
345 | *extras,
346 | cache=cache
347 | )
348 |
349 |
350 | def desigil(value, **sigils):
351 | """
352 | Parse a ``{sigil: value}`` expression and unpack the value inside.
353 | """
354 | if isinstance(value, dict) and len(value) == 1:
355 | for sig, typ in sigils.items():
356 | try:
357 | inner = value[sig]
358 | except KeyError:
359 | pass
360 | else:
361 | if not isinstance(inner, typ):
362 | raise ValueError(
363 | "This Dynamo value {} must have a member encoded as type {}".format(
364 | sig, typ.__name__
365 | )
366 | )
367 | return sig, inner
368 | for sig, typ in sigils.items():
369 | break
370 | raise ValueError(
371 | "This Dynamo value must be encoded as a single-item dict {%r: %s}"
372 | % (sig, typ.__name__)
373 | )
374 |
375 |
376 | def decode_optional(value, inner):
377 | try:
378 | desigil(value, NULL=bool)
379 | except ValueError:
380 | return inner(value)
381 | else:
382 | return None
383 |
384 |
385 | def encode_optional(value, inner):
386 | if value is None:
387 | return {"NULL": True}
388 | else:
389 | return inner(value)
390 |
391 |
392 | def decode_null(value):
393 | desigil(value, NULL=bool)
394 | return None
395 |
396 |
397 | def encode_null(value):
398 | if not value:
399 | return {"NULL": True}
400 | else:
401 | raise ValueError("{} is not None".format(value))
402 |
403 |
404 | def decode_boolean(value):
405 | _, value = desigil(value, BOOL=bool)
406 | return value
407 |
408 |
409 | def encode_boolean(value):
410 | return {"BOOL": bool(value)}
411 |
412 |
413 | b64decode = partial(b64.b64decode, validate=True)
414 |
415 |
416 | def b64encode(value):
417 | return b64.b64encode(value).decode("ASCII")
418 |
419 |
420 | def decode_binary(value):
421 | _, value = desigil(value, B=(str, bytes))
422 | return b64decode(value)
423 |
424 |
425 | def encode_binary(value):
426 | return {"B": b64encode(value)}
427 |
428 |
429 | def decode_number(value, typ):
430 | _, value = desigil(value, N=str, S=str)
431 | return typ(value)
432 |
433 |
434 | def _encode_number(value):
435 | if not isfinite(value):
436 | # We could employ a string type here, but this could put us in a corner if we
437 | # try to use number sets...
438 | raise ValueError("Can't encode non-finite values in Dynamodb")
439 | if isinstance(value, (int, float, Decimal)):
440 | return str(value)
441 | else:
442 | # This is all the Real interface guarantees us. It's a stretch using Fraction in
443 | # Dynamo.
444 | return str(float(value))
445 |
446 |
447 | def encode_number(value):
448 | return {"N": _encode_number(value)}
449 |
450 |
451 | def decode_string(value):
452 | _, value = desigil(value, S=str)
453 | return value
454 |
455 |
456 | def encode_string(value):
457 | return {"S": str(value)}
458 |
459 |
460 | def decode_enum(value, typ):
461 | _, value = desigil(value, S=str)
462 | return typ[value]
463 |
464 |
465 | def encode_enum(value):
466 | return {"S": value.name}
467 |
468 |
469 | def decode_list(value, inner, typ):
470 | _, value = desigil(value, L=list)
471 | return typ(map(inner, value))
472 |
473 |
474 | def encode_list(value, inner):
475 | return {"L": list(map(inner, value))}
476 |
477 |
478 | def decode_dict(value, inner_key, inner_val, con):
479 | _, value = desigil(value, M=dict)
480 | return con(((inner_key(key), inner_val(val)) for key, val in value.items()))
481 |
482 |
483 | def encode_dict(value, inner_key, inner_val):
484 | return {"M": {inner_key(key): inner_val(val) for key, val in value.items()}}
485 |
486 |
487 | def decode_map(value, inner_map, con):
488 | _, value = desigil(value, M=dict)
489 | args = {}
490 | for attr in inner_map:
491 | try:
492 | arg = value[attr.name]
493 | except KeyError:
494 | if attr.is_required:
495 | raise KeyError("Missing key") from None
496 | else:
497 | args[attr.name] = attr.inner(arg)
498 | return con(**args)
499 |
500 |
501 | def encode_map(value, inner_map):
502 | out = {}
503 | for attr in inner_map:
504 | field = getattr(value, attr.name)
505 | if field == attr.default:
506 | continue
507 | out[attr.name] = attr.inner(field)
508 | return {"M": out}
509 |
510 |
511 | def decode_binary_set(value, con):
512 | _, value = desigil(value, BS=list)
513 | return con(map(b64decode, value))
514 |
515 |
516 | def encode_binary_set(value):
517 | return {"BS": list(map(b64encode, value))}
518 |
519 |
520 | def decode_number_set(value, con, elem):
521 | _, value = desigil(value, NS=list)
522 | return con(map(elem, value))
523 |
524 |
525 | def encode_number_set(value):
526 | return {"NS": list(map(_encode_number, value))}
527 |
528 |
529 | def decode_string_set(value, con):
530 | _, value = desigil(value, SS=list)
531 | return con(map(str, value))
532 |
533 |
534 | def encode_string_set(value):
535 | return {"SS": list(map(str, value))}
536 |
537 |
538 | def wrap_item(item, inner):
539 | return inner({"M": item})
540 |
541 |
542 | def unwrap_item(value, inner):
543 | value = inner(value)
544 | _, item = desigil(value, M=dict)
545 | return item
546 |
--------------------------------------------------------------------------------
/json_syntax/extras/flags.py:
--------------------------------------------------------------------------------
1 | """
2 | This module constructs its own fake type and a rule to support it.
3 |
4 | This lets you construct a quick set of enums that are represented as strings.
5 | """
6 |
7 | from ..helpers import JSON2PY, PY2JSON, INSP_JSON, INSP_PY, STR2PY, PY2STR, INSP_STR
8 | from functools import partial
9 |
10 |
11 | class Flag(type):
12 | """
13 | An example of a custom type that lets you quickly create string-only flags.
14 |
15 | This also demonstrates a technique that makes it possible to create a fake type that can
16 | be used within ``typing.Union``.
17 |
18 | Thanks to __class_getitem__, you can invoke this as ``Flag['foo', 'bar', 'etc']``
19 | but this requires Python 3.7!
20 | """
21 |
22 | def __new__(cls, *args, **kwds):
23 | """
24 | This is necessary to be a subclass of `type`, which is necessary to be used in a
25 | Union.
26 | """
27 | return super().__new__(cls, cls.__name__, (), {})
28 |
29 | def __init__(self, *elems):
30 | """"""
31 | if not elems:
32 | raise TypeError("Flag must be called with at least one string argument.")
33 | if not all(isinstance(elem, str) for elem in elems):
34 | raise TypeError("Flag elements must all be strings.")
35 | self.elems = frozenset(elems)
36 | if len(self.elems) != len(elems):
37 | raise TypeError("Duplicate elements are prohibited.")
38 |
39 | def __class_getitem__(cls, elems):
40 | return cls(*elems) if isinstance(elems, tuple) else cls(elems)
41 |
42 | def __repr__(self):
43 | return "{}[{}]".format(
44 | self.__class__.__name__, ", ".join(map(repr, self.elems))
45 | )
46 |
47 |
48 | def _check_flag(elems, value):
49 | """
50 | Checks that a value is a member of a set of flags.
51 |
52 | Note that we use a top-level function and `partial`. The trouble with lambdas or local
53 | defs is that they can't be pickled because they're inaccessible to the unpickler.
54 |
55 | If you don't intend to pickle your encoders, though, they're completely fine to use in
56 | rules.
57 | """
58 | return isinstance(value, str) and value in elems
59 |
60 |
61 | def _convert_flag(elems, value):
62 | """
63 | Checks the value is in elems and returns it.
64 | """
65 | if value in elems:
66 | return value
67 | else:
68 | raise ValueError(
69 | "Expect {!r} to be one of {}".format(value, ", ".join(map(repr, elems)))
70 | )
71 |
72 |
73 | def flags(*, verb, typ, ctx):
74 | """
75 | A simple rule to allow certain strings as flag values, but without converting them to an
76 | actual Enum.
77 |
78 | This rule is triggered with a fake type ``Flag['string', 'string', 'string']``.
79 | """
80 | if not isinstance(typ, Flag):
81 | return
82 | if verb in (JSON2PY, PY2JSON, STR2PY, PY2STR):
83 | return partial(_convert_flag, typ.elems)
84 | elif verb in (INSP_JSON, INSP_PY, INSP_STR):
85 | return partial(_check_flag, typ.elems)
86 |
--------------------------------------------------------------------------------
/json_syntax/extras/loose_dates.py:
--------------------------------------------------------------------------------
1 | from json_syntax.helpers import JSON2PY, PY2JSON, INSP_JSON, INSP_PY
2 | from json_syntax.action_v1 import check_parse_error, check_has_type
3 |
4 | from datetime import date, datetime
5 | from functools import partial
6 |
7 | """
8 | This example is of working around common date issues.
9 |
10 | The standard rules use the standard library's fromisoformat and isoformat methods, to abide
11 | by the principle of least surprise.
12 |
13 | But it's pretty common to have to consume a datetime in a date field, and it may also be the
14 | case that you want to discard the timestamp.
15 |
16 | (Note: requires python3.7 or greater.)
17 | """
18 |
19 |
20 | def convert_date_loosely(value):
21 | return datetime.fromisoformat(value).date()
22 |
23 |
24 | def iso_dates_loose(verb, typ, ctx):
25 | if typ == date:
26 | if verb == PY2JSON:
27 | return date.isoformat
28 | elif verb == JSON2PY:
29 | return convert_date_loosely
30 | elif verb == INSP_PY:
31 | return partial(check_has_type, typ=date)
32 | elif verb == INSP_JSON:
33 | return partial(
34 | check_parse_error,
35 | parser=convert_date_loosely,
36 | error=(TypeError, ValueError),
37 | )
38 |
--------------------------------------------------------------------------------
/json_syntax/helpers.py:
--------------------------------------------------------------------------------
1 | from .types import ( # noqa
2 | has_origin,
3 | get_origin,
4 | is_generic,
5 | issub_safe,
6 | NoneType,
7 | resolve_fwd_ref,
8 | python_minor,
9 | )
10 | from .errors import ErrorContext, err_ctx # noqa
11 |
12 | JSON2PY = "json_to_python"
13 | PY2JSON = "python_to_json"
14 | INSP_JSON = "inspect_json"
15 | INSP_PY = "inspect_python"
16 | INSP_STR = "inspect_string"
17 | STR2PY = "string_to_python"
18 | PY2STR = "python_to_string"
19 | PATTERN = "show_pattern"
20 | SENTINEL = object()
21 |
22 |
23 | def identity(value):
24 | return value
25 |
--------------------------------------------------------------------------------
/json_syntax/pattern.py:
--------------------------------------------------------------------------------
1 | """
2 | Patterns to represent roughly what syntax will look like, and also to investigate whether
3 | unions are potentially ambiguous.
4 | """
5 | from functools import partial, singledispatch
6 | from itertools import cycle, islice, product, zip_longest
7 | from enum import IntEnum
8 |
9 | try:
10 | import simplejson as json
11 | except ImportError:
12 | import json
13 |
14 | def _def(obj):
15 | return obj.for_json()
16 |
17 | _args = {"default": lambda obj: obj.for_json()}
18 | else:
19 | _args = {"for_json": True}
20 |
21 | dump = partial(json.dump, **_args)
22 | dumps = partial(json.dumps, **_args)
23 |
24 |
25 | class Matches(IntEnum):
26 | """
27 | This determines the degree to which one pattern can shadow another causing potential
28 | ambiguity.
29 |
30 | Meaning:
31 |
32 | * always: The pattern always shadows the other pattern.
33 | * sometimes: The pattern is known to sometimes shadow another pattern.
34 | * potential: It's not possible to prove the pattern won't shadow the other pattern.
35 | * never: The pattern will never shadow the other pattern.
36 |
37 | In determining ambiguity, a `sometimes` threshold is often permissible. For example, if
38 | you have `Union[date, str]` then properly formatted dates will sometimes shadow strings.
39 | That's probably okay if you want special handling for dates.
40 |
41 | But in `Union[str, date]`, the `str` will always match and thus no dates will ever be
42 | recognized.
43 | """
44 |
45 | always = 0
46 | sometimes = 1
47 | potential = 2
48 | never = 3
49 |
50 |
51 | matches_all = partial(max, default=Matches.always)
52 | matches_any = partial(min, default=Matches.never)
53 |
54 |
55 | def matches(left, right, ctx=None):
56 | """
57 | Given two `Pattern` objects, determine if the `left` pattern shadows the `right`.
58 |
59 | Returns a `Matches` instance.
60 | """
61 | if ctx is None:
62 | ctx = set()
63 | else:
64 | if (left, right) in ctx:
65 | return Matches.potential
66 | ctx.add((left, right))
67 | result = matches_any(
68 | left._matches(right, ctx)
69 | for left, right in product(left._unpack(), right._unpack())
70 | )
71 | return result
72 |
73 |
74 | class Pattern:
75 | def _matches(self, other, ctx):
76 | raise NotImplementedError()
77 |
78 | def _unpack(self):
79 | return [self]
80 |
81 | def __repr__(self):
82 | return dumps(self, indent=2)
83 |
84 |
85 | class Atom(Pattern):
86 | def __init__(self, value):
87 | self.value = value
88 |
89 | def for_json(self):
90 | return self.value
91 |
92 | def _matches(self, other, ctx):
93 | return (
94 | Matches.always
95 | if isinstance(other, Atom) and self.value == other.value
96 | else Matches.never
97 | )
98 |
99 |
100 | class String(Pattern):
101 | """
102 | Rather than try to analyze regular expressions, we just name common string patterns,
103 | and have a list of known ambiguities.
104 |
105 | We're deliberately not trying to analyze regexes here as we assume you would want to
106 | use specialize logic to make such fine distinctions.
107 | """
108 |
109 | def __init__(self, name, arg=None):
110 | self.name = name
111 | self.arg = arg
112 |
113 | def for_json(self):
114 | if self.name == "exact":
115 | return "=" + self.arg
116 | else:
117 | return self.name
118 |
119 | @classmethod
120 | def exact(cls, string):
121 | assert isinstance(string, str)
122 | return cls("exact", string)
123 |
124 | def _matches(self, other, ctx):
125 | "Check whether this pattern will match the other."
126 | if not isinstance(other, String):
127 | return Matches.never
128 | if self.name == "str":
129 | return Matches.always # Strings always overshadow
130 | elif other.name == "str":
131 | return Matches.sometimes # Strings are sometimes shadowed
132 | if self.name == "exact":
133 | if other.name == "exact":
134 | return Matches.always if self.arg == other.arg else Matches.never
135 | elif other.arg is None:
136 | return Matches.potential
137 | else:
138 | return Matches.always if other.arg(self.arg) else Matches.never
139 | return Matches.always if self.name == other.name else Matches.potential
140 |
141 |
142 | class _Unknown(Pattern):
143 | def __init__(self, name, match):
144 | self._name = name
145 | self._match = match
146 |
147 | def _matches(self, other, ctx):
148 | return self._match
149 |
150 | def __repr__(self):
151 | return self._name
152 |
153 |
154 | String.any = String("str")
155 | Number = Atom(0)
156 | Null = Atom(None)
157 | Bool = Atom(False)
158 | Missing = _Unknown("", Matches.never)
159 | Unknown = _Unknown("", Matches.potential)
160 |
161 |
162 | class Alternatives(Pattern):
163 | """
164 | Used by the `show_pattern` verb to represent alternative patterns in unions.
165 | """
166 |
167 | def __init__(self, alts):
168 | self.alts = tuple(alts)
169 | assert all(isinstance(alt, Pattern) for alt in self.alts)
170 |
171 | def _unpack(self):
172 | yield from self.alts
173 |
174 | def _matches(self, other, ctx):
175 | raise NotImplementedError(
176 | "Didn't call unpack"
177 | ) # Should be bypassed by _unpack.
178 |
179 | def for_json(self):
180 | out = ["alts"]
181 | out.extend(self.alts)
182 | return out
183 |
184 |
185 | class Array(Pattern):
186 | def __init__(self, elems, *, homog):
187 | self.elems = tuple(elems)
188 | assert all(isinstance(elem, Pattern) for elem in self.elems)
189 | self._homog = homog
190 |
191 | @classmethod
192 | def homog(cls, elem):
193 | return cls((elem,), homog=True)
194 |
195 | @classmethod
196 | def exact(cls, elems):
197 | return cls(elems, homog=False)
198 |
199 | def _matches(self, other, ctx):
200 | if not isinstance(other, Array):
201 | return Matches.never
202 | if self._homog and not other.elems:
203 | return Matches.always
204 | left = self.elems
205 | right = other.elems
206 | if self._homog and not other._homog:
207 | left = islice(cycle(left), len(right))
208 | elif not self._homog and other._homog:
209 | right = islice(cycle(right), len(left))
210 |
211 | possible = matches_all(
212 | matches(l, r, ctx) for l, r in zip_longest(left, right, fillvalue=Missing)
213 | )
214 | if self._homog and other._homog:
215 | # Zero cases can't be distinguished match.
216 | possible = matches_any([Matches.sometimes, possible])
217 | return possible
218 |
219 | def for_json(self):
220 | out = list(self.elems)
221 | if self.homog:
222 | out.append("...")
223 | return out
224 |
225 |
226 | class Object(Pattern):
227 | def __init__(self, items, *, homog):
228 | self.items = tuple(items)
229 | valid = all(
230 | isinstance(key, Pattern) and isinstance(val, Pattern)
231 | for key, val in self.items
232 | )
233 | if not valid:
234 | # for key, val in self.items:
235 | # print(f"{key!r}: {type(key)} / {val!r}: {type(val)}")
236 | raise TypeError("Keys and values must be patterns")
237 | self._homog = homog
238 |
239 | @classmethod
240 | def homog(cls, key, val):
241 | return cls(((key, val),), homog=True)
242 |
243 | @classmethod
244 | def exact(cls, items):
245 | return cls(items, homog=False)
246 |
247 | def _matches(self, other, ctx):
248 | if not isinstance(other, Object):
249 | return Matches.never
250 | if self._homog and not other.items:
251 | return Matches.always
252 |
253 | possible = matches_all(
254 | matches_any(
255 | matches(lk, rk, ctx) and matches(lv, rv, ctx) for rk, rv in other.items
256 | )
257 | for lk, lv in self.items
258 | )
259 | if self._homog and other._homog:
260 | possible = matches_any([Matches.sometimes, possible])
261 | return possible
262 |
263 | def for_json(self):
264 | def jsonify(key):
265 | try:
266 | for_json = key.for_json
267 | except AttributeError:
268 | return key
269 | else:
270 | return for_json()
271 |
272 | out = {jsonify(k): v for k, v in self.items}
273 | if self._homog:
274 | out["..."] = "..."
275 | return out
276 |
277 |
278 | @singledispatch
279 | def is_ambiguous(pattern, threshold=Matches.always, _path=()):
280 | """
281 | Attempts to determine if alternatives within a pattern create ambiguities given
282 | a threshold. The `json_syntax.RuleSet.is_ambiguous` constructs the `Pattern` instances
283 | and calls this for you, though.
284 |
285 | If an ambiguity is found, this attempts to identify the path within the pattern to
286 | find it. (This feature isn't well tested, though.)
287 | """
288 | raise TypeError("pattern must be a recognized subclass of Pattern.")
289 |
290 |
291 | @is_ambiguous.register(Atom)
292 | @is_ambiguous.register(String)
293 | def _(pattern, threshold=Matches.always, _path=()):
294 | return ()
295 |
296 |
297 | @is_ambiguous.register(_Unknown)
298 | def _(pattern, threshold=Matches.always, _path=()):
299 | return (str(pattern),) if pattern._match <= threshold else ()
300 |
301 |
302 | def _any(iterable):
303 | for item in iterable:
304 | if bool(item):
305 | return item
306 | return ()
307 |
308 |
309 | @is_ambiguous.register(Array)
310 | def _(pattern, threshold=Matches.always, _path=()):
311 | _path += ("[]",)
312 | return _any(is_ambiguous(elem, threshold, _path) for elem in pattern.elems)
313 |
314 |
315 | @is_ambiguous.register(Object)
316 | def _(pattern, threshold=Matches.always, _path=()):
317 | return _any(
318 | is_ambiguous(val, threshold, _path + (str(key),)) for key, val in pattern.items
319 | )
320 |
321 |
322 | @is_ambiguous.register(Alternatives)
323 | def _(pattern, threshold=Matches.always, _path=()):
324 | # An ambiguous pattern is one where an earlier pattern shadows a later pattern.
325 | alts = pattern.alts
326 | for i, early in enumerate(alts[:-1]):
327 | for late in alts[i + 1 :]:
328 | if matches(early, late) <= threshold:
329 | return _path + ("alternative {}".format(i),)
330 |
331 | return ()
332 |
--------------------------------------------------------------------------------
/json_syntax/product.py:
--------------------------------------------------------------------------------
1 | """
2 | A module to help with product types in Python.
3 | """
4 |
5 | from .helpers import SENTINEL
6 | from .types import issub_safe, resolve_fwd_ref, rewrite_typevars
7 |
8 | _TypedDictMeta = None
9 | try:
10 | from typing import _TypedDictMeta
11 | except ImportError:
12 | try:
13 | from typing_extensions import _TypedDictMeta # noqa
14 | except ImportError:
15 | pass
16 |
17 |
18 | _attrs_missing_values = set()
19 | try:
20 | import attr
21 |
22 | _attrs_missing_values.add(attr.NOTHING)
23 | except ImportError:
24 | pass
25 | try:
26 | import dataclasses
27 |
28 | _attrs_missing_values.add(dataclasses.MISSING)
29 | except ImportError:
30 | pass
31 |
32 |
33 | class Attribute:
34 | """
35 | Generic class to describe an attribute for a product type that can be represented as,
36 | e.g., a JSON map.
37 |
38 | An Attribute is associated with an action, specifically, its "inner" field directs how
39 | to process the inside type, not necessarily what the inside type is.
40 |
41 | See the various build_* commands to generate attribute maps. (These are really just
42 | lists of Attribute instances.)
43 |
44 | Fields:
45 | name: the attribute name
46 | init_name: the constructor name
47 | inner: the action to take given the verb and the attribute's type
48 | default: a static default Python value
49 | is_required: a boolean indicating if the attribute is required
50 | """
51 |
52 | __slots__ = ("name", "typ", "inner", "default", "is_required")
53 |
54 | def __init__(self, name, typ, is_required, default=SENTINEL, inner=None):
55 | self.name = name
56 | self.typ = typ
57 | self.inner = inner
58 | self.default = default
59 | self.is_required = is_required
60 |
61 | @property
62 | def init_name(self):
63 | return self.name
64 |
65 | def __repr__(self):
66 | return "".format(
67 | self.name, "required" if self.is_required else "optional"
68 | )
69 |
70 | def long_repr(self):
71 | return "".format(
72 | self.name, self.typ, self.inner, self.default, self.is_required
73 | )
74 |
75 |
76 | class AttrsAttribute(Attribute):
77 | @property
78 | def init_name(self):
79 | return self.name.lstrip("_")
80 |
81 |
82 | def is_attrs_field_required(field):
83 | """
84 | Determine if a field can calculate its default value.
85 | """
86 | if field.default not in _attrs_missing_values:
87 | return False
88 | try:
89 | factory = field.default_factory
90 | except AttributeError:
91 | return True
92 | else:
93 | return factory in _attrs_missing_values
94 |
95 |
96 | def attr_map(verb, outer, ctx, gen, typ_args=None):
97 | result = []
98 | failed = []
99 | for att in gen:
100 | if att.typ is not None:
101 | try:
102 | att.typ = resolve_fwd_ref(att.typ, outer)
103 | except TypeError:
104 | failed.append("resolve fwd ref {} for {}".format(att.typ, att.name))
105 | else:
106 | att.typ = rewrite_typevars(att.typ, typ_args)
107 | if att.inner is None:
108 | att.inner = ctx.lookup(
109 | verb=verb, typ=resolve_fwd_ref(att.typ, outer), accept_missing=True
110 | )
111 | if att.inner is None:
112 | if att.typ is None:
113 | failed.append("get fallback for {}".format(att.name))
114 | else:
115 | failed.append("get {} for {}".format(att.typ, att.name))
116 | result.append(att)
117 |
118 | if failed:
119 | raise TypeError(
120 | "{}({}) failed while trying to: {}".format(verb, outer, ", ".join(failed))
121 | )
122 | return tuple(result)
123 |
124 |
125 | def build_attribute_map(verb, typ, ctx, typ_args=None):
126 | """
127 | Examine an attrs or dataclass type and construct a list of attributes.
128 |
129 | Returns a list of Attribute instances, or None if the type is not an attrs or dataclass
130 | type.
131 | """
132 | try:
133 | fields, con = typ.__attrs_attrs__, AttrsAttribute
134 | except AttributeError:
135 | try:
136 | fields, con = typ.__dataclass_fields__, Attribute
137 | except AttributeError:
138 | return
139 | else:
140 | fields = fields.values()
141 |
142 | return attr_map(
143 | verb,
144 | typ,
145 | ctx,
146 | gen=(
147 | con(
148 | name=field.name,
149 | typ=field.type,
150 | is_required=is_attrs_field_required(field),
151 | default=field.default,
152 | )
153 | for field in fields
154 | if field.init
155 | ),
156 | typ_args=typ_args,
157 | )
158 |
159 |
160 | def build_named_tuple_map(verb, typ, ctx):
161 | """
162 | Examine a named tuple type and construct a list of attributes.
163 |
164 | Returns a list of Attribute instances, or None if the type is not a named tuple.
165 | """
166 | if not issub_safe(typ, tuple):
167 | return
168 | try:
169 | fields = typ._field_types
170 | except AttributeError:
171 | try:
172 | fields = typ._fields
173 | except AttributeError:
174 | return
175 | fields = [(name, None) for name in fields]
176 | else:
177 | fields = fields.items()
178 |
179 | defaults = {}
180 | try:
181 | defaults.update(typ._fields_defaults)
182 | except AttributeError:
183 | pass
184 | try:
185 | defaults.update(typ._field_defaults)
186 | except AttributeError:
187 | pass
188 |
189 | return attr_map(
190 | verb,
191 | typ,
192 | ctx,
193 | gen=(
194 | Attribute(
195 | name=name,
196 | typ=inner,
197 | is_required=name not in defaults,
198 | default=defaults.get(name, SENTINEL),
199 | )
200 | for name, inner in fields
201 | ),
202 | typ_args=None, # A named tuple type can't accept generic arguments.
203 | )
204 |
205 |
206 | def build_typed_dict_map(verb, typ, ctx):
207 | """
208 | Examine a TypedDict class and construct a list of attributes.
209 |
210 | Returns a list of Attribute instances, or None if the type is not a typed dict.
211 | """
212 | if (
213 | _TypedDictMeta is None
214 | or not issub_safe(typ, dict)
215 | or typ.__class__ is not _TypedDictMeta
216 | ):
217 | return
218 |
219 | return attr_map(
220 | verb,
221 | typ,
222 | ctx,
223 | gen=(
224 | Attribute(name=name, typ=inner, is_required=True, default=SENTINEL)
225 | for name, inner in typ.__annotations__.items()
226 | ),
227 | typ_args=None, # A typed dict can't accept generic arguments.
228 | )
229 |
--------------------------------------------------------------------------------
/json_syntax/ruleset.py:
--------------------------------------------------------------------------------
1 | from .cache import SimpleCache
2 | from .helpers import JSON2PY, PY2JSON, INSP_JSON, INSP_PY, PATTERN
3 | from . import pattern
4 |
5 | import logging
6 |
7 | logger = logging.getLogger(__name__)
8 | TRACE = 5
9 |
10 |
11 | def trace(fmt, *args, _logger=logger, _TRACE=TRACE):
12 | "Trace a log message. Avoids issues with applications setting `style`."
13 | if _logger.isEnabledFor(_TRACE):
14 | _logger.log(_TRACE, fmt.format(*args))
15 |
16 |
17 | def set_trace(enabled=True):
18 | logger.level = TRACE if enabled else logging.WARNING
19 |
20 |
21 | class SimpleRuleSet:
22 | """
23 | This is the base of RuleSet and doesn't know anything about the standard verbs.
24 |
25 | A ruleset contains a series of rules that will be evaluated, in order, against types to
26 | attempt to construct encoders and decoders.
27 |
28 | It takes a list of rules; functions that accept a verb and type and return actions.
29 |
30 | The keyword argument `cache` can specify a custom rule cache.
31 | `json_syntax.cache.ThreadLocalCache` may be helpful if you are loading rules in a
32 | multi-threaded environment.
33 | """
34 |
35 | def __init__(self, *rules, cache=None):
36 | self.rules = rules
37 | self.cache = cache or SimpleCache()
38 |
39 | def lookup(self, verb, typ, accept_missing=False):
40 | trace("lookup({!s}, {!r}): start", verb, typ)
41 | if typ is None:
42 | if accept_missing:
43 | trace("lookup({!s}, {!r}): attempt fallabck", verb, typ)
44 | typ = self.fallback(verb=verb, typ=typ)
45 | if typ is None:
46 | raise TypeError("Attempted to find {} for 'None'".format(verb))
47 |
48 | with self.cache.access() as cache:
49 | action = cache.get(verb=verb, typ=typ)
50 | if action is not None:
51 | trace("lookup({!s}, {!r}): cached", verb, typ)
52 | return action
53 |
54 | forward = cache.in_flight(verb=verb, typ=typ)
55 |
56 | try:
57 | for rule in self.rules:
58 | action = rule(verb=verb, typ=typ, ctx=self)
59 | if action is not None:
60 | cache.complete(verb=verb, typ=typ, action=action)
61 | trace("lookup({!s}, {!r}): computed", verb, typ)
62 | return action
63 |
64 | trace("lookup({!s}, {!r}): fallback", verb, typ)
65 | action = self.fallback(verb=verb, typ=typ)
66 | if action is not None:
67 | cache.complete(verb=verb, typ=typ, action=action)
68 | trace("lookup({!s}, {!r}): computed by fallback", verb, typ)
69 | return action
70 | finally:
71 | cache.de_flight(verb=verb, typ=typ, forward=forward)
72 |
73 | if action is None and not accept_missing:
74 | raise TypeError("Failed: lookup({!s}, {!r})".format(verb, typ))
75 |
76 | def fallback(self, verb, typ):
77 | return None
78 |
79 |
80 | class RuleSet(SimpleRuleSet):
81 | """
82 | A ruleset contains a series of rules that will be evaluated, in order, against types to
83 | attempt to construct encoders and decoders.
84 |
85 | It takes a list of rules; functions that accept a verb and type and return actions.
86 |
87 | The keyword argument `cache` can specify a custom rule cache.
88 | `json_syntax.cache.ThreadLocalCache` may be helpful if you are loading rules in a
89 | multi-threaded environment.
90 |
91 | The most important methods are generally `json_to_python` and `python_to_json`; these
92 | take a fully specified type and produce an encoder and decoder respectively.
93 | """
94 |
95 | def json_to_python(self, typ):
96 | """
97 | Constructs a function to decode JSON objects (dict, list, str, float, etc.) into
98 | a Python class for the given type.
99 |
100 | The function will accept a single argument, an object returned by `json.loads`
101 | or a similar method, and return the Python native instance.
102 | """
103 | return self.lookup(verb=JSON2PY, typ=typ)
104 |
105 | def python_to_json(self, typ):
106 | """
107 | Constructs a function to encode a Python class into JSON objects (dict, list,
108 | str, float, etc.) for the given type.
109 |
110 | The function will accept a single argument, a Python instance, and return an
111 | object that can be passed to `json.dumps` or a similar method.
112 | """
113 | return self.lookup(verb=PY2JSON, typ=typ)
114 |
115 | def inspect_json(self, typ):
116 | """
117 | Constructs a function that inspects a JSON object (dict, list, str, float, etc.)
118 | to determine if they match the pattern expected by the given type.
119 |
120 | The function will accept a single argument, an object returned by `json.loads`
121 | or a similar method, and return True if a call to the decoder function
122 | constructed by `json_to_python` is expected to succeed.
123 |
124 | Note that some objects that fail this test could nevertheless be converted by
125 | `json_to_python`.
126 | """
127 | return self.lookup(verb=INSP_JSON, typ=typ)
128 |
129 | def inspect_python(self, typ):
130 | """
131 | Constructs a function that inspects a value to determine if it matches a given
132 | type.
133 |
134 | The function will accept a single argument, a standard instance, and return True
135 | if a call to the encoder function generated by `python_to_json` is expected to
136 | succeed.
137 |
138 | Note that some objects that fail this test could nevertheless be converted by
139 | `python_to_json`.
140 | """
141 | return self.lookup(verb=INSP_PY, typ=typ)
142 |
143 | def show_pattern(self, typ):
144 | """
145 | Analyze a type to determine the structure it will have in its JSON
146 | representation if `python_to_json` is called against an instance.
147 |
148 | This method does not consider the impact of the `__json_pre_decode__` or
149 | `__json_post_decode__` hooks.
150 |
151 | It will return a `json_syntax.pattern.Pattern` instance.
152 | """
153 | return self.lookup(verb=PATTERN, typ=typ)
154 |
155 | def is_ambiguous(self, typ, threshold=pattern.Matches.always):
156 | """
157 | Determine if a type's representation as JSON is ambiguous according to rules
158 | specified in this ruleset.
159 |
160 | This should only be an issue if you are using `typing.Union` in your data.
161 |
162 | The `threshold` specifies the level below which a pattern is flagged as
163 | ambiguous.
164 | """
165 | pat = self.show_pattern(typ=typ)
166 | return pattern.is_ambiguous(pat, threshold=threshold)
167 |
168 | def fallback(self, verb, typ):
169 | """
170 | Subclasses may override this method to provide fallback handling when the type
171 | is not provided, or if no action is available for that type.
172 |
173 | *If the type is known but doesn't fit a standard rule, it's best to provide a
174 | custom rule.*
175 |
176 | A subclass must check the verb and type (which will be None when missing) and
177 | return a function that performs the task specified by the verb.
178 | """
179 | if verb == PATTERN:
180 | return pattern.Unknown
181 | else:
182 | super().fallback(verb, typ)
183 |
--------------------------------------------------------------------------------
/json_syntax/std.py:
--------------------------------------------------------------------------------
1 | from .helpers import (
2 | JSON2PY,
3 | PY2JSON,
4 | INSP_JSON,
5 | INSP_PY,
6 | PATTERN,
7 | STR2PY,
8 | PY2STR,
9 | INSP_STR,
10 | )
11 | from .types import (
12 | has_origin,
13 | get_origin,
14 | issub_safe,
15 | NoneType,
16 | )
17 | from .action_v1 import (
18 | check_collection,
19 | check_float,
20 | check_isinst,
21 | check_has_type,
22 | check_mapping,
23 | check_optional,
24 | check_parse_error,
25 | check_str_enum,
26 | convert_collection,
27 | convert_date,
28 | convert_datetime,
29 | convert_decimal_str,
30 | convert_enum_str,
31 | convert_float,
32 | convert_mapping,
33 | convert_none,
34 | convert_optional,
35 | convert_str_enum,
36 | convert_str_timedelta,
37 | convert_time,
38 | convert_timedelta_str,
39 | pass_faux_enum,
40 | )
41 | from . import pattern as pat
42 |
43 | from collections import OrderedDict
44 | from datetime import datetime, date, time, timedelta
45 | from decimal import Decimal
46 | from enum import Enum
47 | from functools import partial
48 | from typing import Union
49 |
50 | """
51 | These are standard rules to handle various types.
52 |
53 | All rules take a verb, a Python type and a context, which is generally a RuleSet. A rule
54 | returns a conversion function for that verb.
55 | """
56 |
57 |
58 | def atoms(verb, typ, ctx):
59 | "Rule to handle atoms that translate trivially."
60 | if issub_safe(typ, (str, int, NoneType)):
61 | if verb in (JSON2PY, PY2JSON):
62 | if typ is NoneType:
63 | return convert_none
64 | for base in (str, bool, int): # n.b. bool is a subclass of int.
65 | if typ == base:
66 | return base
67 | elif verb in (INSP_PY, INSP_JSON):
68 | for base in (NoneType, str, bool, int):
69 | if typ == base:
70 | return partial(check_isinst, typ=base)
71 | elif verb == PATTERN:
72 | for base, node in [
73 | (NoneType, pat.Null),
74 | (str, pat.String.any),
75 | (bool, pat.Bool),
76 | (int, pat.Number),
77 | ]:
78 | if typ == base:
79 | return node
80 |
81 |
82 | def floats(verb, typ, ctx):
83 | """
84 | Rule to handle floats passing NaNs through unaltered.
85 |
86 | JSON technically recognizes integers and floats. Many JSON generators will represent
87 | floats with integral value as integers. Thus, this rule will convert both integers and
88 | floats in JSON to floats in Python.
89 |
90 | Python's standard JSON libraries treat `nan` and `inf` as special constants, but this is
91 | not standard JSON.
92 |
93 | This rule simply treats them as regular float values. If you want to catch them, you can
94 | set ``allow_nan=False`` in ``json.dump()``.
95 | """
96 | if typ == float:
97 | if verb in (JSON2PY, PY2JSON):
98 | return float
99 | elif verb == INSP_PY:
100 | return partial(check_isinst, typ=float)
101 | elif verb == INSP_JSON:
102 | return partial(check_isinst, typ=(int, float))
103 | elif verb == PATTERN:
104 | return pat.Number
105 |
106 |
107 | def floats_nan_str(verb, typ, ctx):
108 | """
109 | Rule to handle floats passing NaNs through as strings.
110 |
111 | Python's standard JSON libraries treat `nan` and `inf` as special constants, but this is
112 | not standard JSON.
113 |
114 | This rule converts special constants to string names.
115 | """
116 | if typ == float:
117 | if verb == JSON2PY:
118 | return float
119 | elif verb == PY2JSON:
120 | return convert_float
121 | elif verb == INSP_PY:
122 | return partial(check_isinst, typ=float)
123 | elif verb == INSP_JSON:
124 | return check_float
125 | elif verb == PATTERN:
126 | return pat.Number
127 |
128 |
129 | def decimals(verb, typ, ctx):
130 | """
131 | Rule to handle decimals natively.
132 |
133 | This rule requires that your JSON library has decimal support, e.g. simplejson.
134 |
135 | Other JSON processors may convert values to and from floating-point; if that's a
136 | concern, consider `decimals_as_str`.
137 |
138 | This rule will fail if passed a special constant.
139 | """
140 | if typ == Decimal:
141 | if verb in (JSON2PY, PY2JSON):
142 | return Decimal
143 | elif verb in (INSP_JSON, INSP_PY):
144 | return partial(check_isinst, typ=Decimal)
145 | elif verb == PATTERN:
146 | return pat.Number
147 |
148 |
149 | def decimals_as_str(verb, typ, ctx):
150 | """
151 | Rule to handle decimals as strings.
152 |
153 | This rule bypasses JSON library decimal support, e.g. simplejson.
154 |
155 | This rule will fail if passed a special constant.
156 | """
157 | if typ == Decimal:
158 | if verb == JSON2PY:
159 | return Decimal
160 | elif verb == PY2JSON:
161 | return convert_decimal_str
162 | elif verb == INSP_PY:
163 | return partial(check_isinst, typ=Decimal)
164 | elif verb in (INSP_JSON, PATTERN):
165 | inspect = partial(check_parse_error, parser=Decimal, error=ArithmeticError)
166 | return pat.String("number", inspect) if verb == PATTERN else inspect
167 |
168 |
169 | def iso_dates(verb, typ, ctx):
170 | """
171 | Rule to handle iso formatted datetimes and dates.
172 |
173 | This simply uses the `fromisoformat` and `isoformat` methods of `date` and `datetime`.
174 |
175 | There is a loose variant in the examples that will accept a datetime in a date. A
176 | datetime always accepts both dates and datetimes.
177 | """
178 | if typ not in (date, datetime, time, timedelta):
179 | return
180 | if verb == PY2JSON:
181 | return convert_timedelta_str if typ == timedelta else typ.isoformat
182 | elif verb == INSP_PY:
183 | return partial(check_has_type, typ=typ)
184 | elif verb in (JSON2PY, INSP_JSON, PATTERN):
185 | if typ == date:
186 | parse = convert_date
187 | elif typ == datetime:
188 | parse = convert_datetime
189 | elif typ == time:
190 | parse = convert_time
191 | elif typ == timedelta:
192 | parse = convert_str_timedelta
193 | else:
194 | return
195 | if verb == JSON2PY:
196 | return parse
197 | inspect = partial(
198 | check_parse_error, parser=parse, error=(TypeError, ValueError)
199 | )
200 | return pat.String(typ.__name__, inspect) if verb == PATTERN else inspect
201 |
202 |
203 | def enums(verb, typ, ctx):
204 | "Rule to convert between enumerated types and strings."
205 | if issub_safe(typ, Enum):
206 | if verb == PY2JSON:
207 | return partial(convert_enum_str, typ=typ)
208 | elif verb == JSON2PY:
209 | return partial(convert_str_enum, typ=typ)
210 | elif verb == INSP_PY:
211 | return partial(check_isinst, typ=typ)
212 | elif verb in (INSP_JSON, PATTERN):
213 | inspect = partial(check_str_enum, typ=typ)
214 | return pat.String(typ.__name__, inspect) if verb == PATTERN else inspect
215 |
216 |
217 | def faux_enums(verb, typ, ctx):
218 | "Rule to fake an Enum by actually using strings."
219 | if issub_safe(typ, Enum):
220 | if verb in (JSON2PY, PY2JSON):
221 | return partial(pass_faux_enum, typ=typ)
222 | elif verb in (INSP_JSON, INSP_PY, PATTERN):
223 | inspect = partial(check_str_enum, typ=typ)
224 | return pat.String(typ.__name__, inspect) if verb == PATTERN else inspect
225 |
226 |
227 | def optional(verb, typ, ctx):
228 | """
229 | Handle an ``Optional[inner]`` by passing ``None`` through.
230 | """
231 | if verb not in (JSON2PY, PY2JSON, INSP_PY, INSP_JSON, PATTERN):
232 | return
233 | if has_origin(typ, Union, num_args=2):
234 | if NoneType not in typ.__args__:
235 | return
236 | inner = None
237 | for arg in typ.__args__:
238 | if arg is not NoneType:
239 | inner = arg
240 | if inner is None:
241 | raise TypeError("Could not find inner type for Optional: " + str(typ))
242 | else:
243 | return
244 | inner = ctx.lookup(verb=verb, typ=inner)
245 | if verb in (JSON2PY, PY2JSON):
246 | return partial(convert_optional, inner=inner)
247 | elif verb in (INSP_JSON, INSP_PY):
248 | return partial(check_optional, inner=inner)
249 | elif verb == PATTERN:
250 | return pat.Alternatives([pat.Null, inner])
251 |
252 |
253 | def lists(verb, typ, ctx):
254 | """
255 | Handle a ``List[type]`` or ``Tuple[type, ...]``.
256 |
257 | Trivia: the ellipsis indicates a homogenous tuple; ``Tuple[A, B, C]`` is a product
258 | type that contains exactly those elements.
259 | """
260 | if verb not in (JSON2PY, PY2JSON, INSP_PY, INSP_JSON, PATTERN):
261 | return
262 | if has_origin(typ, list, num_args=1):
263 | (inner,) = typ.__args__
264 | elif has_origin(typ, tuple, num_args=2):
265 | (inner, ell) = typ.__args__
266 | if ell is not Ellipsis:
267 | return
268 | else:
269 | return
270 | inner = ctx.lookup(verb=verb, typ=inner)
271 | con = list if verb in (PY2JSON, INSP_JSON, PATTERN) else get_origin(typ)
272 | if verb in (JSON2PY, PY2JSON):
273 | return partial(convert_collection, inner=inner, con=con)
274 | elif verb in (INSP_JSON, INSP_PY):
275 | return partial(check_collection, inner=inner, con=con)
276 | elif verb == PATTERN:
277 | return pat.Array.homog(inner)
278 |
279 |
280 | def sets(verb, typ, ctx):
281 | """
282 | Handle a ``Set[type]`` or ``FrozenSet[type]``.
283 | """
284 | if verb not in (JSON2PY, PY2JSON, INSP_PY, INSP_JSON, PATTERN):
285 | return
286 | if not has_origin(typ, (set, frozenset), num_args=1):
287 | return
288 | (inner,) = typ.__args__
289 | con = list if verb in (PY2JSON, INSP_JSON, PATTERN) else get_origin(typ)
290 | inner = ctx.lookup(verb=verb, typ=inner)
291 | if verb in (JSON2PY, PY2JSON):
292 | return partial(convert_collection, inner=inner, con=con)
293 | elif verb in (INSP_JSON, INSP_PY):
294 | return partial(check_collection, inner=inner, con=con)
295 | elif verb == PATTERN:
296 | return pat.Array.homog(inner)
297 |
298 |
299 | _STRING = {
300 | JSON2PY: STR2PY,
301 | PY2JSON: PY2STR,
302 | INSP_JSON: INSP_STR,
303 | INSP_PY: INSP_PY,
304 | PATTERN: PATTERN,
305 | }
306 |
307 |
308 | def dicts(verb, typ, ctx):
309 | """
310 | Handle a ``Dict[key, value]`` where key is a string, integer, date or enum type.
311 | """
312 | if verb not in _STRING or not has_origin(typ, (dict, OrderedDict), num_args=2):
313 | return
314 | (key_type, val_type) = typ.__args__
315 | inner_key = ctx.lookup(verb=_STRING[verb], typ=key_type)
316 | inner_val = ctx.lookup(verb=verb, typ=val_type)
317 | if verb in (JSON2PY, PY2JSON):
318 | return partial(
319 | convert_mapping, key=inner_key, val=inner_val, con=get_origin(typ)
320 | )
321 | elif verb in (INSP_JSON, INSP_PY):
322 | return partial(check_mapping, key=inner_key, val=inner_val, con=get_origin(typ))
323 | elif verb == PATTERN:
324 | return pat.Object.homog(inner_key, inner_val)
325 |
--------------------------------------------------------------------------------
/json_syntax/string.py:
--------------------------------------------------------------------------------
1 | from .action_v1 import (
2 | check_parse_error,
3 | check_str_enum,
4 | convert_date,
5 | convert_enum_str,
6 | convert_str_enum,
7 | )
8 | from .helpers import STR2PY, PY2STR, INSP_STR, issub_safe
9 |
10 | from datetime import date
11 | from enum import Enum
12 | from functools import partial
13 |
14 |
15 | """
16 | As JSON requires string keys, unless dicts are only allowed to be Dict[str, T], we need to
17 | be able to encode values as strings.
18 |
19 | Recommendations:
20 |
21 | * The string verbs are not intended for direct use.
22 | * Use these verbs for any type that must be represented as a key in a JSON object.
23 | * The standard rules will only handle types that are reliable keys and have obvious string
24 | encodings.
25 |
26 | See std.dicts for an example.
27 | """
28 |
29 |
30 | def stringify_keys(verb, typ, ctx):
31 | if verb not in (STR2PY, PY2STR, INSP_STR):
32 | return
33 | if typ in (str, int):
34 | if verb == STR2PY:
35 | return typ
36 | elif verb == PY2STR:
37 | return str
38 | elif verb == INSP_STR:
39 | return partial(check_parse_error, parser=typ, error=ValueError)
40 | elif typ == date:
41 | if verb == PY2STR:
42 | return typ.isoformat
43 | elif verb in (STR2PY, INSP_STR):
44 | parse = convert_date
45 | if verb == STR2PY:
46 | return parse
47 | else:
48 | return partial(
49 | check_parse_error, parser=parse, error=(TypeError, ValueError)
50 | )
51 | elif issub_safe(typ, Enum):
52 | if verb == PY2STR:
53 | return partial(convert_enum_str, typ=typ)
54 | elif verb == STR2PY:
55 | return partial(convert_str_enum, typ=typ)
56 | elif verb == INSP_STR:
57 | return partial(check_str_enum, typ=typ)
58 |
--------------------------------------------------------------------------------
/json_syntax/types.py:
--------------------------------------------------------------------------------
1 | import collections as c
2 | from importlib import import_module
3 | import logging
4 | import sys
5 | import typing as t
6 |
7 | logger = logging.getLogger(__name__)
8 | _eval_type = getattr(t, "_eval_type", None)
9 | python_minor = sys.version_info[:2]
10 | NoneType = type(None)
11 |
12 |
13 | def has_origin(typ, origin, num_args=None):
14 | """
15 | Determines if a concrete class (a generic class with arguments) matches an origin
16 | and has a specified number of arguments.
17 |
18 | This does a direct match rather than a subclass check.
19 |
20 | The typing classes use dunder properties such that ``__origin__`` is the generic
21 | class and ``__args__`` are the type arguments.
22 |
23 | Note: in python3.7, the ``__origin__`` attribute changed to reflect native types.
24 | This call attempts to work around that so that 3.5 and 3.6 "just work."
25 | """
26 | t_origin = get_origin(typ)
27 | if not isinstance(origin, tuple):
28 | origin = (origin,)
29 | return t_origin in origin and (num_args is None or len(get_args(typ)) == num_args)
30 |
31 |
32 | def get_origin(typ):
33 | """
34 | Get the constructor origin of a generic type. For example, List is constructed with
35 | list.
36 | """
37 | try:
38 | t_origin = typ.__origin__
39 | except AttributeError:
40 | return _origin_pts(typ)
41 | else:
42 | return _origin_pts(t_origin or typ)
43 |
44 |
45 | def get_args(typ):
46 | return getattr(typ, "__args__", ())
47 |
48 |
49 | def get_generic_origin(typ):
50 | """
51 | Get the generic origin of a fully parametrized generic type.
52 |
53 | E.g. get_generic_origin(typing.List[int]) == typing.List
54 | """
55 | if not is_parametrized(typ):
56 | return None
57 |
58 | origin = typ.__origin__
59 | if not is_generic(origin) and not hasattr(origin, "__parameters__"):
60 | origin = _lookup_generic_origin(origin)
61 |
62 | return origin
63 |
64 |
65 | def get_argument_map(typ):
66 | """
67 | For a concrete type, e.g. List[int], find the type parameters that map to the arguments.
68 |
69 | This is mostly useful for custom generics, example:
70 |
71 | T = TypeVar('T')
72 | @attr.s
73 | class MyGeneric(Generic[T, U]):
74 | foo = attr.ib(type=T)
75 | bar = attr.ib(type=List[U])
76 |
77 | get_argument_map(MyGeneric[int, str]) == {T: int, U: str}
78 | """
79 | origin = get_generic_origin(typ)
80 | if origin is None:
81 | return {}
82 | return dict(zip(origin.__parameters__, typ.__args__))
83 |
84 |
85 | def rewrite_typevars(typ, arg_map):
86 | """
87 | Rewrites a generic type according to a mapping of arguments.
88 |
89 | Note: behavior is only defined for TypeVar objects.
90 |
91 | From the example under get_argument_map:
92 |
93 | rewrite_typevars(List[U], {T: int, U: str}) == List[str]
94 |
95 | Note that we should immediately apply rewrites.
96 | """
97 | if not arg_map:
98 | # Nothing to rewrite.
99 | return typ
100 |
101 | try:
102 | # This is a type variable specified in the arguments.
103 | return arg_map[typ]
104 | except (KeyError, TypeError):
105 | pass
106 |
107 | origin = get_generic_origin(typ) or typ
108 | try:
109 | args = typ.__args__
110 | except AttributeError:
111 | return typ
112 | else:
113 | new_args = tuple(rewrite_typevars(arg, arg_map) for arg in args)
114 | if new_args == args:
115 | # Don't reconstruct the type when nothing changes.
116 | return typ
117 | else:
118 | # If it passes, construct a new type with the rewritten arguments.
119 | return origin[new_args]
120 |
121 |
122 | try:
123 | _Generic = t.GenericMeta
124 | except AttributeError:
125 | _Generic = t._GenericAlias
126 |
127 |
128 | def is_generic(typ):
129 | """
130 | Return true iff the instance (which should be a type value) is a generic type.
131 |
132 | `typing` module notes:
133 |
134 | 3.4, 3.5: typing.List[int] is an instance of typing._GenericAlias
135 | 3.6, 3.7: typing.List[int] is an instance of typing.GenericMeta
136 | """
137 | return isinstance(typ, _Generic)
138 |
139 |
140 | def _make_map():
141 | from collections import abc
142 |
143 | seen = set()
144 | for gen, con in [
145 | (t.Tuple, tuple),
146 | (t.List, list),
147 | (t.Dict, dict),
148 | (t.Callable, callable),
149 | (t.Type, type),
150 | (t.Set, set),
151 | (t.FrozenSet, frozenset),
152 | ]:
153 | seen.add(gen)
154 | yield gen, con
155 |
156 | for name, generic in vars(t).items():
157 | if not is_generic(generic) or generic in seen:
158 | continue
159 | for check in getattr(abc, name, None), getattr(c, name.lower(), None):
160 | if check:
161 | yield generic, check
162 | break
163 |
164 |
165 | if python_minor < (3, 7):
166 |
167 | def _origin_pts(typ, _pts=dict(_make_map())):
168 | """
169 | Convert the __origin__ of a generic type returned by the provisional typing API
170 | (python3.4+) to the stable version.
171 |
172 | Don't use this, just use get_origin.
173 | """
174 | return _pts.get(typ, typ)
175 |
176 | def _lookup_generic_origin(typ):
177 | """
178 | Find the generic type corresponding to a regular type returned by .__origin__
179 |
180 | Prefer using get_generic_origin to this.
181 | """
182 | return None
183 |
184 | def is_parametrized(typ):
185 | """
186 | Determine if the type is both generic and fully realized; no free parameters.
187 | Parameters *may* be specified by type vars.
188 |
189 | This function works around weirdness in pre-3.7 where parameters will be set if
190 | TypeVars are specified.
191 | """
192 | if not is_generic(typ):
193 | return False
194 | args = typ.__args__ or ()
195 | return all(param in args for param in typ.__parameters__)
196 |
197 |
198 | else:
199 |
200 | def _origin_pts(origin):
201 | """
202 | Convert the __origin__ of a generic type returned by the provisional typing API
203 | (python3.4+) to the stable version.
204 |
205 | Don't use this, just use get_origin.
206 | """
207 | return origin
208 |
209 | def _lookup_generic_origin(
210 | typ, _stp={stable: prov for prov, stable in _make_map()}
211 | ):
212 | """
213 | Find the generic type corresponding to a regular type returned by .__origin__
214 | """
215 | return _stp.get(typ, None)
216 |
217 | def is_parametrized(typ):
218 | """
219 | Determine if the type is both generic and fully realized; no free parameters.
220 | Parameters *may* be specified by type vars.
221 | """
222 | return is_generic(typ) and not typ.__parameters__
223 |
224 |
225 | def issub_safe(sub, sup):
226 | """
227 | Safe version of issubclass that only compares regular types.
228 |
229 | Tries to be consistent in handling generic types.
230 |
231 | `typing` module notes:
232 |
233 | 3.5, 3.6: issubclass(t.List[int], list) returns true
234 | 3.7: issubclass(t.List[int], list) raises a TypeError
235 | """
236 | try:
237 | return not is_generic(sub) and issubclass(sub, sup)
238 | except TypeError:
239 | return False
240 |
241 |
242 | def resolve_fwd_ref(typ, context_class):
243 | """
244 | Tries to resolve a forward reference given a containing class. This does nothing for
245 | regular types.
246 | """
247 | resolved = None
248 | try:
249 | namespace = vars(import_module(context_class.__module__))
250 | except AttributeError:
251 | logger.warning("Couldn't determine module of %r", context_class)
252 | else:
253 | resolved = _eval_type(typ, namespace, {})
254 | if resolved is None:
255 | return typ
256 | else:
257 | return resolved
258 |
259 |
260 | if _eval_type is None:
261 | # If typing's internal API changes, we have tests that break.
262 | def resolve_fwd_ref(typ, context_class): # noqa
263 | return typ
264 |
265 |
266 | del _make_map
267 |
--------------------------------------------------------------------------------
/json_syntax/unions.py:
--------------------------------------------------------------------------------
1 | from .helpers import has_origin, JSON2PY, PY2JSON, INSP_JSON, INSP_PY, PATTERN
2 | from .action_v1 import convert_union, check_union
3 | from . import pattern as pat
4 |
5 | from functools import partial
6 | from typing import Union
7 |
8 |
9 | def unions(verb, typ, ctx):
10 | """
11 | Handle undiscriminated unions of the form ``Union[A, B, C, D]`` by inspecting the
12 | inner types one by one.
13 |
14 | This is the "implicit discriminant" technique, exploiting the fact that Python
15 | already tags all values with their type.
16 |
17 | A potential problem is that the JSON structure may not retain that information. So
18 | another rule could attempt to add a discriminant to the JSON data. For example, if
19 | you had two ``attrs`` style classes, they could add a `type` field with the class
20 | name. As there are many ways to do that, this rule doesn't attempt to pick one for
21 | you.
22 |
23 | Note: The optional rule handles the common case of ``Union[T, NoneType]`` more
24 | efficiently, so it should be before this.
25 | """
26 | if has_origin(typ, Union):
27 | if verb in (JSON2PY, PY2JSON):
28 | if verb == PY2JSON:
29 | check_verb = INSP_PY
30 | elif verb == JSON2PY:
31 | check_verb = INSP_JSON
32 | else:
33 | return
34 | steps = [
35 | (
36 | ctx.lookup(verb=check_verb, typ=arg),
37 | ctx.lookup(verb=verb, typ=arg),
38 | "<{!s}>".format(arg),
39 | )
40 | for arg in typ.__args__
41 | ]
42 | return partial(convert_union, steps=steps, typename=repr(typ))
43 | elif verb in (INSP_JSON, INSP_PY):
44 | steps = [
45 | (ctx.lookup(verb=verb, typ=arg), "<{!s}>".format(arg))
46 | for arg in typ.__args__
47 | ]
48 | return partial(check_union, steps=steps)
49 | elif verb == PATTERN:
50 | alts = [ctx.lookup(verb=verb, typ=arg) for arg in typ.__args__]
51 | return pat.Alternatives(alts)
52 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [tool.poetry]
2 | name = "json-syntax"
3 | version = "2.3.1"
4 | description = "Generates functions to convert Python classes to JSON dumpable objects."
5 | authors = ["Ben Samuel "]
6 | license = "MIT"
7 | readme = "README.md"
8 | repository = "https://github.com/UnitedIncome/json-syntax"
9 | classifiers = [
10 | "Development Status :: 5 - Production/Stable",
11 | "Intended Audience :: Developers",
12 | "Topic :: Software Development :: Libraries"
13 | ]
14 |
15 | [tool.poetry.dependencies]
16 | python = "^3.4"
17 | python-dateutil = {version="^2.7", python = "<3.7"}
18 | typing = {version = "^3.7", python = "<3.5"}
19 |
20 | [tool.poetry.dev-dependencies]
21 | pytest = "^4.1"
22 | attrs = "^19.2"
23 | pytest-cov = "^2.6"
24 | hypothesis = "^4"
25 | dataclasses = {version="0.4", python = ">=3.6,<3.7"}
26 | typing-extensions = {version = "^3.7", python = "<3.8"}
27 | # Install these tools via pipx.
28 | # black = {version = "^19.3-beta.0", allow-prereleases = true, python = ">=3.7"}
29 | # dephell = {version = '^0.8.0', python = '>=3.6'}
30 |
31 | [tool.black]
32 | line-length = 88
33 | target-version = ["py34"]
34 |
35 | [build-system]
36 | requires = ["poetry>=1.0"]
37 | build-backend = "poetry.masonry.api"
38 |
39 | [tool.dephell.setup]
40 | # dephell deps convert -e setup; then pip install -e path/to/json_syntax will work
41 | from = {format = "poetry", path = "pyproject.toml"}
42 | to = {format = 'setuppy', path = "setup.py"}
43 |
44 | [tool.dephell.req]
45 | # Use `dephell deps convert -e req` to generate requirements.txt.
46 | from = {format = "poetry", path = "pyproject.toml"}
47 | to = {format = 'pip', path = "requirements.txt"}
48 |
49 | [tool.dephell.test34.docker]
50 | # dephell docker run -e test34 pip install -r requirements.txt
51 | # dephell docker run -e test34 pytest tests/
52 | container = "test34"
53 | repo = "python"
54 | tag = "3.4"
55 |
56 | [tool.dephell.test35.docker]
57 | container = "test35"
58 | repo = "python"
59 | tag = "3.5"
60 |
61 | [tool.dephell.test36.docker]
62 | container = "test36"
63 | repo = "python"
64 | tag = "3.6"
65 |
66 | [tool.dephell.test37.docker]
67 | container = "test37"
68 | repo = "python"
69 | tag = "3.7"
70 |
71 | [tool.dephell.test38.docker]
72 | container = "test38"
73 | repo = "python"
74 | tag = "3.8"
75 |
76 | [tool.dephell.test39.docker]
77 | container = "test39"
78 | repo = "python"
79 | tag = "3.9-rc"
80 |
--------------------------------------------------------------------------------
/setup.cfg:
--------------------------------------------------------------------------------
1 | [flake8]
2 | # D203: one blank line before class docstring, this or D211
3 | # W503: line break before binary operator, have to pick this or W504
4 | # Everything after the first line is fixed with black.
5 | ignore = D203, W503,
6 | E111, E121, E122, E123, E124, E125, E126, E201, E202, E203,
7 | E221, E222, E225, E226, E227, E231, E241, E251, E261, E262,
8 | E265, E271, E272, E302, E303, E306, E502, E701, E702, E703,
9 | E704, W291, W292, W293, W391
10 | exclude =
11 | .tox
12 | __pycache__
13 | .git
14 | htmlcov
15 | max-line-length = 92
16 | disable-noqa = False
17 |
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/capitalone/json-syntax/9240c94a264c5817ee97feb969a651b3707d1d05/tests/__init__.py
--------------------------------------------------------------------------------
/tests/_strategies.py:
--------------------------------------------------------------------------------
1 | """
2 | Some miscellany to keep the type_strategies module a bit more readable.
3 | """
4 | from hypothesis import strategies as st
5 |
6 | import attr
7 |
8 | try:
9 | import dataclasses as dc
10 | except ImportError:
11 | dc = None
12 | from datetime import date
13 | from enum import IntEnum
14 | from keyword import iskeyword
15 | import os
16 | import typing
17 |
18 |
19 | MAX_FIELDS = 8
20 | _max_cp = None if os.environ.get("UNICODE_NAMES") else 0x7F
21 | _any_char = st.characters(min_codepoint=1, max_codepoint=_max_cp)
22 | _ident_start = st.characters(
23 | whitelist_categories=["Lu", "Ll", "Lt", "Lm", "Lo", "Nl"], max_codepoint=_max_cp
24 | )
25 | _ident_tail = st.characters(
26 | whitelist_categories=["Lu", "Ll", "Lt", "Lm", "Lo", "Nl", "Mn", "Mc", "Nd", "Pc"],
27 | whitelist_characters="_",
28 | max_codepoint=_max_cp,
29 | )
30 |
31 |
32 | @st.composite
33 | def _idents(draw, lengths=st.integers(min_value=0, max_value=80)):
34 | chars = [draw(_ident_start)]
35 | chars.extend(draw(_ident_tail) for _ in range(draw(lengths)))
36 | chars = "".join(chars)
37 | if iskeyword(chars):
38 | chars += draw(_ident_tail)
39 | return chars
40 |
41 |
42 | def _make_enum(name, elems):
43 | # print(f'IntEnum(enum_{name}, {elems!r})')
44 | return IntEnum("enum_" + name, elems)
45 |
46 |
47 | idents = _idents()
48 | enums = st.builds(
49 | _make_enum, idents, st.lists(idents, min_size=1, max_size=MAX_FIELDS, unique=True)
50 | )
51 |
52 |
53 | def fields_idents(types):
54 | return st.dictionaries(
55 | idents, types, dict_class=list, min_size=0, max_size=MAX_FIELDS
56 | )
57 |
58 |
59 | class _Faux(attr.validators._InstanceOfValidator):
60 | def __call__(self, inst, attr, value):
61 | pass
62 |
63 |
64 | def attrs(types, frozen):
65 | def _make(name, fields, **kw):
66 | def _attrib(typ):
67 | # Add a bogus validator because from_type reads that, not `type`
68 | # Can't use the real one because of generic types!
69 | return attr.ib(type=typ, validator=_Faux(typ))
70 |
71 | # print(f'attrs({name}, {fields}, **{kw})')
72 | return attr.make_class(
73 | "attrs_" + name,
74 | {field: _attrib(typ) for field, typ in fields},
75 | frozen=frozen,
76 | **kw
77 | )
78 |
79 | return st.builds(_make, idents, fields_idents(types), slots=st.booleans())
80 |
81 |
82 | if dc is not None:
83 |
84 | def dataclasses(types, frozen):
85 | def _make(name, fields, order):
86 | # print(f'dataclass({name}, {fields}, frozen={frozen}, order={order}')
87 | return dc.make_dataclass(
88 | "dc_" + name, fields, frozen=frozen, eq=True, order=order
89 | )
90 |
91 | return st.builds(_make, idents, fields_idents(types), order=st.booleans())
92 |
93 |
94 | else:
95 |
96 | def dataclasses(types, frozen):
97 | return None
98 |
99 |
100 | try:
101 | _NamedTuple = typing.NamedTuple
102 | except AttributeError:
103 |
104 | def namedtuples(types):
105 | return None
106 |
107 |
108 | else:
109 |
110 | def namedtuples(types):
111 | def _make(name, fields):
112 | # print(f'namedtuple({name}, {fields})')
113 | return _NamedTuple("nt_" + name, fields)
114 |
115 | return st.builds(_make, idents, fields_idents(types))
116 |
117 |
118 | def lists(types):
119 | return st.builds(lambda a: typing.List[a], types)
120 |
121 |
122 | def hmg_tuples(types):
123 | return st.builds(lambda a: typing.Tuple[a, ...], types)
124 |
125 |
126 | def sets(types):
127 | return st.builds(lambda a: typing.Set[a], types)
128 |
129 |
130 | def frozensets(types):
131 | return st.builds(lambda a: typing.FrozenSet[a], types)
132 |
133 |
134 | _dict_keys = atoms = st.one_of([st.sampled_from([int, str, date]), enums])
135 |
136 |
137 | def dicts(val_types):
138 | return st.builds(lambda k, v: typing.Dict[k, v], _dict_keys, val_types)
139 |
140 |
141 | def prod_tuples(types):
142 | return st.builds(
143 | lambda a: typing.Tuple[tuple(a)],
144 | st.lists(types, min_size=1, max_size=MAX_FIELDS),
145 | )
146 |
147 |
148 | def unions(types, max_size=None):
149 | return st.builds(
150 | lambda a: typing.Union[tuple(a)], st.lists(types, min_size=1, max_size=max_size)
151 | )
152 |
--------------------------------------------------------------------------------
/tests/common.py:
--------------------------------------------------------------------------------
1 | from importlib import import_module
2 |
3 |
4 | class Rules:
5 | def __init__(self, *rules):
6 | self.rules = rules
7 |
8 | def lookup(self, verb, typ, accept_missing=False):
9 | for rule in self.rules:
10 | result = rule(verb=verb, typ=typ, ctx=self)
11 | if result is not None:
12 | return result
13 | if accept_missing:
14 | return None
15 | else:
16 | raise RuntimeError("No rule for verb={}, typ={}".format(verb, typ))
17 |
18 |
19 | class SoftMod:
20 | def __init__(self, *modules, allow_SyntaxError=False):
21 | self.mods = mods = []
22 | for name in modules:
23 | try:
24 | mods.append(import_module(name))
25 | except ImportError:
26 | pass
27 | except SyntaxError:
28 | if not allow_SyntaxError:
29 | raise
30 |
31 | def __getattr__(self, name):
32 | for mod in self.mods:
33 | val = getattr(mod, name, None)
34 | if val is not None:
35 | return val
36 | return None
37 |
38 |
39 | typing = SoftMod("typing", "typing_extensions")
40 | dataclasses = SoftMod("dataclasses")
41 |
--------------------------------------------------------------------------------
/tests/extras/__init__.py:
--------------------------------------------------------------------------------
1 | # noqa
2 |
--------------------------------------------------------------------------------
/tests/extras/test_dynamodb.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | from json_syntax.extras.dynamodb import dynamodb_ruleset
4 | from json_syntax.helpers import NoneType
5 |
6 | from fractions import Fraction
7 | from decimal import Decimal
8 | from typing import Dict, List, Optional, Set, Tuple
9 |
10 | try:
11 | import attr
12 | except ImportError:
13 | attr = None
14 |
15 |
16 | def encode(value, typ):
17 | return dynamodb_ruleset().python_to_dynamodb(typ)(value)
18 |
19 |
20 | def decode(value, typ):
21 | return dynamodb_ruleset().dynamodb_to_python(typ)(value)
22 |
23 |
24 | def encode_item(value, typ):
25 | return dynamodb_ruleset().python_to_ddb_item(typ)(value)
26 |
27 |
28 | def decode_item(value, typ):
29 | return dynamodb_ruleset().ddb_item_to_python(typ)(value)
30 |
31 |
32 | def test_optional():
33 | assert encode(None, Optional[int]) == {"NULL": True}
34 | assert encode(5, Optional[int]) == {"N": "5"}
35 | assert decode({"NULL": True}, Optional[str]) is None
36 | assert decode({"S": "wat"}, Optional[str]) == "wat"
37 |
38 |
39 | def test_null():
40 | assert encode(None, NoneType) == {"NULL": True}
41 | assert decode({"NULL": True}, NoneType) is None
42 |
43 |
44 | def test_bool():
45 | assert encode(True, bool) == {"BOOL": True}
46 | assert decode({"BOOL": False}, bool) is False
47 |
48 |
49 | def test_binary():
50 | assert encode(b"foobar", bytes) == {"B": "Zm9vYmFy"}
51 | assert decode({"B": "Zm9vYmFy"}, bytes) == b"foobar"
52 | assert decode({"B": b"Zm9vYmFy"}, bytes) == b"foobar"
53 |
54 |
55 | def test_number1():
56 | assert encode(55.125, float) == {"N": "55.125"}
57 | assert decode({"N": "-55.125"}, float) == -55.125
58 |
59 |
60 | def test_number2():
61 | with pytest.raises(ValueError):
62 | encode(float("nan"), float)
63 |
64 |
65 | def test_number3():
66 | assert encode(Fraction(441, 8), Fraction) == {"N": "55.125"}
67 | assert decode({"N": "55.125"}, Fraction) == Fraction(441, 8)
68 |
69 |
70 | def test_number4():
71 | assert encode(Decimal("55.125"), Decimal) == {"N": "55.125"}
72 | assert decode({"N": "-55.125"}, Decimal) == Decimal("-55.125")
73 |
74 |
75 | def test_string():
76 | assert encode("foobar", str) == {"S": "foobar"}
77 | assert decode({"S": "foobar"}, str) == "foobar"
78 |
79 |
80 | def test_list():
81 | assert encode([1, 2, 4, 5], List[int]) == {
82 | "L": [{"N": str(x)} for x in [1, 2, 4, 5]]
83 | }
84 | assert decode({"L": [{"S": "apple"}, {"S": "banana"}]}, List[str]) == [
85 | "apple",
86 | "banana",
87 | ]
88 |
89 |
90 | def test_dict():
91 | assert encode({"A": 1, "B": 2}, Dict[str, int]) == {
92 | "M": {"A": {"N": "1"}, "B": {"N": "2"}}
93 | }
94 | assert decode({"M": {"A": {"N": "1"}, "B": {"N": "2"}}}, Dict[str, int]) == {
95 | "A": 1,
96 | "B": 2,
97 | }
98 |
99 |
100 | def cheat(value):
101 | if isinstance(value, dict):
102 | for key, val in value.items():
103 | if key in ("SS", "NS", "BS"):
104 | val.sort()
105 | else:
106 | cheat(val)
107 | elif isinstance(value, list):
108 | for val in value:
109 | cheat(val)
110 | return value
111 |
112 |
113 | def test_str_set():
114 | assert cheat(encode({"foo", "bar", "qux"}, Set[str])) == {
115 | "SS": ["bar", "foo", "qux"]
116 | }
117 | assert decode({"SS": ["foo", "bar", "qux"]}, Set[str]) == {"foo", "bar", "qux"}
118 |
119 |
120 | def test_num_set():
121 | assert cheat(encode({-33.5, 11.25, 1.75}, Set[float])) == {
122 | "NS": ["-33.5", "1.75", "11.25"]
123 | }
124 | assert decode({"NS": [11.25, 1.75, -33.5]}, Set[float]) == {-33.5, 11.25, 1.75}
125 |
126 |
127 | def test_bin_set():
128 | assert cheat(encode({b"foo", b"bar", b"qux"}, Set[bytes])) == {
129 | "BS": ["YmFy", "Zm9v", "cXV4"]
130 | }
131 | assert decode({"BS": ["YmFy", "Zm9v", "cXV4"]}, Set[bytes]) == {
132 | b"foo",
133 | b"bar",
134 | b"qux",
135 | }
136 | assert decode({"BS": [b"YmFy", b"Zm9v", b"cXV4"]}, Set[bytes]) == {
137 | b"foo",
138 | b"bar",
139 | b"qux",
140 | }
141 |
142 |
143 | @attr.s
144 | class Inner:
145 | name = attr.ib(type=str)
146 |
147 |
148 | @attr.s
149 | class Outer:
150 | stuff = attr.ib(type=Inner)
151 | count = attr.ib(type=int, default=7)
152 |
153 |
154 | def test_map1():
155 | subj = Outer(count=3, stuff=Inner(name="bob"))
156 | expected = {"M": {"count": {"N": "3"}, "stuff": {"M": {"name": {"S": "bob"}}}}}
157 | assert encode(subj, Outer) == expected
158 |
159 | subj = {"M": {"count": {"N": "3"}, "stuff": {"M": {"name": {"S": "bob"}}}}}
160 | expected = Outer(count=3, stuff=Inner(name="bob"))
161 | assert decode(subj, Outer) == expected
162 |
163 |
164 | def test_map2():
165 | subj = Outer(stuff=Inner(name="bob"))
166 | expected = {"M": {"stuff": {"M": {"name": {"S": "bob"}}}}}
167 | assert encode(subj, Outer) == expected
168 |
169 | subj = {
170 | "M": {"stuff": {"M": {"name": {"S": "bob"}}}, "other_key": {"S": "ignored"}}
171 | }
172 | expected = Outer(stuff=Inner(name="bob"))
173 | assert decode(subj, Outer) == expected
174 |
175 |
176 | def test_item1():
177 | subj = Outer(count=3, stuff=Inner(name="bob"))
178 | expected = {"count": {"N": "3"}, "stuff": {"M": {"name": {"S": "bob"}}}}
179 | assert encode_item(subj, Outer) == expected
180 |
181 | subj = {"count": {"N": "3"}, "stuff": {"M": {"name": {"S": "bob"}}}}
182 | expected = Outer(count=3, stuff=Inner(name="bob"))
183 | assert decode_item(subj, Outer) == expected
184 |
185 |
186 | def test_item2():
187 | subj = Outer(stuff=Inner(name="bob"))
188 | expected = {"stuff": {"M": {"name": {"S": "bob"}}}}
189 | assert encode_item(subj, Outer) == expected
190 |
191 | subj = {"stuff": {"M": {"name": {"S": "bob"}}}, "other_key": {"S": "ignored"}}
192 | expected = Outer(stuff=Inner(name="bob"))
193 | assert decode_item(subj, Outer) == expected
194 |
195 |
196 | def test_ad_hoc_atoms():
197 | rs = dynamodb_ruleset()
198 | actual = rs.ad_hoc(
199 | ":",
200 | arg_null=None,
201 | arg_bool=False,
202 | arg_int=3,
203 | arg_float=6.6,
204 | arg_dec=Decimal("-7.888"),
205 | arg_str="some_string",
206 | arg_bytes=b"some_byes",
207 | arg_class=Outer(stuff=Inner(name="bob")),
208 | )
209 | assert actual == {
210 | ":arg_bool": {"BOOL": False},
211 | ":arg_bytes": {"B": "c29tZV9ieWVz"},
212 | ":arg_dec": {"N": "-7.888"},
213 | ":arg_float": {"N": "6.6"},
214 | ":arg_int": {"N": "3"},
215 | ":arg_null": {"NULL": True},
216 | ":arg_str": {"S": "some_string"},
217 | ":arg_class": {"M": {"stuff": {"M": {"name": {"S": "bob"}}}}},
218 | }
219 |
220 |
221 | def test_ad_hoc_typed():
222 | rs = dynamodb_ruleset()
223 | actual = rs.ad_hoc(
224 | ":",
225 | arg_opt1=(None, Optional[int]),
226 | arg_opt2=(5, Optional[int]),
227 | arg_list=([3, 2.2, 6.0], List[float]),
228 | arg_tup=((3, 2.2, 6.0), Tuple[float, ...]),
229 | arg_class=(Outer(stuff=Inner(name="bob")), Outer),
230 | arg_str_set=({"foo", "bar", "qux"}, Set[str]),
231 | )
232 | assert cheat(actual) == {
233 | ":arg_opt1": {"NULL": True},
234 | ":arg_opt2": {"N": "5"},
235 | ":arg_list": {"L": [{"N": "3"}, {"N": "2.2"}, {"N": "6.0"}]},
236 | ":arg_tup": {"L": [{"N": "3"}, {"N": "2.2"}, {"N": "6.0"}]},
237 | ":arg_class": {"M": {"stuff": {"M": {"name": {"S": "bob"}}}}},
238 | ":arg_str_set": {"SS": ["bar", "foo", "qux"]},
239 | }
240 |
--------------------------------------------------------------------------------
/tests/extras/test_flags.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | from tests.common import Rules
3 |
4 | from json_syntax.extras import flags as fl
5 | from json_syntax.helpers import (
6 | JSON2PY,
7 | PY2JSON,
8 | INSP_JSON,
9 | INSP_PY,
10 | STR2PY,
11 | PY2STR,
12 | INSP_STR,
13 | python_minor,
14 | )
15 |
16 |
17 | @pytest.mark.skipif(
18 | python_minor < (3, 7), reason="__class_getitem__ not supported before 3.7"
19 | )
20 | @pytest.mark.parametrize("verb", [JSON2PY, PY2JSON, STR2PY, PY2STR])
21 | def test_Flag_getitem_convert(verb):
22 | action = fl.flags(verb=verb, typ=fl.Flag["foo", "bar", "qux"], ctx=Rules())
23 | assert action("foo") == "foo"
24 | assert action("qux") == "qux"
25 |
26 | with pytest.raises(ValueError):
27 | action("nope")
28 |
29 |
30 | @pytest.mark.parametrize("verb", [JSON2PY, PY2JSON, STR2PY, PY2STR])
31 | def test_Flag_init_convert(verb):
32 | action = fl.flags(verb=verb, typ=fl.Flag("foo", "bar", "qux"), ctx=Rules())
33 | assert action("foo") == "foo"
34 | assert action("qux") == "qux"
35 |
36 | with pytest.raises(ValueError):
37 | action("nope")
38 |
39 |
40 | @pytest.mark.skipif(
41 | python_minor < (3, 7), reason="__class_getitem__ not supported before 3.7"
42 | )
43 | @pytest.mark.parametrize("verb", [INSP_PY, INSP_JSON, INSP_STR])
44 | def test_Flag_getitem_inspect(verb):
45 | action = fl.flags(verb=verb, typ=fl.Flag["foo", "bar", "qux"], ctx=Rules())
46 | assert action("foo")
47 | assert action("qux")
48 | assert not action("nope")
49 |
50 |
51 | @pytest.mark.parametrize("verb", [INSP_PY, INSP_JSON, INSP_STR])
52 | def test_Flag_init_inspect(verb):
53 | action = fl.flags(verb=verb, typ=fl.Flag("foo", "bar", "qux"), ctx=Rules())
54 | assert action("foo")
55 | assert action("qux")
56 | assert not action("nope")
57 |
--------------------------------------------------------------------------------
/tests/extras/test_loose_dates.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | from tests.common import Rules
3 |
4 | from json_syntax.extras import loose_dates as exam
5 | from json_syntax.helpers import JSON2PY, PY2JSON, INSP_PY, INSP_JSON, python_minor
6 |
7 | from datetime import date, datetime
8 |
9 |
10 | @pytest.mark.skipif(
11 | python_minor < (3, 7), reason="datetime.isoformat not supported before python 3.7"
12 | )
13 | def test_iso_dates_loose():
14 | "Test the iso_dates_loose handles dates using ISO8601 and accepts datetimes."
15 |
16 | decoder = exam.iso_dates_loose(verb=JSON2PY, typ=date, ctx=Rules())
17 | assert decoder("1776-07-04") == date(1776, 7, 4)
18 | assert decoder("6543-02-01T09:09:09") == date(6543, 2, 1)
19 |
20 | encoder = exam.iso_dates_loose(verb=PY2JSON, typ=date, ctx=Rules())
21 | assert encoder(date(1776, 7, 4)) == "1776-07-04"
22 |
23 | inspect = exam.iso_dates_loose(verb=INSP_PY, typ=date, ctx=Rules())
24 | assert inspect(date(1776, 7, 4))
25 | assert not inspect(datetime(1776, 7, 4, 3, 3))
26 | assert not inspect("2000-01-01")
27 | assert not inspect("2000-01-01T03:03:03")
28 | assert not inspect("string")
29 |
30 | inspect = exam.iso_dates_loose(verb=INSP_JSON, typ=date, ctx=Rules())
31 | assert not inspect(date(1776, 7, 4))
32 | assert not inspect(datetime(1776, 7, 4, 3, 3))
33 | assert inspect("2000-01-01")
34 | assert inspect("2000-01-01T03:03:03")
35 | assert not inspect("string")
36 |
--------------------------------------------------------------------------------
/tests/test_attrs.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | from .common import SoftMod, typing as t, Rules
4 | from .types_attrs_common import Hooks, T, U
5 |
6 | from json_syntax import attrs as at
7 | from json_syntax import std
8 | from json_syntax.helpers import JSON2PY, PY2JSON, INSP_PY, INSP_JSON
9 |
10 | import attr
11 | from collections import namedtuple
12 | from typing import Tuple, Generic, List
13 |
14 | ann = SoftMod("tests.types_attrs_ann", allow_SyntaxError=True)
15 |
16 |
17 | @attr.s
18 | class Flat:
19 | a = attr.ib(type=int)
20 | b = attr.ib("default", type=str)
21 |
22 |
23 | @attr.s
24 | class GenFlat(Generic[T]):
25 | a = attr.ib(type=T)
26 | b = attr.ib("default", type=str)
27 |
28 |
29 | @attr.s
30 | class PrivateFields:
31 | pub = attr.ib(type=str)
32 | _priv = attr.ib(type=int)
33 |
34 |
35 | @attr.s
36 | class Hook1(Hooks):
37 | a = attr.ib(type=int)
38 | b = attr.ib("default", type=str)
39 |
40 |
41 | @attr.s
42 | class GenExample(Generic[T, U]):
43 | body = attr.ib(type=T)
44 | count = attr.ib(type=int)
45 | messages = attr.ib(type=t.List[U])
46 |
47 |
48 | try:
49 |
50 | @attr.s(slots=True)
51 | class GenExampleSlots(Generic[T, U]):
52 | body = attr.ib(type=T)
53 | count = attr.ib(type=int)
54 | messages = attr.ib(type=t.List[U])
55 |
56 |
57 | except TypeError:
58 | GenExampleSlots = None
59 |
60 |
61 | class Fail:
62 | def lookup(self, verb, typ, accept_missing):
63 | raise RuntimeError("Should not be called in this test")
64 |
65 |
66 | class Ctx:
67 | def lookup(self, verb, typ, accept_missing):
68 | if typ is None:
69 | raise RuntimeError("Should not be called with typ=None")
70 |
71 | if verb in (JSON2PY, PY2JSON):
72 | return typ
73 | else:
74 | return lambda val: isinstance(val, typ)
75 |
76 |
77 | def test_attrs_classes_disregards():
78 | "Test that attrs_classes disregards unknown verbs and types."
79 |
80 | assert at.attrs_classes(verb=PY2JSON, typ=int, ctx=Fail()) is None
81 | assert at.attrs_classes(verb=INSP_PY, typ=int, ctx=Fail()) is None
82 | assert at.attrs_classes(verb=JSON2PY, typ=object, ctx=Fail()) is None
83 | assert at.attrs_classes(verb="dummy", typ=Flat, ctx=Fail()) is None
84 |
85 |
86 | @pytest.mark.parametrize(
87 | "con, FlatCls",
88 | [
89 | (Flat, Flat),
90 | (ann.Flat, ann.Flat),
91 | (GenFlat, GenFlat[int]),
92 | (ann.GenFlat, ann.GenFlat[int]) if ann.GenFlat else (None, None),
93 | (ann.FlatDc, ann.FlatDc),
94 | (ann.GenFlatDc, ann.GenFlatDc[int]) if ann.GenFlatDc else (None, None),
95 | ],
96 | )
97 | def test_attrs_encoding(con, FlatCls):
98 | "Test that attrs_classes encodes and decodes a flat class."
99 | if FlatCls is None:
100 | pytest.skip("Annotations unavailable")
101 |
102 | encoder = at.attrs_classes(verb=PY2JSON, typ=FlatCls, ctx=Ctx())
103 | assert encoder(con(33, "foo")) == {"a": 33, "b": "foo"}
104 | assert encoder(con(33, "default")) == {"a": 33}
105 |
106 | decoder = at.attrs_classes(verb=JSON2PY, typ=FlatCls, ctx=Ctx())
107 | assert decoder({"a": 33, "b": "foo"}) == FlatCls(33, "foo")
108 | assert decoder({"a": 33}) == FlatCls(33)
109 |
110 | inspect = at.attrs_classes(verb=INSP_PY, typ=FlatCls, ctx=Ctx())
111 | assert inspect(con(33, "foo"))
112 | assert inspect(con("str", "foo"))
113 | assert not inspect({"a": 33, "b": "foo"})
114 |
115 | inspect = at.attrs_classes(verb=INSP_JSON, typ=FlatCls, ctx=Ctx())
116 | assert not inspect(con(33, "foo"))
117 | assert not inspect({"a": "str", "b": "foo"})
118 | assert inspect({"a": 33})
119 | assert inspect({"a": 33, "b": "foo"})
120 | assert not inspect({"b": "foo"})
121 |
122 |
123 | @pytest.mark.parametrize("PrivateCls", [PrivateFields, ann.PrivateFieldsDc,])
124 | def test_attrs_private(PrivateCls):
125 | "Test that attrs_classes encode and decode classes with private fields correctly."
126 | if PrivateCls is None:
127 | pytest.skip("Annotations unavailable")
128 |
129 | original = PrivateCls("value", 77)
130 |
131 | encoder = at.attrs_classes(verb=PY2JSON, typ=PrivateCls, ctx=Ctx())
132 | encoded = encoder(original)
133 |
134 | assert encoded["pub"] == "value"
135 | assert encoded["_priv"] == 77
136 |
137 | decoder = at.attrs_classes(verb=JSON2PY, typ=PrivateCls, ctx=Ctx())
138 | decoded = decoder(encoded)
139 |
140 | assert decoded == original
141 |
142 |
143 | @pytest.mark.parametrize("HookCls", [Hook1, ann.Hook])
144 | def test_attrs_hooks(HookCls):
145 | "Test that attrs_classes enables hooks."
146 | if HookCls is None:
147 | pytest.skip("Annotations unavailable")
148 |
149 | encoder = at.attrs_classes(verb=PY2JSON, typ=HookCls, ctx=Ctx())
150 | assert encoder(HookCls(33, "foo")) == {"_type_": "Hook", "a": 33, "b": "foo"}
151 | assert encoder(HookCls(33, "default")) == {"_type_": "Hook", "a": 33}
152 |
153 | decoder = at.attrs_classes(verb=JSON2PY, typ=HookCls, ctx=Ctx())
154 | assert decoder([33, "foo"]) == HookCls(33, "foo")
155 | assert decoder({"a": 33, "b": "foo"}) == HookCls(33, "foo")
156 | assert decoder({"a": 33}) == HookCls(33)
157 |
158 | inspect = at.attrs_classes(verb=INSP_PY, typ=HookCls, ctx=Ctx())
159 | assert inspect(HookCls(33, "foo"))
160 | assert inspect(HookCls("str", "foo"))
161 | assert not inspect({"a": 33, "b": "foo"})
162 |
163 | inspect = at.attrs_classes(verb=INSP_JSON, typ=HookCls, ctx=Ctx())
164 | assert inspect({"_type_": "Hook", "a": "str", "b": "foo"})
165 | assert not inspect({"a": 33, "b": "foo"})
166 | assert inspect({"_type_": "Hook", "a": 33, "b": "foo"})
167 | assert inspect({"_type_": "Hook"})
168 |
169 |
170 | @pytest.mark.parametrize(
171 | "GenClass",
172 | [
173 | GenExample,
174 | GenExampleSlots,
175 | ann.GenExample,
176 | ann.GenExampleSlots,
177 | ann.GenExampleDc,
178 | ],
179 | )
180 | def test_attrs_generic(GenClass):
181 | if GenClass is None:
182 | pytest.skip()
183 |
184 | @attr.s
185 | class Top:
186 | nested = attr.ib(type=GenClass[GenClass[str, str], str])
187 | list_of = attr.ib(type=List[GenClass[Tuple[Flat, ...], int]])
188 |
189 | rules = Rules(at.attrs_classes, std.atoms, std.lists)
190 | py_val = Top(
191 | nested=GenClass(
192 | body=GenClass(body="body", count=5, messages=["msg1", "msg2"]),
193 | count=3,
194 | messages=["msg3", "msg4"],
195 | ),
196 | list_of=[
197 | GenClass(body=(Flat(a=1), Flat(a=2, b="three")), count=4, messages=[6, 7])
198 | ],
199 | )
200 | j_val = {
201 | "list_of": [
202 | {"body": [{"a": 1}, {"a": 2, "b": "three"}], "count": 4, "messages": [6, 7]}
203 | ],
204 | "nested": {
205 | "body": {"body": "body", "count": 5, "messages": ["msg1", "msg2"]},
206 | "count": 3,
207 | "messages": ["msg3", "msg4"],
208 | },
209 | }
210 |
211 | encoder = at.attrs_classes(verb=PY2JSON, typ=Top, ctx=rules)
212 | assert encoder(py_val) == j_val
213 |
214 | decoder = at.attrs_classes(verb=JSON2PY, typ=Top, ctx=rules)
215 | assert decoder(j_val) == py_val
216 |
217 | inspect = at.attrs_classes(verb=INSP_PY, typ=Top, ctx=rules)
218 | assert inspect(py_val)
219 |
220 | inspect = at.attrs_classes(verb=INSP_JSON, typ=Top, ctx=rules)
221 | assert inspect(j_val)
222 |
223 |
224 | class Ctx2:
225 | def lookup(self, *, verb, typ, accept_missing=False):
226 | if typ is None:
227 | assert accept_missing, "typ is None without accept_missing"
228 | if verb in (JSON2PY, PY2JSON):
229 | return str
230 | else:
231 | return lambda val: isinstance(val, str)
232 |
233 | if verb in (JSON2PY, PY2JSON):
234 | return typ
235 | else:
236 | return lambda val: isinstance(val, typ)
237 |
238 |
239 | def test_named_tuples_disregards():
240 | "Test that named_tuples disregards unknown verbs and types."
241 |
242 | assert at.named_tuples(verb=PY2JSON, typ=int, ctx=Fail()) is None
243 | assert at.named_tuples(verb=INSP_PY, typ=int, ctx=Fail()) is None
244 | assert at.named_tuples(verb=JSON2PY, typ=tuple, ctx=Fail()) is None
245 | assert at.named_tuples(verb="dummy", typ=Named1, ctx=Fail()) is None
246 |
247 |
248 | Named1 = namedtuple("Named1", ["a", "b"])
249 | try:
250 | Named2 = namedtuple("Named2", ["a", "b"], defaults=["default"])
251 | except TypeError:
252 | Named2 = None
253 | Named3 = ann.Named
254 |
255 |
256 | def test_named_tuples_encoding1():
257 | "Test that named_tuples encodes and decodes a namedtuple."
258 |
259 | encoder = at.named_tuples(verb=PY2JSON, typ=Named1, ctx=Ctx2())
260 | assert encoder(Named1(42, "foo")) == {"a": "42", "b": "foo"}
261 |
262 | decoder = at.named_tuples(verb=JSON2PY, typ=Named1, ctx=Ctx2())
263 | assert decoder({"a": 42, "b": "foo"}) == Named1("42", "foo")
264 |
265 |
266 | @pytest.mark.skipif(Named2 is None, reason="defaults arg to namedtuple unavailable")
267 | def test_named_tuples_encoding2():
268 | "Test that named_tuples encodes and decodes a namedtuple."
269 |
270 | encoder = at.named_tuples(verb=PY2JSON, typ=Named2, ctx=Ctx2())
271 | assert encoder(Named2(42, "foo")) == {"a": "42", "b": "foo"}
272 | assert encoder(Named2(42)) == {"a": "42"}
273 |
274 | decoder = at.named_tuples(verb=JSON2PY, typ=Named2, ctx=Ctx2())
275 | assert decoder({"a": 42, "b": "foo"}) == Named2("42", "foo")
276 | assert decoder({"a": 42}) == Named2("42")
277 |
278 |
279 | @pytest.mark.skipif(Named3 is None, reason="annotations unavailable")
280 | def test_named_tuples_encoding3():
281 | "Test that named_tuples encodes and decodes a namedtuple."
282 |
283 | encoder = at.named_tuples(verb=PY2JSON, typ=Named3, ctx=Ctx2())
284 | assert encoder(Named3(42, "foo")) == {"a": 42, "b": "foo"}
285 | assert encoder(Named3(42)) == {"a": 42}
286 |
287 | decoder = at.named_tuples(verb=JSON2PY, typ=Named3, ctx=Ctx2())
288 | assert decoder({"a": 42, "b": "foo"}) == Named3(42, "foo")
289 | assert decoder({"a": 42}) == Named3(42)
290 |
291 |
292 | def test_named_tuples_checking1():
293 | "Test that named_tuples verifies a namedtuple."
294 | inspect = at.named_tuples(verb=INSP_PY, typ=Named1, ctx=Ctx2())
295 | assert inspect(Named1(42, "foo"))
296 | assert inspect(Named1("str", "foo"))
297 | assert not inspect({"a": "42", "b": "foo"})
298 |
299 | inspect = at.named_tuples(verb=INSP_JSON, typ=Named1, ctx=Ctx2())
300 | assert not inspect(Named1(42, "foo"))
301 | assert not inspect({"a": "42"})
302 | assert not inspect({"a": 42, "b": "foo"})
303 | assert inspect({"a": "42", "b": "foo"})
304 | assert not inspect({"b": "foo"})
305 |
306 |
307 | @pytest.mark.skipif(Named2 is None, reason="defaults arg to namedtuple unavailable")
308 | def test_named_tuples_checking2():
309 | "Test that named_tuples verifies a namedtuple."
310 | inspect = at.named_tuples(verb=INSP_PY, typ=Named2, ctx=Ctx2())
311 | assert inspect(Named2(42, "foo"))
312 | assert inspect(Named2("str", "foo"))
313 | assert not inspect({"a": "42", "b": "foo"})
314 |
315 | inspect = at.named_tuples(verb=INSP_JSON, typ=Named2, ctx=Ctx2())
316 | assert not inspect(Named2(42, "foo"))
317 | assert not inspect({"a": None, "b": "foo"})
318 | assert inspect({"a": "42"})
319 | assert inspect({"a": "42", "b": "foo"})
320 | assert not inspect({"b": "foo"})
321 |
322 |
323 | @pytest.mark.skipif(Named3 is None, reason="annotations unavailable")
324 | def test_named_tuples_checking3():
325 | "Test that named_tuples verifies a namedtuple."
326 | inspect = at.named_tuples(verb=INSP_PY, typ=Named3, ctx=Ctx2())
327 | assert inspect(Named3(42, "foo"))
328 | assert inspect(Named3("str", "foo"))
329 | assert not inspect({"a": 42, "b": "foo"})
330 |
331 | inspect = at.named_tuples(verb=INSP_JSON, typ=Named3, ctx=Ctx2())
332 | assert not inspect(Named3(42, "foo"))
333 | assert not inspect({"a": None, "b": "foo"})
334 | assert inspect({"a": 42})
335 | assert inspect({"a": 42, "b": "foo"})
336 | assert not inspect({"b": "foo"})
337 |
338 |
339 | def test_tuples_disregards():
340 | "Test that tuples disregards unknown verbs and types."
341 |
342 | assert at.tuples(verb=PY2JSON, typ=Tuple[int, ...], ctx=Fail()) is None
343 | assert at.tuples(verb=INSP_PY, typ=int, ctx=Fail()) is None
344 | assert at.tuples(verb="dummy", typ=Tuple[int, str], ctx=Fail()) is None
345 |
346 |
347 | def test_tuples_encoding():
348 | "Test that tuples encodes and decodes a flat class."
349 |
350 | encoder = at.tuples(verb=PY2JSON, typ=Tuple[int, str], ctx=Ctx2())
351 | assert encoder((33, "foo")) == [33, "foo"]
352 |
353 | decoder = at.tuples(verb=JSON2PY, typ=Tuple[int, str], ctx=Ctx2())
354 | assert decoder([33, "foo"]) == (33, "foo")
355 |
356 | inspect = at.tuples(verb=INSP_PY, typ=Tuple[int, str], ctx=Ctx2())
357 | assert inspect((33, "foo"))
358 | assert not inspect(("str", "foo"))
359 | assert not inspect((33, "foo", None))
360 |
361 | inspect = at.tuples(verb=INSP_JSON, typ=Tuple[int, str], ctx=Ctx2())
362 | assert inspect([33, "foo"])
363 | assert not inspect(["str", "foo"])
364 | assert not inspect([33, "foo", None])
365 | assert not inspect({})
366 |
367 |
368 | @pytest.mark.parametrize(
369 | "dict_type", [t.TypedDict("Dict1", a=int, b=str) if t.TypedDict else None, ann.Dict]
370 | )
371 | def test_typed_dict_encoding(dict_type):
372 | "Test that typed_dicts encodes and decodes a typed dict."
373 | if dict_type is None:
374 | pytest.skip("TypedDict or annotations unavailable")
375 |
376 | encoder = at.typed_dicts(verb=PY2JSON, typ=dict_type, ctx=Ctx())
377 | assert encoder({"a": 3, "b": "foo"}) == {"a": 3, "b": "foo"}
378 | assert encoder({"a": 3, "b": "foo", "c": "extra"}) == {"a": 3, "b": "foo"}
379 | assert encoder({"a": 3.2, "b": 5}) == {"a": 3, "b": "5"}
380 |
381 | decoder = at.typed_dicts(verb=JSON2PY, typ=dict_type, ctx=Ctx())
382 | assert decoder({"a": 3, "b": "foo"}) == {"a": 3, "b": "foo"}
383 | assert decoder({"a": 3, "b": "foo", "c": "extra"}) == {"a": 3, "b": "foo"}
384 | assert decoder({"a": 3.2, "b": 5}) == {"a": 3, "b": "5"}
385 |
386 | inspect = at.typed_dicts(verb=INSP_PY, typ=dict_type, ctx=Ctx())
387 | assert inspect({"a": 3, "b": "foo"})
388 | assert not inspect({"a": 3.2, "b": False})
389 | assert not inspect("foo")
390 | assert not inspect({})
391 | assert inspect({"a": 3, "b": "foo", "c": True})
392 |
393 | inspect = at.typed_dicts(verb=INSP_JSON, typ=dict_type, ctx=Ctx())
394 | assert inspect({"a": 3, "b": "foo"})
395 | assert not inspect({"a": 3.2, "b": False})
396 | assert not inspect("foo")
397 | assert not inspect({})
398 | assert inspect({"a": 3, "b": "foo", "c": True})
399 |
400 |
401 | @attr.s(eq=False)
402 | class Incomparable:
403 | def __eq__(self, other):
404 | raise RuntimeError("Can't compare this class")
405 |
406 | def __ne__(self, other):
407 | raise RuntimeError("Can't compare this class")
408 |
409 |
410 | @attr.s
411 | class IncomparableContainer:
412 | field1 = attr.ib(type=Incomparable)
413 | field2 = attr.ib(type=int, default=3)
414 |
415 |
416 | def test_encode_incomparable():
417 | "Test that encoding doesn't fail if a field's __eq__ method throws."
418 |
419 | rules = Rules(at.attrs_classes, std.atoms, std.lists)
420 |
421 | encoder = at.attrs_classes(verb=PY2JSON, typ=IncomparableContainer, ctx=rules)
422 | assert encoder(IncomparableContainer(Incomparable())) == {"field1": {}}
423 | assert encoder(IncomparableContainer(Incomparable(), 4)) == {"field1": {}, "field2": 4}
424 |
425 | decoder = at.attrs_classes(verb=JSON2PY, typ=IncomparableContainer, ctx=rules)
426 | actual = decoder({"field1": {}})
427 | assert isinstance(actual.field1, Incomparable)
428 | assert actual.field2 == 3
429 |
--------------------------------------------------------------------------------
/tests/test_cache.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | from json_syntax import cache
4 |
5 |
6 | def test_forward_action():
7 | "Test that ForwardAction can replace a function and be updated."
8 |
9 | def func1(a, b):
10 | return a + b
11 |
12 | def func2(a, b):
13 | "Doc string."
14 | return a * b
15 |
16 | subj = cache.ForwardAction(func1)
17 |
18 | assert subj(3, 7) == 10
19 |
20 | subj.__call__ = func2
21 |
22 | assert subj(3, 7) == 21
23 |
24 | assert repr(subj).startswith(" complete mechanism produces a forward action."
38 |
39 | subj = cache.SimpleCache()
40 |
41 | # Notify the cache that we're working on the result.
42 | subj.in_flight(verb="verb", typ=int)
43 |
44 | # Another rule needs the result before it's ready.
45 | actual = subj.get(verb="verb", typ=int)
46 |
47 | def action(value):
48 | return value * 10
49 |
50 | # The ForwardAction previously set is loaded with the callable.
51 | subj.complete(verb="verb", typ=int, action=action)
52 |
53 | # The ForwardAction is loaded with the action.
54 | assert actual(5) == 50
55 |
56 | # The cache entry is replaced with the action itself.
57 | assert subj.get(verb="verb", typ=int) is action
58 |
59 |
60 | class NoHashMeta(type):
61 | __hash__ = None
62 |
63 |
64 | class NoHash(metaclass=NoHashMeta):
65 | pass
66 |
67 |
68 | def test_simple_cache_unhashable():
69 | "Test that SimpleCache warns on unhashable type instances."
70 |
71 | subj = cache.SimpleCache()
72 |
73 | with pytest.warns(cache.UnhashableType):
74 | subj.get(verb="verb", typ=NoHash)
75 |
76 | with pytest.warns(cache.UnhashableType):
77 | subj.in_flight(verb="verb", typ=NoHash)
78 |
79 | with pytest.warns(cache.UnhashableType):
80 | subj.complete(verb="verb", typ=NoHash, action=lambda val: val + 1)
81 |
--------------------------------------------------------------------------------
/tests/test_errors.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | from json_syntax import errors as err
4 |
5 | import traceback as tb
6 |
7 |
8 | @pytest.mark.parametrize(
9 | "args,expect",
10 | [
11 | ((), "ValueError: At .alpha.beta\n"),
12 | (("message",), "ValueError: message; at .alpha.beta\n"),
13 | (("two", "parts"), "ValueError: ('two; at .alpha.beta', 'parts')\n"),
14 | ],
15 | )
16 | def test_error_context(args, expect):
17 | "Test that error contexts add information correctly."
18 |
19 | try:
20 | with err.ErrorContext(".", "alpha"):
21 | with err.ErrorContext(".", "beta"):
22 | raise ValueError(*args)
23 | except ValueError as exc:
24 | actual = "".join(tb.format_exception_only(type(exc), exc))
25 | else:
26 | assert False, "Didn't throw?!"
27 |
28 | assert actual == expect
29 |
30 |
31 | def test_error_ctx_inline():
32 | "Test that err_ctx adds inline context."
33 |
34 | def inside():
35 | raise ValueError("message")
36 |
37 | try:
38 | err.err_ctx(".alpha", inside)
39 | except ValueError as exc:
40 | actual = "".join(tb.format_exception_only(type(exc), exc))
41 | else:
42 | assert False, "Didn't throw?!"
43 |
44 | assert actual == "ValueError: message; at .alpha\n"
45 |
--------------------------------------------------------------------------------
/tests/test_helpers.py:
--------------------------------------------------------------------------------
1 | from json_syntax import helpers as hlp
2 |
3 |
4 | def test_identity():
5 | "Test that the identity function does what it says."
6 |
7 | subj = object()
8 |
9 | assert hlp.identity(subj) is subj
10 |
--------------------------------------------------------------------------------
/tests/test_std.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | from tests.common import Rules
3 | from unittest.mock import Mock
4 |
5 | from json_syntax import std
6 | from json_syntax.helpers import JSON2PY, PY2JSON, INSP_PY, INSP_JSON, NoneType
7 | from json_syntax.string import stringify_keys
8 |
9 | from datetime import datetime, date
10 | from decimal import Decimal
11 | from enum import Enum, IntEnum
12 | import math
13 | from typing import Optional, Union, Tuple, List, Set, FrozenSet, Dict
14 | from pickle import dumps
15 |
16 | try:
17 | from typing import OrderedDict # 3.7.2
18 | except ImportError:
19 | OrderedDict = None
20 |
21 |
22 | Mystery = Tuple["Mystery", "Thing"]
23 |
24 |
25 | def test_atoms_disregard():
26 | "Test the atoms rule will disregard unknown types and verbs."
27 |
28 | assert std.atoms(verb="unknown", typ=str, ctx=Rules()) is None
29 | for verb in (JSON2PY, PY2JSON, INSP_PY, INSP_JSON):
30 | assert std.atoms(verb=verb, typ=Mystery, ctx=Rules()) is None
31 |
32 |
33 | def test_atoms_str():
34 | "Test the atoms rule will generate encoders and decoders for strings."
35 |
36 | decoder = std.atoms(verb=JSON2PY, typ=str, ctx=Rules())
37 | assert decoder("some string") == "some string"
38 |
39 | encoder = std.atoms(verb=PY2JSON, typ=str, ctx=Rules())
40 | assert encoder("some string") == "some string"
41 |
42 | inspect = std.atoms(verb=INSP_PY, typ=str, ctx=Rules())
43 | assert inspect("string")
44 | assert not inspect(5)
45 |
46 | inspect = std.atoms(verb=INSP_JSON, typ=str, ctx=Rules())
47 | assert inspect("string")
48 | assert not inspect(5)
49 |
50 |
51 | def test_atoms_int():
52 | "Test the atoms rule will generate encoders and decoders for integers."
53 |
54 | decoder = std.atoms(verb=JSON2PY, typ=int, ctx=Rules())
55 | assert decoder(77) == 77
56 |
57 | encoder = std.atoms(verb=PY2JSON, typ=int, ctx=Rules())
58 | assert encoder(77) == 77
59 |
60 | inspect = std.atoms(verb=INSP_PY, typ=int, ctx=Rules())
61 | assert not inspect("string")
62 | assert inspect(5)
63 |
64 | inspect = std.atoms(verb=INSP_JSON, typ=int, ctx=Rules())
65 | assert not inspect("string")
66 | assert inspect(5)
67 |
68 |
69 | def test_atoms_bool():
70 | "Test the atoms rule will generate encoders and decoders for booleans."
71 |
72 | decoder = std.atoms(verb=JSON2PY, typ=bool, ctx=Rules())
73 | assert decoder(False) is False
74 | assert decoder(True) is True
75 |
76 | encoder = std.atoms(verb=PY2JSON, typ=bool, ctx=Rules())
77 | assert encoder(False) is False
78 | assert encoder(True) is True
79 |
80 | inspect = std.atoms(verb=INSP_PY, typ=bool, ctx=Rules())
81 | assert not inspect("string")
82 | assert inspect(True)
83 |
84 | inspect = std.atoms(verb=INSP_JSON, typ=bool, ctx=Rules())
85 | assert not inspect("string")
86 | assert inspect(False)
87 |
88 |
89 | def test_atoms_null():
90 | "Test the atoms rule will generate encoders and decoders for None / null."
91 |
92 | decoder = std.atoms(verb=JSON2PY, typ=NoneType, ctx=Rules())
93 | assert decoder(None) is None
94 | with pytest.raises(ValueError):
95 | decoder(5)
96 |
97 | encoder = std.atoms(verb=PY2JSON, typ=NoneType, ctx=Rules())
98 | assert encoder(None) is None
99 | with pytest.raises(ValueError):
100 | encoder(5)
101 |
102 | inspect = std.atoms(verb=INSP_PY, typ=NoneType, ctx=Rules())
103 | assert inspect(None)
104 | assert not inspect(0)
105 |
106 | inspect = std.atoms(verb=INSP_JSON, typ=NoneType, ctx=Rules())
107 | assert inspect(None)
108 | assert not inspect(0)
109 |
110 |
111 | def test_atoms_picklable():
112 | "Test that actions generated by the atoms rule can be pickled."
113 |
114 | actions = [
115 | std.atoms(verb=verb, typ=typ, ctx=Rules())
116 | for verb in [JSON2PY, PY2JSON, INSP_PY, INSP_JSON]
117 | for typ in [str, int, bool, NoneType]
118 | ]
119 | assert None not in actions
120 | dumps(actions)
121 |
122 |
123 | def test_floats_disregard():
124 | "Test the floats rule will disregard unknown types and verbs."
125 |
126 | assert std.floats(verb="unknown", typ=str, ctx=Rules()) is None
127 | for verb in (JSON2PY, PY2JSON, INSP_PY, INSP_JSON):
128 | assert std.floats(verb=verb, typ=Mystery, ctx=Rules()) is None
129 |
130 |
131 | def test_floats():
132 | "Test the floats rule generates encoders and decoders that are tolerant of integers."
133 |
134 | decoder = std.floats(verb=JSON2PY, typ=float, ctx=Rules())
135 | assert decoder(77.7) == 77.7
136 | assert decoder(77) == 77.0
137 | assert math.isnan(decoder("nan"))
138 | assert math.isnan(decoder(float("nan")))
139 |
140 | encoder = std.floats(verb=PY2JSON, typ=float, ctx=Rules())
141 | assert encoder(77.7) == 77.7
142 | assert encoder(float("inf")) == float("inf")
143 |
144 | inspect = std.floats(verb=INSP_PY, typ=float, ctx=Rules())
145 | assert not inspect("string")
146 | assert not inspect("-inf")
147 | assert inspect(float("-inf"))
148 | assert not inspect(44)
149 | assert inspect(77.7)
150 |
151 | inspect = std.floats(verb=INSP_JSON, typ=float, ctx=Rules())
152 | assert not inspect("string")
153 | assert not inspect("-inf")
154 | assert inspect(float("-inf"))
155 | assert inspect(44)
156 | assert inspect(77.7)
157 |
158 |
159 | def test_floats_nan_str():
160 | "Test floats_nan_str rule generates encoders and decoders that stringify 'nan'."
161 |
162 | decoder = std.floats_nan_str(verb=JSON2PY, typ=float, ctx=Rules())
163 | assert decoder(77.7) == 77.7
164 | assert decoder(77) == 77.0
165 | assert math.isnan(decoder("nan"))
166 | assert math.isnan(decoder(float("nan")))
167 |
168 | encoder = std.floats_nan_str(verb=PY2JSON, typ=float, ctx=Rules())
169 | assert encoder(77.7) == 77.7
170 | assert encoder(float("inf")) == "Infinity"
171 |
172 | inspect = std.floats_nan_str(verb=INSP_PY, typ=float, ctx=Rules())
173 | assert not inspect("string")
174 | assert not inspect("-inf")
175 | assert inspect(float("-inf"))
176 | assert not inspect(44)
177 | assert inspect(77.7)
178 |
179 | inspect = std.floats_nan_str(verb=INSP_JSON, typ=float, ctx=Rules())
180 | assert not inspect("string")
181 | assert not inspect("-inf")
182 | assert inspect(float("-inf"))
183 | assert inspect(44)
184 | assert inspect(77.7)
185 |
186 |
187 | def test_floats_picklable():
188 | "Test that actions generated by the floats rule can be pickled."
189 |
190 | actions = [
191 | rule(verb=verb, typ=float, ctx=Rules())
192 | for verb in [JSON2PY, PY2JSON, INSP_PY, INSP_JSON]
193 | for rule in (std.floats, std.floats_nan_str)
194 | ]
195 | assert None not in actions
196 | dumps(actions)
197 |
198 |
199 | def test_decimals_disregard():
200 | "Test the decimals rule will disregard unknown types and verbs."
201 |
202 | assert std.decimals(verb="unknown", typ=date, ctx=Rules()) is None
203 | assert std.decimals(verb=JSON2PY, typ=Mystery, ctx=Rules()) is None
204 | assert std.decimals(verb=PY2JSON, typ=Mystery, ctx=Rules()) is None
205 | assert std.decimals(verb=INSP_JSON, typ=Mystery, ctx=Rules()) is None
206 | assert std.decimals(verb=INSP_PY, typ=Mystery, ctx=Rules()) is None
207 |
208 |
209 | def test_decimals():
210 | "Test the decimals rule will generate encoders and decoders for decimals."
211 |
212 | decoder = std.decimals(verb=JSON2PY, typ=Decimal, ctx=Rules())
213 | assert decoder(Decimal("77.7")) == Decimal("77.7")
214 |
215 | encoder = std.decimals(verb=PY2JSON, typ=Decimal, ctx=Rules())
216 | assert encoder(Decimal("77.7")) == Decimal("77.7")
217 |
218 | inspect = std.decimals(verb=INSP_PY, typ=Decimal, ctx=Rules())
219 | assert not inspect("string")
220 | assert not inspect(44)
221 | assert not inspect(77.7)
222 | assert not inspect("77.7")
223 | assert inspect(Decimal("77.7"))
224 |
225 | inspect = std.decimals(verb=INSP_JSON, typ=Decimal, ctx=Rules())
226 | assert not inspect("string")
227 | assert not inspect(44)
228 | assert not inspect(77.7)
229 | assert not inspect("77.7")
230 | assert inspect(Decimal("77.7"))
231 |
232 |
233 | def test_decimals_as_str_disregard():
234 | "Test the decimals_as_str rule will disregard unknown types and verbs."
235 |
236 | assert std.decimals_as_str(verb="unknown", typ=date, ctx=Rules()) is None
237 | assert std.decimals_as_str(verb=JSON2PY, typ=Mystery, ctx=Rules()) is None
238 | assert std.decimals_as_str(verb=PY2JSON, typ=Mystery, ctx=Rules()) is None
239 | assert std.decimals_as_str(verb=INSP_JSON, typ=Mystery, ctx=Rules()) is None
240 | assert std.decimals_as_str(verb=INSP_PY, typ=Mystery, ctx=Rules()) is None
241 |
242 |
243 | def test_decimals_as_str():
244 | "Test the decimals_as_str rule will generate encoders and decoders for decimals."
245 |
246 | decoder = std.decimals_as_str(verb=JSON2PY, typ=Decimal, ctx=Rules())
247 | assert decoder(Decimal("77.7")) == Decimal("77.7")
248 | assert decoder("77.7") == Decimal("77.7")
249 |
250 | encoder = std.decimals_as_str(verb=PY2JSON, typ=Decimal, ctx=Rules())
251 | assert encoder(Decimal("77.7")) == "77.7"
252 |
253 | inspect = std.decimals_as_str(verb=INSP_PY, typ=Decimal, ctx=Rules())
254 | assert not inspect("string")
255 | assert not inspect(44)
256 | assert not inspect(77.7)
257 | assert not inspect("77.7")
258 | assert inspect(Decimal("77.7"))
259 |
260 | inspect = std.decimals_as_str(verb=INSP_JSON, typ=Decimal, ctx=Rules())
261 | assert not inspect("string")
262 | assert inspect(44)
263 | assert inspect(77.7)
264 | assert inspect("77.7")
265 | assert inspect(Decimal("77.7"))
266 |
267 |
268 | def test_iso_dates_disregard():
269 | "Test the iso_dates rule will disregard unknown types and verbs."
270 |
271 | assert std.iso_dates(verb="unknown", typ=date, ctx=Rules()) is None
272 | assert std.iso_dates(verb=JSON2PY, typ=Mystery, ctx=Rules()) is None
273 | assert std.iso_dates(verb=PY2JSON, typ=Mystery, ctx=Rules()) is None
274 | assert std.iso_dates(verb=INSP_JSON, typ=Mystery, ctx=Rules()) is None
275 | assert std.iso_dates(verb=INSP_PY, typ=Mystery, ctx=Rules()) is None
276 |
277 |
278 | def test_iso_dates():
279 | "Test the iso_dates rule handles dates using ISO8601 and rejects datetimes."
280 |
281 | decoder = std.iso_dates(verb=JSON2PY, typ=date, ctx=Rules())
282 | assert decoder("1776-07-04") == date(1776, 7, 4)
283 | with pytest.raises(ValueError):
284 | decoder("6543-02-01T09:09:09")
285 |
286 | encoder = std.iso_dates(verb=PY2JSON, typ=date, ctx=Rules())
287 | assert encoder(date(1776, 7, 4)) == "1776-07-04"
288 |
289 | inspect = std.iso_dates(verb=INSP_PY, typ=date, ctx=Rules())
290 | assert inspect(date(1776, 7, 4))
291 | assert not inspect(datetime(1776, 7, 4, 3, 3))
292 | assert not inspect("2000-01-01")
293 | assert not inspect("2000-01-01T03:03:03")
294 | assert not inspect("string")
295 |
296 | inspect = std.iso_dates(verb=INSP_JSON, typ=date, ctx=Rules())
297 | assert not inspect(date(1776, 7, 4))
298 | assert not inspect(datetime(1776, 7, 4, 3, 3))
299 | assert inspect("2000-01-01")
300 | assert not inspect("2000-01-01T03:03:03")
301 | assert not inspect("string")
302 |
303 |
304 | def test_iso_datetimes():
305 | "Test the iso_dates rule generates encoders and decoders for datetimes using ISO8601."
306 |
307 | decoder = std.iso_dates(verb=JSON2PY, typ=datetime, ctx=Rules())
308 | assert decoder("6666-06-06T12:12:12.987654") == datetime(
309 | 6666, 6, 6, 12, 12, 12, 987654
310 | )
311 |
312 | encoder = std.iso_dates(verb=PY2JSON, typ=datetime, ctx=Rules())
313 | assert (
314 | encoder(datetime(6666, 6, 6, 12, 12, 12, 987654))
315 | == "6666-06-06T12:12:12.987654"
316 | )
317 |
318 | inspect = std.iso_dates(verb=INSP_PY, typ=datetime, ctx=Rules())
319 | assert not inspect(date(1776, 7, 4))
320 | assert inspect(datetime(1776, 7, 4, 3, 3))
321 | assert not inspect("2000-01-01")
322 | assert not inspect("2000-01-01T03:03:03")
323 | assert not inspect("string")
324 |
325 | inspect = std.iso_dates(verb=INSP_JSON, typ=datetime, ctx=Rules())
326 | assert not inspect(date(1776, 7, 4))
327 | assert not inspect(datetime(1776, 7, 4, 3, 3))
328 | assert inspect("2000-01-01")
329 | assert inspect("2000-01-01T03:03:03")
330 | assert not inspect("string")
331 |
332 |
333 | def test_iso_dates_picklable():
334 | "Test that actions generated by the iso_dates rule can be pickled."
335 |
336 | actions = [
337 | std.iso_dates(verb=verb, typ=typ, ctx=Rules())
338 | for verb in [JSON2PY, PY2JSON]
339 | for typ in [date, datetime]
340 | ]
341 | assert None not in actions
342 | dumps(actions)
343 |
344 |
345 | class Enum1(Enum):
346 | ABLE = "a"
347 | BAKER = "b"
348 | CHARLIE = "c"
349 |
350 |
351 | class Enum2(IntEnum):
352 | ALPHA = 1
353 | BETA = 2
354 | GAMMA = 3
355 |
356 |
357 | def test_enums_disregard():
358 | "Test the iso_dates rule will disregard unknown types and verbs."
359 |
360 | assert std.enums(verb="unknown", typ=Enum1, ctx=Rules()) is None
361 | assert std.enums(verb=JSON2PY, typ=Mystery, ctx=Rules()) is None
362 | assert std.enums(verb=PY2JSON, typ=Mystery, ctx=Rules()) is None
363 |
364 |
365 | def test_enums():
366 | "Test the enums rule will generate encoders and decoders for enumerated types."
367 |
368 | decoder = std.enums(verb=JSON2PY, typ=Enum1, ctx=Rules())
369 | assert decoder("ABLE") == Enum1.ABLE
370 | assert decoder("CHARLIE") == Enum1.CHARLIE
371 |
372 | encoder = std.enums(verb=PY2JSON, typ=Enum1, ctx=Rules())
373 | assert encoder(Enum1.BAKER) == "BAKER"
374 | assert encoder(Enum1.CHARLIE) == "CHARLIE"
375 |
376 | inspect = std.enums(verb=INSP_PY, typ=Enum1, ctx=Rules())
377 | assert not inspect("ABLE")
378 | assert inspect(Enum1.CHARLIE)
379 | assert not inspect(Enum2.BETA)
380 |
381 | inspect = std.enums(verb=INSP_JSON, typ=Enum1, ctx=Rules())
382 | assert not inspect(Enum1.BAKER)
383 | assert not inspect("BETA")
384 | assert inspect("CHARLIE")
385 |
386 |
387 | def test_enums_int():
388 | "Test the enums rule generates encoders and decoders for enumerated type subclasses."
389 |
390 | decoder = std.enums(verb=JSON2PY, typ=Enum2, ctx=Rules())
391 | assert decoder("ALPHA") == Enum2.ALPHA
392 | assert decoder("GAMMA") == Enum2.GAMMA
393 |
394 | encoder = std.enums(verb=PY2JSON, typ=Enum2, ctx=Rules())
395 | assert encoder(Enum2.BETA) == "BETA"
396 | assert encoder(Enum2.GAMMA) == "GAMMA"
397 |
398 | inspect = std.enums(verb=INSP_PY, typ=Enum2, ctx=Rules())
399 | assert not inspect("ALPA")
400 | assert not inspect(Enum1.CHARLIE)
401 | assert inspect(Enum2.BETA)
402 |
403 | inspect = std.enums(verb=INSP_JSON, typ=Enum2, ctx=Rules())
404 | assert not inspect(Enum2.GAMMA)
405 | assert inspect("BETA")
406 | assert not inspect("ABLE")
407 |
408 |
409 | def test_enums_picklable():
410 | "Test that actions generated by the enums rule can be pickled."
411 |
412 | actions = [
413 | std.enums(verb=verb, typ=typ, ctx=Rules())
414 | for verb in [JSON2PY, PY2JSON, INSP_PY, INSP_JSON]
415 | for typ in [Enum1, Enum2]
416 | ]
417 | assert None not in actions
418 | dumps(actions)
419 |
420 |
421 | def test_faux_enums_disregard():
422 | "Test the iso_dates rule will disregard unknown types and verbs."
423 |
424 | assert std.faux_enums(verb="unknown", typ=Enum1, ctx=Rules()) is None
425 | assert std.faux_enums(verb=JSON2PY, typ=Mystery, ctx=Rules()) is None
426 | assert std.faux_enums(verb=PY2JSON, typ=Mystery, ctx=Rules()) is None
427 |
428 |
429 | def test_faux_enums():
430 | "Test the enums rule will generate encoders and decoders for enumerated types."
431 |
432 | decoder = std.faux_enums(verb=JSON2PY, typ=Enum1, ctx=Rules())
433 | assert decoder("ABLE") == "ABLE"
434 | with pytest.raises(KeyError):
435 | decoder("OTHER")
436 |
437 | encoder = std.faux_enums(verb=PY2JSON, typ=Enum1, ctx=Rules())
438 | assert encoder("BAKER") == "BAKER"
439 | with pytest.raises(KeyError):
440 | encoder("OTHER")
441 |
442 | inspect = std.faux_enums(verb=INSP_PY, typ=Enum1, ctx=Rules())
443 | assert inspect("ABLE")
444 | assert not inspect(Enum1.CHARLIE)
445 | assert not inspect(Enum2.BETA)
446 |
447 | inspect = std.faux_enums(verb=INSP_JSON, typ=Enum1, ctx=Rules())
448 | assert not inspect(Enum1.BAKER)
449 | assert not inspect("BETA")
450 | assert inspect("CHARLIE")
451 |
452 |
453 | def test_faux_enums_picklable():
454 | "Test that actions generated by the enums rule can be pickled."
455 |
456 | actions = [
457 | std.faux_enums(verb=verb, typ=typ, ctx=Rules())
458 | for verb in [JSON2PY, PY2JSON, INSP_PY, INSP_JSON]
459 | for typ in [Enum1, Enum2]
460 | ]
461 | assert None not in actions
462 | dumps(actions)
463 |
464 |
465 | def test_optional_disregard():
466 | "Test that optional will disregard unknown types and verbs."
467 |
468 | assert std.optional(verb="unknown", typ=Optional[int], ctx=Rules()) is None
469 | assert std.optional(verb=JSON2PY, typ=Union[int, str], ctx=Rules()) is None
470 | assert (
471 | std.optional(verb=JSON2PY, typ=Union[int, str, NoneType], ctx=Rules()) is None
472 | )
473 | assert std.optional(verb=JSON2PY, typ=Mystery, ctx=Rules()) is None
474 | assert std.optional(verb=PY2JSON, typ=Mystery, ctx=Rules()) is None
475 |
476 |
477 | def test_optional():
478 | "Test that optional returns a action that pass non-null values to an inner action."
479 |
480 | ctx = Rules(std.atoms)
481 |
482 | encoder = std.optional(verb=PY2JSON, typ=Optional[int], ctx=ctx)
483 | assert encoder("77") == 77
484 | assert encoder(None) is None
485 |
486 | decoder = std.optional(verb=JSON2PY, typ=Optional[int], ctx=ctx)
487 | assert decoder("77") == 77
488 | assert decoder(None) is None
489 |
490 | inspect = std.optional(verb=INSP_PY, typ=Optional[int], ctx=ctx)
491 | assert inspect(77)
492 | assert inspect(None)
493 | assert not inspect("77")
494 |
495 | inspect = std.optional(verb=INSP_JSON, typ=Optional[int], ctx=ctx)
496 | assert inspect(77)
497 | assert inspect(None)
498 | assert not inspect("77")
499 |
500 |
501 | def test_optional_nonstandard():
502 | "Test that optional recognizes Unions that are effectively Optional."
503 |
504 | ctx = Rules(std.atoms)
505 |
506 | encoder = std.optional(verb=PY2JSON, typ=Union[str, NoneType], ctx=ctx)
507 | assert encoder(77) == "77"
508 | assert encoder(None) is None
509 |
510 | decoder = std.optional(verb=JSON2PY, typ=Union[str, NoneType], ctx=ctx)
511 | assert decoder(77) == "77"
512 | assert decoder(None) is None
513 |
514 |
515 | def test_optional_invalid():
516 | "Test that optional raises if no valid inner type is found."
517 |
518 | ctx = Rules(std.atoms)
519 | fake_type = Mock(__origin__=Union, __args__=(NoneType, NoneType))
520 |
521 | for verb in (JSON2PY, PY2JSON, INSP_PY, INSP_JSON):
522 | with pytest.raises(TypeError):
523 | std.optional(verb=verb, typ=fake_type, ctx=ctx)
524 |
525 |
526 | def test_optional_picklable():
527 | "Test that actions generated by the optional rule can be pickled."
528 |
529 | ctx = Rules(std.atoms, std.floats)
530 |
531 | actions = [
532 | std.optional(verb=verb, typ=typ, ctx=ctx)
533 | for verb in [JSON2PY, PY2JSON]
534 | for typ in [Optional[str], Optional[float], Optional[int], Optional[bool]]
535 | ]
536 | assert None not in actions
537 | dumps(actions)
538 |
539 |
540 | def test_lists_disregards():
541 | "Test that lists disregards unknown types and verbs."
542 |
543 | assert std.lists(verb="unknown", typ=List[int], ctx=Rules()) is None
544 | assert std.lists(verb="unknown", typ=Tuple[int, ...], ctx=Rules()) is None
545 | assert std.lists(verb=PY2JSON, typ=bool, ctx=Rules()) is None
546 | assert std.lists(verb=JSON2PY, typ=Tuple[int, str], ctx=Rules()) is None
547 | assert std.lists(verb=INSP_PY, typ=bool, ctx=Rules()) is None
548 | assert std.lists(verb=INSP_JSON, typ=Tuple[int, str], ctx=Rules()) is None
549 |
550 |
551 | def test_lists_lists():
552 | "Test that lists will generate encoders and decoders for lists."
553 |
554 | ctx = Rules(std.atoms)
555 |
556 | encoder = std.lists(verb=PY2JSON, typ=List[str], ctx=ctx)
557 | assert encoder([33, 77]) == ["33", "77"]
558 |
559 | decoder = std.lists(verb=JSON2PY, typ=List[str], ctx=ctx)
560 | assert decoder([33, 77]) == ["33", "77"]
561 |
562 | inspect = std.lists(verb=INSP_PY, typ=List[str], ctx=ctx)
563 | assert not inspect(["33", 77])
564 | assert inspect(["33", "77"])
565 | assert not inspect(("33", "77"))
566 |
567 | inspect = std.lists(verb=INSP_JSON, typ=List[str], ctx=ctx)
568 | assert not inspect(["33", 77])
569 | assert inspect(["33", "77"])
570 | assert not inspect(("33", "77"))
571 |
572 |
573 | def test_lists_tuples():
574 | "Test that lists will generate encoders and decoders for homogenous tuples."
575 |
576 | ctx = Rules(std.atoms)
577 |
578 | encoder = std.lists(verb=PY2JSON, typ=Tuple[str, ...], ctx=ctx)
579 | assert encoder((33, 77)) == ["33", "77"]
580 |
581 | decoder = std.lists(verb=JSON2PY, typ=Tuple[str, ...], ctx=ctx)
582 | assert decoder([33, 77]) == ("33", "77")
583 |
584 | inspect = std.lists(verb=INSP_PY, typ=Tuple[str, ...], ctx=ctx)
585 | assert not inspect(("33", 77))
586 | assert inspect(("33", "77"))
587 | assert not inspect(["33", "77"])
588 |
589 | inspect = std.lists(verb=INSP_JSON, typ=Tuple[str, ...], ctx=ctx)
590 | assert not inspect(["33", 77])
591 | assert inspect(["33", "77"])
592 | assert not inspect(("33", "77"))
593 |
594 | # Prove these tests don't pass spuriously.
595 | assert ["1", "2"] != ("1", "2")
596 |
597 |
598 | def test_sets_disregards():
599 | "Test that sets disregards unknown types and verbs."
600 |
601 | assert std.sets(verb="unknown", typ=Set[int], ctx=Rules()) is None
602 | assert std.sets(verb="unknown", typ=FrozenSet[set], ctx=Rules()) is None
603 | assert std.sets(verb=PY2JSON, typ=bool, ctx=Rules()) is None
604 | assert std.sets(verb=JSON2PY, typ=List[str], ctx=Rules()) is None
605 | assert std.sets(verb=INSP_PY, typ=bool, ctx=Rules()) is None
606 | assert std.sets(verb=INSP_JSON, typ=List[str], ctx=Rules()) is None
607 |
608 |
609 | def test_sets_sets():
610 | "Test that sets will generate encoders and decoders for sets."
611 |
612 | ctx = Rules(std.atoms)
613 |
614 | encoder = std.sets(verb=PY2JSON, typ=Set[str], ctx=ctx)
615 | actual = encoder({1, 2, 2, 3})
616 | actual.sort()
617 | assert actual == ["1", "2", "3"]
618 |
619 | decoder = std.sets(verb=JSON2PY, typ=Set[str], ctx=ctx)
620 | assert decoder([1, 2, 2, 3]) == {"1", "2", "3"}
621 |
622 | inspect = std.sets(verb=INSP_PY, typ=Set[str], ctx=ctx)
623 | assert not inspect({"33", 77})
624 | assert inspect({"33", "77"})
625 | assert not inspect(["33", "77"])
626 |
627 | inspect = std.sets(verb=INSP_JSON, typ=Set[str], ctx=ctx)
628 | assert not inspect(["33", 77])
629 | assert inspect(["33", "77"])
630 | assert not inspect({"33", "77"})
631 |
632 |
633 | def test_sets_frozen():
634 | "Test that sets will generate encoders and decoders for frozen sets."
635 |
636 | ctx = Rules(std.atoms)
637 |
638 | encoder = std.sets(verb=PY2JSON, typ=FrozenSet[str], ctx=ctx)
639 | actual = encoder(frozenset([1, 2, 2, 3]))
640 | actual.sort()
641 | assert actual == ["1", "2", "3"]
642 |
643 | decoder = std.sets(verb=JSON2PY, typ=FrozenSet[str], ctx=ctx)
644 | assert decoder([1, 2, 2, 3]) == frozenset(["1", "2", "3"])
645 |
646 | inspect = std.sets(verb=INSP_PY, typ=FrozenSet[str], ctx=ctx)
647 | assert not inspect(frozenset({"33", 77}))
648 | assert inspect(frozenset({"33", "77"}))
649 | assert not inspect(["33", "77"])
650 |
651 | inspect = std.sets(verb=INSP_JSON, typ=FrozenSet[str], ctx=ctx)
652 | assert not inspect(["33", 77])
653 | assert inspect(["33", "77"])
654 | assert not inspect({"33", "77"})
655 |
656 |
657 | def test_dicts_disregards():
658 | "Test that dicts disregards unknown types and verbs."
659 |
660 | ctx = Rules(stringify_keys, std.atoms, std.floats)
661 |
662 | assert std.dicts(verb="unknown", typ=Dict[str, int], ctx=ctx) is None
663 | assert std.dicts(verb="unknown", typ=Dict[datetime, float], ctx=ctx) is None
664 | if OrderedDict is not None:
665 | assert std.dicts(verb="unknown", typ=OrderedDict[str, int], ctx=ctx) is None
666 | assert (
667 | std.dicts(verb="unknown", typ=OrderedDict[datetime, float], ctx=ctx) is None
668 | )
669 | assert std.dicts(verb=PY2JSON, typ=bool, ctx=ctx) is None
670 | with pytest.raises(RuntimeError):
671 | std.dicts(verb=JSON2PY, typ=Dict[float, str], ctx=ctx)
672 |
673 | assert std.dicts(verb=INSP_JSON, typ=List[str], ctx=ctx) is None
674 |
675 |
676 | def test_dicts_string_key():
677 | "Test that dicts will generate encoders and decoders for dicts."
678 |
679 | ctx = Rules(stringify_keys, std.atoms)
680 |
681 | encoder = std.dicts(verb=PY2JSON, typ=Dict[str, int], ctx=ctx)
682 | assert encoder({22: "11", 44: "33"}) == {"22": 11, "44": 33}
683 |
684 | decoder = std.dicts(verb=JSON2PY, typ=Dict[str, int], ctx=ctx)
685 | assert decoder({22: "11", 44: "33"}) == {"22": 11, "44": 33}
686 |
687 | inspect = std.dicts(verb=INSP_PY, typ=Dict[str, int], ctx=ctx)
688 | assert not inspect({"foo": 1, "bar": "no"})
689 | assert inspect({"foo": 1, "bar": 2})
690 | assert inspect({})
691 |
692 | inspect = std.dicts(verb=INSP_JSON, typ=Dict[str, int], ctx=ctx)
693 | assert not inspect({"foo": 1, "bar": "no"})
694 | assert inspect({"foo": 1, "bar": 2})
695 | assert inspect({})
696 |
697 |
698 | def test_dicts_date_key():
699 | "Test that dicts will generate encoders and decoders for dicts keyed by simple dates."
700 |
701 | ctx = Rules(std.atoms, std.iso_dates, stringify_keys)
702 |
703 | encoder = std.dicts(verb=PY2JSON, typ=Dict[date, int], ctx=ctx)
704 | assert encoder({date(2020, 2, 22): "11", date(2040, 4, 4): "33"}) == {
705 | "2020-02-22": 11,
706 | "2040-04-04": 33,
707 | }
708 |
709 | decoder = std.dicts(verb=JSON2PY, typ=Dict[date, int], ctx=ctx)
710 | assert decoder({"2020-02-22": "11", "2040-04-04": "33"}) == {
711 | date(2020, 2, 22): 11,
712 | date(2040, 4, 4): 33,
713 | }
714 |
715 | inspect = std.dicts(verb=INSP_PY, typ=Dict[date, int], ctx=ctx)
716 | assert not inspect({date(2040, 4, 4): 1, date(2020, 2, 22): "no"})
717 | assert inspect({date(2040, 4, 4): 1, date(2020, 2, 22): 2})
718 | assert inspect({})
719 |
720 | inspect = std.dicts(verb=INSP_JSON, typ=Dict[date, int], ctx=ctx)
721 | assert not inspect({"2011-11-11": 1, "2022-02-02": "no"})
722 | assert inspect({"2011-11-11": 1, "2022-02-02": 2})
723 | assert inspect({})
724 |
725 |
726 | class AB(Enum):
727 | A = 1
728 | B = 2
729 |
730 |
731 | def test_dicts_enum_key():
732 | "Test that dicts will generate encoders and decoders for dicts."
733 |
734 | ctx = Rules(stringify_keys, std.atoms, std.enums)
735 |
736 | encoder = std.dicts(verb=PY2JSON, typ=Dict[AB, int], ctx=ctx)
737 | assert encoder({AB.A: "11", AB.B: "33"}) == {"A": 11, "B": 33}
738 |
739 | decoder = std.dicts(verb=JSON2PY, typ=Dict[AB, int], ctx=ctx)
740 | assert decoder({"A": "11", "B": "33"}) == {AB.A: 11, AB.B: 33}
741 |
742 | inspect = std.dicts(verb=INSP_PY, typ=Dict[AB, int], ctx=ctx)
743 | assert not inspect({AB.A: 1, AB.B: "no"})
744 | assert inspect({AB.A: 1, AB.B: 2})
745 | assert not inspect({AB.A: 1, "B": 2})
746 |
747 | inspect = std.dicts(verb=INSP_JSON, typ=Dict[AB, int], ctx=ctx)
748 | assert not inspect({"A": 1, "B": "no"})
749 | assert inspect({"A": 1, "B": 2})
750 | assert not inspect({"A": 1, "C": 2})
751 |
--------------------------------------------------------------------------------
/tests/test_std_ruleset.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | from datetime import date
4 | from decimal import Decimal
5 | import json_syntax as syn
6 |
7 | try:
8 | from tests.types_std_ann import things, accounts
9 | except SyntaxError:
10 | from tests.types_std_noann import things, accounts
11 |
12 |
13 | @pytest.mark.parametrize("Thing,Other", things)
14 | def test_encoding_of_composite_thing(Thing, Other):
15 | "Test encoding of a cyclic type."
16 | rs = syn.std_ruleset()
17 | encoder = rs.lookup(typ=Thing, verb=syn.PY2JSON)
18 | decoder = rs.lookup(typ=Thing, verb=syn.JSON2PY)
19 |
20 | def pythonic():
21 | return Thing(
22 | foo=False,
23 | bar=[
24 | Other(x=3.3, y=date(1944, 4, 4), z=None),
25 | Other(x=4.4, y=date(1955, 5, 5), z=None),
26 | ],
27 | qux=77,
28 | )
29 |
30 | def jsonic():
31 | return {
32 | "foo": False,
33 | "bar": [
34 | {"x": 3.3, "y": "1944-04-04", "z": None},
35 | {"x": 4.4, "y": "1955-05-05", "z": None},
36 | ],
37 | "qux": 77,
38 | }
39 |
40 | assert encoder(pythonic()) == jsonic()
41 | assert decoder(jsonic()) == pythonic()
42 |
43 |
44 | @pytest.mark.parametrize("Account,TransType,Trans", accounts)
45 | def test_readme_example(Account, TransType, Trans):
46 | "Test encoding the readme example."
47 | rules = syn.std_ruleset()
48 | encode_account = rules.lookup(typ=Account, verb=syn.PY2JSON)
49 | decode_account = rules.lookup(typ=Account, verb=syn.JSON2PY)
50 |
51 | def pythonic():
52 | return Account(
53 | "bob",
54 | [Trans(TransType.withdraw, Decimal("523.33"), date(2019, 4, 4))],
55 | Decimal("77.00"),
56 | )
57 |
58 | def jsonic():
59 | return {
60 | "user": "bob",
61 | "transactions": [
62 | {"type": "withdraw", "amount": Decimal("523.33"), "stamp": "2019-04-04"}
63 | ],
64 | "balance": Decimal("77.00"),
65 | }
66 |
67 | assert encode_account(pythonic()) == jsonic()
68 | assert decode_account(jsonic()) == pythonic()
69 |
--------------------------------------------------------------------------------
/tests/test_types.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | from json_syntax import types as tt
4 |
5 | from .common import typing as t, SoftMod
6 | from .types_attrs_common import T, U
7 |
8 | import attr
9 |
10 | ann = SoftMod("tests.types_attrs_ann", allow_SyntaxError=True)
11 |
12 |
13 | @attr.s
14 | class GenExample(t.Generic[T, U]):
15 | body = attr.ib(type=T)
16 | count = attr.ib(type=int)
17 | messages = attr.ib(type=t.List[U])
18 |
19 |
20 | def test_has_origin_not_typing():
21 | "Test that has_origin disregards a type value if it's not from `typing`."
22 |
23 | assert tt.has_origin(list, list)
24 |
25 |
26 | def test_has_origin_handle_tuple():
27 | "Test that has_origin accepts a tuple of origins."
28 |
29 | assert tt.has_origin(t.List[int], (str, list, tuple))
30 |
31 |
32 | def test_has_origin_num_args():
33 | "Test that has_origin checks the number of arguments."
34 |
35 | assert tt.has_origin(t.Tuple[int, str, float], tuple, num_args=3)
36 |
37 |
38 | def test_issub_safe_normal_type1():
39 | "Test that issub_safe behaves like issubclass for normal types."
40 |
41 | assert tt.issub_safe(bool, int)
42 | assert tt.issub_safe(bool, (int, float, str))
43 | assert not tt.issub_safe(int, str)
44 |
45 |
46 | def test_issub_safe_normal_type2():
47 | "Test that issub_safe returns False for generic types."
48 |
49 | assert not tt.issub_safe(t.List[int], list)
50 |
51 |
52 | def test_eval_type_imports():
53 | "Test that the private ``typing._eval_type`` function imports."
54 |
55 | assert (
56 | tt._eval_type is not None
57 | ), "typing._eval_type is not available, investigate an alternative."
58 |
59 |
60 | class SomeClass:
61 | some_type = t.List["AnotherClass"]
62 |
63 |
64 | class AnotherClass:
65 | pass
66 |
67 |
68 | def test_resolve_fwd_ref():
69 | "Test that resolve_fwd_ref correctly identifies the target of a forward reference."
70 |
71 | actual = tt.resolve_fwd_ref(SomeClass.some_type, SomeClass)
72 |
73 | assert tt.has_origin(actual, list)
74 | assert actual.__args__ == (AnotherClass,)
75 |
76 |
77 | def test_resolve_fwd_ref_bad_context():
78 | "Test that resolve_fwd_ref returns the original if the module can't be determined."
79 |
80 | Forward = t.ForwardRef or t._ForwardRef
81 | subj = Forward("AnotherClass")
82 | actual = tt.resolve_fwd_ref(subj, "dummy")
83 |
84 | assert actual is subj
85 |
86 |
87 | @pytest.mark.parametrize(
88 | "GenClass, origin",
89 | [
90 | (GenExample, None),
91 | (GenExample[str, int], GenExample),
92 | (t.List[int], t.List),
93 | (t.List["int"], t.List),
94 | (t.List, None),
95 | (t.Union[int, str], None),
96 | (int, None),
97 | ],
98 | )
99 | def test_get_generic_origin(GenClass, origin):
100 | "Test that get_generic_origin finds the origin class, unless the class is not generic."
101 | assert tt.get_generic_origin(GenClass) == origin
102 |
103 |
104 | @pytest.mark.parametrize(
105 | "GenClass, origin",
106 | [
107 | (GenExample, GenExample),
108 | (GenExample[str, int], GenExample),
109 | (t.List[int], list),
110 | (t.List["int"], list),
111 | (t.List, list),
112 | (t.Union[int, str], t.Union),
113 | (t.Union, t.Union),
114 | (int, int),
115 | ],
116 | )
117 | def test_get_origin(GenClass, origin):
118 | "Test that get_generic_origin finds the origin class, unless the class is not generic."
119 | assert tt.get_origin(GenClass) == origin
120 |
--------------------------------------------------------------------------------
/tests/test_union.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | import attr
4 | from datetime import date, datetime
5 | from decimal import Decimal
6 | from enum import Enum
7 | from itertools import product
8 | from typing import Union, List, Tuple, Set, FrozenSet, Dict
9 |
10 | from json_syntax import std_ruleset
11 | from json_syntax.helpers import PY2JSON, JSON2PY, INSP_PY, INSP_JSON, NoneType
12 |
13 |
14 | @attr.s(frozen=True)
15 | class Point:
16 | x = attr.ib(type=float)
17 | y = attr.ib(0.0, type=float)
18 |
19 |
20 | class Dir(Enum):
21 | UP = 1
22 | DOWN = 2
23 |
24 |
25 | atoms = [(NoneType, None, None), (bool, True, True)]
26 |
27 | nums = [(int, 5, 5), (float, 3.3, 3.3), (Decimal, Decimal("5.5"), Decimal("5.5"))]
28 |
29 | strings = [
30 | (str, "str", "str"),
31 | (date, date(2010, 10, 10), "2010-10-10"),
32 | (datetime, datetime(2011, 11, 11, 11, 11, 11), "2011-11-11T11:11:11"),
33 | (Dir, Dir.UP, "UP"),
34 | ]
35 |
36 | arrays = [
37 | (List[Point], [Point(x=4.5, y=6.6)], [{"x": 4.5, "y": 6.6}]),
38 | (Tuple[Point, ...], (Point(x=4.5, y=6.6),), [{"x": 4.5, "y": 6.6}]),
39 | (Set[Point], {Point(x=4.5, y=6.6)}, [{"x": 4.5, "y": 6.6}]),
40 | (FrozenSet[Point], frozenset([Point(x=4.5, y=6.6)]), [{"x": 4.5, "y": 6.6}]),
41 | ]
42 |
43 | dicts = [
44 | (Point, Point(x=4.5, y=6.6), {"x": 4.5, "y": 6.6}),
45 | (Dict[Dir, Decimal], {Dir.UP: Decimal("7.7")}, {"UP": Decimal("7.7")}),
46 | (Dict[str, float], {"a": 2.3, "b": 3.4}, {"a": 2.3, "b": 3.4}),
47 | ]
48 |
49 | cats = [atoms, nums, strings, arrays, dicts]
50 |
51 |
52 | @pytest.mark.parametrize("typ,py,js", [trip for cat in cats for trip in cat])
53 | def test_simple(typ, py, js):
54 | rs = std_ruleset()
55 | act = rs.lookup(verb=PY2JSON, typ=typ)
56 | assert act(py) == js
57 | act = rs.lookup(verb=JSON2PY, typ=typ)
58 | assert act(js) == py
59 |
60 |
61 | def _pairs():
62 | for i in range(0, len(cats)):
63 | lefts = cats[i]
64 | rights = cats[(i + 2) % len(cats)]
65 | yield from product(lefts, rights)
66 |
67 |
68 | def cvt_map():
69 | for left, right in _pairs():
70 | left_type, left_python, left_json = left
71 | right_type, right_python, right_json = right
72 |
73 | typ = Union[left_type, right_type]
74 | yield (PY2JSON, typ, left_python, left_json)
75 | yield (PY2JSON, typ, right_python, right_json)
76 | yield (JSON2PY, typ, left_json, left_python)
77 | yield (JSON2PY, typ, right_json, right_python)
78 |
79 |
80 | @pytest.mark.parametrize("verb,typ,subj,expect", cvt_map())
81 | def test_convert_unions(verb, typ, subj, expect):
82 | "Test that the unions rule is able to convert possible types."
83 |
84 | action = std_ruleset().lookup(verb=verb, typ=typ)
85 |
86 | assert action(subj) == expect
87 |
88 |
89 | def check_map():
90 | for left, right in _pairs():
91 | left_type, left_python, left_json = left
92 | right_type, right_python, right_json = right
93 |
94 | typ = Union[left_type, right_type]
95 | yield (INSP_PY, typ, left_python)
96 | yield (INSP_PY, typ, right_python)
97 | yield (INSP_JSON, typ, left_json)
98 | yield (INSP_JSON, typ, right_json)
99 |
100 |
101 | @pytest.mark.parametrize("verb,typ,subj", check_map())
102 | def test_check_unions(verb, typ, subj):
103 | "Test that the unions rule is able to verify possible types."
104 |
105 | action = std_ruleset().lookup(verb=verb, typ=typ)
106 |
107 | assert action(subj)
108 |
--------------------------------------------------------------------------------
/tests/test_union_prop.py:
--------------------------------------------------------------------------------
1 | from hypothesis import given, settings, HealthCheck
2 |
3 | from . import type_strategies as ts
4 |
5 | # import attr
6 | # from datetime import date, datetime
7 | # from decimal import Decimal
8 | # from enum import Enum
9 | # from itertools import product
10 | # from typing import Union, List, Tuple, Set, FrozenSet, Dict
11 |
12 | from json_syntax import std_ruleset
13 | from json_syntax.helpers import PY2JSON, JSON2PY # INSP_PY, INSP_JSON, NoneType
14 | from json_syntax.pattern import Matches
15 |
16 |
17 | @settings(suppress_health_check=[HealthCheck.too_slow], max_examples=100, deadline=None)
18 | @given(ts.type_value_pairs(ts.complex_no_unions))
19 | def test_roundtrip(pair):
20 | typ, py_value = pair
21 | rs = std_ruleset()
22 | act = rs.lookup(verb=PY2JSON, typ=typ)
23 | json_value = act(py_value)
24 | act2 = rs.lookup(verb=JSON2PY, typ=typ)
25 | rt_py_value = act2(json_value)
26 | assert py_value == rt_py_value
27 |
28 |
29 | @settings(suppress_health_check=[HealthCheck.too_slow], max_examples=100, deadline=None)
30 | @given(ts.type_value_pairs(ts.unions_of_simple))
31 | def test_roundtrip_union_simple(pair):
32 | typ, py_value = pair
33 | rs = std_ruleset()
34 | act = rs.lookup(verb=PY2JSON, typ=typ)
35 | json_value = act(py_value)
36 | act2 = rs.lookup(verb=JSON2PY, typ=typ)
37 | rt_py_value = act2(json_value)
38 | if not rs.is_ambiguous(typ=typ, threshold=Matches.potential):
39 | assert py_value == rt_py_value
40 |
41 |
42 | @settings(suppress_health_check=[HealthCheck.too_slow], max_examples=100, deadline=None)
43 | @given(ts.type_value_pairs(ts.complex_anything))
44 | def test_roundtrip_arbitrary_complex(pair):
45 | typ, py_value = pair
46 | rs = std_ruleset()
47 | act = rs.lookup(verb=PY2JSON, typ=typ)
48 | json_value = act(py_value)
49 | act2 = rs.lookup(verb=JSON2PY, typ=typ)
50 | rt_py_value = act2(json_value)
51 | if not rs.is_ambiguous(typ=typ, threshold=Matches.potential):
52 | assert py_value == rt_py_value
53 |
--------------------------------------------------------------------------------
/tests/type_strategies.py:
--------------------------------------------------------------------------------
1 | from hypothesis import strategies as st
2 |
3 | from decimal import Decimal
4 | import datetime as dt
5 | from enum import Enum
6 |
7 | from . import _strategies as _st
8 |
9 |
10 | # Tests often want to compare for equality, and there's no good way to do this with NaNs
11 | # breaking it. :-(
12 | st.register_type_strategy(Decimal, st.decimals(allow_nan=False))
13 | st.register_type_strategy(float, st.floats(allow_nan=False))
14 |
15 |
16 | def type_value_pairs(base):
17 | @st.composite
18 | def tv_pairs(draw):
19 | typ = draw(base)
20 | try:
21 | val = draw(st.from_type(typ))
22 | except Exception as exc:
23 | exc.args += (typ,)
24 | raise
25 | return (typ, val)
26 |
27 | return tv_pairs()
28 |
29 |
30 | atoms = st.sampled_from(
31 | [
32 | type(None),
33 | bool,
34 | int,
35 | float,
36 | Decimal,
37 | str,
38 | dt.date,
39 | dt.datetime,
40 | dt.time,
41 | dt.timedelta,
42 | ]
43 | )
44 |
45 |
46 | class Head(Enum):
47 | def __init__(self, disposition):
48 | self.disposition = disposition
49 | self.atomic = disposition == "atom"
50 | self.hashable = disposition in ("atom", "immut")
51 | self.is_union = disposition == "union"
52 |
53 | atoms = "atom"
54 | enums = "atom"
55 | lists = "mut"
56 | sets = "mut"
57 | dicts = "mut"
58 | mut_attrs = "mut"
59 | mut_dataclasses = "mut"
60 | hmg_tuples = "immut"
61 | frozensets = "immut"
62 | prod_tuples = "immut"
63 | frz_attrs = "immut"
64 | frz_dataclasses = "immut"
65 | namedtuples = "immut"
66 | unions = "union"
67 |
68 | @classmethod
69 | def short(cls, elems):
70 | if isinstance(elems, (cls, str)):
71 | elems = [elems]
72 | out = set()
73 | for elem in elems:
74 | if isinstance(elem, cls):
75 | out.add(elem)
76 | elif isinstance(elem, str):
77 | out.update(head for head in cls if head.disposition == elem)
78 | return out
79 |
80 |
81 | # Need to add:
82 | # 1. default values to all of these
83 | # 2. typeless variants
84 | # 3. our own subclasses?
85 |
86 |
87 | def map_heads(types, frz_types):
88 | H = Head
89 | yield H.atoms, atoms
90 | yield H.enums, _st.enums
91 | if types:
92 | yield H.lists, _st.lists(types)
93 | yield H.unions, _st.unions(types)
94 | yield H.mut_attrs, _st.attrs(types, frozen=False)
95 | yield H.mut_dataclasses, _st.dataclasses(types, frozen=False)
96 | yield H.dicts, _st.dicts(types)
97 | if frz_types:
98 | yield H.hmg_tuples, _st.hmg_tuples(frz_types)
99 | yield H.sets, _st.sets(frz_types)
100 | yield H.frozensets, _st.frozensets(frz_types)
101 | yield H.prod_tuples, _st.prod_tuples(frz_types)
102 | yield H.frz_attrs, _st.attrs(frz_types, frozen=True)
103 | yield H.frz_dataclasses, _st.dataclasses(frz_types, frozen=True)
104 | yield H.namedtuples, _st.namedtuples(frz_types)
105 |
106 |
107 | def type_tree(*levels):
108 | """
109 | Constructs a type tree of a fixed maximum height based on the heads provided.
110 | The last level must be leaves that can be contained by the levels above.
111 | """
112 | types, frz_types = None, None
113 |
114 | for level in map(Head.short, reversed(levels)):
115 | tt = []
116 | frz_tt = []
117 | for head, typ in map_heads(types, frz_types):
118 | if typ is None:
119 | continue
120 | if head in level:
121 | tt.append(typ)
122 | if head.hashable:
123 | frz_tt.append(typ)
124 | types = st.one_of(tt) if tt else None
125 | frz_types = st.one_of(frz_tt) if frz_tt else None
126 |
127 | if types is None:
128 | raise ValueError("No types for {}".format(levels))
129 | return types
130 |
131 |
132 | complex_no_unions = type_tree(
133 | {"atom", "mut", "immut"},
134 | {"atom", "mut", "immut"},
135 | {"atom", "mut", "immut"},
136 | {"atom"},
137 | )
138 |
139 | unions_of_simple = type_tree({Head.unions}, {"atom", "mut", "immut"}, {"atom"})
140 |
141 | complex_anything = type_tree(
142 | {"atom", "mut", "immut", "unions"},
143 | {"atom", "mut", "immut", "unions"},
144 | {"atom", "mut", "immut", "unions"},
145 | {"atom"},
146 | )
147 |
--------------------------------------------------------------------------------
/tests/types_attrs_ann.py:
--------------------------------------------------------------------------------
1 | import attr
2 |
3 | from .common import dataclasses as dc, typing as t
4 | from .types_attrs_common import Hooks, T, U
5 |
6 |
7 | @attr.s(auto_attribs=True)
8 | class Flat:
9 | a: int
10 | b: str = "default"
11 |
12 |
13 | @attr.s(auto_attribs=True)
14 | class GenFlat(t.Generic[T]):
15 | a: T
16 | b: str = "default"
17 |
18 |
19 | @attr.s(auto_attribs=True)
20 | class Hook(Hooks):
21 | a: int
22 | b: str = "default"
23 |
24 |
25 | class Named(t.NamedTuple):
26 | a: int
27 | b: str = "default"
28 |
29 |
30 | class Dict(t.TypedDict):
31 | a: int
32 | b: str
33 |
34 |
35 | @attr.s(auto_attribs=True)
36 | class GenExample(t.Generic[T, U]):
37 | body: T
38 | count: int
39 | messages: t.List[U]
40 |
41 |
42 | try:
43 |
44 | @attr.s(auto_attribs=True, slots=True)
45 | class GenExampleSlots(t.Generic[T, U]):
46 | body: T
47 | count: int
48 | messages: t.List[U]
49 |
50 |
51 | except TypeError:
52 | # Slots don't work with Generic on older versions of typing.
53 | GenExampleSlots = None
54 |
55 |
56 | if dc.dataclass:
57 |
58 | @dc.dataclass
59 | class FlatDc:
60 | a: int
61 | b: str = "default"
62 |
63 | @dc.dataclass
64 | class GenFlatDc(t.Generic[T]):
65 | a: T
66 | b: str = "default"
67 |
68 | @dc.dataclass
69 | class HookDc(Hooks):
70 | a: int
71 | b: str = "default"
72 |
73 | @dc.dataclass
74 | class GenExampleDc(t.Generic[T, U]):
75 | body: T
76 | count: int
77 | messages: t.List[U]
78 |
79 | @dc.dataclass
80 | class PrivateFieldsDc:
81 | pub: str
82 | _priv: int
83 |
84 |
85 | else:
86 | FlatDc = GenFlatDc = HookDc = GenericExampleDc = PrivateFieldsDc = None
87 |
--------------------------------------------------------------------------------
/tests/types_attrs_common.py:
--------------------------------------------------------------------------------
1 | from typing import TypeVar
2 |
3 |
4 | class Hooks:
5 | @classmethod
6 | def __json_pre_decode__(cls, value):
7 | if isinstance(value, list):
8 | value = {"a": value[0], "b": value[1]}
9 | return value
10 |
11 | @classmethod
12 | def __json_check__(cls, value):
13 | return value.get("_type_") == "Hook"
14 |
15 | def __json_post_encode__(cls, value):
16 | return dict(value, _type_="Hook")
17 |
18 |
19 | T = TypeVar("T")
20 | U = TypeVar("U")
21 |
--------------------------------------------------------------------------------
/tests/types_std_ann.py:
--------------------------------------------------------------------------------
1 | try:
2 | from dataclasses import dataclass
3 | except ImportError:
4 | from attr import dataclass
5 | from datetime import date
6 | from decimal import Decimal
7 | from enum import Enum
8 | from typing import Optional, List
9 |
10 |
11 | @dataclass
12 | class CompositeThing:
13 | foo: bool
14 | bar: List["Other"]
15 | qux: Optional[int]
16 |
17 |
18 | @dataclass
19 | class Other:
20 | x: float
21 | y: date
22 | z: Optional[CompositeThing]
23 |
24 |
25 | things = [(CompositeThing, Other)]
26 |
27 |
28 | @dataclass
29 | class Account:
30 | user: str
31 | transactions: List["Trans"]
32 | balance: Decimal = Decimal(0)
33 |
34 |
35 | class TransType(Enum):
36 | withdraw = 0
37 | deposit = 1
38 |
39 |
40 | @dataclass
41 | class Trans:
42 | type: TransType
43 | amount: Decimal
44 | stamp: date
45 |
46 |
47 | accounts = [(Account, TransType, Trans)]
48 |
--------------------------------------------------------------------------------
/tests/types_std_noann.py:
--------------------------------------------------------------------------------
1 | import attr
2 | from datetime import date
3 | from decimal import Decimal
4 | from enum import Enum
5 | from typing import Optional, List
6 |
7 |
8 | @attr.s
9 | class CompositeThing:
10 | foo = attr.ib(type=bool)
11 | bar = attr.ib(type=List["Other"])
12 | qux = attr.ib(type=Optional[int])
13 |
14 |
15 | @attr.s
16 | class Other:
17 | x = attr.ib(type=float)
18 | y = attr.ib(type=date)
19 | z = attr.ib(type=Optional[CompositeThing])
20 |
21 |
22 | things = [(CompositeThing, Other)]
23 |
24 |
25 | @attr.s
26 | class Account:
27 | user = attr.ib(type=str)
28 | transactions = attr.ib(type=List["Trans"])
29 | balance = attr.ib(Decimal(0), type=Decimal)
30 |
31 |
32 | class TransType(Enum):
33 | withdraw = 0
34 | deposit = 1
35 |
36 |
37 | @attr.s
38 | class Trans:
39 | type = attr.ib(type=TransType)
40 | amount = attr.ib(type=Decimal)
41 | stamp = attr.ib(type=date)
42 |
43 |
44 | accounts = [(Account, TransType, Trans)]
45 |
--------------------------------------------------------------------------------