├── .formatter.exs ├── .github ├── FUNDING.yml └── workflows │ ├── main.yml │ └── stdlib.yml ├── .gitignore ├── CHANGELOG.md ├── README.md ├── examples └── examples_test.exs ├── lib ├── ex_unit_properties.ex ├── stream_data.ex └── stream_data │ └── lazy_tree.ex ├── mix.exs ├── mix.lock └── test ├── ex_unit_properties_test.exs ├── stdlib ├── enum_test.exs ├── kernel_test.exs ├── keyword_test.exs └── string_test.exs ├── stream_data └── lazy_tree_test.exs ├── stream_data_test.exs └── test_helper.exs /.formatter.exs: -------------------------------------------------------------------------------- 1 | locals_without_parens = [ 2 | all: :*, 3 | check: 1, 4 | check: 2, 5 | property: 1, 6 | property: 2 7 | ] 8 | 9 | [ 10 | inputs: [ 11 | ".formatter.exs", 12 | "mix.exs", 13 | "lib/**/*.ex", 14 | "{test,examples}/**/*.exs" 15 | ], 16 | locals_without_parens: locals_without_parens, 17 | export: [locals_without_parens: locals_without_parens] 18 | ] 19 | -------------------------------------------------------------------------------- /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | github: whatyouhide 2 | -------------------------------------------------------------------------------- /.github/workflows/main.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: 4 | pull_request: 5 | push: 6 | branches: 7 | - main 8 | 9 | jobs: 10 | test: 11 | name: Test (Elixir ${{ matrix.elixir }} | Erlang/OTP ${{ matrix.otp }}) 12 | runs-on: ${{ matrix.os }} 13 | strategy: 14 | fail-fast: false 15 | matrix: 16 | include: 17 | - otp: "27.2" 18 | elixir: "1.18" 19 | os: ubuntu-latest 20 | lint: true 21 | coverage: true 22 | 23 | - otp: "23.3" 24 | elixir: "1.12" 25 | os: ubuntu-20.04 26 | 27 | env: 28 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 29 | MIX_ENV: test 30 | 31 | steps: 32 | - name: Clone repository 33 | uses: actions/checkout@v3 34 | 35 | - name: Install OTP and Elixir 36 | uses: erlef/setup-beam@v1 37 | with: 38 | otp-version: ${{ matrix.otp }} 39 | elixir-version: ${{ matrix.elixir }} 40 | version-type: strict 41 | 42 | - name: Install dependencies 43 | run: mix do deps.get --only test, deps.compile 44 | 45 | - name: Check for formatted code 46 | if: ${{ matrix.lint }} 47 | run: mix format --check-formatted 48 | 49 | - name: Cache/uncache PLTs 50 | uses: actions/cache@v3 51 | with: 52 | path: | 53 | priv/plts 54 | key: "${{ runner.os }}-\ 55 | erlang-${{ matrix.otp }}-\ 56 | elixir-${{ matrix.elixir }}-\ 57 | ${{ hashFiles('mix.lock') }}" 58 | 59 | - name: Run Dialyzer 60 | run: mix dialyzer 61 | 62 | - name: Check for unused dependencies 63 | if: ${{ matrix.lint }} 64 | run: mix do deps.get, deps.unlock --check-unused 65 | 66 | - name: Check for compilation warnings 67 | if: ${{ matrix.lint }} 68 | run: mix compile --warnings-as-errors 69 | 70 | - name: Run tests 71 | run: mix test --trace 72 | if: ${{ !matrix.coverage }} 73 | 74 | - name: Run tests with coverage 75 | run: mix coveralls.github 76 | if: ${{ matrix.coverage }} 77 | -------------------------------------------------------------------------------- /.github/workflows/stdlib.yml: -------------------------------------------------------------------------------- 1 | name: Elixir stdlib tests 2 | 3 | on: 4 | # This is needed to trigger the workflow manually from the "Actions" tab in the repo. 5 | workflow_dispatch: 6 | inputs: {} 7 | # Every day at 9am. 8 | schedule: 9 | - cron: "0 9 * * *" 10 | # Every time a change makes it to the main branch of this library, so we maaaaybe 11 | # catch regressions in the library itself. 12 | push: 13 | branches: 14 | - main 15 | 16 | jobs: 17 | stdlib-test: 18 | name: Elixir standard library tests (OTP ${{ matrix.otp }}, Elixir ${{ matrix.elixir }}) 19 | runs-on: ubuntu-22.04 20 | strategy: 21 | fail-fast: false 22 | matrix: 23 | include: 24 | - otp: "25.3" 25 | 26 | env: 27 | MIX_ENV: test 28 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 29 | 30 | steps: 31 | - name: Clone repository 32 | uses: actions/checkout@v3 33 | 34 | - name: Install Erlang/OTP 35 | uses: erlef/setup-beam@v1 36 | with: 37 | otp-version: ${{ matrix.otp }} 38 | # TODO: remove this once we'll compile Elixir from source again. 39 | elixir-version: 1.14.4-otp-25 40 | 41 | - name: Get SHA of Elixir's main branch 42 | id: get-elixir-sha 43 | run: | 44 | echo ::set-output name=sha::$( curl -u "u:${{ github.token }}" https://api.github.com/repos/elixir-lang/elixir/git/ref/heads/main | jq .object.sha | tr -d '"' ) 45 | 46 | - name: Check out latest Elixir 47 | uses: actions/checkout@v3 48 | with: 49 | repository: elixir-lang/elixir 50 | ref: ${{ steps.get-elixir-sha.outputs.sha }} 51 | path: elixir_src 52 | 53 | - name: Cache compiled Elixir 54 | id: cache-compiled-elixir 55 | uses: actions/cache@v3 56 | with: 57 | path: elixir_src 58 | key: ${{ runner.os }}-elixir-${{ steps.get-elixir-sha.outputs.sha }} 59 | 60 | # TODO: for now, Elixir main doesn't compile some deps. We need to 61 | # go back and do these steps once Elixir works again against Erlang. 62 | 63 | # - name: Compile Elixir 64 | # if: steps.cache-compiled-elixir.outputs.cache-hit != 'true' 65 | # working-directory: elixir_src 66 | # run: make 67 | 68 | # # Needs to happen even on cache hits. 69 | # - name: Add Elixir binaries to the path 70 | # run: echo "$PWD/elixir_src/bin" >> $GITHUB_PATH 71 | 72 | - name: Install dependencies 73 | run: | 74 | mix do local.hex --force, local.rebar --force 75 | mix do deps.get --only test, deps.compile 76 | 77 | - name: Run stdlib tests 78 | run: mix test --only stdlib 79 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # The directory Mix will write compiled artifacts to. 2 | /_build 3 | 4 | # If you run "mix test --cover", coverage assets end up here. 5 | /cover 6 | 7 | # The directory Mix downloads your dependencies sources to. 8 | /deps 9 | 10 | # Where 3rd-party dependencies like ExDoc output generated docs. 11 | /doc 12 | 13 | # Ignore .fetch files in case you like to edit your project deps locally. 14 | /.fetch 15 | 16 | # If the VM crashes, it generates a dump, let's ignore it too. 17 | erl_crash.dump 18 | 19 | # Also ignore archive artifacts (built via "mix archive.build"). 20 | *.ez 21 | 22 | # Ignore local PLTs 23 | priv/plts 24 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | ## v1.2.0 4 | 5 | * Add `StreamData.shuffle/1`. 6 | 7 | ## v1.1.3 8 | 9 | * Fix compilation warnings with current Elixir version (1.18) and upcoming Elixir version. 10 | 11 | ## v1.1.2 12 | 13 | * Fix one more Elixir 1.17 warning. 14 | 15 | ## v1.1.1 16 | 17 | * Add warnings-free support for Elixir 1.17. 18 | 19 | ## v1.1.0 20 | 21 | * Drop support for Elixir 1.11 and lower, require Elixir 1.12+ now. 22 | 23 | ## v1.0.0 24 | 25 | No changes. This is just the 1.0 release. Happy fuzzying! 26 | 27 | ## v0.6.0 28 | 29 | ### Bug Fixes 30 | 31 | * Consider max chars when generating atoms. 32 | * Fix some small issues in `StreamData.nonempty_improper_list_of/2`. 33 | 34 | ### Features 35 | 36 | * Add `StreamData.non_negative_integer/0`. 37 | * Add `StreamData.repeatedly/1`. 38 | * Add `StreamData.chardata/0`. 39 | * Add `StreamData.codepoint/1`. 40 | * Add support for *not implemented* properties (which are just `property "some name"`, without a `do`/`/end` body). This is on par with ExUnit's `test/1`. 41 | * Add support for stepped ranges in `StreamData.integer/1`. 42 | * Add support for required keys in `StreamData.optional_map/2`. 43 | * Add `:utf8` option in `StreamData.string/1`. 44 | 45 | ## v0.5.0 46 | 47 | * Slightly improve the shrinking algorigthm. 48 | * Add `StreamData.map_of/2`. 49 | * Fix a bug around the `:max_shrinking_steps` option. 50 | * Fix a runtime warning with Elixir 1.10. 51 | 52 | ## v0.4.3 53 | 54 | * Improve the frequency of terms in `StreamData.term/0` 55 | * Fix a bug in `StreamData.positive_integer/0` that would crash with a generation size of `0`. 56 | * Support inline `, do:` in `gen all` and `check all`. 57 | * Support `:initial_seed` in `check all`. 58 | * Export formatter configuration for `check all` and `gen all`. 59 | * Add `StreamData.seeded/2`. 60 | 61 | ## v0.4.2 62 | 63 | * Fix a bug when shrinking boolean values generated with `StreamData.boolean/0` 64 | 65 | ## v0.4.1 66 | 67 | * Import all functions/macros from `ExUnitProperties` when `use`d 68 | * Various optimizations 69 | * Add the `:max_run_time` configuration option to go together with `:max_runs` 70 | * Add support for `:do` syntax in `gen all`/`check all` 71 | 72 | ## v0.4.0 73 | 74 | * Add a `StreamData.term/0` generator 75 | * Bump the number of allowed consecutive failures in `StreamData.filter/3` and `StreamData.bind_filter/3` 76 | * Improve error message for `StreamData.filter/3` 77 | * Add `ExUnitProperties.pick/1` 78 | * Add `Enumerable.slice/1` to `StreamData` structs 79 | * Improve the performance of `StreamData.bitstring/1` 80 | 81 | #### Breaking changes 82 | 83 | * Remove `StreamData.unquoted_atom/0` in favour of `StreamData.atom(:unquoted | :alias)` 84 | * Start behaving like filtering when patterns don't match in `check all` or `gen all` 85 | * Remove special casing of `=` clauses in `check all` and `gen all` 86 | * Introduce `StreamData.float/1` replacing `StreamData.uniform_float/0` 87 | 88 | ## v0.3.0 89 | 90 | * Add length-related options to `StreamData.string/2` 91 | * Introduce `StreamData.positive_integer/0` 92 | * Raise a better error message on invalid generators 93 | * Fix the `StreamData.t/0` type 94 | * Add support for `rescue/catch/after` in `ExUnitProperties.property/2,3` 95 | * Introduce `StreamData.optional_map/1` 96 | * Add support for keyword lists as argument to `StreamData.fixed_map/1` 97 | 98 | #### Breaking changes 99 | 100 | * Change the arguments to `StreamData.string/2` so that it can take `:ascii`, `:alphanumeric`, `:printable`, a range, or a list of ranges or single codepoints 101 | * Rename `PropertyTest` to `ExUnitProperties` and introduce `use ExUnitProperties` to use in tests that use property-based testing 102 | 103 | ## v0.2.0 104 | 105 | * Add length-related options to `StreamData.list_of/2`, `StreamData.uniq_list_of/1`, `StreamData.binary/1` 106 | * Add a `StreamData.bitstring/1` generator 107 | 108 | #### Breaking changes 109 | 110 | * Remove `StreamData.string_from_chars/1`, `StreamData.ascii_string/0`, and `StreamData.alphanumeric_string/0` in favour of `StreamData.string/1` 111 | * Rename `StreamData.non_empty/1` to `StreamData.nonempty/1` 112 | * Rename `StreamData.int/0,1` to `StreamData.integer/0,1` 113 | * Rename `StreamData.no_shrink/` to `StreamData.unshrinkable/1` 114 | * Remove `StreamData.uniq_list_of/3` in favour of `StreamData.uniq_list_of/2` (which takes options) 115 | 116 | ## v0.1.1 117 | 118 | * Fix a bug with `check all` syntax where it wouldn't work with assignments in the clauses. 119 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # StreamData 2 | 3 | [![hex.pm badge](https://img.shields.io/badge/Package%20on%20hex.pm-informational)](https://hex.pm/packages/stream_data) 4 | [![CI](https://github.com/whatyouhide/stream_data/workflows/CI/badge.svg)](https://github.com/whatyouhide/stream_data/actions/workflows/main.yml) 5 | [![Coverage Status](https://coveralls.io/repos/github/whatyouhide/stream_data/badge.svg?branch=master)](https://coveralls.io/github/whatyouhide/stream_data?branch=master) 6 | 7 | > StreamData is an Elixir library for **data generation** and **property-based testing**. 8 | 9 | Read [the announcement on the Elixir website](https://elixir-lang.org/blog/2017/10/31/stream-data-property-based-testing-and-data-generation-for-elixir/). 10 | 11 | ## Installation 12 | 13 | Add `stream_data` to your list of dependencies: 14 | 15 | ```elixir 16 | defp deps() do 17 | [{:stream_data, "~> 1.0", only: :test}] 18 | end 19 | ``` 20 | 21 | and run `mix deps.get`. StreamData is usually added only to the `:test` environment since it's used in tests and test data generation. 22 | To also import StreamData's formatter configuration, add the `:dev` environment as well as `:test` for `stream_data` and add `:stream_data` to your `.formatter.exs`: 23 | 24 | ```elixir 25 | [ 26 | import_deps: [:stream_data] 27 | ] 28 | ``` 29 | 30 | ## Usage 31 | 32 | [The documentation is available online.](https://hexdocs.pm/stream_data/) 33 | 34 | StreamData is made of two main components: data generation and property-based testing. The `StreamData` module provides tools to work with data generation. The `ExUnitProperties` module takes care of the property-based testing functionality. 35 | 36 | ### Data generation 37 | 38 | All data generation functionality is provided in the `StreamData` module. `StreamData` provides "generators" and functions to combine those generators and create new ones. Since generators implement the `Enumerable` protocol, it's easy to use them as infinite streams of data: 39 | 40 | ```elixir 41 | StreamData.integer() |> Stream.map(&abs/1) |> Enum.take(3) 42 | #=> [1, 0, 2] 43 | ``` 44 | 45 | `StreamData` provides all the necessary tools to create arbitrarily complex custom generators: 46 | 47 | ```elixir 48 | require ExUnitProperties 49 | 50 | domains = [ 51 | "gmail.com", 52 | "hotmail.com", 53 | "yahoo.com", 54 | ] 55 | 56 | email_generator = 57 | ExUnitProperties.gen all name <- StreamData.string(:alphanumeric), 58 | name != "", 59 | domain <- StreamData.member_of(domains) do 60 | name <> "@" <> domain 61 | end 62 | 63 | Enum.take(StreamData.resize(email_generator, 20), 2) 64 | #=> ["efsT6Px@hotmail.com", "swEowmk7mW0VmkJDF@yahoo.com"] 65 | ``` 66 | 67 | ### Property testing 68 | 69 | Property testing aims at randomizing test data in order to make tests more robust. Instead of writing a bunch of inputs and expected outputs by hand, with property-based testing we write a *property* of our code that should hold for a set of data, and then we generate data in this set, in attempt to falsify that property. To generate this data, we can use the above-mentioned `StreamData` module. 70 | 71 | ```elixir 72 | use ExUnitProperties 73 | 74 | property "bin1 <> bin2 always starts with bin1" do 75 | check all bin1 <- binary(), 76 | bin2 <- binary() do 77 | assert String.starts_with?(bin1 <> bin2, bin1) 78 | end 79 | end 80 | ``` 81 | 82 | To know more about property-based testing, read the `ExUnitProperties` documentation. Another great resource about property-based testing in Erlang (but with most ideas that apply to Elixir as well) is Fred Hebert's website [propertesting.com](http://propertesting.com). 83 | 84 | The property-based testing side of this library is heavily inspired by the [original QuickCheck paper](http://www.cs.tufts.edu/~nr/cs257/archive/john-hughes/quick.pdf) (which targeted Haskell) as well as Clojure's take on property-based testing, [test.check](https://github.com/clojure/test.check). 85 | 86 | ## Differences from other property-based testing frameworks 87 | 88 | There are a handful of property-based testing frameworks for the BEAM ecosystem (Erlang, Elixir, and so on). For Elixir, the main alternative to StreamData is [PropCheck](https://github.com/alfert/propcheck). PropCheck is a wrapper around [PropEr](https://github.com/proper-testing/proper), which is a property-based testing framework for Erlang. There are a few fundamental differences between StreamData and PropEr. They are listed below to help you choose between the two. 89 | 90 | **PropEr** (via PropCheck): 91 | 92 | * It provides *stateful property-based testing*. If you need to test a system with state by building a model of the system to test against, you'll have to go with PropCheck since StreamData doesn't support this yet. 93 | 94 | * It can store counter-examples: StreamData doesn't support storing counter-examples in a file (you have to reuse the seed that caused the failure in order to reproduce it). 95 | 96 | **StreamData**: 97 | 98 | * Provides functionality for generating data as the base for property-based testing. StreamData generators can be used outside of property-based testing as normal Elixir streams that produce random data. 99 | 100 | * It is native to Elixir. It's written entirely in Elixir and has an idiomatic Elixir API (for example, all generators are Elixir enumerables). 101 | 102 | ## License 103 | 104 | Copyright 2017 Andrea Leopardi and José Valim 105 | 106 | Licensed under the Apache License, Version 2.0 (the "License"); 107 | you may not use this file except in compliance with the License. 108 | You may obtain a copy of the License at 109 | 110 | http://www.apache.org/licenses/LICENSE-2.0 111 | 112 | Unless required by applicable law or agreed to in writing, software 113 | distributed under the License is distributed on an "AS IS" BASIS, 114 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 115 | See the License for the specific language governing permissions and 116 | limitations under the License. 117 | -------------------------------------------------------------------------------- /examples/examples_test.exs: -------------------------------------------------------------------------------- 1 | ExUnit.start() 2 | 3 | defmodule StdlibSamplesTest do 4 | use ExUnit.Case, async: true 5 | 6 | use ExUnitProperties 7 | 8 | property "my_starts_with?/1" do 9 | check all bin1 <- binary(), 10 | bin2 <- binary() do 11 | assert my_starts_with?(bin1 <> bin2, bin1) 12 | end 13 | end 14 | 15 | property "element not in list (with options)" do 16 | check all list <- list_of(integer()), initial_size: 5, max_size: 50 do 17 | assert 22 not in list 18 | end 19 | end 20 | 21 | property "something with filter" do 22 | check all a <- integer(), 23 | b <- integer(), 24 | a + b >= 0, 25 | sum = a + b do 26 | assert sum > 0 27 | end 28 | end 29 | 30 | property "FunctionClauseErrors are blamed" do 31 | check all atom <- atom(:alphanumeric) do 32 | Access.get(atom, :key) 33 | end 34 | end 35 | 36 | test "non-assertion error" do 37 | import StreamData 38 | 39 | check all tuple <- {:ok, integer()} do 40 | failing_tuple_match(tuple) 41 | end 42 | end 43 | 44 | defp failing_tuple_match(tuple) do 45 | {:ok, :not_an_int} = tuple 46 | end 47 | 48 | defp my_starts_with?(a, "") when byte_size(a) > 0, do: false 49 | defp my_starts_with?(_, _), do: true 50 | end 51 | -------------------------------------------------------------------------------- /lib/ex_unit_properties.ex: -------------------------------------------------------------------------------- 1 | defmodule ExUnitProperties do 2 | @moduledoc """ 3 | Provides macros for property-based testing. 4 | 5 | This module provides a few macros that can be used for property-based testing. The core is `check/3`, 6 | which allows executing arbitrary tests on many pieces of generated data. Another one is 7 | `property/3`, which is meant as a utility to replace the `ExUnit.Case.test/3` macro when writing 8 | properties. The last one is `gen/3`, which can be used as syntactic sugar to build generators 9 | (see `StreamData` for other ways of building generators and for core generators). 10 | 11 | ## Overview of property-based testing 12 | 13 | One of the most common ways of writing tests (in Elixir and many other 14 | languages) is to write tests by hand. For example, say that we want to write a 15 | `starts_with?/2` function that takes two binaries and returns `true` if the 16 | first starts with the second and `false` otherwise. We would likely test such 17 | function with something like this: 18 | 19 | test "starts_with?/2" do 20 | assert starts_with?("foo", "f") 21 | refute starts_with?("foo", "b") 22 | assert starts_with?("foo", "") 23 | assert starts_with?("", "") 24 | refute starts_with?("", "something") 25 | end 26 | 27 | This test highlights the method used to write such kind of tests: they're 28 | written by hand. The process usually consists of testing an expected output on 29 | a set of expected inputs. This works especially well for edge cases, but the 30 | robustness of this test could be improved. This is what property-based testing aims 31 | to solve. Property testing is based on two ideas: 32 | 33 | * specify a set of **properties** that a piece of code should satisfy 34 | * test those properties on a very large number of randomly generated data 35 | 36 | The point of specifying **properties** instead of testing manual scenarios is 37 | that properties should hold for all the data that the piece of code should be 38 | able to deal with, and in turn, this plays well with generating data at 39 | random. Writing properties has the added benefit of forcing the programmer to 40 | think about their code differently: they have to think about which are 41 | invariant properties that their code satisfies. 42 | 43 | To go back to the `starts_with?/2` example above, let's come up with a 44 | property that this function should hold. Since we know that the `Kernel.<>/2` 45 | operator concatenates two binaries, we can say that a property of 46 | `starts_with?/2` is that the concatenation of binaries `a` and `b` always 47 | starts with `a`. This is easy to model as a property using the `check/3` macro 48 | from this module and generators taken from the `StreamData` module: 49 | 50 | test "starts_with?/2" do 51 | check all a <- StreamData.binary(), 52 | b <- StreamData.binary() do 53 | assert starts_with?(a <> b, a) 54 | end 55 | end 56 | 57 | When run, this piece of code will generate a random binary and assign it to 58 | `a`, do the same for `b`, and then run the assertion. This step will be 59 | repeated for a large number of times (`100` by default, but it's 60 | configurable), hence generating many combinations of random `a` and `b`. If 61 | the body passes for all the generated data, then we consider the property to 62 | hold. If a combination of randomly generated terms fails the body of the 63 | property, then `ExUnitProperties` tries to find the smallest set of random 64 | generated terms that still fails the property and reports that; this step is 65 | called shrinking. 66 | 67 | ### Shrinking 68 | 69 | Say that our `starts_with?/2` function blindly returns false when the second 70 | argument is the empty binary (such as `starts_with?("foo", "")`). It's likely 71 | that in 100 runs an empty binary will be generated and bound to `b`. When that 72 | happens, the body of the property fails but `a` is a randomly generated binary 73 | and this might be inconvenient: for example, `a` could be `<<0, 74, 192, 99, 74 | 24, 26>>`. In this case, the `check/3` macro tries to **shrink** `a` to the 75 | smallest term that still fails the property (`b` is not shrunk because `""` is 76 | the smallest binary possible). Doing so will lead to `a = ""` and `b = ""` 77 | which is the "minimal" failing case for our function. 78 | 79 | The example above is a contrived example but shrinking is a very powerful tool 80 | that aims at taking the noise out of the failing data. 81 | 82 | For detailed information on shrinking, see also the "Shrinking" section in the 83 | documentation for `StreamData`. 84 | 85 | ## Building structs 86 | 87 | We can use the built-in generators to generate other kinds of structs. For 88 | example, imagine we wanted to test the following function. 89 | 90 | def noon?(~T[12:00:00]), do: true 91 | def noon?(_), do: false 92 | 93 | We could generate `%Time{}` structs as follows: 94 | 95 | defp non_noon_generator do 96 | gen all time <- valid_time_generator(), time != ~T[12:00:00] do 97 | time 98 | end 99 | end 100 | 101 | defp valid_time_generator do 102 | gen all hour <- StreamData.integer(0..23), 103 | minute <- StreamData.integer(0..59), 104 | second <- StreamData.integer(0..59) do 105 | Time.new!(hour, minute, second) 106 | end 107 | end 108 | 109 | and use them in properties: 110 | 111 | describe "noon?/1" do 112 | test "returns true for noon" do 113 | assert noon?(~T[12:00:00]) == true 114 | end 115 | 116 | property "returns false for other times" do 117 | check all time <- non_noon_generator() do 118 | assert noon?(time) == false 119 | end 120 | end 121 | end 122 | 123 | ## Resources on property-based testing 124 | 125 | There are many resources available online on property-based testing. An interesting 126 | read is the original paper that introduced QuickCheck, ["QuickCheck: A 127 | Lightweight Tool for Random Testing of Haskell 128 | Programs"](http://www.cs.tufts.edu/~nr/cs257/archive/john-hughes/quick.pdf), a 129 | property-testing tool for the Haskell programming language. Another very 130 | useful resource especially geared towards Erlang and the BEAM is 131 | [propertesting.com](http://propertesting.com), a website created by Fred 132 | Hebert: it's a great explanation of property-based testing that includes many 133 | examples. Fred's website uses an Erlang property-based testing tool called 134 | [PropEr](https://github.com/manopapad/proper) but many of the things he talks 135 | about apply to `ExUnitProperties` as well. 136 | 137 | ## Options 138 | 139 | When an error occurs, StreamData will shrink the generated values to find the smallest set of values that still reproduces the error. 140 | It will then print out the generated values using `inspect/2`. 141 | 142 | You can customize the `inspect/2` options used by setting the `:inspect_opts` option in your test config. 143 | 144 | # config/test.exs 145 | import Config 146 | 147 | config :stream_data, 148 | inspect_opts: [limit: :infinity] 149 | 150 | """ 151 | 152 | alias ExUnit.AssertionError 153 | 154 | defmodule Error do 155 | @moduledoc false 156 | defexception [:message] 157 | end 158 | 159 | @doc """ 160 | Sets up an `ExUnit.Case` module for property-based testing. 161 | """ 162 | defmacro __using__(_opts) do 163 | quote do 164 | import unquote(__MODULE__) 165 | import StreamData 166 | end 167 | end 168 | 169 | @doc """ 170 | Defines a not-implemented property test with a string. 171 | 172 | Provides a convenient macro that allows a property test to be defined with a 173 | string, but not yet implemented. The resulting property test will always 174 | fail and print a "Not implemented" error message. The resulting test case is 175 | also tagged with `:not_implemented`. 176 | 177 | This behavior is similar to `ExUnit.Case.test/1`. 178 | 179 | ## Examples 180 | 181 | property "this will be a property test in the future" 182 | 183 | """ 184 | defmacro property(message) do 185 | ExUnit.plural_rule("property", "properties") 186 | 187 | %{module: mod, file: file, line: line} = __CALLER__ 188 | 189 | quote bind_quoted: binding() do 190 | name = ExUnit.Case.register_test(mod, file, line, :property, message, [:not_implemented]) 191 | def unquote(name)(_), do: flunk("Not implemented") 192 | end 193 | end 194 | 195 | @doc """ 196 | Defines a property and imports property-testing facilities in the body. 197 | 198 | This macro is similar to `ExUnit.Case.test/3`, except that it denotes a 199 | **property**. In the given body, all the functions exposed by `StreamData` are 200 | imported, as well as `check/2`. 201 | 202 | When defining a test whose body only consists of one or more `check/2` calls, 203 | it's advised to use `property/3` so as to clearly denote and scope properties. 204 | Doing so will also improve reporting. 205 | 206 | ## Examples 207 | 208 | use ExUnitProperties 209 | 210 | property "reversing a list doesn't change its length" do 211 | check all list <- list_of(integer()) do 212 | assert length(list) == length(:lists.reverse(list)) 213 | end 214 | end 215 | 216 | """ 217 | defmacro property(message, context \\ quote(do: _), contents) do 218 | ExUnit.plural_rule("property", "properties") 219 | 220 | contents = 221 | case contents do 222 | [do: block] -> 223 | quote do 224 | unquote(block) 225 | :ok 226 | end 227 | 228 | _ -> 229 | quote do 230 | try(unquote(contents)) 231 | :ok 232 | end 233 | end 234 | 235 | context = Macro.escape(context) 236 | contents = Macro.escape(contents, unquote: true) 237 | 238 | quote bind_quoted: [context: context, contents: contents, message: message] do 239 | %{module: mod, file: file, line: line} = __ENV__ 240 | name = ExUnit.Case.register_test(mod, file, line, :property, message, [:property]) 241 | def unquote(name)(unquote(context)), do: unquote(contents) 242 | end 243 | end 244 | 245 | @doc """ 246 | Syntactic sugar to create generators. 247 | 248 | This macro provides ad-hoc syntax to write complex generators. Let's see a 249 | quick example to get a feel of how it works. Say we have a `User` struct: 250 | 251 | defmodule User do 252 | defstruct [:name, :email] 253 | end 254 | 255 | We can create a generator of users like this: 256 | 257 | email_generator = map({binary(), binary()}, fn {left, right} -> left <> "@" <> right end) 258 | 259 | user_generator = 260 | gen all name <- binary(), 261 | email <- email_generator do 262 | %User{name: name, email: email} 263 | end 264 | 265 | Everything between `gen all` and `do` is referred to as **clauses**. You can write 266 | clauses to specify the values to generate. You can then use those values in the `do` body. 267 | The newly-created generator will generate values that are the return value of the 268 | `do` body using the generated values in the clauses. 269 | 270 | ### Clauses 271 | 272 | As seen in the example above, clauses can be of the following types: 273 | 274 | * **value generation** - they have the form `pattern <- generator` where `generator` must be a 275 | generator. These clauses take a value out of `generator` on each run and match it against 276 | `pattern`. Variables bound in `pattern` can be then used throughout subsequent clauses and 277 | in the `do` body. If `pattern` doesn't match a generated value, it's treated like a filter 278 | (see the "filtering" clauses described below). 279 | 280 | * **filtering and binding** - they have the form `expression`. If a filtering clause returns 281 | a truthy value, then the set of generated values that appear before the 282 | filtering clause is considered valid and generation continues. If the 283 | filtering clause returns a falsey value, then the current value is 284 | considered invalid and a new value is generated. Note that filtering 285 | clauses should not filter out too many times; in case they do, a 286 | `StreamData.FilterTooNarrowError` error is raised (same as `StreamData.filter/3`). 287 | Filtering clauses can be used also to assign variables: for example, `a = :foo` is a valid 288 | clause. 289 | 290 | The behaviour of the clauses above is similar to the behaviour of clauses in 291 | `Kernel.SpecialForms.for/1`. 292 | 293 | ### Body 294 | 295 | The return value of the body passed in the `do` block is what is ultimately 296 | generated by the generator return by this macro. 297 | 298 | ## Shrinking 299 | 300 | See the module documentation for more information on shrinking. Clauses affect 301 | shrinking in the following way: 302 | 303 | * filtering clauses affect shrinking like `StreamData.filter/3` 304 | * value generation clauses affect shrinking similarly to `StreamData.bind/2` 305 | 306 | """ 307 | defmacro gen({:all, _meta, clauses_with_body} = _clauses_and_body) do 308 | {clauses, [[do: body]]} = Enum.split(clauses_with_body, -1) 309 | compile(clauses, body) 310 | end 311 | 312 | # We don't need docs for `check/2`, the docs for `check/1` are enough since 313 | # using `do:` should just work from the perspective of the end user. 314 | @doc false 315 | defmacro gen({:all, _meta, clauses}, do: body) do 316 | compile(clauses, body) 317 | end 318 | 319 | defp compile(clauses, body) do 320 | assert_first_clause_is_generator(clauses) 321 | 322 | quote do 323 | var!(generated_values, unquote(__MODULE__)) = [] 324 | {:cont, data} = unquote(compile_clauses(clauses, body, _line = nil)) 325 | data 326 | end 327 | end 328 | 329 | defp assert_first_clause_is_generator([{:<-, _, [_, _]} | _]) do 330 | :ok 331 | end 332 | 333 | defp assert_first_clause_is_generator([clause | _]) do 334 | raise ArgumentError, 335 | "\"gen all\" and \"check all\" clauses must start with a generator (<-) clause, " <> 336 | "got: #{Macro.to_string(clause)}" 337 | end 338 | 339 | defp compile_clauses([], body, _line) do 340 | quote do 341 | var!(generated_values, unquote(__MODULE__)) = 342 | Enum.reverse(var!(generated_values, unquote(__MODULE__))) 343 | 344 | {:cont, StreamData.constant(unquote(body))} 345 | end 346 | end 347 | 348 | defp compile_clauses([{:<-, meta, [pattern, generator]} = clause | rest], body, _line) do 349 | line = meta[:line] 350 | 351 | quote generated: true, line: line do 352 | data = 353 | StreamData.bind_filter(unquote(generator), fn 354 | # TODO: support when 355 | unquote(pattern) = generated_value, tries_left -> 356 | var!(generated_values, unquote(__MODULE__)) = [ 357 | {unquote(Macro.to_string(clause)), generated_value} 358 | | var!(generated_values, unquote(__MODULE__)) 359 | ] 360 | 361 | unquote(compile_clauses(rest, body, line)) 362 | 363 | other, _tries_left = 1 -> 364 | raise StreamData.FilterTooNarrowError, last_generated_value: {:value, other} 365 | 366 | _other, _tries_left -> 367 | :skip 368 | end) 369 | 370 | {:cont, data} 371 | end 372 | end 373 | 374 | defp compile_clauses([clause | rest], body, parent_line) do 375 | line = get_clause_line(clause, parent_line) 376 | 377 | quote generated: true, line: line do 378 | cond do 379 | unquote(clause) -> 380 | unquote(compile_clauses(rest, body, line)) 381 | 382 | tries_left == 1 -> 383 | raise StreamData.FilterTooNarrowError, last_generated_value: :none 384 | 385 | true -> 386 | :skip 387 | end 388 | end 389 | end 390 | 391 | defp get_clause_line(clause, parent_line) do 392 | with {_, meta, _} when is_list(meta) <- clause, 393 | {:ok, line} when is_integer(line) <- Keyword.fetch(meta, :line) do 394 | line 395 | else 396 | _ -> parent_line 397 | end 398 | end 399 | 400 | @doc """ 401 | Runs tests for a property. 402 | 403 | This macro provides ad hoc syntax to write properties. Let's see a quick 404 | example to get a feel of how it works: 405 | 406 | check all int1 <- integer(), 407 | int2 <- integer(), 408 | int1 > 0 and int2 > 0, 409 | sum = int1 + int2 do 410 | assert sum > int1 411 | assert sum > int2 412 | end 413 | 414 | Everything between `check all` and `do` is referred to as **clauses**. Clauses 415 | are used to specify the values to generate in order to test the properties. 416 | The actual tests that the properties hold live in the `do` block. 417 | 418 | Clauses work exactly like they work in the `gen/1` macro. 419 | 420 | The body passed in the `do` block is where you test that the property holds 421 | for the generated values. The body is just like the body of a test: use 422 | `ExUnit.Assertions.assert/2` (and friends) to assert whatever you want. 423 | 424 | ## Options 425 | 426 | * `:initial_size` - (non-negative integer) the initial generation size used 427 | to start generating values. The generation size is then incremented by `1` 428 | on each iteration. See the "Generation size" section of the `StreamData` 429 | documentation for more information on generation size. Defaults to `1`. 430 | 431 | * `:max_runs` - (non-negative integer) the total number of generations to 432 | run. Defaults to `100`. 433 | 434 | * `:max_run_time` - (non-negative integer) the total number of time (in milliseconds) 435 | to run a given check for. This is not used by default, so unless a value 436 | is given then the length of the test will be determined by `:max_runs`. 437 | If both `:max_runs` and `:max_run_time` are given, then the check will finish at 438 | whichever comes first, `:max_runs` or `:max_run_time`. 439 | 440 | * `:max_shrinking_steps` - (non-negative integer) the maximum numbers of 441 | shrinking steps to perform in case a failing case is found. Defaults to 442 | `100`. 443 | 444 | * `:max_generation_size` - (non-negative integer) the maximum generation 445 | size to reach. Note that the size is increased by one on each run. By 446 | default, the generation size is unbounded. 447 | 448 | * `:initial_seed` - (integer) the initial seed used to drive the random generation. 449 | When `check all` is run with the same initial seed more than once, then every time 450 | the terms generated by the generators will be the same as all other runs. This is useful 451 | when you want to deterministically reproduce a result. However, it's usually better 452 | to leave `:initial_seed` to its default value, which is taken from ExUnit's seed: this 453 | way, the random generation will follow options like `--seed` used in ExUnit to 454 | deterministically reproduce tests. 455 | 456 | It is also possible to set the values for `:initial_size`, `:max_runs`, `:max_run_time`, and 457 | `:max_shrinking_steps` through your project's config files. This is especially helpful 458 | in combination with `:max_runs` when you want to run more iterations on your continuous 459 | integration platform, but keep your local tests fast: 460 | 461 | # config/test.exs 462 | import Config 463 | 464 | config :stream_data, 465 | max_runs: if System.get_env("CI"), do: 1_000, else: 50 466 | 467 | ## Examples 468 | 469 | Check that all values generated by the `StreamData.integer/0` generator are 470 | integers: 471 | 472 | check all int <- integer() do 473 | assert is_integer(int) 474 | end 475 | 476 | Check that `String.starts_with?/2` and `String.ends_with?/2` always hold for 477 | concatenated strings: 478 | 479 | check all start <- binary(), 480 | finish <- binary(), 481 | concat = start <> finish do 482 | assert String.starts_with?(concat, start) 483 | assert String.ends_with?(concat, finish) 484 | end 485 | 486 | Check that `Kernel.in/2` returns `true` when checking if an element taken out 487 | of a list is in that same list (changing the number of runs): 488 | 489 | check all list <- list_of(integer()), 490 | member <- member_of(list), 491 | max_runs: 50 do 492 | assert member in list 493 | end 494 | 495 | ### Using `check all` in doctests 496 | 497 | `check all` can be used in doctests. Make sure that the module where you call 498 | `doctest(MyModule)` calls `use ExUnitProperties`. Then, you can call `check all` 499 | in your doctests: 500 | 501 | @doc \"\"\" 502 | Tells if a term is an integer. 503 | 504 | iex> check all i <- integer() do 505 | ...> assert int?(i) 506 | ...> end 507 | :ok 508 | 509 | \"\"\" 510 | def int?(i), do: is_integer(i) 511 | 512 | `check all` always returns `:ok`, so you can use that as the return value of 513 | the whole expression. 514 | """ 515 | defmacro check({:all, _meta, clauses_with_body} = _clauses_and_body) 516 | when is_list(clauses_with_body) do 517 | {clauses, [body_with_options]} = Enum.split(clauses_with_body, -1) 518 | {options, [do: body]} = Enum.split(body_with_options, -1) 519 | compile_check_all(clauses ++ [options], body) 520 | end 521 | 522 | # We don't need docs for `check/2`, the docs for `check/1` are enough since 523 | # using `do:` should just work from the perspective of the end user. 524 | @doc false 525 | defmacro check({:all, _meta, clauses_and_options}, do: body) 526 | when is_list(clauses_and_options) do 527 | compile_check_all(clauses_and_options, body) 528 | end 529 | 530 | defp compile_check_all(clauses_and_options, body) do 531 | {clauses, options} = split_clauses_and_options(clauses_and_options) 532 | 533 | quote do 534 | options = unquote(options) 535 | 536 | # TODO: Use :rand.export_seed in Elixir master. 537 | # The value may be :undefined in a new process 538 | # though, which means we may need to generate one. 539 | initial_seed = 540 | case Keyword.get(options, :initial_seed, ExUnit.configuration()[:seed]) do 541 | seed when is_integer(seed) -> 542 | {0, 0, seed} 543 | 544 | other -> 545 | raise ArgumentError, "expected :initial_seed to be an integer, got: #{inspect(other)}" 546 | end 547 | 548 | # TODO: Use ExUnit configuration when made part of ExUnit 549 | options = [ 550 | initial_seed: initial_seed, 551 | initial_size: 552 | options[:initial_size] || Application.fetch_env!(:stream_data, :initial_size), 553 | max_runs: options[:max_runs] || Application.fetch_env!(:stream_data, :max_runs), 554 | max_run_time: 555 | options[:max_run_time] || Application.fetch_env!(:stream_data, :max_run_time), 556 | max_shrinking_steps: 557 | options[:max_shrinking_steps] || 558 | Application.fetch_env!(:stream_data, :max_shrinking_steps) 559 | ] 560 | 561 | property = 562 | ExUnitProperties.gen all unquote_splicing(clauses) do 563 | fn -> 564 | try do 565 | unquote(body) 566 | rescue 567 | exception -> 568 | result = %{ 569 | exception: exception, 570 | stacktrace: __STACKTRACE__, 571 | generated_values: var!(generated_values, unquote(__MODULE__)) 572 | } 573 | 574 | {:error, result} 575 | else 576 | _result -> 577 | {:ok, nil} 578 | end 579 | end 580 | end 581 | 582 | property = 583 | if max_size = options[:max_generation_size] do 584 | StreamData.scale(property, &min(max_size, &1)) 585 | else 586 | property 587 | end 588 | 589 | case StreamData.check_all(property, options, & &1.()) do 590 | {:ok, _result} -> :ok 591 | {:error, test_result} -> unquote(__MODULE__).__raise__(test_result) 592 | end 593 | end 594 | end 595 | 596 | @spec __raise__(term()) :: no_return() 597 | def __raise__(test_result) do 598 | %{ 599 | original_failure: original_failure, 600 | shrunk_failure: shrunk_failure, 601 | successful_runs: successful_runs 602 | } = test_result 603 | 604 | choose_error_and_raise(original_failure, shrunk_failure, successful_runs) 605 | end 606 | 607 | defp choose_error_and_raise( 608 | _, 609 | %{exception: %AssertionError{}} = shrunk_failure, 610 | successful_runs 611 | ) do 612 | reraise enrich_assertion_error(shrunk_failure, successful_runs), shrunk_failure.stacktrace 613 | end 614 | 615 | defp choose_error_and_raise( 616 | %{exception: %AssertionError{}} = original_failure, 617 | _, 618 | successful_runs 619 | ) do 620 | reraise enrich_assertion_error(original_failure, successful_runs), original_failure.stacktrace 621 | end 622 | 623 | defp choose_error_and_raise(_original_failure, shrunk_failure, successful_runs) do 624 | %{exception: exception, stacktrace: stacktrace, generated_values: generated_values} = 625 | shrunk_failure 626 | 627 | {exception, stacktrace} = Exception.blame(:error, exception, stacktrace) 628 | formatted_exception = Exception.format_banner(:error, exception, stacktrace) 629 | 630 | message = 631 | "failed with generated values (after #{successful_runs(successful_runs)}):\n\n" <> 632 | indent(format_generated_values(generated_values), " ") <> 633 | "\n\ngot exception:\n\n" <> indent(formatted_exception, " ") 634 | 635 | reraise Error, [message: message], shrunk_failure.stacktrace 636 | end 637 | 638 | defp enrich_assertion_error( 639 | %{exception: exception, generated_values: generated_values}, 640 | successful_runs 641 | ) do 642 | message = 643 | "Failed with generated values (after #{successful_runs(successful_runs)}):\n\n" <> 644 | indent(format_generated_values(generated_values), " ") <> 645 | if(is_binary(exception.message), do: "\n\n" <> exception.message, else: "") 646 | 647 | %{exception | message: message} 648 | end 649 | 650 | defp format_generated_values(values) do 651 | Enum.map_join(values, "\n\n", fn {gen_string, value} -> 652 | String.trim_trailing(""" 653 | * Clause: #{gen_string} 654 | Generated: #{inspect(value, Application.fetch_env!(:stream_data, :inspect_opts))} 655 | """) 656 | end) 657 | end 658 | 659 | defp indent(string, indentation) do 660 | indentation <> String.replace(string, "\n", "\n" <> indentation) 661 | end 662 | 663 | defp successful_runs(1), do: "1 successful run" 664 | defp successful_runs(n), do: "#{n} successful runs" 665 | 666 | defp split_clauses_and_options(clauses_and_options) do 667 | case Enum.split_while(clauses_and_options, &(not Keyword.keyword?(&1))) do 668 | {_clauses, []} = result -> result 669 | {clauses, [options]} -> {clauses, options} 670 | end 671 | end 672 | 673 | @doc """ 674 | Picks a random element generated by the `StreamData` generator `data`. 675 | 676 | This function uses the current ExUnit seed to generate a random term from `data`. The generation 677 | size (see [*Generation size*](StreamData.html#module-generation-size)) is chosen at random between in `1..100`. If you want finer 678 | control over the generation size, you can use functions like `StreamData.resize/2` to resize 679 | `data` or `StreamData.scale/2` to scale the generation size. 680 | 681 | ## Examples 682 | 683 | ExUnitProperties.pick(StreamData.integer()) 684 | #=> -21 685 | 686 | """ 687 | @spec pick(StreamData.t(a)) :: a when a: term() 688 | def pick(data) do 689 | exported_seed = 690 | case :rand.export_seed() do 691 | :undefined -> 692 | raise "the random seed is not set in the current process. Make sure to only call " <> 693 | "pick/1 inside ExUnit tests" 694 | 695 | seed -> 696 | seed 697 | end 698 | 699 | seed = :rand.seed_s(exported_seed) 700 | {size, seed} = :rand.uniform_s(100, seed) 701 | %StreamData.LazyTree{root: root} = StreamData.__call__(data, seed, size) 702 | root 703 | end 704 | end 705 | -------------------------------------------------------------------------------- /lib/stream_data.ex: -------------------------------------------------------------------------------- 1 | defmodule StreamData do 2 | @moduledoc """ 3 | Functions to create and combine generators. 4 | 5 | A generator is a `StreamData` struct. Generators can be created through the 6 | functions exposed in this module, like `constant/1`, and by combining other 7 | generators through functions like `bind/2`. 8 | 9 | Similar to the `Stream` module, the functions in this module return a lazy 10 | construct. We can get values out of a generator by enumerating the generator. 11 | Generators always generate an infinite stream of values (which are randomized 12 | most of the time). 13 | 14 | For example, to get an infinite stream of integers that starts with small 15 | integers and progressively grows the boundaries, you can use `integer/0`: 16 | 17 | Enum.take(StreamData.integer(), 10) 18 | #=> [-1, 0, -3, 4, -4, 5, -1, -3, 5, 8] 19 | 20 | As you can see above, values emitted by a generator are not unique. 21 | 22 | In many applications of generators, the longer the generator runs the larger 23 | the generated values will be. For integers, a larger integer means a bigger number. 24 | For lists, it may mean a list with more elements. This is controlled by a parameter 25 | that we call the **generation size** (see the "Generation size" section below). 26 | 27 | StreamData is often used to generate random values. It is also the foundation 28 | for property-based testing. See `ExUnitProperties` for more information. 29 | 30 | ## Enumeration 31 | 32 | Generators implement the `Enumerable` protocol. The enumeration starts with a 33 | small generation size, which increases when the enumeration continues (up to a 34 | fixed maximum size). 35 | 36 | Since generators are proper streams, functions from the `Stream` module can be 37 | used to stream values out of them. For example, to build an infinite stream of 38 | positive even integers, you can do: 39 | 40 | StreamData.integer() 41 | |> Stream.filter(& &1 > 0) 42 | |> Stream.map(& &1 * 2) 43 | |> Enum.take(10) 44 | #=> [4, 6, 4, 10, 14, 16, 4, 16, 36, 16] 45 | 46 | Generators that are manipulated via the `Stream` and `Enum` modules are no 47 | longer **shrinkable** (see the section about shrinking below). If you want 48 | generation through the `Enumerable` protocol to be reproducible, see `seeded/2`. 49 | 50 | ## Generation size 51 | 52 | Generators have access to a generation parameter called the **generation 53 | size**, which is a non-negative integer. This parameter is meant to bind the 54 | data generated by each generator in a way that is completely up to the 55 | generator. For example, a generator that generates integer can use the `size` 56 | parameter to generate integers inside the `-size..size` range. In a similar 57 | way, a generator that generates lists could use this parameter to generate a 58 | list with `0` to `size` elements. During composition, it is common for the 59 | "parent generator" to pass the size to the composed generators. 60 | 61 | When creating generators, they can access the generation size using the 62 | `sized/1` function. Generators can be resized to a fixed generation size using 63 | `resize/2`. 64 | 65 | ## Shrinking 66 | 67 | `StreamData` generators are also shrinkable. The idea behind shrinking is 68 | to find the simplest value that respects a certain condition. For example, 69 | during property-based tests, we use shrinking to find the integer closest to 0 70 | or the smallest list that makes a test fail. By reporting the simplest data 71 | structure that triggers an error, the failure becomes easier to understand 72 | and reproduce. 73 | 74 | Each generator has its own logic to shrink values. Those are outlined in each 75 | generator documentation. 76 | 77 | Note that the generation size is not related in any way to shrinking: while 78 | intuitively one may think that shrinking just means decreasing the generation 79 | size, in reality the shrinking rule is bound to each generated value. One way 80 | to look at it is that shrinking a list is always the same, regardless of its 81 | generated length. 82 | 83 | ## Special generators 84 | 85 | Some Elixir types are implicitly converted to `StreamData` generators when 86 | composed or used in property-based testing. These types are: 87 | 88 | * atoms - they generate themselves. For example, `:foo` is equivalent to 89 | `StreamData.constant(:foo)`. 90 | 91 | * tuples of generators - they generate tuples where each value is a value 92 | generated by the corresponding generator, exactly like described in 93 | `tuple/1`. For example, `{StreamData.integer(), StreamData.boolean()}` 94 | generates entries like `{10, false}`. 95 | 96 | Note that *these terms must be explicitly converted to StreamData generators*. 97 | This means that these terms are not full-fledged generators. For example, atoms 98 | cannot be enumerated directly as they don't implement the `Enumerable` protocol. 99 | However, `StreamData.constant(:foo)` is enumerable as it has been wrapped in 100 | a `StreamData` function. 101 | """ 102 | 103 | alias StreamData.LazyTree 104 | 105 | @typep seed() :: :rand.state() 106 | @typep size() :: non_neg_integer() 107 | @typep generator_fun(a) :: (seed(), size() -> LazyTree.t(a)) 108 | 109 | @typedoc """ 110 | An opaque type that represents a `StreamData` generator that generates values 111 | of type `a`. 112 | """ 113 | @opaque t(a) :: %__MODULE__{generator: generator_fun(a)} | atom() | tuple() 114 | 115 | @rand_algorithm :exsp 116 | 117 | defstruct [:generator] 118 | 119 | defmodule FilterTooNarrowError do 120 | defexception [:max_consecutive_failures, :last_generated_value] 121 | 122 | def message(exception) do 123 | %{ 124 | max_consecutive_failures: max_consecutive_failures, 125 | last_generated_value: last_generated_value 126 | } = exception 127 | 128 | max_consecutive_failures_part = 129 | if max_consecutive_failures do 130 | " (#{max_consecutive_failures} elements in this case)" 131 | else 132 | "" 133 | end 134 | 135 | last_element_part = 136 | case last_generated_value do 137 | {:value, value} -> " The last element to be filtered out was: #{inspect(value)}." 138 | :none -> "" 139 | end 140 | 141 | """ 142 | too many consecutive elements#{max_consecutive_failures_part} were filtered out. 143 | #{last_element_part} To avoid this: 144 | 145 | * make sure the generation space contains enough values that the chance of a generated 146 | value being filtered out is small. For example, don't generate all integers and filter 147 | out odd ones in order to have a generator of even integers (since you'd be taking out 148 | half the generation space). 149 | 150 | * keep an eye on how the generation size affects the generator being filtered. For 151 | example, you might be filtering out only a handful of values from the generation space, 152 | but small generation sizes might make the generation space much smaller hence increasing 153 | the probability of values that you'd filter out being generated. 154 | 155 | * try to restructure your generator so that instead of generating many values and taking 156 | out the ones you don't want, you instead generate values and turn all of them into 157 | values that are suitable. For example, multiply integers by two to have a generator of 158 | even values instead of filtering out all odd integers. 159 | 160 | """ 161 | end 162 | end 163 | 164 | defmodule TooManyDuplicatesError do 165 | defexception [:max_tries, :remaining_to_generate, :generated] 166 | 167 | def message(%{max_tries: max_tries, remaining_to_generate: remaining, generated: generated}) do 168 | "too many (#{max_tries}) non-unique elements were generated consecutively. " <> 169 | "Make sure to avoid generating from a small space of data (such as only a " <> 170 | "handful of terms) and make sure a small generation size doesn't affect " <> 171 | "uniqueness too heavily. There were still #{remaining} elements left to " <> 172 | "generate, while the generated elements were:\n\n#{inspect(generated)}" 173 | end 174 | end 175 | 176 | ### Minimal interface 177 | 178 | ## Helpers 179 | 180 | @compile {:inline, new: 1} 181 | 182 | defp new(generator) when is_function(generator, 2) do 183 | %__MODULE__{generator: generator} 184 | end 185 | 186 | # We support multiple types of generators through call/3: this is basically a 187 | # poor implementation of a protocol (which we don't want to add just for 188 | # this). 189 | @doc false 190 | @spec __call__(StreamData.t(a), seed(), size()) :: a when a: term() 191 | def __call__(data, seed, size) do 192 | call(data, seed, size) 193 | end 194 | 195 | @compile {:inline, call: 3} 196 | 197 | defp call(%__MODULE__{generator: generator}, seed, size) do 198 | %LazyTree{} = generator.(seed, size) 199 | end 200 | 201 | defp call(atom, _seed, _size) when is_atom(atom) do 202 | lazy_tree_constant(atom) 203 | end 204 | 205 | defp call(tuple, seed, size) when is_tuple(tuple) do 206 | case tuple_size(tuple) do 207 | 0 -> 208 | lazy_tree_constant({}) 209 | 210 | tuple_size -> 211 | {trees, _seed} = 212 | Enum.map_reduce(0..(tuple_size - 1), seed, fn index, acc -> 213 | {seed1, seed2} = split_seed(acc) 214 | data = elem(tuple, index) 215 | {call(data, seed1, size), seed2} 216 | end) 217 | 218 | trees 219 | |> LazyTree.zip() 220 | |> LazyTree.map(&List.to_tuple/1) 221 | end 222 | end 223 | 224 | defp call(other, _seed, _size) do 225 | raise ArgumentError, 226 | "expected a generator, which can be a %StreamData{} struct, an atom, " <> 227 | "or a tuple with generators in it, but got:\n\n #{inspect(other)}\n\n" <> 228 | "If you want to use a term as a \"constant\" generator, wrap it in a call to " <> 229 | "StreamData.constant/1 instead." 230 | end 231 | 232 | ## Generators 233 | 234 | @compile {:inline, constant: 1} 235 | 236 | @doc """ 237 | A generator that always generates the given term. 238 | 239 | ## Examples 240 | 241 | iex> Enum.take(StreamData.constant(:some_term), 3) 242 | [:some_term, :some_term, :some_term] 243 | 244 | ## Shrinking 245 | 246 | This generator doesn't shrink. 247 | """ 248 | @spec constant(a) :: t(a) when a: var 249 | def constant(term) do 250 | new(fn _seed, _size -> lazy_tree_constant(term) end) 251 | end 252 | 253 | ## Combinators 254 | 255 | @compile {:inline, map: 2} 256 | 257 | @doc """ 258 | Maps the given function `fun` over the given generator `data`. 259 | 260 | Returns a new generator that returns elements from `data` after applying `fun` 261 | to them. 262 | 263 | ## Examples 264 | 265 | iex> data = StreamData.map(StreamData.integer(), &Integer.to_string/1) 266 | iex> Enum.take(data, 3) 267 | ["1", "0", "3"] 268 | 269 | ## Shrinking 270 | 271 | This generator shrinks exactly like `data`, but with `fun` mapped over the 272 | shrunk data. 273 | """ 274 | @spec map(t(a), (a -> b)) :: t(b) when a: term(), b: term() 275 | def map(data, fun) when is_function(fun, 1) do 276 | new(fn seed, size -> 277 | data 278 | |> call(seed, size) 279 | |> LazyTree.map(fun) 280 | end) 281 | end 282 | 283 | @doc """ 284 | Binds each element generated by `data` and to a new generator returned by 285 | applying `fun` or filters the generated element. 286 | 287 | Works similarly to `bind/2` but allows to filter out unwanted values. It takes 288 | a generator `data` and invokes `fun` with each element generated by `data`. 289 | `fun` must return one of: 290 | 291 | * `{:cont, generator}` - `generator` is then used to generate the next 292 | element 293 | 294 | * `:skip` - the value generated by `data` is filtered out and a new element 295 | is generated 296 | 297 | Since this function acts as a filter as well, it behaves similarly to 298 | `filter/3`: when more than `max_consecutive_failures` elements are filtered 299 | out (that is, `fun` returns `:skip`), a `StreamData.FilterTooNarrowError` is 300 | raised. See the documentation for `filter/3` for suggestions on how to avoid 301 | such errors. 302 | 303 | The function can accept one or two arguments. If a two-argument function is 304 | passed, the second argument will be the number of tries left before raising 305 | `StreamData.FilterTooNarrowError`. 306 | 307 | ## Examples 308 | 309 | Say we wanted to create a generator that generates two-element tuples where 310 | the first element is a list of integers with an even number of members and the 311 | second element is a member of that list. We can do that by generating a list 312 | and, if it has even length, taking an element out of it, otherwise filtering 313 | it out. 314 | 315 | require Integer 316 | 317 | list_data = StreamData.list_of(StreamData.integer(), min_length: 1) 318 | 319 | data = 320 | StreamData.bind_filter(list_data, fn 321 | list when Integer.is_even(length(list)) -> 322 | inner_data = StreamData.bind(StreamData.member_of(list), fn member -> 323 | StreamData.constant({list, member}) 324 | end) 325 | {:cont, inner_data} 326 | _odd_list -> 327 | :skip 328 | end) 329 | 330 | Enum.at(data, 0) 331 | #=> {[-6, -7, -4, 5, -9, 8, 7, -9], 5} 332 | 333 | ## Shrinking 334 | 335 | This generator shrinks like `bind/2` but values that are skipped are not used 336 | for shrinking (similarly to how `filter/3` works). 337 | """ 338 | @spec bind_filter( 339 | t(a), 340 | (a -> {:cont, t(b)} | :skip) | (a, non_neg_integer() -> {:cont, t(b)} | :skip), 341 | non_neg_integer() 342 | ) :: t(b) 343 | when a: term(), 344 | b: term() 345 | def bind_filter(data, fun, max_consecutive_failures \\ 10) 346 | 347 | def bind_filter(data, fun, max_consecutive_failures) when is_function(fun, 1) do 348 | bind_filter(data, fn elem, _tries_left -> fun.(elem) end, max_consecutive_failures) 349 | end 350 | 351 | def bind_filter(data, fun, max_consecutive_failures) 352 | when is_function(fun, 2) and is_integer(max_consecutive_failures) and 353 | max_consecutive_failures >= 0 do 354 | new(fn seed, size -> 355 | case bind_filter(seed, size, data, fun, max_consecutive_failures) do 356 | {:ok, lazy_tree} -> 357 | lazy_tree 358 | 359 | :too_many_failures -> 360 | raise FilterTooNarrowError, 361 | max_consecutive_failures: max_consecutive_failures, 362 | last_generated_value: :none 363 | 364 | {:too_many_failures, last_generated_value} -> 365 | raise FilterTooNarrowError, 366 | max_consecutive_failures: max_consecutive_failures, 367 | last_generated_value: {:value, last_generated_value} 368 | end 369 | end) 370 | end 371 | 372 | defp bind_filter(_seed, _size, _data, _mapper, _tries_left = 0) do 373 | :too_many_failures 374 | end 375 | 376 | defp bind_filter(seed, size, data, mapper, tries_left) do 377 | fun = fn elem -> 378 | mapper.(elem, tries_left) 379 | end 380 | 381 | {seed1, seed2} = split_seed(seed) 382 | lazy_tree = call(data, seed1, size) 383 | 384 | case LazyTree.filter_map(lazy_tree, fun) do 385 | {:ok, filter_mapped_tree} -> 386 | tree = 387 | filter_mapped_tree 388 | |> LazyTree.map(&call(&1, seed2, size)) 389 | |> LazyTree.flatten() 390 | 391 | {:ok, tree} 392 | 393 | :error when tries_left == 1 -> 394 | {:too_many_failures, lazy_tree.root} 395 | 396 | :error -> 397 | bind_filter(seed2, size, data, mapper, tries_left - 1) 398 | end 399 | end 400 | 401 | @compile {:inline, bind: 2} 402 | 403 | @doc """ 404 | Binds each element generated by `data` to a new generator returned by applying `fun`. 405 | 406 | This function is the basic mechanism for composing generators. It takes a 407 | generator `data` and invokes `fun` with each element in `data`. `fun` must 408 | return a new *generator* that is effectively used to generate items from 409 | now on. 410 | 411 | ## Examples 412 | 413 | Say we wanted to create a generator that returns two-element tuples where the 414 | first element is a non-empty list, and the second element is a random element from that 415 | list. To do that, we can first generate a list and then bind a function to 416 | that list; this function will return the list and a random element from it. 417 | 418 | StreamData.bind(StreamData.list_of(StreamData.integer(), min_length: 1), fn list -> 419 | StreamData.bind(StreamData.member_of(list), fn elem -> 420 | StreamData.constant({list, elem}) 421 | end) 422 | end) 423 | 424 | ## Shrinking 425 | 426 | The generator returned by `bind/2` shrinks by first shrinking the value 427 | generated by the inner generator and then by shrinking the outer generator 428 | given as `data`. When `data` shrinks, `fun` is once more applied on the 429 | shrunk value and returns a whole new generator, which will most likely 430 | emit new items. 431 | """ 432 | @spec bind(t(a), (a -> t(b))) :: t(b) when a: term(), b: term() 433 | def bind(data, fun) when is_function(fun, 1) do 434 | bind_filter(data, fn generated_term -> {:cont, fun.(generated_term)} end) 435 | end 436 | 437 | @doc """ 438 | Filters the given generator `data` according to the given `predicate` function. 439 | 440 | Only elements generated by `data` that pass the filter are kept in the 441 | resulting generator. 442 | 443 | If the filter is too strict, it can happen that too few values generated by `data` satisfy it. 444 | In case more than `max_consecutive_failures` consecutive values don't satisfy the filter, a 445 | `StreamData.FilterTooNarrowError` will be raised. There are a few ways you can avoid risking 446 | `StreamData.FilterTooNarrowError` errors. 447 | 448 | * Try to make sure that your filter filters out only a small subset of the elements generated 449 | by `data`. For example, having something like `StreamData.filter(StreamData.integer(), &(&1 != 450 | 0))` is usually fine because only a very tiny part of the generation space (integers) is 451 | being filtered out. 452 | 453 | * Keep an eye on how the generation size affects the generator being filtered. For example, 454 | take something like `StreamData.filter(StreamData.positive_integer(), &(&1 not in 1..5)`. 455 | While it seems like this filter is not that strict (as we're filtering out only a handful of 456 | numbers out of all natural numbers), this filter will fail with small generation sizes. 457 | Since `positive_integer/0` returns an integer between `0..size`, if `size` is small (for 458 | example, less than 10) then the probability of generating many consecutive values in `1..5` 459 | is high. 460 | 461 | * Try to restructure your generator so that instead of generating many values and taking out 462 | the ones you don't want, you instead generate values and turn all of them into values that 463 | are suitable. A good example is a generator for even integers. You could write it as 464 | 465 | def even_integers() do 466 | StreamData.filter(StreamData.integer(), &Integer.is_even/1) 467 | end 468 | 469 | but this would generate many unused values, increasing likeliness of 470 | `StreamData.FilterTooNarrowError` errors and performing inefficiently. Instead, you can use 471 | `map/2` to turn all integers into even integers: 472 | 473 | def even_integers() do 474 | StreamData.map(StreamData.integer(), &(&1 * 2)) 475 | end 476 | 477 | ## Shrinking 478 | 479 | All the values that each generated value shrinks to satisfy `predicate` as 480 | well. 481 | """ 482 | @spec filter(t(a), (a -> as_boolean(term())), non_neg_integer()) :: t(a) when a: term() 483 | def filter(data, predicate, max_consecutive_failures \\ 25) 484 | when is_function(predicate, 1) and is_integer(max_consecutive_failures) and 485 | max_consecutive_failures >= 0 do 486 | bind_filter_fun = fn term -> 487 | if predicate.(term), do: {:cont, constant(term)}, else: :skip 488 | end 489 | 490 | bind_filter(data, bind_filter_fun, max_consecutive_failures) 491 | end 492 | 493 | ### Rich API 494 | 495 | @compile {:inline, integer: 1} 496 | 497 | @doc """ 498 | Generates an integer in the given `range`. 499 | 500 | The generation size is ignored since the integer always lies inside `range`. 501 | 502 | ## Examples 503 | 504 | Enum.take(StreamData.integer(4..8), 3) 505 | #=> [6, 7, 7] 506 | 507 | ## Shrinking 508 | 509 | Shrinks towards the smallest absolute value that still lie in `range`. 510 | """ 511 | @spec integer(Range.t()) :: t(integer()) 512 | # Range step syntax was introduced in Elixir v1.12.0 513 | if Version.compare(System.version(), "1.12.0") == :lt do 514 | def integer(left..right = _range) do 515 | {lower, upper} = order(left, right) 516 | 517 | new(fn seed, _size -> 518 | {init, _next_seed} = uniform_in_range(lower, upper, seed) 519 | integer_lazy_tree(init, lower, upper) 520 | end) 521 | end 522 | else 523 | # Keep the original, somewhat more efficient implementation 524 | # for ranges with a step of 1 525 | def integer(%Range{first: left, last: right, step: 1} = _range) do 526 | if left > right do 527 | raise "cannot generate elements from an empty range" 528 | end 529 | 530 | new(fn seed, _size -> 531 | {init, _next_seed} = uniform_in_range(left, right, seed) 532 | integer_lazy_tree(init, left, right) 533 | end) 534 | end 535 | 536 | def integer(%Range{first: left, last: right, step: step} = _range) do 537 | require Integer 538 | lower_stepless = Integer.floor_div(left, step) 539 | upper_stepless = Integer.floor_div(right, step) 540 | 541 | if lower_stepless > upper_stepless do 542 | raise "cannot generate elements from an empty range" 543 | end 544 | 545 | fn seed, _size -> 546 | {init, _next_seed} = uniform_in_range(lower_stepless, upper_stepless, seed) 547 | integer_lazy_tree(init, lower_stepless, upper_stepless) 548 | end 549 | |> new() 550 | |> map(fn result -> result * step end) 551 | end 552 | end 553 | 554 | defp integer_lazy_tree(int, lower, upper) do 555 | lazy_tree(int, &integer_lazy_tree(int, lower, upper, _current = int, &1, &2)) 556 | end 557 | 558 | defp integer_lazy_tree(_int, _lower, _upper, _current, {:halt, acc}, _fun) do 559 | {:halted, acc} 560 | end 561 | 562 | defp integer_lazy_tree(int, lower, upper, current, {:suspend, acc}, fun) do 563 | {:suspended, acc, &integer_lazy_tree(int, lower, upper, current, &1, fun)} 564 | end 565 | 566 | defp integer_lazy_tree(int, lower, upper, current, {:cont, acc}, fun) do 567 | case int - current do 568 | ^int -> 569 | {:done, acc} 570 | 571 | to_emit when to_emit >= lower and to_emit <= upper -> 572 | lazy_tree = integer_lazy_tree(to_emit, lower, upper) 573 | integer_lazy_tree(int, lower, upper, div(current, 2), fun.(lazy_tree, acc), fun) 574 | 575 | _ -> 576 | integer_lazy_tree(int, lower, upper, div(current, 2), {:cont, acc}, fun) 577 | end 578 | end 579 | 580 | ## Generator modifiers 581 | 582 | @compile {:inline, resize: 2, sized: 1, scale: 2} 583 | 584 | @doc """ 585 | Resize the given generated `data` to have fixed generation size `new_size`. 586 | 587 | The new generator will ignore the generation size and always use `new_size`. 588 | 589 | See the "Generation size" section in the documentation for `StreamData` for 590 | more information about the generation size. 591 | 592 | ## Examples 593 | 594 | data = StreamData.resize(StreamData.integer(), 10) 595 | Enum.take(data, 3) 596 | #=> [4, -5, -9] 597 | 598 | """ 599 | @spec resize(t(a), size()) :: t(a) when a: term() 600 | def resize(data, new_size) when is_integer(new_size) and new_size >= 0 do 601 | new(fn seed, _size -> call(data, seed, new_size) end) 602 | end 603 | 604 | @doc """ 605 | Returns the generator returned by calling `fun` with the generation size. 606 | 607 | `fun` takes the generation size and has to return a generator, that can use 608 | that size to its advantage. 609 | 610 | See the "Generation size" section in the documentation for `StreamData` for 611 | more information about the generation size. 612 | 613 | ## Examples 614 | 615 | Let's build a generator that generates integers in double the range `integer/0` 616 | does: 617 | 618 | data = StreamData.sized(fn size -> 619 | StreamData.resize(StreamData.integer(), size * 2) 620 | end) 621 | 622 | Enum.take(data, 3) 623 | #=> [0, -1, 5] 624 | 625 | """ 626 | @spec sized((size() -> t(a))) :: t(a) when a: term() 627 | def sized(fun) when is_function(fun, 1) do 628 | new(fn seed, size -> 629 | call(fun.(size), seed, size) 630 | end) 631 | end 632 | 633 | @doc """ 634 | Scales the generation size of the given generator `data` according to 635 | `size_changer`. 636 | 637 | When generating data from `data`, the generation size will be the result of 638 | calling `size_changer` with the generation size as its argument. This is 639 | useful, for example, when a generator needs to grow faster or slower than 640 | the default. 641 | 642 | See the "Generation size" section in the documentation for `StreamData` for 643 | more information about the generation size. 644 | 645 | ## Examples 646 | 647 | Let's create a generator that generates much smaller integers than `integer/0` 648 | when size grows. We can do this by scaling the generation size to the 649 | logarithm of the generation size. 650 | 651 | data = StreamData.scale(StreamData.integer(), fn size -> 652 | trunc(:math.log(size)) 653 | end) 654 | 655 | Enum.take(data, 3) 656 | #=> [0, 0, -1] 657 | 658 | Another interesting example is creating a generator with a fixed maximum 659 | generation size. For example, say we want to generate binaries but we never 660 | want them to be larger than 64 bytes: 661 | 662 | small_binaries = StreamData.scale(StreamData.binary(), fn size -> 663 | min(size, 64) 664 | end) 665 | 666 | """ 667 | @spec scale(t(a), (size() -> size())) :: t(a) when a: term() 668 | def scale(data, size_changer) when is_function(size_changer, 1) do 669 | new(fn seed, size -> 670 | new_size = size_changer.(size) 671 | call(data, seed, new_size) 672 | end) 673 | end 674 | 675 | @doc """ 676 | Makes the values generated by `data` not shrink. 677 | 678 | ## Examples 679 | 680 | Let's build a generator of bytes (integers in the `0..255`) range. We can 681 | build this on top of `integer/1`, but for our purposes, it doesn't make sense for 682 | a byte to shrink towards `0`: 683 | 684 | byte = StreamData.unshrinkable(StreamData.integer(0..255)) 685 | Enum.take(byte, 3) 686 | #=> [190, 181, 178] 687 | 688 | ## Shrinking 689 | 690 | The generator returned by `unshrinkable/1` generates the same values as `data`, 691 | but such values will not shrink. 692 | """ 693 | @spec unshrinkable(t(a)) :: t(a) when a: term() 694 | def unshrinkable(data) do 695 | new(fn seed, size -> 696 | %LazyTree{call(data, seed, size) | children: []} 697 | end) 698 | end 699 | 700 | @doc """ 701 | Calls the provided zero argument function to generate values. 702 | 703 | ## Examples 704 | 705 | Generating a UUID 706 | 707 | uuid = StreamData.repeatedly(&Ecto.UUID.generate/0) 708 | Enum.take(uuid, 3) 709 | #=> ["2712ec5b-bc50-4b4a-8a8a-ca85d37a457b", "2092570d-8fb0-4e67-acbe-92db4c8a2bae", "1bef1fb1-8f86-46ac-a49e-3bffaa51e40b"] 710 | 711 | Generating a unique integer 712 | 713 | integer = StreamData.repeatedly(&System.unique_integer([:positive, :monotonic])) 714 | Enum.take(integer, 3) 715 | #=> [1, 2, 3] 716 | 717 | ## Shrinking 718 | 719 | By nature, this generator is not shrinkable. 720 | """ 721 | @doc since: "0.6.0" 722 | @spec repeatedly((-> returns)) :: t(returns) when returns: term() 723 | def repeatedly(fun) when is_function(fun, 0) do 724 | new(fn _seed, _size -> 725 | %LazyTree{root: fun.()} 726 | end) 727 | end 728 | 729 | @doc """ 730 | Makes the given generator `data` always use the same given `seed` when generating. 731 | 732 | This function is useful when you want a generator to have a predictable generating 733 | behaviour. It's especially useful when using a generator with the `Enumerable` protocol 734 | since you can't set the seed specifically in that case (while you can with `check_all/3` 735 | for example). 736 | 737 | `seed` must be an integer. 738 | 739 | ## Examples 740 | 741 | int = StreamData.seeded(StreamData.integer(), 10) 742 | 743 | Enum.take(int, 3) 744 | #=> [-1, -2, 1] 745 | Enum.take(int, 4) 746 | #=> [-1, -2, 1, 2] 747 | 748 | """ 749 | @spec seeded(t(a), integer()) :: t(a) when a: term() 750 | def seeded(data, seed) when is_integer(seed) do 751 | seed = new_seed({0, 0, seed}) 752 | 753 | new(fn _seed, size -> 754 | call(data, seed, size) 755 | end) 756 | end 757 | 758 | @doc """ 759 | Generates values from different generators with specified probability. 760 | 761 | `frequencies` is a list of `{frequency, data}` where `frequency` is an integer 762 | and `data` is a generator. The resulting generator will generate data from one 763 | of the generators in `frequency`, with probability `frequency / vsum_of_frequencies`. 764 | 765 | ## Examples 766 | 767 | Let's build a generator that returns a binary around 25% of the time and an 768 | integer around 75% of the time. We'll use `integer/0` first so that generated values 769 | will shrink towards integers. 770 | 771 | ints_and_some_bins = StreamData.frequency([ 772 | {3, StreamData.integer()}, 773 | {1, StreamData.binary()}, 774 | ]) 775 | Enum.take(ints_and_some_bins, 3) 776 | #=> ["", -2, -1] 777 | 778 | ## Shrinking 779 | 780 | Each generated value is shrunk, and then this generator shrinks towards 781 | values generated by generators earlier in the list of `frequencies`. 782 | """ 783 | # Right now, it shrinks by first shrinking the generated value, and then 784 | # shrinking towards earlier generators in "frequencies". Clojure shrinks 785 | # towards earlier generators *first*, and then shrinks the generated value. 786 | # An implementation that does this can be: 787 | # 788 | # new(fn seed, size -> 789 | # {frequency, next_seed} = uniform_in_range(0..sum - 1, seed) 790 | # index = pick_index(Enum.map(frequencies, &elem(&1, 0)), frequency) 791 | # {_frequency, data} = Enum.fetch!(frequencies, index) 792 | # 793 | # tree = call(data, next_seed, size) 794 | # 795 | # earlier_children = 796 | # frequencies 797 | # |> Stream.take(index) 798 | # |> Stream.map(&call(elem(&1, 1), seed2, size)) 799 | # 800 | # %Lazytree{root: tree.root, children: Stream.concat(earlier_children, tree.children)} 801 | # end) 802 | # 803 | @spec frequency([{pos_integer(), t(a)}]) :: t(a) when a: term() 804 | def frequency(frequencies) when is_list(frequencies) do 805 | sum = List.foldl(frequencies, 0, fn {frequency, _data}, acc -> acc + frequency end) 806 | bind(integer(0..(sum - 1)), &pick_frequency(frequencies, &1)) 807 | end 808 | 809 | defp pick_frequency([{frequency, data} | rest], int) do 810 | if int < frequency do 811 | data 812 | else 813 | pick_frequency(rest, int - frequency) 814 | end 815 | end 816 | 817 | @doc """ 818 | Generates values out of one of the given `datas`. 819 | 820 | `datas` must be a list of generators. The values generated by this generator 821 | are values generated by generators in `datas`, chosen each time at random. 822 | 823 | ## Examples 824 | 825 | data = StreamData.one_of([StreamData.integer(), StreamData.binary()]) 826 | Enum.take(data, 3) 827 | #=> [-1, <<28>>, ""] 828 | 829 | ## Shrinking 830 | 831 | The generated value will be shrunk first according to the generator that 832 | generated it, and then this generator will shrink towards earlier generators 833 | in `datas`. 834 | """ 835 | @spec one_of([t(a)]) :: t(a) when a: term() 836 | def one_of([_ | _] = datas) do 837 | datas = List.to_tuple(datas) 838 | bind(integer(0..(tuple_size(datas) - 1)), fn index -> elem(datas, index) end) 839 | end 840 | 841 | @doc """ 842 | Generates elements taken randomly out of `enum`. 843 | 844 | `enum` must be a non-empty and **finite** enumerable. If given an empty 845 | enumerable, this function raises an error. If given an infinite enumerable, 846 | this function will not terminate. 847 | 848 | ## Examples 849 | 850 | Enum.take(StreamData.member_of([:ok, 4, "hello"]), 3) 851 | #=> [4, 4, "hello"] 852 | 853 | ## Shrinking 854 | 855 | This generator shrinks towards elements that appear earlier in `enum`. 856 | """ 857 | @spec member_of(Enumerable.t()) :: t(term()) 858 | def member_of(enum) do 859 | enum_length = Enum.count(enum) 860 | 861 | if enum_length == 0 do 862 | raise "cannot generate elements from an empty enumerable" 863 | end 864 | 865 | map(integer(0..(enum_length - 1)), fn index -> Enum.fetch!(enum, index) end) 866 | end 867 | 868 | ## Compound data types 869 | 870 | @doc """ 871 | Generates lists where each values is generated by the given `data`. 872 | 873 | Each generated list can contain duplicate elements. The length of the 874 | generated list is bound by the generation size. If the generation size is `0`, 875 | the empty list will always be generated. Note that the accepted options 876 | provide finer control over the size of the generated list. See the "Options" 877 | section below. 878 | 879 | ## Options 880 | 881 | * `:length` - (integer or range) if an integer, the exact length the 882 | generated lists should be; if a range, the range in which the length of 883 | the generated lists should be. If provided, `:min_length` and 884 | `:max_length` are ignored. 885 | 886 | * `:min_length` - (integer) the minimum length of the generated lists. 887 | 888 | * `:max_length` - (integer) the maximum length of the generated lists. 889 | 890 | ## Examples 891 | 892 | Enum.take(StreamData.list_of(StreamData.binary()), 3) 893 | #=> [[""], [], ["", "w"]] 894 | 895 | Enum.take(StreamData.list_of(StreamData.integer(), length: 3), 3) 896 | #=> [[0, 0, -1], [2, -1, 1], [0, 3, -3]] 897 | 898 | Enum.take(StreamData.list_of(StreamData.integer(), max_length: 1), 3) 899 | #=> [[1], [], []] 900 | 901 | ## Shrinking 902 | 903 | This generator shrinks by taking elements out of the generated list and also 904 | by shrinking the elements of the generated list. Shrinking still respects any 905 | possible length-related option: for example, if `:min_length` is provided, all 906 | shrunk list will have more than `:min_length` elements. 907 | """ 908 | # We could have an implementation that relies on fixed_list/1 and List.duplicate/2, 909 | # it would look like this: 910 | # 911 | # new(fn seed, size -> 912 | # {length, next_seed} = uniform_in_range(0..size, seed) 913 | # data 914 | # |> List.duplicate(length) 915 | # |> fixed_list() 916 | # |> call(next_seed, size) 917 | # |> LazyTree.map(&list_lazy_tree/1) 918 | # |> LazyTree.flatten() 919 | # end) 920 | # 921 | @spec list_of(t(a), keyword()) :: t([a]) when a: term() 922 | def list_of(data, options) do 923 | list_length_range_fun = list_length_range_fun(options) 924 | 925 | new(fn seed, size -> 926 | {min_length, max_length} = list_length_range_fun.(size) 927 | {length, next_seed} = uniform_in_range(min_length, max_length, seed) 928 | 929 | data 930 | |> call_n_times(next_seed, size, length, []) 931 | |> LazyTree.zip() 932 | |> LazyTree.map(&list_lazy_tree(&1, min_length)) 933 | |> LazyTree.flatten() 934 | end) 935 | end 936 | 937 | @doc """ 938 | Generates lists where each values is generated by the given `data`. 939 | 940 | The same as calling `list_of/2` with `[]` as options. 941 | """ 942 | @spec list_of(t(a)) :: t([a]) when a: term() 943 | def list_of(data) do 944 | new(fn seed, size -> 945 | {length, next_seed} = uniform_in_range(0, size, seed) 946 | 947 | data 948 | |> call_n_times(next_seed, size, length, []) 949 | |> LazyTree.zip() 950 | |> LazyTree.map(&list_lazy_tree(&1, 0)) 951 | |> LazyTree.flatten() 952 | end) 953 | end 954 | 955 | defp list_length_range_fun(options) do 956 | {min, max} = 957 | case Keyword.fetch(options, :length) do 958 | {:ok, length} when is_integer(length) and length >= 0 -> 959 | {length, length} 960 | 961 | {:ok, min..max//_} when min >= 0 and max >= 0 -> 962 | order(min, max) 963 | 964 | {:ok, other} -> 965 | raise ArgumentError, 966 | ":length must be a positive integer or a range " <> 967 | "of positive integers, got: #{inspect(other)}" 968 | 969 | :error -> 970 | min_length = Keyword.get(options, :min_length, 0) 971 | max_length = Keyword.get(options, :max_length, :infinity) 972 | 973 | unless is_integer(min_length) and min_length >= 0 do 974 | raise ArgumentError, 975 | ":min_length must be a positive integer, got: #{inspect(min_length)}" 976 | end 977 | 978 | unless (is_integer(max_length) and max_length >= 0) or max_length == :infinity do 979 | raise ArgumentError, 980 | ":max_length must be a positive integer, got: #{inspect(max_length)}" 981 | end 982 | 983 | {min_length, max_length} 984 | end 985 | 986 | fn size -> {min, max |> min(size) |> max(min)} end 987 | end 988 | 989 | defp call_n_times(_data, _seed, _size, 0, acc) do 990 | acc 991 | end 992 | 993 | defp call_n_times(data, seed, size, length, acc) do 994 | {seed1, seed2} = split_seed(seed) 995 | call_n_times(data, seed2, size, length - 1, [call(data, seed1, size) | acc]) 996 | end 997 | 998 | defp list_lazy_tree(list, min_length) do 999 | length = length(list) 1000 | 1001 | if length == min_length do 1002 | lazy_tree_constant(list) 1003 | else 1004 | children = 1005 | Stream.map(0..(length - 1), fn index -> 1006 | list_lazy_tree(List.delete_at(list, index), min_length) 1007 | end) 1008 | 1009 | lazy_tree(list, children) 1010 | end 1011 | end 1012 | 1013 | @doc """ 1014 | Generates a list of elements generated by `data` without duplicates (possibly 1015 | according to a given uniqueness function). 1016 | 1017 | This generator will generate lists where each list is unique according to the 1018 | value returned by applying the given uniqueness function to each element 1019 | (similarly to how `Enum.uniq_by/2` works). If more than the value of the 1020 | `:max_tries` option consecutive elements are generated that are considered 1021 | duplicates according to the uniqueness function, a 1022 | `StreamData.TooManyDuplicatesError` error is raised. For this reason, try to 1023 | make sure to not make the uniqueness function return values out of a small 1024 | value space. The uniqueness function and the max number of tries can be 1025 | customized via options. 1026 | 1027 | ## Options 1028 | 1029 | * `:uniq_fun` - (a function of arity one) a function that is called with 1030 | each generated element and whose return value is used as the value to 1031 | compare with other values for uniqueness (similarly to `Enum.uniq_by/2`). 1032 | 1033 | * `:max_tries` - (non-negative integer) the maximum number of times that 1034 | this generator tries to generate the next element of the list before 1035 | giving up and raising a `StreamData.TooManyDuplicatesError` in case it 1036 | can't find a unique element to generate. Note that the generation size 1037 | often affects this: for example, if you have a generator like 1038 | `uniq_list_of(integer(), min_length: 4)` and you start generating elements 1039 | out of it with a generation size of `1`, it will fail by definition 1040 | because `integer/0` generates in `-size..size` so it would only generate 1041 | in a set (`[-1, 0, 1]`) with three elements. Use `resize/2` or `scale/2` 1042 | to manipulate the size (for example by setting a minimum generation size 1043 | of `3`) in such cases. 1044 | 1045 | * `:length` - (non-negative integer) same as in `list_of/2`. 1046 | 1047 | * `:min_length` - (non-negative integer) same as in `list_of/2`. 1048 | 1049 | * `:max_length` - (non-negative integer) same as in `list_of/2`. 1050 | 1051 | ## Examples 1052 | 1053 | data = StreamData.uniq_list_of(StreamData.integer()) 1054 | Enum.take(data, 3) 1055 | #=> [[1], [], [2, 3, 1]] 1056 | 1057 | ## Shrinking 1058 | 1059 | This generator shrinks like `list_of/1`, but the shrunk values are unique 1060 | according to the `:uniq_fun` option as well. 1061 | """ 1062 | @spec uniq_list_of(t(a), keyword()) :: t([a]) when a: term() 1063 | def uniq_list_of(data, options \\ []) do 1064 | uniq_fun = Keyword.get(options, :uniq_fun, & &1) 1065 | max_tries = Keyword.get(options, :max_tries, 10) 1066 | list_length_range_fun = list_length_range_fun(options) 1067 | 1068 | new(fn seed, size -> 1069 | {min_length, max_length} = list_length_range_fun.(size) 1070 | {length, next_seed} = uniform_in_range(min_length, max_length, seed) 1071 | 1072 | data 1073 | |> uniq_list_of( 1074 | uniq_fun, 1075 | next_seed, 1076 | size, 1077 | _seen = MapSet.new(), 1078 | max_tries, 1079 | max_tries, 1080 | length, 1081 | [] 1082 | ) 1083 | |> LazyTree.zip() 1084 | |> LazyTree.map(&list_lazy_tree(&1, min_length)) 1085 | |> LazyTree.flatten() 1086 | |> LazyTree.map(&Enum.uniq_by(&1, uniq_fun)) 1087 | |> LazyTree.filter(&(length(&1) >= min_length)) 1088 | end) 1089 | end 1090 | 1091 | defp uniq_list_of( 1092 | _data, 1093 | _uniq_fun, 1094 | _seed, 1095 | _size, 1096 | seen, 1097 | _tries_left = 0, 1098 | max_tries, 1099 | remaining, 1100 | _acc 1101 | ) do 1102 | raise TooManyDuplicatesError, 1103 | max_tries: max_tries, 1104 | remaining_to_generate: remaining, 1105 | generated: seen 1106 | end 1107 | 1108 | defp uniq_list_of( 1109 | _data, 1110 | _uniq_fun, 1111 | _seed, 1112 | _size, 1113 | _seen, 1114 | _tries_left, 1115 | _max_tries, 1116 | _remaining = 0, 1117 | acc 1118 | ) do 1119 | acc 1120 | end 1121 | 1122 | defp uniq_list_of(data, uniq_fun, seed, size, seen, tries_left, max_tries, remaining, acc) do 1123 | {seed1, seed2} = split_seed(seed) 1124 | tree = call(data, seed1, size) 1125 | 1126 | key = uniq_fun.(tree.root) 1127 | 1128 | if MapSet.member?(seen, key) do 1129 | uniq_list_of(data, uniq_fun, seed2, size, seen, tries_left - 1, max_tries, remaining, acc) 1130 | else 1131 | uniq_list_of( 1132 | data, 1133 | uniq_fun, 1134 | seed2, 1135 | size, 1136 | MapSet.put(seen, key), 1137 | max_tries, 1138 | max_tries, 1139 | remaining - 1, 1140 | [tree | acc] 1141 | ) 1142 | end 1143 | end 1144 | 1145 | @doc """ 1146 | Generates *lists* with the same elements as the provided `enum` but in a random order. 1147 | 1148 | ## Examples 1149 | 1150 | StreamData.shuffle([1, 2, 3, 4, 5]) 1151 | |> Enum.take(3) 1152 | #=> [[4, 2, 5, 3, 1], [1, 3, 4, 5, 2], [3, 2, 5, 4, 1]] 1153 | 1154 | ## Shrinking 1155 | 1156 | Shrinks towards a list with elements in the same order as the original `enum`. 1157 | """ 1158 | @doc since: "1.2.0" 1159 | @spec shuffle(Enumerable.t()) :: t(Enumerable.t()) 1160 | def shuffle(enum) 1161 | 1162 | # We need this clause because the logic in the non-empty-list clause 1163 | # reliase on the list having one or more elements. 1164 | def shuffle([]) do 1165 | constant([]) 1166 | end 1167 | 1168 | def shuffle(list) when is_list(list) do 1169 | # Convert to array for faster swapping 1170 | array = :array.from_list(list) 1171 | len = :array.size(array) 1172 | 1173 | index_generator = integer(0..(len - 1)) 1174 | 1175 | # Inspired by this clojure implementation: 1176 | # https://github.com/clojure/test.check/blob/0ee576eb73d4864c199305c4a0c1e8101d8d1b39/src/main/clojure/clojure/test/check/generators.cljc#L636 1177 | {index_generator, index_generator} 1178 | |> list_of(length: 0..(len * 2)) 1179 | |> map(fn swap_instructions -> 1180 | swap_instructions 1181 | |> Enum.reduce(array, fn {i, j}, array -> array_swap(array, i, j) end) 1182 | |> :array.to_list() 1183 | end) 1184 | end 1185 | 1186 | def shuffle(enum) do 1187 | enum |> Enum.to_list() |> shuffle() 1188 | end 1189 | 1190 | defp array_swap(array, i, j) do 1191 | v_i = :array.get(i, array) 1192 | v_j = :array.get(j, array) 1193 | array = :array.set(i, v_j, array) 1194 | :array.set(j, v_i, array) 1195 | end 1196 | 1197 | @doc ~S""" 1198 | Generates non-empty improper lists where elements of the list are generated 1199 | out of `first` and the improper ending out of `improper`. 1200 | 1201 | ## Examples 1202 | 1203 | data = StreamData.nonempty_improper_list_of(StreamData.byte(), StreamData.binary()) 1204 | Enum.take(data, 3) 1205 | #=> [[42], [56 | <<140, 137>>], [226 | "j"]] 1206 | 1207 | ## Shrinking 1208 | 1209 | Shrinks towards smaller lists (that are still non-empty, having the improper 1210 | ending) and towards shrunk elements of the list and a shrunk improper 1211 | ending. 1212 | """ 1213 | @spec nonempty_improper_list_of(t(a), t(b)) :: t(nonempty_improper_list(a, b)) 1214 | when a: term(), 1215 | b: term() 1216 | def nonempty_improper_list_of(first, improper) do 1217 | map({list_of(first, min_length: 1), improper}, fn 1218 | {list, ending} -> 1219 | list ++ ending 1220 | end) 1221 | end 1222 | 1223 | @doc """ 1224 | Generates lists of elements out of `first` with a chance of them being 1225 | improper with the improper ending taken out of `improper`. 1226 | 1227 | Behaves similarly to `nonempty_improper_list_of/2` but can generate empty 1228 | lists and proper lists as well. 1229 | 1230 | ## Examples 1231 | 1232 | data = StreamData.maybe_improper_list_of(StreamData.byte(), StreamData.binary()) 1233 | Enum.take(data, 3) 1234 | #=> [[60 | "."], [], [<<212>>]] 1235 | 1236 | ## Shrinking 1237 | 1238 | Shrinks towards smaller lists and shrunk elements in those lists, and 1239 | ultimately towards proper lists. 1240 | """ 1241 | @spec maybe_improper_list_of(t(a), t(b)) :: t(maybe_improper_list(a, b)) 1242 | when a: term(), 1243 | b: term() 1244 | def maybe_improper_list_of(first, improper) do 1245 | frequency([ 1246 | {2, list_of(first)}, 1247 | {1, nonempty_improper_list_of(first, improper)} 1248 | ]) 1249 | end 1250 | 1251 | @doc """ 1252 | Generates a list of fixed length where each element is generated from the 1253 | corresponding generator in `data`. 1254 | 1255 | ## Examples 1256 | 1257 | data = StreamData.fixed_list([StreamData.integer(), StreamData.binary()]) 1258 | Enum.take(data, 3) 1259 | #=> [[1, <<164>>], [2, ".T"], [1, ""]] 1260 | 1261 | ## Shrinking 1262 | 1263 | Shrinks by shrinking each element in the generated list according to the 1264 | corresponding generator. Shrunk lists never lose elements. 1265 | """ 1266 | @spec fixed_list([t(a)]) :: t([a]) when a: term() 1267 | def fixed_list(datas) when is_list(datas) do 1268 | new(fn seed, size -> 1269 | {trees, _seed} = 1270 | Enum.map_reduce(datas, seed, fn data, acc -> 1271 | {seed1, seed2} = split_seed(acc) 1272 | {call(data, seed1, size), seed2} 1273 | end) 1274 | 1275 | LazyTree.zip(trees) 1276 | end) 1277 | end 1278 | 1279 | @doc """ 1280 | Generates tuples where each element is taken out of the corresponding 1281 | generator in the `tuple_datas` tuple. 1282 | 1283 | ## Examples 1284 | 1285 | data = StreamData.tuple({StreamData.integer(), StreamData.binary()}) 1286 | Enum.take(data, 3) 1287 | #=> [{-1, <<170>>}, {1, "<"}, {1, ""}] 1288 | 1289 | ## Shrinking 1290 | 1291 | Shrinks by shrinking each element in the generated tuple according to the 1292 | corresponding generator. 1293 | """ 1294 | @spec tuple(tuple()) :: t(tuple()) 1295 | def tuple(tuple_datas) when is_tuple(tuple_datas) do 1296 | new(fn seed, size -> call(tuple_datas, seed, size) end) 1297 | end 1298 | 1299 | @doc """ 1300 | Generates maps with keys from `key_data` and values from `value_data`. 1301 | 1302 | Since maps require keys to be unique, this generator behaves similarly to 1303 | `uniq_list_of/2`: if more than `max_tries` duplicate keys are generated 1304 | consequently, it raises a `StreamData.TooManyDuplicatesError` exception. 1305 | 1306 | ## Options 1307 | 1308 | * `:length` - (non-negative integer) same as in `list_of/2`. 1309 | 1310 | * `:min_length` - (non-negative integer) same as in `list_of/2`. 1311 | 1312 | * `:max_length` - (non-negative integer) same as in `list_of/2`. 1313 | 1314 | ## Examples 1315 | 1316 | Enum.take(StreamData.map_of(StreamData.integer(), StreamData.boolean()), 3) 1317 | #=> [%{}, %{1 => false}, %{-2 => true, -1 => false}] 1318 | 1319 | ## Shrinking 1320 | 1321 | Shrinks towards smallest maps and towards shrinking keys and values according 1322 | to the respective generators. 1323 | """ 1324 | @spec map_of(t(key), t(value), keyword()) :: t(%{optional(key) => value}) 1325 | when key: term(), 1326 | value: term() 1327 | def map_of(key_data, value_data, options \\ []) do 1328 | options = Keyword.put(options, :uniq_fun, fn {key, _value} -> key end) 1329 | 1330 | {key_data, value_data} 1331 | |> uniq_list_of(options) 1332 | |> map(&Map.new/1) 1333 | end 1334 | 1335 | @doc """ 1336 | Generates maps with fixed keys and generated values. 1337 | 1338 | `data_map` is a map or keyword list of `fixed_key => data` pairs. Maps generated by this 1339 | generator will have the same keys as `data_map` and values corresponding to values generated by 1340 | the generator under those keys. 1341 | 1342 | See also `optional_map/1`. 1343 | 1344 | ## Examples 1345 | 1346 | data = StreamData.fixed_map(%{ 1347 | integer: StreamData.integer(), 1348 | binary: StreamData.binary(), 1349 | }) 1350 | Enum.take(data, 3) 1351 | #=> [%{binary: "", integer: 1}, %{binary: "", integer: -2}, %{binary: "R1^", integer: -3}] 1352 | 1353 | ## Shrinking 1354 | 1355 | This generator shrinks by shrinking the values of the generated map. 1356 | """ 1357 | @spec fixed_map(map() | keyword()) :: t(map()) 1358 | def fixed_map(data) 1359 | 1360 | def fixed_map(data_map) when is_list(data_map) or is_map(data_map) do 1361 | data_map 1362 | |> Enum.map(fn {key, value_data} -> {constant(key), value_data} end) 1363 | |> fixed_list() 1364 | |> map(&Map.new/1) 1365 | end 1366 | 1367 | @doc """ 1368 | Generates maps with fixed but optional keys and generated values. 1369 | 1370 | `data_map` is a map or keyword list of `fixed_key => data` pairs. Maps generated by this 1371 | generator will have a subset of the keys of `data_map` and values corresponding to the values 1372 | generated by the generator unders those keys. 1373 | 1374 | By default, all keys are considered optional. A list of exactly which keys are optional can be 1375 | provided as the second argument, allowing for a map of mixed optional and required keys. The second argument is available since StreamData 0.6.0. 1376 | 1377 | See also `fixed_map/1`. 1378 | 1379 | ## Examples 1380 | 1381 | data = StreamData.optional_map(%{ 1382 | integer: StreamData.integer(), 1383 | binary: StreamData.binary(), 1384 | }) 1385 | Enum.take(data, 3) 1386 | #=> [%{binary: "", integer: 1}, %{integer: -2}, %{binary: "R1^"}] 1387 | 1388 | data = StreamData.optional_map(%{ 1389 | integer: StreamData.integer(), 1390 | binary: StreamData.binary(), 1391 | }, [:integer]) 1392 | Enum.take(data, 3) 1393 | #=> [%{binary: ""}, %{binary: "R1^", integer: -2}, %{binary: "R2^"}] 1394 | 1395 | ## Shrinking 1396 | 1397 | This generator shrinks by first shrinking the map by taking out keys until the map is empty, and 1398 | then by shrinking the generated values. 1399 | """ 1400 | @spec optional_map(map() | keyword(), list(any) | nil) :: t(map()) 1401 | def optional_map(data, optional_keys \\ nil) 1402 | 1403 | def optional_map(data, optional_keys) when is_list(data) do 1404 | optional_map(Map.new(data), optional_keys) 1405 | end 1406 | 1407 | def optional_map(data_map, optional_keys) when is_map(data_map) do 1408 | keys = Map.keys(data_map) 1409 | subkeys_data = sublist(optional_keys || keys) 1410 | 1411 | constant_keys = 1412 | if optional_keys do 1413 | keys -- optional_keys 1414 | else 1415 | [] 1416 | end 1417 | 1418 | new(fn seed, size -> 1419 | {seed1, seed2} = split_seed(seed) 1420 | subkeys_tree = call(subkeys_data, seed1, size) 1421 | 1422 | # This map contains the constant keys and the data value for the map we are going to 1423 | # generate. Since we are only going to take keys away, doing this here instead of generating 1424 | # the map first with all the keys and then taking away values saves us from generating keys 1425 | # that we are never going to use. 1426 | base_data_map = Map.take(data_map, constant_keys ++ subkeys_tree.root) 1427 | 1428 | call(fixed_map(base_data_map), seed2, size) 1429 | |> LazyTree.map(fn fixed_map -> 1430 | LazyTree.map(subkeys_tree, &Map.take(fixed_map, constant_keys ++ &1)) 1431 | end) 1432 | |> LazyTree.flatten() 1433 | end) 1434 | end 1435 | 1436 | defp sublist(list) do 1437 | map(list_of(boolean(), length: length(list)), fn indexes_to_keep -> 1438 | for {elem, true} <- Enum.zip(list, indexes_to_keep), do: elem 1439 | end) 1440 | end 1441 | 1442 | @doc """ 1443 | Generates keyword lists where values are generated by `value_data`. 1444 | 1445 | Keys are always atoms. 1446 | 1447 | ## Examples 1448 | 1449 | Enum.take(StreamData.keyword_of(StreamData.integer()), 3) 1450 | #=> [[], [sY: 1], [t: -1]] 1451 | 1452 | ## Shrinking 1453 | 1454 | This generator shrinks equivalently to a list of key-value tuples generated by 1455 | `list_of/1`, that is, by shrinking the values in each tuple and also reducing 1456 | the size of the generated keyword list. 1457 | """ 1458 | @spec keyword_of(t(a)) :: t(keyword(a)) when a: term() 1459 | def keyword_of(value_data) do 1460 | list_of({atom(:alphanumeric), value_data}) 1461 | end 1462 | 1463 | @doc """ 1464 | Generates sets where values are generated by `data`. 1465 | 1466 | ## Options 1467 | 1468 | * `:max_tries` - (non-negative integer) the maximum number of times that 1469 | this generator tries to generate the next element of the set before 1470 | giving up and raising a `StreamData.TooManyDuplicatesError` in case it 1471 | can't find a unique element to generate. 1472 | 1473 | ## Examples 1474 | 1475 | Enum.take(StreamData.mapset_of(StreamData.integer()), 3) 1476 | #=> [#MapSet<[-1]>, #MapSet<[1, 2]>, #MapSet<[-3, 2, 3]>] 1477 | 1478 | ## Shrinking 1479 | 1480 | This generator shrinks in the same way as `uniq_list_of/2`, by removing 1481 | elements and shrinking elements as well. 1482 | """ 1483 | @spec mapset_of(t(a), keyword()) :: t(MapSet.t(a)) when a: term() 1484 | def mapset_of(data, options \\ []) do 1485 | options = Keyword.take(options, [:max_tries]) 1486 | 1487 | data 1488 | |> uniq_list_of(options) 1489 | |> map(&MapSet.new/1) 1490 | end 1491 | 1492 | @doc """ 1493 | Constrains the given `enum_data` to be non-empty. 1494 | 1495 | `enum_data` must be a generator that emits enumerables, such as lists 1496 | and maps. `nonempty/1` will filter out enumerables that are empty 1497 | (`Enum.empty?/1` returns `true`). 1498 | 1499 | ## Examples 1500 | 1501 | Enum.take(StreamData.nonempty(StreamData.list_of(StreamData.integer())), 3) 1502 | #=> [[1], [-1, 0], [2, 1, -2]] 1503 | 1504 | """ 1505 | @spec nonempty(t(Enumerable.t())) :: t(Enumerable.t()) 1506 | def nonempty(enum_data) do 1507 | filter(enum_data, &(not Enum.empty?(&1))) 1508 | end 1509 | 1510 | @doc ~S""" 1511 | Generates trees of values generated by `leaf_data` and `subtree_fun`. 1512 | 1513 | `leaf_data` generates the leaf nodes. `subtree_fun` is a function that is 1514 | called by `tree`, if an inner node of the tree shall be generated. It takes a 1515 | generator `child_gen` for child nodes and returns a generator for an inner 1516 | node using `child_gen` to go "one level deeper" in the tree. The frequency 1517 | between leaves and inner nodes is 1:2. 1518 | 1519 | This is best explained with an example. Say that we want to generate binary 1520 | trees of integers, and that we represent binary trees as either an integer (a 1521 | leaf) or a `%Branch{}` struct: 1522 | 1523 | defmodule Branch do 1524 | defstruct [:left, :right] 1525 | end 1526 | 1527 | Now, we can generate trees by using the `integer()` generator to generate 1528 | the leaf nodes. Then we can use the `subtree_fun` function to generate inner 1529 | nodes (that is, `%Branch{}` structs or `integer()`s). 1530 | 1531 | tree_data = 1532 | StreamData.tree(StreamData.integer(), fn child_data -> 1533 | StreamData.map({child_data, child_data}, fn {left, right} -> 1534 | %Branch{left: left, right: right} 1535 | end) 1536 | end) 1537 | 1538 | Enum.at(StreamData.resize(tree_data, 10), 0) 1539 | #=> %Branch{left: %Branch{left: 4, right: -1}, right: -2} 1540 | 1541 | ## Examples 1542 | 1543 | A common example is a nested list: 1544 | 1545 | data = StreamData.tree(StreamData.integer(), &StreamData.list_of/1) 1546 | Enum.at(StreamData.resize(data, 10), 0) 1547 | #=> [[], '\t', '\a', [1, 2], -3, [-7, [10]]] 1548 | 1549 | A more complex example is generating data that could represent the Elixir 1550 | equivalent of a JSON document. The code below is slightly simplified 1551 | compared to the JSON spec. 1552 | 1553 | scalar_generator = 1554 | StreamData.one_of([ 1555 | StreamData.integer(), 1556 | StreamData.boolean(), 1557 | StreamData.string(:ascii), 1558 | nil 1559 | ]) 1560 | 1561 | json_generator = 1562 | StreamData.tree(scalar_generator, fn nested_generator -> 1563 | StreamData.one_of([ 1564 | StreamData.list_of(nested_generator), 1565 | StreamData.map_of(StreamData.string(:ascii, min_length: 1), nested_generator) 1566 | ]) 1567 | end) 1568 | 1569 | Enum.at(StreamData.resize(json_generator, 10), 0) 1570 | #=> [%{"#" => "5"}, true, %{"4|B" => nil, "7" => true, "yt(3y" => 4}, [[false]]] 1571 | 1572 | ## Shrinking 1573 | 1574 | Shrinks values and shrinks towards less deep trees. 1575 | """ 1576 | @spec tree(t(a), (child_data :: t(a | b) -> t(b))) :: t(a | b) when a: term(), b: term() 1577 | def tree(leaf_data, subtree_fun) do 1578 | new(fn seed, size -> 1579 | leaf_data = resize(leaf_data, size) 1580 | {seed1, seed2} = split_seed(seed) 1581 | nodes_on_each_level = random_pseudofactors(trunc(:math.pow(size, 1.1)), seed1) 1582 | 1583 | data = 1584 | Enum.reduce(nodes_on_each_level, leaf_data, fn nodes_on_this_level, data_acc -> 1585 | frequency([ 1586 | {1, data_acc}, 1587 | {2, resize(subtree_fun.(data_acc), nodes_on_this_level)} 1588 | ]) 1589 | end) 1590 | 1591 | call(data, seed2, size) 1592 | end) 1593 | end 1594 | 1595 | defp random_pseudofactors(n, _seed) when n < 2 do 1596 | [n] 1597 | end 1598 | 1599 | defp random_pseudofactors(n, seed) do 1600 | {seed1, seed2} = split_seed(seed) 1601 | {factor, _seed} = :rand.uniform_s(trunc(:math.log2(n)), seed1) 1602 | 1603 | if factor == 1 do 1604 | [n] 1605 | else 1606 | [factor | random_pseudofactors(div(n, factor), seed2)] 1607 | end 1608 | end 1609 | 1610 | ## Data types 1611 | 1612 | @doc """ 1613 | Generates boolean values. 1614 | 1615 | ## Examples 1616 | 1617 | Enum.take(StreamData.boolean(), 3) 1618 | #=> [true, true, false] 1619 | 1620 | ## Shrinking 1621 | 1622 | Shrinks towards `false`. 1623 | """ 1624 | @spec boolean() :: t(boolean()) 1625 | def boolean() do 1626 | new(fn seed, _size -> 1627 | case uniform_in_range(0, 1, seed) do 1628 | {1, _} -> lazy_tree(true, [lazy_tree_constant(false)]) 1629 | {0, _} -> lazy_tree_constant(false) 1630 | end 1631 | end) 1632 | end 1633 | 1634 | @doc """ 1635 | Generates integers bound by the generation size. 1636 | 1637 | ## Examples 1638 | 1639 | Enum.take(StreamData.integer(), 3) 1640 | #=> [1, -1, -3] 1641 | 1642 | ## Shrinking 1643 | 1644 | Generated values shrink towards `0`. 1645 | """ 1646 | @spec integer() :: t(integer()) 1647 | def integer() do 1648 | new(fn seed, size -> 1649 | {init, _next_seed} = uniform_in_range(-size, size, seed) 1650 | integer_lazy_tree(init, -size, size) 1651 | end) 1652 | end 1653 | 1654 | @doc """ 1655 | Generates positive integers bound by the generation size. 1656 | 1657 | ## Examples 1658 | 1659 | Enum.take(StreamData.positive_integer(), 3) 1660 | #=> [1, 1, 3] 1661 | 1662 | ## Shrinking 1663 | 1664 | Generated values shrink towards `1`. 1665 | """ 1666 | @spec positive_integer() :: t(pos_integer()) 1667 | def positive_integer() do 1668 | new(fn seed, size -> 1669 | size = max(size, 1) 1670 | {init, _next_seed} = uniform_in_range(1, size, seed) 1671 | integer_lazy_tree(init, 1, size) 1672 | end) 1673 | end 1674 | 1675 | @doc """ 1676 | Generates non-negative integers bound by the generation size. 1677 | 1678 | ## Examples 1679 | 1680 | Enum.take(StreamData.non_negative_integer(), 3) 1681 | #=> [0, 2, 0] 1682 | 1683 | ## Shrinking 1684 | 1685 | Generated values shrink towards `0`. 1686 | """ 1687 | @doc since: "0.6.0" 1688 | @spec non_negative_integer() :: t(non_neg_integer()) 1689 | def non_negative_integer() do 1690 | new(fn seed, size -> 1691 | size = max(size, 0) 1692 | {init, _next_seed} = uniform_in_range(0, size, seed) 1693 | integer_lazy_tree(init, 0, size) 1694 | end) 1695 | end 1696 | 1697 | @doc """ 1698 | Generates floats according to the given `options`. 1699 | 1700 | The complexity of the generated floats grows proportionally to the generation size. 1701 | 1702 | ## Options 1703 | 1704 | * `:min` - (float) if present, the generated floats will be greater than or equal to this 1705 | value. 1706 | 1707 | * `:max` - (float) if present, the generated floats will be less than or equal to this value. 1708 | 1709 | If neither of `:min` or `:max` is provided, then unbounded floats will be generated. 1710 | 1711 | ## Shrinking 1712 | 1713 | Values generated by this generator will shrink towards simpler floats. Such values are not 1714 | guaranteed to shrink towards smaller or larger values (but they will never violate the `:min` or 1715 | `:max` options). 1716 | """ 1717 | @spec float(keyword()) :: t(float()) 1718 | def float(options \\ []) do 1719 | case {Keyword.get(options, :min), Keyword.get(options, :max)} do 1720 | {nil, nil} -> 1721 | bind(boolean(), fn negative? -> 1722 | map(positive_float_without_bounds(), &if(negative?, do: -&1, else: &1)) 1723 | end) 1724 | 1725 | {min, nil} -> 1726 | map(positive_float_without_bounds(), &(&1 + min)) 1727 | 1728 | {nil, max} -> 1729 | map(positive_float_without_bounds(), &(-&1 + max)) 1730 | 1731 | {min, max} when min <= max -> 1732 | float_with_bounds(min, max) 1733 | end 1734 | end 1735 | 1736 | defp positive_float_without_bounds() do 1737 | sized(fn size -> 1738 | abs_exp = min(size, 1023) 1739 | decimal_part = float_in_0_to_1(abs_exp) 1740 | 1741 | bind(boolean(), fn negative_exp? -> 1742 | if negative_exp? do 1743 | decimal_part 1744 | else 1745 | int_part = power_of_two_with_zero(abs_exp) 1746 | map({decimal_part, int_part}, fn {decimal, int} -> decimal + int end) 1747 | end 1748 | end) 1749 | end) 1750 | end 1751 | 1752 | defp float_with_bounds(min, max) do 1753 | sized(fn size -> 1754 | exponent_data = integer(0..min(size, 1023)) 1755 | 1756 | bind(exponent_data, fn exponent -> 1757 | map(float_in_0_to_1(exponent), fn float -> float * (max - min) + min end) 1758 | end) 1759 | end) 1760 | end 1761 | 1762 | defp float_in_0_to_1(abs_exp) do 1763 | factor = :math.pow(2, -abs_exp) 1764 | map(power_of_two_with_zero(abs_exp), &(&1 * factor)) 1765 | end 1766 | 1767 | defp power_of_two_with_zero(abs_exp) do 1768 | new(fn seed, _size -> 1769 | {integer, _} = uniform_in_range(0, power_of_two(abs_exp), seed) 1770 | powers = Stream.map(abs_exp..0//-1, &lazy_tree_constant(power_of_two(&1))) 1771 | lazy_tree(integer, Enum.concat(powers, [lazy_tree_constant(0)])) 1772 | end) 1773 | end 1774 | 1775 | defp power_of_two(0), do: 1 1776 | defp power_of_two(n), do: 2 * power_of_two(n - 1) 1777 | 1778 | @doc """ 1779 | Generates bytes. 1780 | 1781 | A byte is an integer between `0` and `255`. 1782 | 1783 | ## Examples 1784 | 1785 | Enum.take(StreamData.byte(), 3) 1786 | #=> [102, 161, 13] 1787 | 1788 | ## Shrinking 1789 | 1790 | Values generated by this generator shrink like integers, so towards bytes 1791 | closer to `0`. 1792 | """ 1793 | @spec byte() :: t(byte()) 1794 | def byte() do 1795 | integer(0..255) 1796 | end 1797 | 1798 | @doc """ 1799 | Generates binaries. 1800 | 1801 | The length of the generated binaries is limited by the generation size. 1802 | 1803 | ## Options 1804 | 1805 | * `:length` - (non-negative integer) sets the exact length of the generated 1806 | binaries (same as in `list_of/2`). 1807 | 1808 | * `:min_length` - (non-negative integer) sets the minimum length of the 1809 | generated binaries (same as in `list_of/2`). Ignored if `:length` is 1810 | present. 1811 | 1812 | * `:max_length` - (non-negative integer) sets the maximum length of the 1813 | generated binaries (same as in `list_of/2`). Ignored if `:length` is 1814 | present. 1815 | 1816 | ## Examples 1817 | 1818 | Enum.take(StreamData.binary(), 3) 1819 | #=> [<<1>>, "", "@Q"] 1820 | 1821 | ## Shrinking 1822 | 1823 | Values generated by this generator shrink by becoming smaller binaries and by 1824 | having individual bytes that shrink towards `0`. 1825 | """ 1826 | @spec binary(keyword()) :: t(binary()) 1827 | def binary(options \\ []) do 1828 | list_options = Keyword.take(options, [:length, :min_length, :max_length]) 1829 | map(list_of(byte(), list_options), &:binary.list_to_bin/1) 1830 | end 1831 | 1832 | @doc """ 1833 | Generates bitstrings. 1834 | 1835 | The length of the generated bitstring is limited by the generation size. 1836 | 1837 | ## Options 1838 | 1839 | * `:length` - (non-negative integer) sets the exact length of the generated 1840 | bitstrings (same as in `list_of/2`). 1841 | 1842 | * `:min_length` - (non-negative integer) sets the minimum length of the 1843 | generated bitstrings (same as in `list_of/2`). Ignored if `:length` is 1844 | present. 1845 | 1846 | * `:max_length` - (non-negative integer) sets the maximum length of the 1847 | generated bitstrings (same as in `list_of/2`). Ignored if `:length` is 1848 | present. 1849 | 1850 | ## Examples 1851 | 1852 | Enum.take(StreamData.bitstring(), 3) 1853 | #=> [<<0::size(1)>>, <<2::size(2)>>, <<5::size(3)>>] 1854 | 1855 | ## Shrinking 1856 | 1857 | Values generated by this generator shrink by becoming smaller bitstrings and 1858 | by having the individual bits go towards `0`. 1859 | """ 1860 | @spec bitstring(keyword()) :: t(bitstring()) 1861 | def bitstring(options \\ []) do 1862 | list_options = Keyword.take(options, [:length, :min_length, :max_length]) 1863 | 1864 | map(list_of(integer(0..1), list_options), fn bits -> 1865 | Enum.reduce(bits, <<>>, fn bit, acc -> <> end) 1866 | end) 1867 | end 1868 | 1869 | @ascii_chars ?\s..?~ 1870 | 1871 | # "UTF-8 prohibits encoding character numbers between U+D800 and U+DFFF" 1872 | @utf8_chars [0..0xD7FF, 0xE000..0x10FFFF] 1873 | 1874 | @alphanumeric_chars [?a..?z, ?A..?Z, ?0..?9] 1875 | @printable_chars [ 1876 | ?\n, 1877 | ?\r, 1878 | ?\t, 1879 | ?\v, 1880 | ?\b, 1881 | ?\f, 1882 | ?\e, 1883 | ?\d, 1884 | ?\a, 1885 | 0x20..0x7E, 1886 | 0xA0..0xD7FF, 1887 | 0xE000..0xFFFD, 1888 | 0x10000..0x10FFFF 1889 | ] 1890 | 1891 | @doc """ 1892 | Generates an integer corresponding to a valid UTF-8 codepoint of the given kind. 1893 | 1894 | `kind` can be: 1895 | 1896 | * `:ascii` - only ASCII characters are generated. Shrinks towards lower codepoints. 1897 | 1898 | * `:alphanumeric` - only alphanumeric characters (`?a..?z`, `?A..?Z`, `?0..?9`) 1899 | are generated. Shrinks towards `?a` following the order shown previously. 1900 | 1901 | * `:printable` - only printable codepoints 1902 | (`String.printable?(<>)` returns `true`) 1903 | are generated. Shrinks towards lower codepoints. 1904 | 1905 | * `:utf8` - all valid codepoints (`<>)` does not raise) 1906 | are generated. Shrinks towards lower codepoints. 1907 | 1908 | Defaults to `:utf8`. 1909 | 1910 | ## Examples 1911 | 1912 | Enum.take(StreamData.codepoint(), 3) 1913 | #=> [889941, 349615, 1060099] 1914 | 1915 | Enum.take(StreamData.codepoint(:ascii), 3) 1916 | #=> ~c"Kk:" 1917 | 1918 | """ 1919 | @doc since: "0.6.0" 1920 | @spec codepoint(:ascii | :alphanumeric | :printable | :utf8) :: t(char()) 1921 | def codepoint(kind \\ :utf8) 1922 | 1923 | def codepoint(:ascii), do: integer(@ascii_chars) 1924 | def codepoint(:alphanumeric), do: codepoint_with_frequency(@alphanumeric_chars) 1925 | def codepoint(:printable), do: codepoint_with_frequency(@printable_chars) 1926 | def codepoint(:utf8), do: codepoint_with_frequency(@utf8_chars) 1927 | 1928 | defp codepoint_with_frequency(chars_or_ranges) do 1929 | chars_or_ranges 1930 | |> Enum.map(fn 1931 | %Range{} = range -> 1932 | {Enum.count(range), integer(range)} 1933 | 1934 | codepoint when is_integer(codepoint) -> 1935 | {1, constant(codepoint)} 1936 | end) 1937 | |> frequency() 1938 | end 1939 | 1940 | @doc """ 1941 | Generates a string of the given kind or from the given characters. 1942 | 1943 | `kind_or_codepoints` can be: 1944 | 1945 | * `:ascii` - strings containing only ASCII characters are generated. Such 1946 | strings shrink towards lower codepoints. 1947 | 1948 | * `:alphanumeric` - strings containing only alphanumeric characters 1949 | (`?a..?z`, `?A..?Z`, `?0..?9`) are generated. Such strings shrink towards 1950 | `?a` following the order shown previously. 1951 | 1952 | * `:printable` - printable strings (`String.printable?/1` returns `true`) 1953 | are generated. Such strings shrink towards lower codepoints. 1954 | 1955 | * `:utf8` - valid strings (`String.valid?/1` returns `true`) 1956 | are generated. Such strings shrink towards lower codepoints. *Available 1957 | since 0.6.0.* 1958 | 1959 | * a range - strings with characters from the range are generated. Such 1960 | strings shrink towards characters that appear earlier in the range. 1961 | 1962 | * a list of ranges or single codepoints - strings with characters from the 1963 | ranges or codepoints are generated. Such strings shrink towards earlier 1964 | elements of the given list and towards the beginning of ranges. 1965 | 1966 | ## Options 1967 | 1968 | See the documentation of `list_of/2` for the possible values of options. 1969 | 1970 | ## Examples 1971 | 1972 | Enum.take(StreamData.string(:ascii), 3) 1973 | #=> ["c", "9A", ""] 1974 | 1975 | Enum.take(StreamData.string(Enum.concat([?a..?c, ?l..?o])), 3) 1976 | #=> ["c", "oa", "lb"] 1977 | 1978 | ## Shrinking 1979 | 1980 | Shrinks towards smaller strings and as described in the description of the 1981 | possible values of `kind_or_codepoints` above. 1982 | """ 1983 | @spec string( 1984 | :ascii 1985 | | :alphanumeric 1986 | | :printable 1987 | | :utf8 1988 | | Range.t() 1989 | | [Range.t() | pos_integer()] 1990 | ) :: 1991 | t(String.t()) 1992 | def string(kind_or_codepoints, options \\ []) 1993 | 1994 | def string(atom, options) when atom in [:ascii, :alphanumeric, :printable, :utf8] do 1995 | atom 1996 | |> codepoint() 1997 | |> string_from_codepoint_data(options) 1998 | end 1999 | 2000 | def string(%Range{} = codepoints_range, options) do 2001 | string_from_codepoint_data(integer(codepoints_range), options) 2002 | end 2003 | 2004 | def string(codepoints, options) when is_list(codepoints) and is_list(options) do 2005 | codepoints 2006 | |> codepoint_with_frequency() 2007 | |> string_from_codepoint_data(options) 2008 | end 2009 | 2010 | def string(other, _options) do 2011 | raise ArgumentError, 2012 | "unsupported string kind, has to be one of :ascii, " <> 2013 | ":alphanumeric, :printable, :utf8, a range, or a list of " <> 2014 | "ranges or single codepoints, got: #{inspect(other)}" 2015 | end 2016 | 2017 | defp string_from_codepoint_data(codepoint_data, options) do 2018 | codepoint_data 2019 | |> list_of(options) 2020 | |> map(&List.to_string/1) 2021 | end 2022 | 2023 | @doc """ 2024 | Generates atoms of various `kind`s. 2025 | 2026 | `kind` can be: 2027 | 2028 | * `:alphanumeric` - this generates alphanumeric atoms that don't need to be quoted when 2029 | written as literals. For example, it will generate `:foo` but not `:"foo bar"`. 2030 | 2031 | * `:alias` - generates Elixir aliases like `Foo` or `Foo.Bar.Baz`. 2032 | 2033 | These are some of the most common kinds of atoms usually used in Elixir applications. If you 2034 | need completely arbitrary atoms, you can use a combination of `map/2`, `String.to_atom/1`, 2035 | and string-focused generators to transform arbitrary strings into atoms: 2036 | 2037 | printable_atom = 2038 | StreamData.map( 2039 | StreamData.string(:printable, max_length: 255), 2040 | &String.to_atom/1 2041 | ) 2042 | 2043 | Bear in mind the [system limit](http://erlang.org/doc/efficiency_guide/advanced.html#system-limits) 2044 | of 255 characters in an atom when doing so. 2045 | 2046 | ## Examples 2047 | 2048 | Enum.take(StreamData.atom(:alphanumeric), 3) 2049 | #=> [:xF, :y, :B_] 2050 | 2051 | ## Shrinking 2052 | 2053 | Shrinks towards smaller atoms and towards "simpler" letters (like towards only alphabet 2054 | letters). 2055 | """ 2056 | @spec atom(:alphanumeric | :alias) :: t(atom()) 2057 | def atom(kind) 2058 | 2059 | @unquoted_atom_characters [?a..?z, ?A..?Z, ?0..?9, ?_, ?@] 2060 | def atom(:alphanumeric) do 2061 | starting_char = 2062 | frequency([ 2063 | {4, integer(?a..?z)}, 2064 | {2, integer(?A..?Z)}, 2065 | {1, constant(?_)} 2066 | ]) 2067 | 2068 | # We limit the size to 254 so that adding the first character doesn't 2069 | # break the system limit of 255 chars in an atom. 2070 | rest = scale(string(@unquoted_atom_characters), &min(&1, 254)) 2071 | 2072 | {starting_char, rest} 2073 | |> map(fn {first, rest} -> String.to_atom(<>) end) 2074 | |> scale_with_exponent(0.75) 2075 | end 2076 | 2077 | @alias_atom_characters [?a..?z, ?A..?Z, ?0..?9, ?_] 2078 | def atom(:alias) do 2079 | sized(fn size -> 2080 | max_list_length = 2081 | size 2082 | |> min(100) 2083 | |> :math.pow(0.75) 2084 | |> Float.ceil() 2085 | |> trunc() 2086 | 2087 | first_letter = integer(?A..?Z) 2088 | other_letters = string(@alias_atom_characters, max_length: trunc(255 / max_list_length)) 2089 | 2090 | {first_letter, other_letters} 2091 | |> map(fn {first, rest} -> <> end) 2092 | |> list_of(length: 1..max_list_length) 2093 | |> map(&Module.concat/1) 2094 | end) 2095 | end 2096 | 2097 | @doc """ 2098 | Generates iolists. 2099 | 2100 | Iolists are values of the `t:iolist/0` type. 2101 | 2102 | ## Examples 2103 | 2104 | Enum.take(StreamData.iolist(), 3) 2105 | #=> [[164 | ""], [225], ["" | ""]] 2106 | 2107 | ## Shrinking 2108 | 2109 | Shrinks towards smaller and less nested lists and towards bytes instead of 2110 | binaries. 2111 | """ 2112 | @spec iolist() :: t(iolist()) 2113 | def iolist() do 2114 | iolist_or_chardata_tree(byte(), binary()) 2115 | end 2116 | 2117 | @doc """ 2118 | Generates iodata. 2119 | 2120 | Iodata are values of the `t:iodata/0` type. 2121 | 2122 | ## Examples 2123 | 2124 | Enum.take(StreamData.iodata(), 3) 2125 | #=> [[""], <<198>>, [115, 172]] 2126 | 2127 | ## Shrinking 2128 | 2129 | Shrinks towards less nested iodata and ultimately towards smaller binaries. 2130 | """ 2131 | @spec iodata() :: t(iodata()) 2132 | def iodata() do 2133 | frequency([ 2134 | {3, binary()}, 2135 | {2, iolist()} 2136 | ]) 2137 | end 2138 | 2139 | @doc """ 2140 | Generates chardata. 2141 | 2142 | Chardata are values of the `t:IO.chardata/0` type. 2143 | 2144 | ## Examples 2145 | 2146 | Enum.take(StreamData.chardata(), 3) 2147 | #=> ["", [""], [12174]] 2148 | 2149 | ## Shrinking 2150 | 2151 | Shrinks towards less nested chardata and ultimately towards smaller binaries. 2152 | """ 2153 | @doc since: "0.6.0" 2154 | @spec chardata() :: t(IO.chardata()) 2155 | def chardata() do 2156 | frequency([ 2157 | {3, string(:utf8)}, 2158 | {2, iolist_or_chardata_tree(codepoint(:utf8), string(:utf8))} 2159 | ]) 2160 | end 2161 | 2162 | defp iolist_or_chardata_tree(int_type, binary_type) do 2163 | # We try to use binaries that scale slower otherwise we end up with iodata with 2164 | # big binaries at many levels deep. 2165 | scaled_binary = scale_with_exponent(binary_type, 0.6) 2166 | 2167 | improper_ending = one_of([scaled_binary, constant([])]) 2168 | tree = tree(one_of([int_type, scaled_binary]), &maybe_improper_list_of(&1, improper_ending)) 2169 | map(tree, &List.wrap/1) 2170 | end 2171 | 2172 | @doc """ 2173 | Generates any term. 2174 | 2175 | The terms that this generator can generate are simple terms or compound terms. The simple terms 2176 | are: 2177 | 2178 | * integers (through `integer/0`) 2179 | * binaries (through `binary/1`) 2180 | * floats (through `float/0`) 2181 | * booleans (through `boolean/0`) 2182 | * atoms (through `atom/1`) 2183 | * references (which are not shrinkable) 2184 | 2185 | Compound terms are terms that contain other terms (which are generated recursively with 2186 | `term/0`): 2187 | 2188 | * lists (through `list_of/2`) 2189 | * maps (through `map_of/2`) 2190 | * tuples 2191 | 2192 | ## Examples 2193 | 2194 | Enum.take(StreamData.term(), 3) 2195 | #=> [0.5119003572251588, {{true, ""}}, :WJg] 2196 | 2197 | ## Shrinking 2198 | 2199 | The terms generated by this generator shrink based on the generator used to create them (see the 2200 | list of possible generated terms above). 2201 | """ 2202 | @spec term() :: t(simple | [simple] | %{optional(simple) => simple} | tuple()) 2203 | when simple: boolean() | integer() | binary() | float() | atom() | reference() 2204 | def term() do 2205 | ref = new(fn _seed, _size -> lazy_tree_constant(make_ref()) end) 2206 | simple_term = one_of([boolean(), integer(), binary(), float(), atom(:alphanumeric), ref]) 2207 | 2208 | tree(simple_term, fn leaf -> 2209 | one_of([list_of(leaf), map_of(leaf, leaf), one_to_four_element_tuple(leaf)]) 2210 | end) 2211 | end 2212 | 2213 | defp one_to_four_element_tuple(leaf) do 2214 | bind(integer(0..9), fn 2215 | int when int >= 6 -> {leaf, leaf, leaf} 2216 | int when int >= 3 -> {leaf, leaf} 2217 | int when int >= 1 -> {leaf} 2218 | _ -> {} 2219 | end) 2220 | end 2221 | 2222 | defp scale_with_exponent(data, exponent) do 2223 | scale(data, fn size -> trunc(:math.pow(size, exponent)) end) 2224 | end 2225 | 2226 | @doc """ 2227 | Checks the behaviour of a given function on values generated by `data`. 2228 | 2229 | This function takes a generator and a function `fun` and verifies that that 2230 | function "holds" for all generated data. `fun` is called with each generated 2231 | value and can return one of: 2232 | 2233 | * `{:ok, term}` - means that the function "holds" for the given value. `term` 2234 | can be anything and will be used for internal purposes by `StreamData`. 2235 | 2236 | * `{:error, term}` - means that the function doesn't hold for the given 2237 | value. `term` is the term that will be shrunk to find the minimal value 2238 | for which `fun` doesn't hold. See below for more information on shrinking. 2239 | 2240 | When a value is found for which `fun` doesn't hold (returns `{:error, term}`), 2241 | `check_all/3` tries to shrink that value in order to find a minimal value that 2242 | still doesn't satisfy `fun`. 2243 | 2244 | The return value of this function is one of: 2245 | 2246 | * `{:ok, ok_map}` - if all generated values satisfy `fun`. `ok_map` is a map 2247 | of metadata that contains no keys for now. 2248 | 2249 | * `{:error, error_map}` - if a generated value doesn't satisfy `fun`. 2250 | `error_map` is a map of metadata that contains the following keys: 2251 | 2252 | * `:original_failure` - if `fun` returned `{:error, term}` for a generated 2253 | value, this key in the map will be `term`. 2254 | 2255 | * `:shrunk_failure` - the value returned in `{:error, term}` by `fun` 2256 | when invoked with the smallest failing value that was generated. 2257 | 2258 | * `:nodes_visited` - the number of nodes (a positive integer) visited in 2259 | the shrinking tree in order to find the smallest value. See also the 2260 | `:max_shrinking_steps` option. 2261 | 2262 | * `:successful_runs` - the number of successful runs before a failing value was found. 2263 | 2264 | ## Options 2265 | 2266 | This function takes the following options: 2267 | 2268 | * `:initial_seed` - three-element tuple with three integers that is used as 2269 | the initial random seed that drives the random generation. This option is 2270 | required. 2271 | 2272 | * `:initial_size` - (non-negative integer) the initial generation size used 2273 | to start generating values. The generation size is then incremented by `1` 2274 | on each iteration. See the "Generation size" section of the module 2275 | documentation for more information on generation size. Defaults to `1`. 2276 | 2277 | * `:max_runs` - (non-negative integer) the total number of elements to 2278 | generate out of `data` and check through `fun`. Defaults to `100`. 2279 | 2280 | * `:max_run_time` - (non-negative integer) the total number of time (in milliseconds) 2281 | to run a given check for. This is not used by default, so unless a value 2282 | is given, then the length of the check will be determined by `:max_runs`. 2283 | If both `:max_runs` and `:max_run_time` are given, then the check will finish at 2284 | whichever comes first, `:max_runs` or `:max_run_time`. 2285 | 2286 | * `:max_shrinking_steps` - (non-negative integer) the maximum numbers of 2287 | shrinking steps to perform in case `check_all/3` finds an element that 2288 | doesn't satisfy `fun`. Defaults to `100`. 2289 | 2290 | ## Examples 2291 | 2292 | Let's try out a contrived example: we want to verify that the `integer/0` 2293 | generator generates integers that are not `0` or multiples of `11`. This 2294 | verification is broken by design because `integer/0` is likely to generate 2295 | multiples of `11` at some point, but it will show the capabilities of 2296 | `check_all/3`. For the sake of the example, let's say we want the values that 2297 | fail to be represented as strings instead of the original integers that 2298 | failed. We can implement what we described like this: 2299 | 2300 | options = [initial_seed: :os.timestamp()] 2301 | 2302 | {:error, metadata} = StreamData.check_all(StreamData.integer(), options, fn int -> 2303 | if int == 0 or rem(int, 11) != 0 do 2304 | {:ok, nil} 2305 | else 2306 | {:error, Integer.to_string(int)} 2307 | end 2308 | end) 2309 | 2310 | metadata.nodes_visited 2311 | #=> 7 2312 | metadata.original_failure 2313 | #=> 22 2314 | metadata.shrunk_failure 2315 | #=> 11 2316 | 2317 | As we can see, the function we passed to `check_all/3` "failed" for `int = 2318 | 22`, and `check_all/3` was able to shrink this value to the smallest failing 2319 | value, which in this case is `11`. 2320 | """ 2321 | @spec check_all(t(a), Keyword.t(), (a -> {:ok, term()} | {:error, b})) :: 2322 | {:ok, map()} | {:error, map()} 2323 | when a: term(), 2324 | b: term() 2325 | def check_all(data, options, fun) when is_list(options) and is_function(fun, 1) do 2326 | seed = new_seed(Keyword.fetch!(options, :initial_seed)) 2327 | size = Keyword.get(options, :initial_size, 1) 2328 | max_shrinking_steps = Keyword.get(options, :max_shrinking_steps, 100) 2329 | start_time = System.system_time(:millisecond) 2330 | config = %{max_shrinking_steps: max_shrinking_steps} 2331 | 2332 | config = 2333 | case {Keyword.get(options, :max_runs), Keyword.get(options, :max_run_time, :infinity)} do 2334 | {:infinity, :infinity} -> 2335 | raise ArgumentError, 2336 | "both the :max_runs and :max_run_time options are set to :infinity. " <> 2337 | "This would result in an infinite loop. Be sure to set at least one of " <> 2338 | "these options to an integer to avoid this error. Note that :max_run_time " <> 2339 | "defaults to :infinity, so if you set \"max_runs: :infinity\" then you need " <> 2340 | "to explicitly set :max_run_time to an integer." 2341 | 2342 | {max_runs, :infinity} -> 2343 | Map.put(config, :max_runs, max_runs || 100) 2344 | 2345 | {:infinity, max_run_time} -> 2346 | Map.put(config, :max_end_time, start_time + max_run_time) 2347 | 2348 | {max_runs, max_run_time} -> 2349 | Map.merge(config, %{max_end_time: start_time + max_run_time, max_runs: max_runs}) 2350 | end 2351 | 2352 | check_all(data, seed, size, fun, _runs = 0, start_time, config) 2353 | end 2354 | 2355 | defp check_all(_data, _seed, _size, _fun, _runs, current_time, %{max_end_time: end_time}) 2356 | when current_time >= end_time do 2357 | {:ok, %{}} 2358 | end 2359 | 2360 | defp check_all(_data, _seed, _size, _fun, runs, _current_time, %{max_runs: runs}) do 2361 | {:ok, %{}} 2362 | end 2363 | 2364 | defp check_all(data, seed, size, fun, runs, _current_time, config) do 2365 | {seed1, seed2} = split_seed(seed) 2366 | %LazyTree{root: root, children: children} = call(data, seed1, size) 2367 | 2368 | case fun.(root) do 2369 | {:ok, _term} -> 2370 | check_all(data, seed2, size + 1, fun, runs + 1, System.system_time(:millisecond), config) 2371 | 2372 | {:error, reason} -> 2373 | shrinking_result = 2374 | shrink_failure(shrink_initial_cont(children), nil, reason, fun, 0, config) 2375 | |> Map.put(:original_failure, reason) 2376 | |> Map.put(:successful_runs, runs) 2377 | 2378 | {:error, shrinking_result} 2379 | end 2380 | end 2381 | 2382 | defp shrink_initial_cont(nodes) do 2383 | &Enumerable.reduce(nodes, &1, fn elem, acc -> {:suspend, [elem | acc]} end) 2384 | end 2385 | 2386 | defp shrink_failure(_cont, _parent_cont, smallest, _fun, nodes_visited, %{ 2387 | max_shrinking_steps: nodes_visited 2388 | }) do 2389 | %{shrunk_failure: smallest, nodes_visited: nodes_visited} 2390 | end 2391 | 2392 | # We try to get the next element out of the current nodes. If the current 2393 | # nodes are finished, we check if this was the first check: if it was, it 2394 | # means we were visiting children of a node and this node has no children, so 2395 | # we recurse on the siblings of that node. Otherwise, we return the smallest 2396 | # failure. If we get the next nodes out of the current nodes, we check if it 2397 | # also fails: if it does, we "go down" and recurse on the children of that 2398 | # node but only if it has children, otherwise we move to the siblings. If it 2399 | # doesn't fail, we move to the siblings. 2400 | 2401 | defp shrink_failure(cont, parent_cont, smallest, fun, nodes_visited, config) do 2402 | case cont.({:cont, []}) do 2403 | # If this list of nodes is over, we backtrack to the parent nodes and 2404 | # keep shrinking. 2405 | {state, _acc} when state in [:halted, :done] and not is_nil(parent_cont) -> 2406 | shrink_failure(parent_cont, nil, smallest, fun, nodes_visited, config) 2407 | 2408 | # If this list of nodes is over and we don't have parent nodes, we 2409 | # return what we have now. 2410 | {state, _acc} when state in [:halted, :done] -> 2411 | %{shrunk_failure: smallest, nodes_visited: nodes_visited} 2412 | 2413 | {:suspended, [child], cont} -> 2414 | case fun.(child.root) do 2415 | # If this child passes the property, we don't go down anymore on this node 2416 | # anymore and move to the siblings (`cont` is the enumerable representing 2417 | # the rest of the children). 2418 | {:ok, _term} -> 2419 | shrink_failure(cont, parent_cont, smallest, fun, nodes_visited + 1, config) 2420 | 2421 | # If this child still fails, we update the smallest failure to this failure. 2422 | # Then, we go down to the children of this node and update the parent continuations 2423 | # so that if we encounter a success in the children then we can resume from the 2424 | # siblings of this child. 2425 | {:error, reason} -> 2426 | shrink_failure( 2427 | shrink_initial_cont(child.children), 2428 | cont, 2429 | reason, 2430 | fun, 2431 | nodes_visited + 1, 2432 | config 2433 | ) 2434 | end 2435 | end 2436 | end 2437 | 2438 | defp new_seed({int1, int2, int3} = tuple) 2439 | when is_integer(int1) and is_integer(int2) and is_integer(int3) do 2440 | :rand.seed_s(@rand_algorithm, tuple) 2441 | end 2442 | 2443 | defp new_seed({_, _} = exported_seed) do 2444 | :rand.seed_s(exported_seed) 2445 | end 2446 | 2447 | @compile { 2448 | :inline, 2449 | split_seed: 1, order: 2, uniform_in_range: 3, lazy_tree: 2, lazy_tree_constant: 1 2450 | } 2451 | 2452 | defp split_seed(seed) do 2453 | {int, seed} = :rand.uniform_s(1_000_000_000, seed) 2454 | new_seed = :rand.seed_s(@rand_algorithm, {int, 0, 0}) 2455 | {new_seed, seed} 2456 | end 2457 | 2458 | defp order(left, right) when left > right, do: {right, left} 2459 | defp order(left, right), do: {left, right} 2460 | 2461 | defp uniform_in_range(left, right, seed) do 2462 | {random_int, next_seed} = :rand.uniform_s(right - left + 1, seed) 2463 | {random_int - 1 + left, next_seed} 2464 | end 2465 | 2466 | defp lazy_tree(root, children) do 2467 | %LazyTree{root: root, children: children} 2468 | end 2469 | 2470 | defp lazy_tree_constant(term) do 2471 | %LazyTree{root: term} 2472 | end 2473 | 2474 | # This is the implementation of Enumerable.reduce/3. It's here because it 2475 | # needs split_seed/1 and call/3 which are private. 2476 | @doc false 2477 | def __reduce__(data, acc, fun) do 2478 | reduce(data, acc, fun, new_seed(:os.timestamp()), _initial_size = 1, _max_size = 100) 2479 | end 2480 | 2481 | defp reduce(_data, {:halt, acc}, _fun, _seed, _size, _max_size) do 2482 | {:halted, acc} 2483 | end 2484 | 2485 | defp reduce(data, {:suspend, acc}, fun, seed, size, max_size) do 2486 | {:suspended, acc, &reduce(data, &1, fun, seed, size, max_size)} 2487 | end 2488 | 2489 | defp reduce(data, {:cont, acc}, fun, seed, size, max_size) do 2490 | {seed1, seed2} = split_seed(seed) 2491 | %LazyTree{root: next} = call(data, seed1, size) 2492 | reduce(data, fun.(next, acc), fun, seed2, min(max_size, size + 1), max_size) 2493 | end 2494 | 2495 | ## Enumerable 2496 | 2497 | defimpl Enumerable do 2498 | def reduce(data, acc, fun), do: @for.__reduce__(data, acc, fun) 2499 | def count(_data), do: {:error, __MODULE__} 2500 | def member?(_data, _term), do: {:error, __MODULE__} 2501 | def slice(_data), do: {:error, __MODULE__} 2502 | end 2503 | 2504 | ## Inspect 2505 | 2506 | defimpl Inspect do 2507 | def inspect(%StreamData{generator: generator}, opts) do 2508 | case @protocol.inspect(generator, opts) do 2509 | "#Function<" <> rest -> "#StreamData<" <> rest 2510 | other -> "#StreamData<#{other}>" 2511 | end 2512 | end 2513 | end 2514 | end 2515 | -------------------------------------------------------------------------------- /lib/stream_data/lazy_tree.ex: -------------------------------------------------------------------------------- 1 | defmodule StreamData.LazyTree do 2 | @moduledoc false 3 | 4 | # A lazy tree structure. 5 | # 6 | # A lazy tree has a root (which is always a realized term) and a possibly lazy 7 | # enumerable of children (which are in turn lazy trees). This allows to create 8 | # infinitely deep trees where the children are a lazy stream that can be 9 | # realized on demand. 10 | 11 | defstruct [:root, children: []] 12 | 13 | @type t(node) :: %__MODULE__{ 14 | root: node, 15 | children: Enumerable.t() 16 | } 17 | 18 | @doc """ 19 | Maps the given `fun` over the given `lazy_tree`. 20 | 21 | The given function `fun` is applied eagerly to the root of the given tree, 22 | and then lazily to the children of such tree. This means that mapping over a tree 23 | is a cheap operation because it only actually calls `fun` once until children 24 | are realized. 25 | 26 | ## Examples 27 | 28 | iex> tree = %StreamData.LazyTree{root: 1, children: []} 29 | iex> mapped_tree = StreamData.LazyTree.map(tree, & -&1) 30 | iex> mapped_tree.root 31 | -1 32 | 33 | """ 34 | @spec map(t(a), (a -> b)) :: t(b) when a: term(), b: term() 35 | def map(%__MODULE__{root: root, children: children}, fun) when is_function(fun, 1) do 36 | %__MODULE__{root: fun.(root), children: Stream.map(children, &map(&1, fun))} 37 | end 38 | 39 | @doc """ 40 | Maps and filters the given `lazy_tree` in one go using the given function `fun`. 41 | 42 | `fun` can return either `{:cont, mapped_term}` or `:skip`. If it returns 43 | `{:cont, mapped_term}`, then `mapped_term` will replace the original item passed 44 | to `fun` in the given tree. If it returns `:skip`, the tree the item passed to 45 | `fun` belongs to is filtered out of the resulting tree (the whole tree is filtered 46 | out, not just the root). 47 | 48 | ## Examples 49 | 50 | iex> tree = %StreamData.LazyTree{root: 2, children: []} 51 | iex> {:ok, mapped_tree} = 52 | ...> StreamData.LazyTree.filter_map(tree, fn integer -> 53 | ...> if rem(integer, 2) == 0 do 54 | ...> {:cont, -integer} 55 | ...> else 56 | ...> :skip 57 | ...> end 58 | ...> end) 59 | iex> mapped_tree.root 60 | -2 61 | 62 | """ 63 | @spec filter_map(t(a), (a -> {:cont, b} | :skip)) :: {:ok, t(b)} | :error 64 | when a: term(), 65 | b: term() 66 | def filter_map(%__MODULE__{root: root, children: children}, fun) when is_function(fun, 1) do 67 | case fun.(root) do 68 | {:cont, new_root} -> 69 | new_children = 70 | Stream.flat_map(children, fn child -> 71 | case filter_map(child, fun) do 72 | {:ok, new_child} -> [new_child] 73 | :error -> [] 74 | end 75 | end) 76 | 77 | {:ok, %__MODULE__{root: new_root, children: new_children}} 78 | 79 | :skip -> 80 | :error 81 | end 82 | end 83 | 84 | @doc """ 85 | Takes a tree of trees and flattens it to a tree of elements in those trees. 86 | 87 | The tree is flattened so that the root and its children always come "before" 88 | (as in higher or more towards the left in the tree) the children of `tree`. 89 | 90 | ## Examples 91 | 92 | iex> tree = 93 | ...> %StreamData.LazyTree{root: 1, children: []} 94 | ...> |> StreamData.LazyTree.map(&%StreamData.LazyTree{root: &1, children: []}) 95 | ...> |> StreamData.LazyTree.flatten() 96 | iex> tree.root 97 | 1 98 | 99 | """ 100 | @spec flatten(t(t(a))) :: t(a) when a: term() 101 | def flatten(%__MODULE__{root: child, children: children}) do 102 | %__MODULE__{root: child_root, children: child_children} = child 103 | 104 | %__MODULE__{ 105 | root: child_root, 106 | children: Stream.concat(child_children, Stream.map(children, &flatten/1)) 107 | } 108 | end 109 | 110 | @doc """ 111 | Filters element out of `tree` that don't satisfy the given `predicate`. 112 | 113 | When an element of `tree` doesn't satisfy `predicate`, the whole tree whose 114 | root is that element is filtered out of the original `tree`. 115 | 116 | Note that this function does not apply `predicate` to the root of `tree`, just 117 | to its children (and recursively down). This behaviour exists because if the 118 | root of `tree` did not satisfy `predicate`, the return value couldn't be a 119 | tree at all. 120 | 121 | ## Examples 122 | 123 | iex> children = [%StreamData.LazyTree{root: 3, children: []}] 124 | iex> tree = %StreamData.LazyTree{root: 2, children: children} 125 | iex> filtered_tree = StreamData.LazyTree.filter(tree, &(rem(&1, 2) == 0)) 126 | iex> filtered_tree.root 127 | 2 128 | iex> Enum.to_list(filtered_tree.children) 129 | [] 130 | 131 | """ 132 | @spec filter(t(a), (a -> as_boolean(term()))) :: t(a) when a: term() 133 | def filter(%__MODULE__{children: children} = tree, predicate) when is_function(predicate, 1) do 134 | children = 135 | Stream.flat_map(children, fn %__MODULE__{root: child_root} = child -> 136 | if predicate.(child_root) do 137 | [filter(child, predicate)] 138 | else 139 | [] 140 | end 141 | end) 142 | 143 | %__MODULE__{tree | children: children} 144 | end 145 | 146 | @doc """ 147 | Zips a list of trees into a single tree. 148 | 149 | Each element in the resulting tree is a list of as many elements as there are 150 | trees in `trees`. Each of these elements is going to be a list where each element 151 | comes from the corresponding tree in `tree`. All permutations of children are 152 | generated (lazily). 153 | 154 | ## Examples 155 | 156 | iex> trees = [%StreamData.LazyTree{root: 1, children: []}, %StreamData.LazyTree{root: 2, children: []}] 157 | iex> StreamData.LazyTree.zip(trees).root 158 | [1, 2] 159 | 160 | """ 161 | @spec zip([t(a)]) :: t([a]) when a: term() 162 | def zip(trees) do 163 | %__MODULE__{ 164 | root: Enum.map(trees, fn %{root: root} -> root end), 165 | children: 166 | trees 167 | |> permutations() 168 | |> Stream.map(&zip/1) 169 | } 170 | end 171 | 172 | defp permutations(trees) when is_list(trees) do 173 | trees 174 | |> Stream.with_index() 175 | |> Stream.flat_map(fn {%__MODULE__{children: children}, index} -> 176 | Enum.map(children, &List.replace_at(trees, index, &1)) 177 | end) 178 | end 179 | 180 | defimpl Inspect do 181 | import Inspect.Algebra 182 | 183 | def inspect(tree, options) do 184 | children = if Enum.empty?(tree.children), do: "[]", else: "[...]" 185 | concat(["#LazyTree<", to_doc(tree.root, options), ", #{children}>"]) 186 | end 187 | end 188 | end 189 | -------------------------------------------------------------------------------- /mix.exs: -------------------------------------------------------------------------------- 1 | defmodule StreamData.Mixfile do 2 | use Mix.Project 3 | 4 | @version "1.2.0" 5 | @repo_url "https://github.com/whatyouhide/stream_data" 6 | 7 | def project() do 8 | [ 9 | app: :stream_data, 10 | version: @version, 11 | elixir: "~> 1.12", 12 | start_permanent: Mix.env() == :prod, 13 | deps: deps(), 14 | 15 | # Tests 16 | test_coverage: [tool: ExCoveralls], 17 | 18 | # Docs 19 | name: "StreamData", 20 | docs: [ 21 | source_ref: "v#{@version}", 22 | main: "StreamData", 23 | source_url: @repo_url 24 | ], 25 | 26 | # Hex 27 | description: "Data generation and property-based testing for Elixir", 28 | package: [ 29 | maintainers: ["Andrea Leopardi"], 30 | licenses: ["Apache-2.0"], 31 | links: %{"GitHub" => @repo_url, "Sponsor" => "https://github.com/sponsors/whatyouhide"} 32 | ], 33 | 34 | # Dialyxir 35 | dialyzer: [ 36 | plt_add_deps: :apps_direct, 37 | plt_file: {:no_warn, "priv/plts/project.plt"}, 38 | plt_add_apps: [:ex_unit] 39 | ] 40 | ] 41 | end 42 | 43 | def application() do 44 | [ 45 | extra_applications: [], 46 | env: [ 47 | initial_size: 1, 48 | max_runs: 100, 49 | max_run_time: :infinity, 50 | max_shrinking_steps: 100, 51 | inspect_opts: [] 52 | ] 53 | ] 54 | end 55 | 56 | defp deps() do 57 | [ 58 | # Dev/test dependencies. 59 | {:dialyxir, "~> 1.3", only: [:dev, :test], runtime: false}, 60 | {:ex_doc, "~> 0.29", only: :dev}, 61 | {:excoveralls, "~> 0.18.0", only: :test} 62 | ] 63 | end 64 | end 65 | -------------------------------------------------------------------------------- /mix.lock: -------------------------------------------------------------------------------- 1 | %{ 2 | "dialyxir": {:hex, :dialyxir, "1.4.5", "ca1571ac18e0f88d4ab245f0b60fa31ff1b12cbae2b11bd25d207f865e8ae78a", [:mix], [{:erlex, ">= 0.2.7", [hex: :erlex, repo: "hexpm", optional: false]}], "hexpm", "b0fb08bb8107c750db5c0b324fa2df5ceaa0f9307690ee3c1f6ba5b9eb5d35c3"}, 3 | "earmark_parser": {:hex, :earmark_parser, "1.4.43", "34b2f401fe473080e39ff2b90feb8ddfeef7639f8ee0bbf71bb41911831d77c5", [:mix], [], "hexpm", "970a3cd19503f5e8e527a190662be2cee5d98eed1ff72ed9b3d1a3d466692de8"}, 4 | "erlex": {:hex, :erlex, "0.2.7", "810e8725f96ab74d17aac676e748627a07bc87eb950d2b83acd29dc047a30595", [:mix], [], "hexpm", "3ed95f79d1a844c3f6bf0cea61e0d5612a42ce56da9c03f01df538685365efb0"}, 5 | "ex_doc": {:hex, :ex_doc, "0.37.0", "970f92b39e62c460aa8a367508e938f5e4da6e2ff3eaed3f8530b25870f45471", [:mix], [{:earmark_parser, "~> 1.4.42", [hex: :earmark_parser, repo: "hexpm", optional: false]}, {:makeup_c, ">= 0.1.0", [hex: :makeup_c, repo: "hexpm", optional: true]}, {:makeup_elixir, "~> 0.14 or ~> 1.0", [hex: :makeup_elixir, repo: "hexpm", optional: false]}, {:makeup_erlang, "~> 0.1 or ~> 1.0", [hex: :makeup_erlang, repo: "hexpm", optional: false]}, {:makeup_html, ">= 0.1.0", [hex: :makeup_html, repo: "hexpm", optional: true]}], "hexpm", "b0ee7f17373948e0cf471e59c3a0ee42f3bd1171c67d91eb3626456ef9c6202c"}, 6 | "excoveralls": {:hex, :excoveralls, "0.18.5", "e229d0a65982613332ec30f07940038fe451a2e5b29bce2a5022165f0c9b157e", [:mix], [{:castore, "~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "523fe8a15603f86d64852aab2abe8ddbd78e68579c8525ae765facc5eae01562"}, 7 | "jason": {:hex, :jason, "1.4.4", "b9226785a9aa77b6857ca22832cffa5d5011a667207eb2a0ad56adb5db443b8a", [:mix], [{:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "c5eb0cab91f094599f94d55bc63409236a8ec69a21a67814529e8d5f6cc90b3b"}, 8 | "makeup": {:hex, :makeup, "1.2.1", "e90ac1c65589ef354378def3ba19d401e739ee7ee06fb47f94c687016e3713d1", [:mix], [{:nimble_parsec, "~> 1.4", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "d36484867b0bae0fea568d10131197a4c2e47056a6fbe84922bf6ba71c8d17ce"}, 9 | "makeup_elixir": {:hex, :makeup_elixir, "1.0.1", "e928a4f984e795e41e3abd27bfc09f51db16ab8ba1aebdba2b3a575437efafc2", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}, {:nimble_parsec, "~> 1.2.3 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "7284900d412a3e5cfd97fdaed4f5ed389b8f2b4cb49efc0eb3bd10e2febf9507"}, 10 | "makeup_erlang": {:hex, :makeup_erlang, "1.0.2", "03e1804074b3aa64d5fad7aa64601ed0fb395337b982d9bcf04029d68d51b6a7", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}], "hexpm", "af33ff7ef368d5893e4a267933e7744e46ce3cf1f61e2dccf53a111ed3aa3727"}, 11 | "nimble_parsec": {:hex, :nimble_parsec, "1.4.2", "8efba0122db06df95bfaa78f791344a89352ba04baedd3849593bfce4d0dc1c6", [:mix], [], "hexpm", "4b21398942dda052b403bbe1da991ccd03a053668d147d53fb8c4e0efe09c973"}, 12 | } 13 | -------------------------------------------------------------------------------- /test/ex_unit_properties_test.exs: -------------------------------------------------------------------------------- 1 | defmodule ExUnitPropertiesTest do 2 | use ExUnit.Case 3 | use ExUnitProperties 4 | 5 | describe "gen all" do 6 | test "supports generation and filtering clauses" do 7 | filtered_value = 34 8 | 9 | data = 10 | gen all [_ | _] = list <- list_of(integer(), min_length: 5), 11 | elem <- member_of(list), 12 | elem != filtered_value, 13 | elem_not_filtered = elem do 14 | {Integer.to_string(elem_not_filtered), list} 15 | end 16 | 17 | # Let's make sure that the minimum size is high enough that our filtered element 18 | # is not common at all. 19 | data = scale(data, &max(&1, 20)) 20 | 21 | check all {string, list} <- data do 22 | assert is_binary(string) 23 | assert is_list(list) 24 | assert String.to_integer(string) in list 25 | assert String.to_integer(string) != filtered_value 26 | end 27 | end 28 | 29 | test "treats non-matching patterns in <- clauses as filters" do 30 | data = 31 | gen all :non_boolean <- boolean() do 32 | :ok 33 | end 34 | 35 | assert_raise StreamData.FilterTooNarrowError, fn -> 36 | Enum.take(data, 1) 37 | end 38 | end 39 | 40 | test "supports do keyword syntax" do 41 | gen(all _boolean <- boolean(), do: :ok) 42 | 43 | data = 44 | gen( 45 | all string <- binary(), 46 | list <- list_of(integer()), 47 | do: {string, list} 48 | ) 49 | 50 | check all {string, list} <- data do 51 | assert is_binary(string) 52 | assert is_list(list) 53 | end 54 | end 55 | 56 | test "errors out if the first clause is not a generator" do 57 | message = 58 | "\"gen all\" and \"check all\" clauses must start with a generator (<-) clause, " <> 59 | "got: a = 1" 60 | 61 | assert_raise ArgumentError, message, fn -> 62 | Code.compile_quoted( 63 | quote do 64 | gen(all a = 1, _ <- integer, do: :ok) 65 | end 66 | ) 67 | end 68 | 69 | message = 70 | "\"gen all\" and \"check all\" clauses must start with a generator (<-) clause, " <> 71 | "got: true" 72 | 73 | assert_raise ArgumentError, message, fn -> 74 | Code.compile_quoted( 75 | quote do 76 | gen(all true, _ <- integer, do: :ok) 77 | end 78 | ) 79 | end 80 | end 81 | end 82 | 83 | describe "property" do 84 | property "supports rescue" do 85 | raise "some error" 86 | rescue 87 | exception in [RuntimeError] -> 88 | assert Exception.message(exception) == "some error" 89 | end 90 | 91 | property "supports catch" do 92 | throw(:some_error) 93 | catch 94 | :throw, term -> 95 | assert term == :some_error 96 | end 97 | end 98 | 99 | describe "check all" do 100 | property "can do assignment" do 101 | {:ok, counter} = Agent.start_link(fn -> 0 end) 102 | 103 | check all i <- integer(), string_i = Integer.to_string(i), max_runs: 10 do 104 | Agent.update(counter, &(&1 + 1)) 105 | assert String.to_integer(string_i) == i 106 | end 107 | 108 | assert Agent.get(counter, & &1) == 10 109 | end 110 | 111 | property "runs the number of specified times" do 112 | {:ok, counter} = Agent.start_link(fn -> 0 end) 113 | 114 | check all :ok <- :ok, max_runs: 10 do 115 | Agent.update(counter, &(&1 + 1)) 116 | :ok 117 | end 118 | 119 | assert Agent.get(counter, & &1) == 10 120 | end 121 | 122 | property "runs for the specified number of milliseconds" do 123 | {:ok, counter} = Agent.start_link(fn -> 0 end) 124 | 125 | check all :ok <- :ok, max_runs: :infinity, max_run_time: 100 do 126 | Process.sleep(25) 127 | Agent.update(counter, &(&1 + 1)) 128 | :ok 129 | end 130 | 131 | assert Agent.get(counter, & &1) in 3..5 132 | end 133 | 134 | property "ends at :max_runs if it ends before :max_run_time" do 135 | {:ok, counter} = Agent.start_link(fn -> 0 end) 136 | 137 | check all :ok <- :ok, max_runs: 5, max_run_time: 500 do 138 | Process.sleep(1) 139 | Agent.update(counter, &(&1 + 1)) 140 | :ok 141 | end 142 | 143 | assert Agent.get(counter, & &1) == 5 144 | end 145 | 146 | property "ends at :max_run_time if it ends before :max_runs" do 147 | {:ok, counter} = Agent.start_link(fn -> 0 end) 148 | 149 | check all :ok <- :ok, max_runs: 100_000, max_run_time: 100 do 150 | Process.sleep(25) 151 | Agent.update(counter, &(&1 + 1)) 152 | :ok 153 | end 154 | 155 | assert Agent.get(counter, & &1) in 3..5 156 | end 157 | 158 | test "supports an :initial_seed option" do 159 | {:ok, agent1} = Agent.start_link(fn -> nil end) 160 | {:ok, agent2} = Agent.start_link(fn -> nil end) 161 | 162 | check all list <- list_of(integer()), initial_seed: 1, max_runs: 1, initial_size: 10 do 163 | Agent.update(agent1, fn _ -> list end) 164 | end 165 | 166 | check all list <- list_of(integer()), initial_seed: 1, max_runs: 1, initial_size: 10 do 167 | Agent.update(agent2, fn _ -> list end) 168 | end 169 | 170 | assert Agent.get(agent1, & &1) == Agent.get(agent2, & &1) 171 | end 172 | 173 | test "raises an error instead of running an infinite loop" do 174 | message = ~r/both the :max_runs and :max_run_time options are set to :infinity/ 175 | 176 | assert_raise ArgumentError, message, fn -> 177 | check all :ok <- :ok, max_runs: :infinity, max_run_time: :infinity do 178 | :ok 179 | end 180 | end 181 | end 182 | 183 | property "works with errors that are not assertion errors" do 184 | assert_raise ExUnitProperties.Error, fn -> 185 | check all tuple <- {:ok, nil} do 186 | :ok = tuple 187 | end 188 | end 189 | end 190 | 191 | property "shrinking" do 192 | assert_raise ExUnit.AssertionError, fn -> 193 | check all list <- list_of(integer()) do 194 | assert 5 not in list 195 | end 196 | end 197 | end 198 | 199 | test "supports do keyword syntax" do 200 | check all int <- integer(), do: assert(is_integer(int)) 201 | 202 | check all a <- binary(), 203 | b <- binary(), 204 | do: assert(String.starts_with?(a <> b, a)) 205 | 206 | check all int1 <- integer(), 207 | int2 <- integer(), 208 | sum = abs(int1) + abs(int2), 209 | max_runs: 25, 210 | do: assert(sum >= int1) 211 | end 212 | 213 | test "do keyword syntax passes in options" do 214 | {:ok, counter} = Agent.start_link(fn -> 0 end) 215 | 216 | check all int <- integer(), 217 | max_runs: 25, 218 | do: Agent.update(counter, &(&1 + 1)) && assert(is_integer(int)) 219 | 220 | assert Agent.get(counter, & &1) == 25 221 | end 222 | 223 | test "errors out if the first clause is not a generator" do 224 | message = 225 | "\"gen all\" and \"check all\" clauses must start with a generator (<-) clause, " <> 226 | "got: a = 1" 227 | 228 | assert_raise ArgumentError, message, fn -> 229 | Code.compile_quoted( 230 | quote do 231 | gen(all a = 1, _ <- integer, do: :ok) 232 | end 233 | ) 234 | end 235 | 236 | message = 237 | "\"gen all\" and \"check all\" clauses must start with a generator (<-) clause, " <> 238 | "got: true" 239 | 240 | assert_raise ArgumentError, message, fn -> 241 | Code.compile_quoted( 242 | quote do 243 | gen(all true, _ <- integer, do: :ok) 244 | end 245 | ) 246 | end 247 | end 248 | end 249 | 250 | if Version.compare(System.version(), "1.6.0") in [:eq, :gt] do 251 | describe "pick/1" do 252 | test "when there's a random seed thanks to ExUnit setting it up" do 253 | integer = ExUnitProperties.pick(integer()) 254 | assert is_integer(integer) 255 | assert integer in -100..100 256 | end 257 | 258 | test "raises when there's no random seed in the process dictionary" do 259 | {_pid, ref} = 260 | spawn_monitor(fn -> 261 | message = ~r/the random seed is not set in the current process/ 262 | 263 | assert_raise RuntimeError, message, fn -> 264 | ExUnitProperties.pick(integer()) 265 | end 266 | end) 267 | 268 | assert_receive {:DOWN, ^ref, _, _, _} 269 | end 270 | end 271 | end 272 | end 273 | -------------------------------------------------------------------------------- /test/stdlib/enum_test.exs: -------------------------------------------------------------------------------- 1 | defmodule StreamData.EnumTest do 2 | use ExUnit.Case, async: true 3 | use ExUnitProperties 4 | 5 | @moduletag :stdlib 6 | 7 | # TODO: Make this unconditional when we depend on Elixir 1.13+. 8 | if function_exported?(Enum, :slide, 3) do 9 | describe "Enum.slide/3" do 10 | property "handles negative indices" do 11 | check all( 12 | list <- StreamData.list_of(StreamData.integer(), max_length: 100), 13 | {range, insertion_idx} <- slide_spec(list) 14 | ) do 15 | length = length(list) 16 | 17 | negative_range = (range.first - length)..(range.last - length)//1 18 | 19 | assert Enum.slide(list, negative_range, insertion_idx) == 20 | Enum.slide(list, range, insertion_idx) 21 | end 22 | end 23 | 24 | property "matches behavior for lists, ranges, and sets" do 25 | range = 0..31 26 | list = Enum.to_list(range) 27 | set = MapSet.new(list) 28 | 29 | check all({slide_range, insertion_idx} <- slide_spec(list)) do 30 | # As of Elixir 1.13, the map implementation underlying a MapSet 31 | # maintains the pairs in order below 32 elements. 32 | # If this ever stops being true, we can keep the test for 33 | # list vs. range but drop the test for list vs. set. 34 | slide = &Enum.slide(&1, slide_range, insertion_idx) 35 | assert slide.(list) == slide.(range) 36 | assert slide.(list) == slide.(set) 37 | end 38 | end 39 | 40 | property "matches behavior for lists of pairs and maps" do 41 | # As of Elixir 1.13, the map implementation maintains the pairs 42 | # in order below 32 elements. 43 | # If this ever stops being true, we can drop this test. 44 | range = 0..31 45 | zipped_list = Enum.zip(range, range) 46 | map = Map.new(zipped_list) 47 | 48 | check all({slide_range, insertion_idx} <- slide_spec(zipped_list)) do 49 | slide = &Enum.slide(&1, slide_range, insertion_idx) 50 | assert slide.(zipped_list) == slide.(map) 51 | end 52 | end 53 | 54 | # Generator for valid slides on the input list 55 | # Generates values of the form: 56 | # {range_to_slide, insertion_idx} 57 | # ...such that the two arguments are always valid on the given list. 58 | defp slide_spec(list) do 59 | max_idx = max(0, length(list) - 1) 60 | 61 | StreamData.bind(StreamData.integer(0..max_idx), fn first -> 62 | StreamData.bind(StreamData.integer(first..max_idx), fn last -> 63 | allowable_insertion_idxs_at_end = 64 | if last < max_idx do 65 | [StreamData.integer((last + 1)..max_idx)] 66 | else 67 | [] 68 | end 69 | 70 | allowable_insertion_idxs = 71 | [StreamData.integer(0..first)] ++ allowable_insertion_idxs_at_end 72 | 73 | StreamData.bind(one_of(allowable_insertion_idxs), fn insertion_idx -> 74 | StreamData.constant({first..last, insertion_idx}) 75 | end) 76 | end) 77 | end) 78 | end 79 | end 80 | end 81 | 82 | if Version.match?(System.version(), "~> 1.14") do 83 | # From https://github.com/elixir-lang/elixir/pull/12043. 84 | property "Enum.slice/2 is consistent for list, range and stream inputs" do 85 | check all count <- enum_count(), 86 | first <- integer(), 87 | last <- integer(), 88 | step <- positive_integer() do 89 | range = first..last//step 90 | 91 | enum_consistency_spec(count, fn enumerable -> 92 | Enum.slice(enumerable, range) 93 | end) 94 | end 95 | end 96 | end 97 | 98 | if Version.match?(System.version(), "~> 1.12") do 99 | property "Enum.take/2 is consistent for list, range and stream inputs" do 100 | check all count <- enum_count(), 101 | amount <- integer() do 102 | enum_consistency_spec(count, fn enumerable -> 103 | Enum.take(enumerable, amount) 104 | end) 105 | end 106 | end 107 | 108 | # From https://github.com/elixir-lang/elixir/pull/12040. 109 | property "Enum.drop/2 is consistent for list, range and stream inputs" do 110 | check all count <- enum_count(), 111 | amount <- integer() do 112 | enum_consistency_spec(count, fn enumerable -> 113 | Enum.drop(enumerable, amount) 114 | end) 115 | end 116 | end 117 | 118 | # From https://github.com/elixir-lang/elixir/pull/10886. 119 | property "Enum.dedup/1 is consistent for list, range and stream inputs" do 120 | check all count <- enum_count() do 121 | enum_consistency_spec(count, &Enum.dedup/1) 122 | end 123 | end 124 | 125 | defp enum_consistency_spec(count, fun) do 126 | range = 1..count//1 127 | list = Enum.to_list(range) 128 | stream = Stream.map(range, & &1) 129 | 130 | result = fun.(range) 131 | assert fun.(list) == result 132 | assert fun.(stream) == result 133 | end 134 | 135 | defp enum_count do 136 | # Creating arbitrary big enums will make tests needlessly slow. 137 | # Finding edge cases doesn't require big sizes. 138 | non_negative_integer() |> resize(100) 139 | end 140 | end 141 | end 142 | -------------------------------------------------------------------------------- /test/stdlib/kernel_test.exs: -------------------------------------------------------------------------------- 1 | defmodule StreamData.KernelTest do 2 | use ExUnit.Case, async: true 3 | use ExUnitProperties 4 | 5 | @moduletag :stdlib 6 | 7 | if Version.match?(System.version(), "~> 1.14") do 8 | # From https://github.com/elixir-lang/elixir/pull/12045. 9 | property "binary_slice/2 is always consistent with Enum.slice/2" do 10 | check all binary <- binary(), 11 | start <- integer(), 12 | stop <- integer(), 13 | step <- positive_integer() do 14 | expected = 15 | binary 16 | |> :binary.bin_to_list() 17 | |> Enum.slice(start..stop//step) 18 | |> :binary.list_to_bin() 19 | 20 | assert binary_slice(binary, start..stop//step) == expected 21 | end 22 | end 23 | 24 | property "binary_slice/3 is always consistent with Enum.slice/3" do 25 | check all binary <- binary(), start <- integer(), amount <- non_negative_integer() do 26 | expected = 27 | binary 28 | |> :binary.bin_to_list() 29 | |> Enum.slice(start, amount) 30 | |> :binary.list_to_bin() 31 | 32 | assert binary_slice(binary, start, amount) == expected 33 | end 34 | end 35 | end 36 | end 37 | -------------------------------------------------------------------------------- /test/stdlib/keyword_test.exs: -------------------------------------------------------------------------------- 1 | defmodule StreamData.KeywordTest do 2 | use ExUnit.Case, async: true 3 | use ExUnitProperties 4 | 5 | @moduletag :stdlib 6 | 7 | # From https://github.com/elixir-lang/elixir/issues/7420 8 | property "Keyword.merge/2 and Keyword.merge/3 keeps duplicate entries from rhs" do 9 | check all list <- list_of({atom(:alphanumeric), integer()}) do 10 | double = list ++ list 11 | assert Keyword.merge(double, list) == list 12 | assert Keyword.merge(list, double) == double 13 | assert Keyword.merge(double, list, fn _, _, v2 -> v2 end) == list 14 | assert Keyword.merge(list, double, fn _, _, v2 -> v2 end) == double 15 | end 16 | end 17 | end 18 | -------------------------------------------------------------------------------- /test/stdlib/string_test.exs: -------------------------------------------------------------------------------- 1 | defmodule StreamData.StringTest do 2 | use ExUnit.Case, async: true 3 | use ExUnitProperties 4 | 5 | @moduletag :stdlib 6 | 7 | # From https://github.com/elixir-lang/elixir/pull/6559 8 | property "String.replace* functions replace the whole string" do 9 | check all string <- string(:printable), 10 | replacement <- string(:printable) do 11 | assert String.replace(string, string, replacement) == replacement 12 | assert String.replace_prefix(string, string, replacement) == replacement 13 | assert String.replace_suffix(string, string, replacement) == replacement 14 | end 15 | end 16 | 17 | if Version.match?(System.version(), "~> 1.6") do 18 | # From https://github.com/elixir-lang/elixir/issues/7023. 19 | property "String.replace/3 is equivalent to String.split/1 + Enum.join/2" do 20 | check all string <- string(:printable), 21 | pattern <- string(:printable), 22 | replacement <- string(:printable) do 23 | assert String.replace(string, pattern, replacement) == 24 | string |> String.split(pattern) |> Enum.join(replacement) 25 | end 26 | end 27 | end 28 | end 29 | -------------------------------------------------------------------------------- /test/stream_data/lazy_tree_test.exs: -------------------------------------------------------------------------------- 1 | defmodule StreamData.LazyTreeTest do 2 | use ExUnit.Case, async: true 3 | 4 | alias StreamData.LazyTree 5 | 6 | doctest LazyTree 7 | 8 | test "map/2" do 9 | tree = new(1, [constant(2), constant(3)]) 10 | mapped_tree = LazyTree.map(tree, &Integer.to_string/1) 11 | expected = new("1", [constant("2"), constant("3")]) 12 | 13 | assert realize_tree(mapped_tree) == realize_tree(expected) 14 | end 15 | 16 | test "filter_map/2" do 17 | require Integer 18 | 19 | tree = new(1, [constant(2), constant(3)]) 20 | 21 | {:ok, mapped_tree} = 22 | LazyTree.filter_map(tree, fn int -> 23 | if Integer.is_odd(int) do 24 | {:cont, Integer.to_string(int)} 25 | else 26 | :skip 27 | end 28 | end) 29 | 30 | expected = new("1", [constant("3")]) 31 | 32 | assert realize_tree(mapped_tree) == realize_tree(expected) 33 | end 34 | 35 | test "flatten/1" do 36 | tree1 = new(:root1, [constant(:child1_a), constant(:child1_b)]) 37 | tree2 = new(:root2, [constant(:child2_a), constant(:child2_b)]) 38 | tree = new(tree1, [constant(tree2)]) 39 | 40 | assert %LazyTree{} = joined_tree = LazyTree.flatten(tree) 41 | 42 | expected = 43 | new(:root1, [ 44 | constant(:child1_a), 45 | constant(:child1_b), 46 | new(:root2, [constant(:child2_a), constant(:child2_b)]) 47 | ]) 48 | 49 | assert realize_tree(joined_tree) == realize_tree(expected) 50 | end 51 | 52 | test "filter/2" do 53 | tree = 54 | new(1, [ 55 | # Here only an inner child is removed since it doesn't pass the filter 56 | new(1, [constant(-1), constant(2)]), 57 | # This whole branch is cut since the root doesn't pass the filter 58 | new(-1, [constant(1), constant(2)]) 59 | ]) 60 | 61 | filtered_tree = LazyTree.filter(tree, &(&1 > 0)) 62 | 63 | expected = new(1, [new(1, [constant(2)])]) 64 | 65 | assert realize_tree(filtered_tree) == realize_tree(expected) 66 | end 67 | 68 | test "zip/1" do 69 | tree1 = new(11, [new(13, [constant(14)])]) 70 | tree2 = new(21, [constant(22), constant(23)]) 71 | 72 | assert %LazyTree{} = zipped_tree = LazyTree.zip([tree1, tree2]) 73 | 74 | assert realize_tree(zipped_tree).root == [11, 21] 75 | end 76 | 77 | test "implementation of the Inspect protocol" do 78 | assert inspect(constant(:root)) == "#LazyTree<:root, []>" 79 | assert inspect(new(:root, [1, 2, 3])) == "#LazyTree<:root, [...]>" 80 | end 81 | 82 | defp realize_tree(tree) do 83 | %{tree | children: Enum.map(tree.children, &realize_tree/1)} 84 | end 85 | 86 | defp new(root, children) do 87 | %LazyTree{root: root, children: children} 88 | end 89 | 90 | defp constant(term) do 91 | %LazyTree{root: term} 92 | end 93 | end 94 | -------------------------------------------------------------------------------- /test/stream_data_test.exs: -------------------------------------------------------------------------------- 1 | # TODO: test shrinking 2 | 3 | defmodule StreamDataTest do 4 | use ExUnit.Case, async: true 5 | use ExUnitProperties 6 | 7 | test "implementation of the Enumerable protocol" do 8 | values = Enum.take(Stream.zip(integer(), boolean()), 10) 9 | 10 | Enum.each(values, fn {int, boolean} -> 11 | assert is_integer(int) 12 | assert is_boolean(boolean) 13 | end) 14 | end 15 | 16 | test "implementation of the Inspect protocol" do 17 | data = constant(:foo) 18 | assert inspect(data) =~ ~r/\A#StreamData<\d{2}\./ 19 | end 20 | 21 | describe "terms used as generators" do 22 | property "atoms" do 23 | check all term <- :foo do 24 | assert term == :foo 25 | end 26 | end 27 | 28 | property "tuples" do 29 | check all {integer, boolean} <- {integer(), boolean()} do 30 | assert is_integer(integer) 31 | assert is_boolean(boolean) 32 | end 33 | end 34 | 35 | property "nested generator terms" do 36 | check all {atom, boolean} <- {:ok, boolean()} do 37 | assert atom == :ok 38 | assert is_boolean(boolean) 39 | end 40 | end 41 | end 42 | 43 | test "error message on invalid generators" do 44 | message = ~r/expected a generator, which can be a %StreamData{} struct/ 45 | 46 | assert_raise ArgumentError, message, fn -> 47 | Enum.take(one_of([1, 2, 3]), 1) 48 | end 49 | end 50 | 51 | property "constant/1" do 52 | check all term <- constant(:term) do 53 | assert term == :term 54 | end 55 | end 56 | 57 | property "map/1" do 58 | data = map(integer(1..5), &(-&1)) 59 | 60 | check all int <- data do 61 | assert int in -1..-5//-1 62 | end 63 | end 64 | 65 | describe "bind_filter/2" do 66 | property "with a function of arity 1" do 67 | require Integer 68 | 69 | bind_filter_fun = fn int -> 70 | if Integer.is_even(int), do: {:cont, constant(int)}, else: :skip 71 | end 72 | 73 | data = bind_filter(integer(1..5), bind_filter_fun, 1000) 74 | 75 | check all int <- data do 76 | assert int in 1..5 77 | assert Integer.is_even(int) 78 | end 79 | end 80 | 81 | property "with a function of arity 2" do 82 | require Integer 83 | 84 | bind_filter_fun = fn _term, tries_left when is_integer(tries_left) -> 85 | raise "tries_left = #{tries_left}" 86 | end 87 | 88 | data = bind_filter(boolean(), bind_filter_fun, _tries = 5) 89 | 90 | assert_raise RuntimeError, "tries_left = 5", fn -> 91 | Enum.take(data, 1) 92 | end 93 | end 94 | end 95 | 96 | property "bind/2" do 97 | data = bind(integer(1..5), &constant(-&1)) 98 | 99 | check all int <- data do 100 | assert int in -1..-5//-1 101 | end 102 | end 103 | 104 | describe "filter/2,3" do 105 | test "filters out terms that fail the predicate" do 106 | values = 107 | integer(0..10000) 108 | |> filter(&(&1 > 0)) 109 | |> Enum.take(1000) 110 | 111 | assert length(values) <= 1000 112 | 113 | Enum.each(values, fn value -> 114 | assert value in 0..10000 115 | end) 116 | end 117 | 118 | test "raises an error when too many consecutive elements fail the predicate" do 119 | data = filter(constant(:term), &is_binary/1, 10) 120 | 121 | exception = 122 | assert_raise StreamData.FilterTooNarrowError, fn -> 123 | Enum.take(data, 1) 124 | end 125 | 126 | message = Exception.message(exception) 127 | 128 | assert message =~ "too many consecutive elements (10 elements in this case)" 129 | assert message =~ "The last element to be filtered out was: :term." 130 | end 131 | end 132 | 133 | property "integer/1 for ranges without steps" do 134 | check all int <- integer(-10..10) do 135 | assert int in -10..10 136 | end 137 | end 138 | 139 | # Range step syntax was introduced in Elixir v1.12.0 140 | unless Version.compare(System.version(), "1.12.0") == :lt do 141 | property "integer/1 for a range with an even step only produces even numbers" do 142 | check all int <- integer(%Range{first: 0, last: 100, step: 2}) do 143 | require Integer 144 | assert Integer.is_even(int) 145 | end 146 | end 147 | 148 | property "integer/1 for descending ranges with negative steps" do 149 | check all int <- integer(%Range{first: 100, last: 5, step: -10}) do 150 | require Integer 151 | assert int in 10..100 152 | assert rem(int, 10) == 0 153 | end 154 | end 155 | 156 | property "integer/1 raises on empty ranges" do 157 | check all lower <- positive_integer(), 158 | offset <- positive_integer() do 159 | assert_raise(RuntimeError, fn -> 160 | StreamData.integer(%Range{first: lower + offset, last: lower, step: 1}) 161 | end) 162 | end 163 | end 164 | end 165 | 166 | property "resize/2" do 167 | generator = fn seed, size -> 168 | case :rand.uniform_s(2, seed) do 169 | {1, _seed} -> %StreamData.LazyTree{root: size} 170 | {2, _seed} -> %StreamData.LazyTree{root: -size} 171 | end 172 | end 173 | 174 | check all int <- resize(%StreamData{generator: generator}, 10) do 175 | assert int in [-10, 10] 176 | end 177 | end 178 | 179 | property "sized/1" do 180 | data = 181 | sized(fn size -> 182 | bind(boolean(), fn bool -> 183 | if bool do 184 | constant(size) 185 | else 186 | constant(-size) 187 | end 188 | end) 189 | end) 190 | 191 | check all int <- data do 192 | assert is_integer(int) 193 | end 194 | end 195 | 196 | test "seeded/2" do 197 | data = seeded(integer(), _seed = 1) 198 | assert Enum.take(data, 100) == Enum.take(data, 100) 199 | end 200 | 201 | property "scale/2" do 202 | size_data = sized(&constant(&1)) 203 | data = scale(size_data, fn size -> size + 1000 end) 204 | 205 | check all int <- data do 206 | assert int >= 1000 207 | end 208 | end 209 | 210 | test "frequency/1" do 211 | data = 212 | frequency([ 213 | {1, constant(:small_chance)}, 214 | {100, constant(:big_chance)} 215 | ]) 216 | 217 | values = Enum.take(data, 1000) 218 | 219 | assert :small_chance in values 220 | assert :big_chance in values 221 | assert Enum.count(values, &(&1 == :small_chance)) < Enum.count(values, &(&1 == :big_chance)) 222 | end 223 | 224 | property "one_of/1" do 225 | check all int <- one_of([integer(1..5), integer(-1..-5//-1)]) do 226 | assert int in 1..5 or int in -1..-5//-1 227 | end 228 | end 229 | 230 | property "member_of/1" do 231 | check all elem <- member_of([1, 2, 3]) do 232 | assert elem in [1, 2, 3] 233 | end 234 | 235 | check all elem <- member_of(MapSet.new([1, 2, 3])) do 236 | assert elem in [1, 2, 3] 237 | end 238 | 239 | assert_raise RuntimeError, "cannot generate elements from an empty enumerable", fn -> 240 | Enum.take(member_of([]), 1) 241 | end 242 | end 243 | 244 | property "repeatedly/1" do 245 | check all value <- repeatedly(&System.unique_integer/0) do 246 | assert is_integer(value) 247 | end 248 | 249 | values = Enum.take(repeatedly(&System.unique_integer/0), 20) 250 | assert Enum.uniq(values) == values 251 | end 252 | 253 | property "boolean/0" do 254 | check all bool <- boolean() do 255 | assert is_boolean(bool) 256 | end 257 | end 258 | 259 | property "integer/0" do 260 | check all int <- integer() do 261 | assert is_integer(int) 262 | assert abs(int) < 1000 263 | end 264 | end 265 | 266 | describe "positive_integer/0" do 267 | property "without bounds" do 268 | check all int <- positive_integer() do 269 | assert is_integer(int) 270 | assert int in 1..1000 271 | end 272 | end 273 | 274 | property "works when resized to 0" do 275 | check all int <- resize(positive_integer(), 0), max_runs: 3 do 276 | assert int == 1 277 | end 278 | end 279 | end 280 | 281 | describe "non_negative_integer/0" do 282 | property "without bounds" do 283 | check all int <- non_negative_integer() do 284 | assert is_integer(int) 285 | assert int in 0..1000 286 | end 287 | end 288 | 289 | property "works when resized to 0" do 290 | check all int <- resize(non_negative_integer(), 0), max_runs: 3 do 291 | assert int == 0 292 | end 293 | end 294 | end 295 | 296 | describe "float/1" do 297 | property "without bounds" do 298 | check all float <- float() do 299 | assert is_float(float) 300 | end 301 | end 302 | 303 | property "with a :min option" do 304 | check all float <- float(min: 1.23) do 305 | assert is_float(float) 306 | assert float >= 1.23 307 | end 308 | 309 | check all float <- float(min: -10.0) do 310 | assert is_float(float) 311 | assert float >= -10.0 312 | end 313 | end 314 | 315 | property "with a :max option" do 316 | check all float <- float(max: 1.23) do 317 | assert is_float(float) 318 | assert float <= 1.23 319 | end 320 | 321 | check all float <- float(max: -10.0) do 322 | assert is_float(float) 323 | assert float <= -10.0 324 | end 325 | end 326 | 327 | property "with both a :min and a :max option" do 328 | check all float <- float(min: -1.12, max: 4.01) do 329 | assert is_float(float) 330 | assert float >= -1.12 and float <= 4.01 331 | end 332 | end 333 | end 334 | 335 | property "byte/0" do 336 | check all value <- byte() do 337 | assert value in 0..255 338 | end 339 | end 340 | 341 | describe "binary/1" do 342 | property "generates binaries" do 343 | check all value <- resize(binary(), 10) do 344 | assert is_binary(value) 345 | assert byte_size(value) in 0..10 346 | end 347 | end 348 | 349 | property "with length-related options" do 350 | check all value <- binary(length: 3) do 351 | assert is_binary(value) 352 | assert byte_size(value) == 3 353 | end 354 | end 355 | end 356 | 357 | describe "bitstring/1" do 358 | property "generates bitstrings" do 359 | check all value <- resize(bitstring(), 10) do 360 | assert is_bitstring(value) 361 | assert bit_size(value) in 0..10 362 | end 363 | end 364 | 365 | property "with length-related options" do 366 | check all value <- bitstring(length: 3) do 367 | assert is_bitstring(value) 368 | assert bit_size(value) == 3 369 | end 370 | end 371 | end 372 | 373 | describe "list_of/2" do 374 | property "generates lists" do 375 | check all value <- list_of(constant(:term)) do 376 | assert is_list(value) 377 | assert Enum.all?(value, &(&1 == :term)) 378 | end 379 | end 380 | 381 | property "with the :length option as a integer" do 382 | check all value <- list_of(constant(:term), length: 10) do 383 | assert value == List.duplicate(:term, 10) 384 | end 385 | end 386 | 387 | property "with the :length option as a min..max range" do 388 | check all value <- list_of(constant(:term), length: 5..10) do 389 | assert Enum.all?(value, &(&1 == :term)) 390 | assert length(value) in 5..10 391 | end 392 | 393 | check all value <- resize(list_of(constant(:term), length: 5..10), 4) do 394 | assert value == List.duplicate(:term, 5) 395 | end 396 | end 397 | 398 | property "with the :min_length option set" do 399 | check all value <- list_of(constant(:term), min_length: 5) do 400 | assert Enum.all?(value, &(&1 == :term)) 401 | assert length(value) >= 5 402 | end 403 | end 404 | 405 | property "with the :max_length option set" do 406 | check all value <- list_of(constant(:term), max_length: 5) do 407 | assert Enum.all?(value, &(&1 == :term)) 408 | assert length(value) <= 5 409 | end 410 | end 411 | 412 | test "with invalid options" do 413 | data = constant(:term) 414 | 415 | message = ":length must be a positive integer or a range of positive integers, got: :oops" 416 | assert_raise ArgumentError, message, fn -> list_of(data, length: :oops) end 417 | 418 | message = ":min_length must be a positive integer, got: :oops" 419 | assert_raise ArgumentError, message, fn -> list_of(data, min_length: :oops) end 420 | 421 | message = ":max_length must be a positive integer, got: :oops" 422 | assert_raise ArgumentError, message, fn -> list_of(data, max_length: :oops) end 423 | end 424 | end 425 | 426 | describe "uniq_list_of/1" do 427 | property "without options" do 428 | check all list <- uniq_list_of(integer(1..10000)) do 429 | assert Enum.uniq(list) == list 430 | end 431 | end 432 | 433 | property "with the :uniq_fun option" do 434 | check all list <- uniq_list_of(integer(-10000..10000), uniq_fun: &abs/1) do 435 | assert Enum.uniq_by(list, &abs/1) == list 436 | end 437 | end 438 | 439 | property "with length-related options" do 440 | check all list <- uniq_list_of(integer(), min_length: 3, max_tries: 1000) do 441 | assert Enum.uniq(list) == list 442 | assert length(list) >= 3 443 | end 444 | end 445 | 446 | test "raises an error when :max_tries are reached" do 447 | assert_raise StreamData.TooManyDuplicatesError, fn -> 448 | integer() 449 | |> uniq_list_of(max_tries: 0, min_length: 1) 450 | |> Enum.take(1) 451 | end 452 | end 453 | end 454 | 455 | describe "shuffle/1" do 456 | property "shuffling retains same elements" do 457 | input = [1, 2, 3, 4, 5] 458 | 459 | check all list <- shuffle(input) do 460 | assert Enum.sort(list) == input 461 | end 462 | end 463 | 464 | property "shrinks towards not shuffled" do 465 | check all input <- list_of(integer()) do 466 | assert shrink(shuffle(input)) == input 467 | end 468 | end 469 | end 470 | 471 | property "nonempty_improper_list_of/2" do 472 | check all list <- nonempty_improper_list_of(integer(), constant("")) do 473 | assert list != [] 474 | refute match?([_], list) 475 | each_improper_list(list, &assert(is_integer(&1)), &assert(&1 == "")) 476 | end 477 | end 478 | 479 | property "maybe_improper_list_of/2" do 480 | check all list <- maybe_improper_list_of(integer(), constant("")) do 481 | assert list != [""] 482 | each_improper_list(list, &assert(is_integer(&1)), &assert(&1 == "" or is_integer(&1))) 483 | end 484 | end 485 | 486 | property "tuple/1" do 487 | check all value <- tuple({integer(-1..-10//-1), integer(1..10)}) do 488 | assert {int1, int2} = value 489 | assert int1 in -1..-10//-1 490 | assert int2 in 1..10 491 | end 492 | end 493 | 494 | property "map_of/2" do 495 | check all map <- map_of(integer(), boolean()), max_runs: 50 do 496 | assert is_map(map) 497 | 498 | Enum.each(map, fn {key, value} -> 499 | assert is_integer(key) 500 | assert is_boolean(value) 501 | end) 502 | end 503 | end 504 | 505 | property "map_of/3" do 506 | check all map <- map_of(integer(), boolean(), max_length: 5), max_runs: 50 do 507 | assert is_map(map) 508 | 509 | assert map_size(map) <= 5 510 | 511 | Enum.each(map, fn {key, value} -> 512 | assert is_integer(key) 513 | assert is_boolean(value) 514 | end) 515 | end 516 | end 517 | 518 | property "fixed_map/1" do 519 | data_with_map = fixed_map(%{integer: integer(), binary: binary()}) 520 | data_with_keyword = fixed_map(integer: integer(), binary: binary()) 521 | 522 | Enum.each([data_with_map, data_with_keyword], fn data -> 523 | check all map <- data do 524 | assert map_size(map) == 2 525 | assert is_integer(Map.fetch!(map, :integer)) 526 | assert is_binary(Map.fetch!(map, :binary)) 527 | end 528 | end) 529 | end 530 | 531 | property "optional_map/1" do 532 | data_with_map = optional_map(%{integer: integer(), binary: binary()}) 533 | data_with_keyword = optional_map(integer: integer(), binary: binary()) 534 | 535 | Enum.each([data_with_map, data_with_keyword], fn data -> 536 | check all map <- data do 537 | assert map_size(map) <= 2 538 | 539 | assert map 540 | |> Map.keys() 541 | |> MapSet.new() 542 | |> MapSet.subset?(MapSet.new([:integer, :binary])) 543 | 544 | if Map.has_key?(map, :integer) do 545 | assert is_integer(Map.fetch!(map, :integer)) 546 | end 547 | 548 | if Map.has_key?(map, :binary) do 549 | assert(is_binary(Map.fetch!(map, :binary))) 550 | end 551 | end 552 | end) 553 | end 554 | 555 | property "optional_map/2" do 556 | data_with_map = optional_map(%{integer: integer(), binary: binary()}, [:integer]) 557 | data_with_keyword = optional_map([integer: integer(), binary: binary()], [:integer]) 558 | 559 | Enum.each([data_with_map, data_with_keyword], fn data -> 560 | check all map <- data do 561 | assert map_size(map) in [1, 2] 562 | 563 | assert map 564 | |> Map.keys() 565 | |> MapSet.new() 566 | |> MapSet.subset?(MapSet.new([:integer, :binary])) 567 | 568 | if Map.has_key?(map, :integer) do 569 | assert is_integer(Map.fetch!(map, :integer)) 570 | end 571 | 572 | assert(is_binary(Map.fetch!(map, :binary))) 573 | end 574 | end) 575 | 576 | assert Enum.any?(Stream.take(data_with_map, 100), fn data -> 577 | Map.has_key?(data, :integer) && is_integer(data.integer) 578 | end) 579 | end 580 | 581 | property "keyword_of/1" do 582 | check all keyword <- keyword_of(boolean()), max_runs: 50 do 583 | assert Keyword.keyword?(keyword) 584 | 585 | Enum.each(keyword, fn {_key, value} -> 586 | assert is_boolean(value) 587 | end) 588 | end 589 | end 590 | 591 | describe "mapset_of/1" do 592 | property "without options" do 593 | check all set <- mapset_of(integer(1..10000)) do 594 | assert %MapSet{} = set 595 | 596 | if MapSet.size(set) > 0 do 597 | assert Enum.all?(set, &is_integer/1) 598 | end 599 | end 600 | end 601 | 602 | test "raises an error when :max_tries are reached" do 603 | assert_raise StreamData.TooManyDuplicatesError, fn -> 604 | integer() 605 | |> mapset_of(max_tries: 0) 606 | |> filter(&(MapSet.size(&1) > 0)) 607 | |> Enum.take(1) 608 | end 609 | end 610 | end 611 | 612 | property "nonempty/1" do 613 | check all list <- nonempty(list_of(:term)) do 614 | assert length(list) > 0 615 | end 616 | end 617 | 618 | property "tree/2" do 619 | check all tree <- tree(boolean(), &list_of/1), max_runs: 100 do 620 | if is_list(tree) do 621 | assert Enum.all?(List.flatten(tree), &is_boolean/1) 622 | else 623 | assert is_boolean(tree) 624 | end 625 | end 626 | end 627 | 628 | describe "codepoint/1" do 629 | property "with :ascii" do 630 | check all codepoint <- codepoint(:ascii) do 631 | assert codepoint in ?\s..?~ 632 | end 633 | end 634 | 635 | property "with :alphanumeric" do 636 | check all codepoint <- codepoint(:alphanumeric) do 637 | assert <> =~ ~r/^[a-zA-Z0-9]$/ 638 | end 639 | end 640 | 641 | property "with :printable" do 642 | check all codepoint <- codepoint(:printable) do 643 | assert String.printable?(<>) 644 | end 645 | end 646 | 647 | property "with :utf8" do 648 | check all codepoint <- codepoint(:utf8) do 649 | assert String.valid?(<>) 650 | end 651 | end 652 | end 653 | 654 | describe "string/1" do 655 | property "with a list of ranges and codepoints" do 656 | check all string <- string([?a..?z, ?A..?K, ?_]) do 657 | assert is_binary(string) 658 | 659 | Enum.each(String.to_charlist(string), fn char -> 660 | assert char in ?a..?z or char in ?A..?K or char == ?_ 661 | end) 662 | end 663 | end 664 | 665 | property "with a range" do 666 | check all string <- string(?a..?f, min_length: 1) do 667 | assert string =~ ~r/\A[a-f]+\z/ 668 | end 669 | end 670 | 671 | property "with :ascii" do 672 | check all string <- string(:ascii) do 673 | assert is_binary(string) 674 | 675 | Enum.each(String.to_charlist(string), fn char -> 676 | assert char in ?\s..?~ 677 | end) 678 | end 679 | end 680 | 681 | property "with :alphanumeric" do 682 | check all string <- string(:alphanumeric) do 683 | assert string =~ ~r/\A[a-zA-Z0-9]*\z/ 684 | end 685 | end 686 | 687 | property "with :printable" do 688 | check all string <- string(:printable) do 689 | assert String.printable?(string) 690 | end 691 | end 692 | 693 | property "with :utf8" do 694 | check all string <- string(:utf8) do 695 | assert String.valid?(string) 696 | end 697 | end 698 | 699 | property "with a fixed length" do 700 | check all string <- string(:alphanumeric, length: 3) do 701 | assert String.length(string) == 3 702 | end 703 | end 704 | end 705 | 706 | describe "atom/1" do 707 | property ":alphanumeric" do 708 | check all atom <- atom(:alphanumeric) do 709 | assert is_atom(atom) 710 | refute String.starts_with?(inspect(atom), ":\"") 711 | end 712 | end 713 | 714 | property ":alias" do 715 | check all module <- atom(:alias), max_runs: 50 do 716 | assert is_atom(module) 717 | assert String.starts_with?(Atom.to_string(module), "Elixir.") 718 | end 719 | end 720 | end 721 | 722 | property "iolist/0" do 723 | check all iolist <- iolist(), max_runs: 50 do 724 | assert :erlang.iolist_size(iolist) >= 0 725 | end 726 | end 727 | 728 | property "iodata/0" do 729 | check all iodata <- iodata(), max_runs: 50 do 730 | assert IO.iodata_length(iodata) >= 0 731 | end 732 | end 733 | 734 | property "chardata/0" do 735 | check all chardata <- chardata(), max_runs: 50 do 736 | assert IO.chardata_to_string(chardata) |> String.valid?() 737 | end 738 | end 739 | 740 | property "term/0" do 741 | check all term <- term(), max_runs: 25 do 742 | assert is_boolean(term) or is_integer(term) or is_float(term) or is_binary(term) or 743 | is_atom(term) or is_reference(term) or is_list(term) or is_map(term) or 744 | is_tuple(term) 745 | end 746 | end 747 | 748 | test "check_all/3 with :os.timestamp" do 749 | options = [initial_seed: :os.timestamp()] 750 | 751 | property = fn list -> 752 | if 5 in list do 753 | {:error, list} 754 | else 755 | {:ok, nil} 756 | end 757 | end 758 | 759 | assert {:error, info} = check_all(list_of(integer()), options, property) 760 | assert is_list(info.original_failure) and 5 in info.original_failure 761 | assert info.shrunk_failure == [5] 762 | assert is_integer(info.nodes_visited) and info.nodes_visited >= 0 763 | assert is_integer(info.successful_runs) and info.successful_runs >= 0 764 | 765 | assert check_all(list_of(boolean()), options, property) == {:ok, %{}} 766 | end 767 | 768 | test "check_all/3 with :rand.export_seed()" do 769 | seed = :rand.seed_s(:exs64) 770 | options = [initial_seed: :rand.export_seed_s(seed)] 771 | 772 | property = fn list -> 773 | if 5 in list do 774 | {:error, list} 775 | else 776 | {:ok, nil} 777 | end 778 | end 779 | 780 | assert check_all(list_of(boolean()), options, property) == {:ok, %{}} 781 | end 782 | 783 | # Taken from: https://github.com/whatyouhide/stream_data/issues/160 784 | defp shrink(generator) do 785 | {:error, %{shrunk_failure: value}} = 786 | check_all(generator, [initial_seed: :os.timestamp()], &{:error, &1}) 787 | 788 | value 789 | end 790 | 791 | defp each_improper_list([], _head_fun, _tail_fun) do 792 | :ok 793 | end 794 | 795 | defp each_improper_list([elem], _head_fun, tail_fun) do 796 | tail_fun.(elem) 797 | end 798 | 799 | defp each_improper_list([head | tail], head_fun, tail_fun) do 800 | head_fun.(head) 801 | 802 | if is_list(tail) do 803 | each_improper_list(tail, head_fun, tail_fun) 804 | else 805 | tail_fun.(tail) 806 | end 807 | end 808 | end 809 | -------------------------------------------------------------------------------- /test/test_helper.exs: -------------------------------------------------------------------------------- 1 | ExUnit.start(exclude: [:stdlib]) 2 | --------------------------------------------------------------------------------