├── docs ├── Project.toml ├── make.jl ├── mkdocs.yml ├── Manifest.toml └── src │ └── index.md ├── .gitignore ├── test ├── runtests.jl ├── benchmarks.json ├── runbenchmarks.jl ├── mixturemodels.jl └── iterable.jl ├── .github └── workflows │ ├── TagBot.yml │ ├── CompatHelper.yml │ ├── Documentation.yml │ └── CI.yml ├── src ├── Expectations.jl ├── mixturemodels.jl ├── types.jl └── iterable.jl ├── Project.toml ├── LICENSE.md ├── paper.bib ├── README.md └── paper.md /docs/Project.toml: -------------------------------------------------------------------------------- 1 | [deps] 2 | Documenter = "e30172f5-a6a5-5a46-863b-614d45cd2de4" 3 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | Manifest.toml 2 | *.jl.cov 3 | *.jl.*.cov 4 | *.jl.mem 5 | docs/build/ 6 | docs/site/ 7 | .DS_Store 8 | .vscode -------------------------------------------------------------------------------- /test/runtests.jl: -------------------------------------------------------------------------------- 1 | using Expectations, Distributions 2 | using Test, Random, Statistics, LinearAlgebra 3 | 4 | @testset "Iterable distributions" begin include("iterable.jl") end 5 | @testset "Mixture models" begin include("mixturemodels.jl") end 6 | -------------------------------------------------------------------------------- /.github/workflows/TagBot.yml: -------------------------------------------------------------------------------- 1 | name: TagBot 2 | on: 3 | schedule: 4 | - cron: 0 * * * * 5 | jobs: 6 | TagBot: 7 | runs-on: ubuntu-latest 8 | steps: 9 | - uses: JuliaRegistries/TagBot@v1 10 | with: 11 | token: ${{ secrets.GITHUB_TOKEN }} 12 | -------------------------------------------------------------------------------- /docs/make.jl: -------------------------------------------------------------------------------- 1 | using Documenter, Expectations 2 | 3 | # Compile the raw documentation. 4 | makedocs(sitename = "Expectations.jl") 5 | 6 | # Push the documentation to the server. 7 | deploydocs( 8 | deps = Deps.pip("mkdocs", "python-markdown-math"), 9 | repo = "github.com/QuantEcon/Expectations.jl.git", 10 | forcepush = true) 11 | -------------------------------------------------------------------------------- /.github/workflows/CompatHelper.yml: -------------------------------------------------------------------------------- 1 | name: CompatHelper 2 | on: 3 | schedule: 4 | - cron: '00 00 * * *' 5 | workflow_dispatch: 6 | jobs: 7 | CompatHelper: 8 | runs-on: ubuntu-latest 9 | steps: 10 | - name: Pkg.add("CompatHelper") 11 | run: julia -e 'using Pkg; Pkg.add("CompatHelper")' 12 | - name: CompatHelper.main() 13 | env: 14 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 15 | COMPATHELPER_PRIV: ${{ secrets.COMPATHELPER_PRIV }} # optional 16 | run: julia -e 'using CompatHelper; CompatHelper.main()' 17 | -------------------------------------------------------------------------------- /src/Expectations.jl: -------------------------------------------------------------------------------- 1 | # __precompile__(true) 2 | 3 | module Expectations 4 | 5 | # Load external dependencies. 6 | using FastGaussQuadrature 7 | using LinearAlgebra 8 | using SpecialFunctions 9 | using Distributions 10 | 11 | # Load internal files. 12 | include("types.jl"), 13 | include("iterable.jl") 14 | include("mixturemodels.jl") 15 | 16 | # Export 17 | export expectation, Expectation, IterableExpectation, Gaussian, Trapezoidal, FiniteDiscrete, QuadratureAlgorithm, ExplicitQuadratureAlgorithm, nodes, weights, QuantileRange 18 | export MixtureExpectation, expectations 19 | 20 | end # module 21 | -------------------------------------------------------------------------------- /docs/mkdocs.yml: -------------------------------------------------------------------------------- 1 | site_name: Expectations.jl 2 | repo_url: https://github.com/QuantEcon/Expectations.jl 3 | site_description: A package to help with numerical expectations of random variables and their functions. 4 | site_author: QuantEcon, jlperla, arnavs 5 | 6 | theme: readthedocs 7 | 8 | extra_css: 9 | - assets/Documenter.css 10 | 11 | extra_javascript: 12 | - https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.1/MathJax.js?config=TeX-AMS_HTML 13 | - assets/mathjaxhelper.js 14 | 15 | markdown_extensions: 16 | - extra 17 | - tables 18 | - fenced_code 19 | - mdx_math 20 | 21 | docs_dir: 'build' 22 | 23 | pages: 24 | - Home: index.md 25 | -------------------------------------------------------------------------------- /.github/workflows/Documentation.yml: -------------------------------------------------------------------------------- 1 | name: Documentation 2 | 3 | on: [push] 4 | 5 | jobs: 6 | build: 7 | runs-on: ${{ matrix.os }} 8 | strategy: 9 | matrix: 10 | julia-version: [1.8.0] 11 | julia-arch: [x86] 12 | os: [ubuntu-latest] 13 | steps: 14 | - uses: actions/checkout@v1.0.0 15 | - uses: julia-actions/setup-julia@latest 16 | with: 17 | version: ${{ matrix.julia-version }} 18 | - name: Install dependencies 19 | run: julia --project=docs/ -e 'using Pkg; Pkg.develop(PackageSpec(path=pwd())); Pkg.instantiate()' 20 | - name: Build and deploy 21 | env: 22 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # For authentication with GitHub Actions token 23 | run: julia --project=docs/ docs/make.jl 24 | -------------------------------------------------------------------------------- /Project.toml: -------------------------------------------------------------------------------- 1 | name = "Expectations" 2 | uuid = "2fe49d83-0758-5602-8f54-1f90ad0d522b" 3 | version = "1.9.2" 4 | 5 | [deps] 6 | Distributions = "31c24e10-a181-5473-b8eb-7969acd0382f" 7 | FastGaussQuadrature = "442a2c76-b920-505d-bb47-c5924d526838" 8 | LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" 9 | SpecialFunctions = "276daf66-3868-5448-9aa4-cd146d93841b" 10 | 11 | [compat] 12 | Distributions = "0.23, 0.24, 0.25" 13 | FastGaussQuadrature = "0.4.3, 0.5, 1" 14 | SpecialFunctions = "0.10, 1.1, 2" 15 | LinearAlgebra = "<0.0.1, 1" 16 | julia = "1.7, 1.8, 1.9" 17 | 18 | [extras] 19 | LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" 20 | Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" 21 | Statistics = "10745b16-79ce-11e8-11f9-7d13ad32a3b2" 22 | Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" 23 | 24 | [targets] 25 | test = ["Test", "Statistics", "Random", "LinearAlgebra"] 26 | -------------------------------------------------------------------------------- /test/benchmarks.json: -------------------------------------------------------------------------------- 1 | [{"Julia":"1.3.0","BenchmarkTools":"0.4.3"},[["BenchmarkGroup",{"data":{"exponential":["BenchmarkTools.TrialEstimate",{"allocs":5,"time":308.0,"memory":400,"params":["BenchmarkTools.Parameters",{"gctrial":true,"time_tolerance":0.05,"samples":10000,"evals":1,"gcsample":false,"seconds":5.0,"overhead":0.0,"memory_tolerance":0.01}],"gctime":0.0}],"standardnormal":["BenchmarkTools.TrialEstimate",{"allocs":5,"time":324.0,"memory":400,"params":["BenchmarkTools.Parameters",{"gctrial":true,"time_tolerance":0.05,"samples":10000,"evals":1,"gcsample":false,"seconds":5.0,"overhead":0.0,"memory_tolerance":0.01}],"gctime":0.0}],"discreteuniform":["BenchmarkTools.TrialEstimate",{"allocs":5,"time":469.0,"memory":272,"params":["BenchmarkTools.Parameters",{"gctrial":true,"time_tolerance":0.05,"samples":10000,"evals":1,"gcsample":false,"seconds":5.0,"overhead":0.0,"memory_tolerance":0.01}],"gctime":0.0}]},"tags":[]}]]] -------------------------------------------------------------------------------- /test/runbenchmarks.jl: -------------------------------------------------------------------------------- 1 | # Import dependency. 2 | using BenchmarkTools, Expectations, Distributions 3 | 4 | #Create benchmark group and benchmarks 5 | benchmarks = BenchmarkGroup() 6 | 7 | #Put in specific benchmarks 8 | dist1 = DiscreteUniform(1, 10) 9 | E1 = expectation(dist1) 10 | benchmarks["discreteuniform"] = @benchmarkable $E1($(x -> x)) 11 | dist2 = Normal() 12 | E2 = expectation(dist2) 13 | benchmarks["standardnormal"] = @benchmarkable $E2($(x -> x)) 14 | dist3 = Exponential(2) 15 | E3 = expectation(dist3) 16 | benchmarks["exponential"] = @benchmarkable $E3($(x -> x)) 17 | #... 18 | 19 | results = run(benchmarks) # Get results. 20 | results = median(results) # Condense to median. 21 | 22 | # To save results, manually call in the REPL: BenchmarkTools.save("benchmarks.json", results) 23 | 24 | #Compare to old results 25 | try 26 | oldresults= BenchmarkTools.load("benchmarks.json")[1] 27 | judge(oldresults, results) 28 | catch err 29 | error("Couldn't load file- make sure that you've previously saved results.", err.prefix) 30 | end -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | The MIT License 2 | 3 | Copyright (c) 2018, 2019 QuantEcon 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in 13 | all copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 21 | THE SOFTWARE. 22 | -------------------------------------------------------------------------------- /test/mixturemodels.jl: -------------------------------------------------------------------------------- 1 | # Mixture models 2 | distset = [ 3 | MixtureModel([Uniform(), Normal(), LogNormal()]), 4 | MixtureModel([Uniform(1, 10), Gamma()]), 5 | MixtureModel([Uniform(1, 100), Normal(1, 1000)]), 6 | MixtureModel([DiscreteUniform(1, 10), DiscreteUniform(1, 10)]) 7 | ] 8 | 9 | for dist in distset 10 | println(dist) 11 | μ = mean(dist) 12 | σ = std(dist) 13 | # No convenience call. 14 | E = expectation(dist) 15 | @test E(x -> x) ≈ μ 16 | @test E(x -> x^2) - μ^2 ≈ σ^2 17 | # Convenience call. 18 | @test expectation(x -> x, dist) ≈ μ 19 | @test expectation(x -> x^2, dist) - μ^2 ≈ σ^2 20 | # Stress tests. 21 | # Many nodes. 22 | E2 = expectation(dist, n = 100) 23 | @test E2(x -> x) ≈ μ 24 | @test E2(x -> x^2) - μ^2 ≈ σ^2 25 | end 26 | 27 | # Addition of expectations 28 | e1 = expectation(Normal()) 29 | e2 = expectation(Uniform()) 30 | e3 = expectation(Gamma()) 31 | 32 | @test ((e1 + e2) + e3)(identity) ≈ (e1 + (e2 + e3))(identity) ≈ (e1 + e2 + e3)(identity) 33 | @test (e1 + e2 + e3)(x -> x^2) ≈ 3.333333333333342 34 | 35 | # Linear operator behavior. 36 | 37 | # Error handling. 38 | -------------------------------------------------------------------------------- /.github/workflows/CI.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | on: 3 | - push 4 | - pull_request 5 | jobs: 6 | test: 7 | name: Julia ${{ matrix.version }} - ${{ matrix.os }} - ${{ matrix.arch }} - ${{ github.event_name }} 8 | runs-on: ${{ matrix.os }} 9 | strategy: 10 | fail-fast: false 11 | matrix: 12 | version: 13 | - '1.6' 14 | - '1' 15 | os: 16 | - ubuntu-latest 17 | arch: 18 | - x64 19 | steps: 20 | - uses: actions/checkout@v2 21 | - uses: julia-actions/setup-julia@v1 22 | with: 23 | version: ${{ matrix.version }} 24 | arch: ${{ matrix.arch }} 25 | - uses: actions/cache@v1 26 | env: 27 | cache-name: cache-artifacts 28 | with: 29 | path: ~/.julia/artifacts 30 | key: ${{ runner.os }}-test-${{ env.cache-name }}-${{ hashFiles('**/Project.toml') }} 31 | restore-keys: | 32 | ${{ runner.os }}-test-${{ env.cache-name }}- 33 | ${{ runner.os }}-test- 34 | ${{ runner.os }}- 35 | - uses: julia-actions/julia-buildpkg@v1 36 | - uses: julia-actions/julia-runtest@v1 37 | -------------------------------------------------------------------------------- /src/mixturemodels.jl: -------------------------------------------------------------------------------- 1 | # uses the defaults for each dist. 2 | # otherwise, you can manually construct expectations and glob them together 3 | function expectation(m::UnivariateMixture; kwargs...) 4 | expectations = [expectation(d; kwargs...) for d in components(m)] 5 | return MixtureExpectation(expectations, probs(m)) 6 | end 7 | 8 | function (e::MixtureExpectation)(f::Function; kwargs...) 9 | return dot(e.mixtureweights, [E(f; kwargs...) for E in e.expectations]) 10 | end 11 | 12 | weights(e::MixtureExpectation) = e.mixtureweights 13 | expectations(e::MixtureExpectation) = e.expectations 14 | expectation(f::Function, m::UnivariateMixture; kwargs...) = dot(probs(m), [expectation(f, dist; kwargs...) for dist in components(m)]) 15 | # Left-multiplying an expectation by a scalar. 16 | """ 17 | *(r::Real, e::MixtureExpectation) = IterableExpectation(nodes(e), r * weights(e)) 18 | Implements left-multiplication of an `IterableExpectation` by a real scalar. 19 | """ 20 | *(r::Real, e::MixtureExpectation) = MixtureExpectation(r * expectations(e), weights(e)) 21 | # *(e::MixtureExpectation, h::AbstractArray) = dot(weights(e), [E*h for E in expectations(e)]) 22 | import Base.+ 23 | +(expectations::Expectation...) = MixtureExpectation(expectations, ones(length(expectations))) 24 | 25 | import Base.* 26 | 27 | # Right-multiplying an expectation by something. 28 | """ 29 | *(e::MixtureExpectation, h::AbstractArray) = dot(map(x -> x * h, expectations(e)), weights(e)) 30 | Implements the right-application of an `MixtureExpectation` by a vector of values on its nodes. 31 | """ 32 | *(e::MixtureExpectation, h::AbstractArray) = dot(map(x -> x * h, expectations(e)), weights(e)) 33 | 34 | 35 | -------------------------------------------------------------------------------- /paper.bib: -------------------------------------------------------------------------------- 1 | @article{mccall, 2 | title = {Economics of Information and Job Search}, 3 | author = {McCall, J. J.}, 4 | year = {1970}, 5 | journal = {The Quarterly Journal of Economics}, 6 | volume = {84}, 7 | doi = {10.2307/1879403}, 8 | number = {1}, 9 | pages = {113-126}, 10 | abstract = {I. Introduction, 113. — II. A simple model of job search, 115. — III. A more general model of job search, 123. — IV. An adaptive search model, 125.}, 11 | url = {https://EconPapers.repec.org/RePEc:oup:qjecon:v:84:y:1970:i:1:p:113-126.} 12 | } 13 | 14 | @misc{fastquad, 15 | title={Fast computation of Gauss quadrature nodes and weights on the whole real line}, 16 | author={Alex Townsend and Thomas Trogdon and Sheehan Olver}, 17 | year={2014}, 18 | eprint={1410.5286}, 19 | archivePrefix={arXiv}, 20 | primaryClass={math.NA}, 21 | doi = {10.1093/imanum/drv002} 22 | } 23 | 24 | @misc{distributions, 25 | author = {Dahua Lin and 26 | John Myles White and 27 | Simon Byrne and 28 | Douglas Bates and 29 | Andreas Noack and 30 | John Pearson and 31 | Alex Arslan and 32 | Kevin Squire and 33 | David Anthoff and 34 | Theodore Papamarkou and 35 | Mathieu Besançon and 36 | Jan Drugowitsch and 37 | Moritz Schauer and 38 | other contributors}, 39 | title = {{JuliaStats/Distributions.jl: a Julia package for probability distributions and associated functions}}, 40 | month = july, 41 | year = 2019, 42 | doi = {10.5281/zenodo.2647458}, 43 | url = {https://doi.org/10.5281/zenodo.2647458} 44 | } -------------------------------------------------------------------------------- /src/types.jl: -------------------------------------------------------------------------------- 1 | # Types for quadrature algorithms. 2 | """ 3 | Abstract type for quadrature algorithms without user-defined nodes (e.g., Gaussian quadrature.) 4 | """ 5 | abstract type QuadratureAlgorithm end 6 | 7 | """ 8 | Abstract type for quadrature algorithms with user-defined nodes (e.g., trapezoidal integration). 9 | """ 10 | abstract type ExplicitQuadratureAlgorithm end 11 | 12 | # Concrete types for quadrature algorithms. 13 | """ 14 | Gaussian quadrature. See specific methods for what precise algorithm is used (e.g., Gauss-Legendre, Gauss-Hermite, etc.) 15 | """ 16 | struct Gaussian <: QuadratureAlgorithm end # Distribution-family specific quadrature. 17 | 18 | """ 19 | A custom quadrature scheme written by Spencer Lyon as part of the QuantEcon.jl library. Used with permission. 20 | 21 | For detailed information, see: https://github.com/QuantEcon/QuantEcon.jl/blob/be0a32ec17d1f5b04ed8f2e52604c70c69f416b2/src/quad.jl#L918. 22 | """ 23 | struct QuantileRange <: QuadratureAlgorithm end 24 | 25 | """ 26 | A dot product of a (finite) PDF vector and a finite set of transformed nodes. 27 | """ 28 | struct FiniteDiscrete <: ExplicitQuadratureAlgorithm end # Dot-product basically. 29 | 30 | """ 31 | Trapezoidal integration. 32 | """ 33 | struct Trapezoidal <: ExplicitQuadratureAlgorithm end # For iterable expectations. 34 | 35 | # Abstract types for expectations. 36 | """ 37 | Abstract type for all expectations. 38 | """ 39 | abstract type Expectation end # Supports E(f) 40 | 41 | # Concrete types for expectations. 42 | 43 | #= For an example of using abstract types named in this way, see: https://github.com/JuliaStats/Distributions.jl/blob/2d98eb6f31e9a92cce416e7391a84cff9bba7292/src/truncate.jl#L1-L10. We define a family of Truncated{blahblahblah} types parametrically, but use the abstract Truncated as a supertype for all Truncated distributions. 44 | =# 45 | """ 46 | Expectations which are paramterized by a vector of nodes (e.g., a discretized support) and corresponding quadrature weights. 47 | """ 48 | struct IterableExpectation{NT, WT} <: Expectation # Supports E(f), nodes, weights, * 49 | nodes::NT 50 | weights::WT 51 | end 52 | 53 | struct MixtureExpectation{ET, WT} <: Expectation 54 | expectations::ET 55 | mixtureweights::WT 56 | end 57 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | [![CI](https://github.com/QuantEcon/Expectations.jl/workflows/CI/badge.svg)](https://github.com/QuantEcon/Expectations.jl/actions?query=workflow%3ACI) 2 | [![codecov](https://codecov.io/gh/QuantEcon/Expectations.jl/branch/master/graph/badge.svg)](https://codecov.io/gh/QuantEcon/Expectations.jl) 3 | 4 | [![](https://img.shields.io/badge/docs-latest-blue.svg)](https://QuantEcon.github.io/Expectations.jl/dev) 5 | 6 | # Expectations 7 | 8 | Installation (for Julia v1.0 and up): 9 | ```julia 10 | pkg> add Expectations 11 | ``` 12 | See [Pkg docs for more details](https://julialang.github.io/Pkg.jl/v1/managing-packages/#Adding-packages-1) 13 | 14 | 15 | This is a package designed to simplify the process of taking expectations of functions of random variables. 16 | 17 | ### Expectation Operator 18 | 19 | The key object is the `expectation` function, which returns an operator: 20 | 21 | ```julia 22 | dist = Normal() 23 | E = expectation(dist) 24 | E(x -> x) 25 | ``` 26 | For convenience, the operator can be applied directly to a function instead of being cached, 27 | ```julia 28 | expectation(x->x^2, dist) 29 | ``` 30 | 31 | As a linear operator on vectors using the nodes of the distribution 32 | ```julia 33 | dist = Normal() 34 | E = expectation(dist) 35 | x = nodes(E) 36 | f(x) = x^2 37 | E * f.(x) == dot(f.(x), weights(E)) 38 | ``` 39 | 40 | ### Random Variables 41 | 42 | The underlying distributions are objects from `Distributions.jl` (currently `<:UnivariateDistribution`). 43 | 44 | **Starting with 1.3.0, we also support mixture models.** 45 | 46 | ### Quadrature Algorithms 47 | 48 | We support different types of Gaussian quadrature (Gauss-Hermite, Gauss-Legendre, Gauss-Laguerre, etc.) based on the distribution, as well as some methods with user-defined nodes (e.g., trapezoidal integration). 49 | 50 | We have rules for the following distributions: 51 | 52 | * Normal 53 | * ChiSq 54 | * LogNormal 55 | * Exponential 56 | * Beta 57 | * Gamma/Erlang 58 | * Uniform 59 | * Continuous Univariate (compact; generic fallback) 60 | * Continuous Univariate (no restriction; approximates with quantile grid) 61 | * Discrete 62 | 63 | See docs for more info. 64 | 65 | ### Mixture Models 66 | 67 | We also support mixture models, e.g. 68 | 69 | ```julia 70 | d = MixtureModel([Uniform(), Normal(), Gamma()]); 71 | E = expectation(d); 72 | E(x -> x) # 0.5000000000000016 73 | ``` 74 | 75 | The `MixtureExpectation` objects support most of the same behavior as the individual `IterableExpectation`s. 76 | 77 | ```julia 78 | 2E(x -> x) # 1.000000000000003 79 | weights(E) # [1/3, 1/3, 1/3] 80 | expectations(E) # component expectations 81 | ``` 82 | -------------------------------------------------------------------------------- /docs/Manifest.toml: -------------------------------------------------------------------------------- 1 | # This file is machine-generated - editing it directly is not advised 2 | 3 | [[Base64]] 4 | uuid = "2a0f44e3-6c83-55bd-87e4-b1978d98bd5f" 5 | 6 | [[Dates]] 7 | deps = ["Printf"] 8 | uuid = "ade2ca70-3891-5945-98fb-dc099432e06a" 9 | 10 | [[Distributed]] 11 | deps = ["Random", "Serialization", "Sockets"] 12 | uuid = "8ba89e20-285c-5b6f-9357-94700520ee1b" 13 | 14 | [[DocStringExtensions]] 15 | deps = ["LibGit2", "Markdown", "Pkg", "Test"] 16 | git-tree-sha1 = "50ddf44c53698f5e784bbebb3f4b21c5807401b1" 17 | uuid = "ffbed154-4ef7-542d-bbb7-c09d3a79fcae" 18 | version = "0.8.3" 19 | 20 | [[Documenter]] 21 | deps = ["Base64", "Dates", "DocStringExtensions", "IOCapture", "InteractiveUtils", "JSON", "LibGit2", "Logging", "Markdown", "REPL", "Test", "Unicode"] 22 | git-tree-sha1 = "c01a7e8bcf7a6693444a52a0c5ac8b4e9528600e" 23 | uuid = "e30172f5-a6a5-5a46-863b-614d45cd2de4" 24 | version = "0.26.0" 25 | 26 | [[IOCapture]] 27 | deps = ["Logging"] 28 | git-tree-sha1 = "377252859f740c217b936cebcd918a44f9b53b59" 29 | uuid = "b5f81e59-6552-4d32-b1f0-c071b021bf89" 30 | version = "0.1.1" 31 | 32 | [[InteractiveUtils]] 33 | deps = ["Markdown"] 34 | uuid = "b77e0a4c-d291-57a0-90e8-8db25a27a240" 35 | 36 | [[JSON]] 37 | deps = ["Dates", "Mmap", "Parsers", "Unicode"] 38 | git-tree-sha1 = "81690084b6198a2e1da36fcfda16eeca9f9f24e4" 39 | uuid = "682c06a0-de6a-54ab-a142-c8b1cf79cde6" 40 | version = "0.21.1" 41 | 42 | [[LibGit2]] 43 | deps = ["Printf"] 44 | uuid = "76f85450-5226-5b5a-8eaa-529ad045b433" 45 | 46 | [[Libdl]] 47 | uuid = "8f399da3-3557-5675-b5ff-fb832c97cbdb" 48 | 49 | [[Logging]] 50 | uuid = "56ddb016-857b-54e1-b83d-db4d58db5568" 51 | 52 | [[Markdown]] 53 | deps = ["Base64"] 54 | uuid = "d6f4376e-aef5-505a-96c1-9c027394607a" 55 | 56 | [[Mmap]] 57 | uuid = "a63ad114-7e13-5084-954f-fe012c677804" 58 | 59 | [[Parsers]] 60 | deps = ["Dates"] 61 | git-tree-sha1 = "50c9a9ed8c714945e01cd53a21007ed3865ed714" 62 | uuid = "69de0a69-1ddd-5017-9359-2bf0b02dc9f0" 63 | version = "1.0.15" 64 | 65 | [[Pkg]] 66 | deps = ["Dates", "LibGit2", "Libdl", "Logging", "Markdown", "Printf", "REPL", "Random", "SHA", "UUIDs"] 67 | uuid = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f" 68 | 69 | [[Printf]] 70 | deps = ["Unicode"] 71 | uuid = "de0858da-6303-5e67-8744-51eddeeeb8d7" 72 | 73 | [[REPL]] 74 | deps = ["InteractiveUtils", "Markdown", "Sockets"] 75 | uuid = "3fa0cd96-eef1-5676-8a61-b3b8758bbffb" 76 | 77 | [[Random]] 78 | deps = ["Serialization"] 79 | uuid = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" 80 | 81 | [[SHA]] 82 | uuid = "ea8e919c-243c-51af-8825-aaa63cd721ce" 83 | 84 | [[Serialization]] 85 | uuid = "9e88b42a-f829-5b0c-bbe9-9e923198166b" 86 | 87 | [[Sockets]] 88 | uuid = "6462fe0b-24de-5631-8697-dd941f90decc" 89 | 90 | [[Test]] 91 | deps = ["Distributed", "InteractiveUtils", "Logging", "Random"] 92 | uuid = "8dfed614-e22c-5e08-85e1-65c5234f0b40" 93 | 94 | [[UUIDs]] 95 | deps = ["Random", "SHA"] 96 | uuid = "cf7118a7-6976-5b1a-9a39-7adc72f591a4" 97 | 98 | [[Unicode]] 99 | uuid = "4ec0a83e-493e-50e2-b9ac-8f72acf5a8f5" 100 | -------------------------------------------------------------------------------- /test/iterable.jl: -------------------------------------------------------------------------------- 1 | # Common univariate distributions. 2 | distset = [ 3 | Normal(1.23, 3.45), 4 | Exponential(2.12), 5 | Gamma(4.3), 6 | Gamma(2.2, 3.1,), 7 | Erlang(2, 3), 8 | Beta(2.123), 9 | LogNormal(4.5), 10 | DiscreteUniform(1, 10), 11 | Chisq(3), 12 | Uniform(1, 200), 13 | Binomial(101, 0.45), 14 | Categorical([0.2, 0.35, 0.15, 0.3]) 15 | ] 16 | 17 | for dist in distset 18 | println(dist) 19 | μ = mean(dist) 20 | σ = std(dist) 21 | # No convenience call. 22 | E = expectation(dist) 23 | @test E(x -> x) ≈ μ 24 | @test E(x -> x^2) - μ^2 ≈ σ^2 25 | @test E(x -> ((x - μ)/σ)^3) + 1. ≈ skewness(dist) + 1. # To avoid comparisons to 0.0 exactly. 26 | # Convenience call. 27 | @test expectation(x -> x, dist) ≈ μ 28 | @test expectation(x -> x^2, dist) - μ^2 ≈ σ^2 29 | @test expectation(x -> ((x - μ)/σ)^3, dist) + 1. ≈ skewness(dist) + 1. 30 | # Stress tests. 31 | # Many nodes. 32 | E2 = expectation(dist, n = 100) 33 | @test E2(x -> x) ≈ μ 34 | @test E2(x -> x^2) - μ^2 ≈ σ^2 35 | @test E2(x -> ((x - μ)/σ)^3) + 1. ≈ skewness(dist) + 1. # To avoid comparisons to 0.0 exactly. 36 | end 37 | 38 | # Linear operator behavior. 39 | distset = [ 40 | DiscreteUniform(1., 10.), 41 | Normal(1.45), 42 | Beta(2.1) 43 | ] 44 | 45 | for dist in distset 46 | E = expectation(dist) 47 | h(x) = 2*x 48 | z = nodes(E) 49 | @test E * h.(z) ≈ E(x -> 2*x) # Right-multiplying. 50 | @test weights(2E) ≈ 2*weights(E) # Left-multiplying. 51 | @test 3E*z ≈ (3E) * z ≈ 3*(E * z) # Linearity. 52 | end 53 | 54 | # Error handling. 55 | distset = [ # Noncompact dists 56 | LogNormal(Inf, Inf), 57 | Beta(Inf), 58 | Normal(0, Inf), 59 | Uniform(-Inf, Inf), 60 | Poisson(3) 61 | ] 62 | 63 | for dist in distset 64 | @test_throws ArgumentError expectation(dist) 65 | end 66 | 67 | distset = [ # Compact dist 68 | Uniform(1,2), 69 | Arcsine(1,2), 70 | Beta(1,1), 71 | Truncated(LogNormal(1,1), 0., 10.), 72 | ] 73 | 74 | for dist in distset 75 | E = expectation(dist) 76 | h(x) = 2*x 77 | z = nodes(E) 78 | @test E * h.(z) ≈ E(x -> 2*x) # Right-multiplying. 79 | @test weights(2E) ≈ 2*weights(E) # Left-multiplying. 80 | @test 3E*z ≈ (3E) * z ≈ 3*(E * z) # Linearity. 81 | 82 | nodeList = nodes(E); 83 | E = expectation(dist, nodeList) 84 | @test E * h.(z) ≈ E(x -> 2*x) # Right-multiplying. 85 | @test weights(2E) ≈ 2*weights(E) # Left-multiplying. 86 | @test 3E*z ≈ (3E) * z ≈ 3*(E * z) # Linearity. 87 | end 88 | 89 | 90 | # Other errors. 91 | E = expectation(DiscreteUniform(1, 10)) 92 | @test_throws Exception E(x -> dot(x, ones(7))) # Non-applicable functions. 93 | @test_throws MethodError (x -> 2*x).(nodes(E)) * E # Non-commutativity. 94 | 95 | # Trapezoidal methods. 96 | distset = [Beta()] 97 | 98 | for dist in distset 99 | # Setup. 100 | x = support(dist) 101 | μ = mean(dist) 102 | σ = std(dist) 103 | # Regular grid. 104 | grid = range(minimum(x), stop = maximum(x), length = 100) 105 | E = expectation(dist, grid) 106 | @test E(x -> x) ≈ μ 107 | @test abs(E(x -> x^2) - μ^2 - σ^2) < 1e-4 108 | # Irregular grid. 109 | grid2 = unique([grid' range(minimum(x), stop = maximum(x), length = 77)']) 110 | E2 = expectation(dist, grid2) 111 | @test E2(x -> x) isa Number # no accuracy guarantees for the irregular grid 112 | @test abs(E2(x -> x^2) - μ^2 - σ^2) isa Number 113 | # Convenience method 114 | @test expectation(identity, dist, grid2) ≈ E2(x -> x) 115 | end 116 | 117 | # Quantile 118 | 119 | distset = [ 120 | Uniform(1., 2.) 121 | ] 122 | 123 | for dist in distset 124 | E = expectation(dist, QuantileRange) 125 | h(x) = 2*x 126 | z = nodes(E) 127 | @test E * h.(z) ≈ E(x -> 2*x) # Right-multiplying. 128 | @test weights(2E) ≈ 2*weights(E) # Left-multiplying. 129 | @test 3E*z ≈ (3E) * z ≈ 3*(E * z) # Linearity. 130 | end 131 | 132 | ## Truncated distrubtions 133 | @test_throws ArgumentError E = expectation(Pareto()) 134 | # Mean of pareto is (α * θ / (α - 1)). If we bound a Pareto at a high number we should get (close to) the analytical mean 135 | α = 5.0 136 | θ = 1.0 137 | righttrunc = 10000 138 | E = expectation(truncated(Pareto(α,θ),nothing,righttrunc),n=1000) # Right truncated Pareto at 1000 139 | @test E(x->x) ≈ α*θ/(α-1) 140 | -------------------------------------------------------------------------------- /paper.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: 'Expectations.jl: Quick and Accurate Expectation Operators in Julia' 3 | tags: 4 | - Julia 5 | - statistics 6 | - distributions 7 | - quadrature 8 | authors: 9 | - name: Arnav Sood 10 | orcid: 0000-0003-0074-7908 11 | affiliation: 1 12 | - name: Patrick K Mogensen 13 | orcid: 0000-0002-4910-1932 14 | affiliation: 2 15 | affiliations: 16 | - name: Vancouver School of Economics, Unversity of British Columbia 17 | index: 1 18 | - name: Julia Computing, Inc. 19 | index: 2 20 | date: 2 February 2020 21 | bibliography: paper.bib 22 | --- 23 | 24 | # Summary 25 | 26 | Many statistical problems require taking an expectation of some function $f(x)$, where $x$ is drawn from some known distribution. For example, a well-known economic model of job search [@mccall] involves calculating an expected value $\mathbb{E}[f(w)]$, where $w$ is a random wage offer, and $f(\cdot)$ is the lifetime value of that offer. 27 | 28 | Julia's ``Distributions.jl`` [@distributions] package provides many random variable objects, but taking expectations is still a laborious process. Traditional approaches include Monte Carlo simulation (slow, and potentially inaccurate), or custom numerical integration (inaccessible for non-statisticians.) And both of these approaches fail to capitalize on one of Julia's key features: the similarity between math and Julia code. 29 | 30 | The ``Expectations.jl`` package addresses these weaknesses. By implementing custom Gaussian integration (also known as _quadrature_) schemes around well-known distributions, we provide fast and compact expectation operators. By making these callable objects, we allow these to be used as valid linear operators (acting on vectors, supporting scalar multiplication, etc.) Accuracy is not compromised; in testing, two pairs of 32-node vectors are sufficient to compute expectations to machine precision. For distributions without a custom quadrature rule, we give generic fallbacks. We also support univariate mixture models, by taking a weighted average over the component expectations. 31 | 32 | Current use of the package includes the well-known QuantEcon [Julia course](https://julia.quantecon.org), and the package has already attracted some community input (feature requests, bug reports, etc.) 33 | 34 | # Related Software 35 | 36 | There are analogous packages in other programming languages. For example, in Python the ``sympy.stats`` module provides for both symbolic and numerical representations of random variable expectations. And ``scipy.stats`` also has support for numerical integration with respect to pdfs. 37 | 38 | The advantages of our package are twofold. First, because of Julia's multiple dispatch, we can build distribution and expectation objects using any numeric types that implement basic arithmetic functions. For example, if we import the ``Measurements.jl`` package, the following works without any modification: 39 | 40 | ``` 41 | julia> mu, sigma = 0.5 ± 0.01, 1.0 ± 0.01 42 | (0.5 ± 0.01, 1.0 ± 0.01) 43 | 44 | julia> d = Normal(mu, sigma); 45 | 46 | julia> E = expectation(d); 47 | 48 | julia> E(x -> (x - mu)^2) 49 | 1.0 ± 0.02 50 | 51 | julia> sigma^2 52 | 1.0 ± 0.02 53 | ``` 54 | 55 | Second, because we have designed our expectation operators as bona fide _operators_ (instead of functions), they support mathematical behavior (like scalar multiplication and action on a vector) we have not seen elsewhere. 56 | 57 | # Additional Julia Details 58 | 59 | There are connections between our package and two emerging trends in Julia; differentiable programming (the ability to take derivatives of arbitrary code, most commonly through autodifferentiation), and probabilistic programming languages (domain-specific languages, such as ``Turing.jl`` and ``Stan.jl``, focused on probabilistic work.) 60 | 61 | Our expectation operators are compatible with the ``ForwardDiff.jl`` autodifferentiation library, which means that we can take derivatives of functions like the following: 62 | 63 | ``` 64 | julia> f = x -> E(y -> y^2 * x) 65 | #59 (generic function with 1 method) 66 | 67 | julia> ForwardDiff.derivative(f, 2) 68 | 0.9999999999999984 69 | ``` 70 | 71 | A corollary is that expectations can be embedded into things like machine learning training loops. 72 | 73 | On the second point, our focus on clean notation and mathematically faithful behavior (scalar multiplication, etc.) is in the spirit of a high-level, user-facing PPL. Our generic fallback method supports any compact, finite-parameter subtype of ``UnivariateDistribution`` which supports parameter extraction (e.g., ``maximum``, ``minimum``, ``params``). We are working on adding support for various transformed distribution objects from (e.g.) Turing, at which point closer integration will be possible. 74 | 75 | # Mathematical and Computational Details 76 | 77 | For a (continuous) univariate random variable $X$, following a cumulative distribution function $G(\cdot)$, the expectation is defined as: 78 | 79 | $$ \mathbb{E}[f(X)] = \int_{S}f(x) dG(x) $$ 80 | 81 | Where $S$ is the _support_ of $X$, or the set of values for which $X$ is nonzero. 82 | 83 | The integral is what makes this quantity challenging to compute. A Monte Carlo method might approximate it by drawing a large sample of points $S = \{x_1, x_2, ..., x_N\}$, and then simply taking the average $\tilde{\mathbb{E}}[f(X)] = \frac{1}{N} \sum_{i = 1}^N f(x_i)$. 84 | 85 | While the estimator $\tilde{\mathbb{E}}$ has several attractive statistical properties, Monte Carlo methods tend to be resource-intensive (one must draw a large enough sample, store it in memory, and compute the average.) 86 | 87 | We compute the integral via so-called _Gaussian quadrature_ (specifically, via calls to the Julia package [FastGaussQuadrature.jl](https://github.com/JuliaApproximation/FastGaussQuadrature.jl).) There are several flavors (Gauss-Legendre, Gauss-Hermite, Gauss-Laguerre, etc.) which are suitable for various domains $S$ (compact, infinite, semi-infinite, etc.), and therefore for various distributions. 88 | 89 | The core of each, however, is the approximation of an integral by the dot product $\mathbf{n} \cdot \mathbf{w}$, where $\mathbf{n}$ is called the _node vector_, and $\mathbf{w}$ the _weight vector_. To take the expectations of arbitrary functions, we simply apply the transformation to the nodes. That is, if: 90 | 91 | $$ \mathbb{E}[X] \approx \mathbf{n} \cdot \mathbf{w} $$ 92 | 93 | Then: 94 | 95 | $$ \mathbb{E}[f(X)] \approx f(\mathbf{n}) \cdot \mathbf{w} $$ 96 | 97 | Where $f(\mathbf{n})$ is the function $f$ applied to each element of $\mathbf{n}$. 98 | 99 | The computation of these weights and nodes is a literature in its own right. We refer to the introduction of [@fastquad] for an exposition (the authors also maintain the FastGaussQuadrature library mentioned above.) 100 | 101 | # Acknowledgements 102 | 103 | The [QuantEcon](https://quantecon.org) organization, which partially supported this work, is a NumFocus Fiscally Sponsored Project currently funded primarily by the Alfred P. Sloan foundation. 104 | 105 | This paper benefited from the efforts of many people, including JOSS editor Viviane Pons, and volunteer referees. 106 | 107 | # References 108 | -------------------------------------------------------------------------------- /docs/src/index.md: -------------------------------------------------------------------------------- 1 | ## Overview 2 | 3 | The goal of this package is to provide an intuitive and mathematically sound interface for taking expectations of random variables 4 | and their higher-order functions (i.e., if ``X \sim N(0, 1)``, what is ``\mathbb{E}[\sin(X)]``?). 5 | 6 | The underlying random variables are assumed to be distributions from [`Distributions.jl`](https://github.com/juliastats/distributions.jl). Currently, only univariate distributions are supported. 7 | 8 | ## Installation 9 | 10 | To install, run: 11 | 12 | ```@repl 1 13 | using Pkg 14 | pkg"add Expectations Distributions" 15 | 16 | using Expectations, Distributions 17 | ``` 18 | 19 | ## The Expectation Operator 20 | 21 | The key object in this package is an **expectation operator**, or an object `<: Expectation`. These include all objects capable of being called on a function; e.g. that support a method `function (e::Expectation)(f::Function)`. You can create these as following: 22 | 23 | ```@repl 1 24 | 25 | dist = Normal(); 26 | E = expectation(dist) 27 | ``` 28 | 29 | You can also choose algorithms and default parameters (see below for list): 30 | 31 | ```@repl 1 32 | E = expectation(dist, Gaussian; n = 30) # Could have done expectation(dist) or expectation(dist; n = 30) 33 | ``` 34 | 35 | These objects can then be applied to functions: 36 | 37 | ```@repl 1 38 | E(x -> x) 39 | E(x -> x^2) 40 | ``` 41 | 42 | There is also a convenience function to evaluate expectations directly, without returning the operator: 43 | 44 | ```@repl 1 45 | f = x -> x^2 46 | expectation(f, dist) 47 | ``` 48 | 49 | In general, `expectation(f, dist, ...)` is equivalent to `E(f)`, where `E = expectation(dist, ...)`. 50 | 51 | ### IterableExpectation 52 | 53 | The only concrete subtype of `Expectation` currently supported is `IterableExpectation{NT, WT}`. These are expectations for which we have a 54 | discrete vector of quadrature nodes and weights, either defined by user fiat, or set algorithmically. These support some additional behavior: 55 | 56 | ```@repl 1 57 | nodeList = nodes(E); 58 | vals = map(x -> x^2, nodeList); 59 | E * vals 60 | (2E) * vals 61 | ``` 62 | 63 | The above behavior, in some sense, puts the "operator" in "expectation operator"; that is, it allows it to move elements of a vector space around, and to be scalar-multiplied. 64 | 65 | ### User-Defined Nodes 66 | 67 | There are some situations where we are forced to use a specific set of nodes. In those situations, `E = expectation(dist, nodes)` will create the relevant object. 68 | 69 | ## Mixture Models 70 | 71 | We also have support for univariate [mixture models](https://juliastats.org/Distributions.jl/latest/mixture). The `MixtureExpectation` type is a struct with two fields: 72 | 73 | * `expectations`, which is a list of `IterableExpectation` objects. 74 | 75 | * `mixtureweights`, which is the mixing probabilities over the various model components. 76 | 77 | The mixture models are constructed using the **default settings** for each component distribution. (It still accepts kwargs, which are applied to each.) 78 | 79 | ```@repl 1 80 | d = MixtureModel([Uniform(), Normal(), Gamma()]); 81 | E = expectation(d; n = 30); # n = 30 nodes for each 82 | @show typeof(E) 83 | E(x -> abs(x)) 84 | ``` 85 | 86 | If you want to change this, you should construct each distribution separately, and then chain them together. 87 | 88 | ```@repl 1 89 | E1 = expectation(Uniform()) 90 | E2 = expectation(Normal()) 91 | E3 = expectation(Gamma()) 92 | 93 | E = MixtureExpectation([E1, E2, E3], [1/3, 1/3, 1/3]) 94 | E(x -> abs(x)) 95 | ``` 96 | 97 | ## Supported Distributions, Algorithms, Keywords, and Defaults 98 | 99 | Here is a list of currently supported distributions, along with keyword arguments and their defaults. 100 | 101 | | Distribution Name | Algorithm (Julia Type) | Keywords and Defaults | Restrictions | 102 | | ----------------- | -------------- | --------------------- | ------------ | 103 | | Discrete Univariate | FiniteDiscrete <: QuadratureAlgorithm | N/A | Support must be finite. | 104 | | Continuous Univariate | Gauss-Legendre (Gaussian <: QuadratureAlgorithm) | n = 500 | Support must be a compact interval ``[a, b]``. | 105 | | Continuous Univariate | QNWDist[^1] (QuantileRange <: ...) | n = 50, q0 = 0.001, qN = 0.999 | Distribution must be nondegenerate. | 106 | | Normal <: Continuous Univariate | Gauss-Hermite (...) | n = 30 | ... | 107 | | LogNormal <: ... | Gauss-Hermite (...) | n = 30 | ... | 108 | | Beta <: ... | Gauss-Jacobi (...) | n = 32 | ... | 109 | | ChiSq <: ... | Gauss-Laguerre (...) | n = 32 | ... | 110 | | Uniform <: ... | Gauss-Legendre (...) | n = 30 | ... | 111 | | Exponential <: ... | Gauss-Laguerre (...) | n = 32 | ... | 112 | | Gamma <: ... | Gauss-Laguerre (...) | n = 32 | ... | 113 | | Univariate | Trapezoidal <: ExplicitQuadratureAlgorithm | N/A | All nodes must be inside distribution's support. | 114 | 115 | ### 116 | 117 | Some unbounded distributions are currently not supported (e.g., Poisson). Depending on your use case truncating may be a feasible option: 118 | ```@repl 119 | E = expectation(Pareto()) # Throws error 120 | E = expectation(truncated(Pareto(),0.0,1000.0)) # Truncated Pareto on [0,1000] 121 | ``` 122 | See `Distributions.truncated` for more. Of course, truncating the distribution also affects its properties. 123 | 124 | ## Mathematical Details and References 125 | 126 | The specific quadrature algorithms come from the [`FastGaussQuadrature.jl`](https://github.com/ajt60gaibb/FastGaussQuadrature.jl) library, which is maintained by [Alex Townsend](https://github.com/ajt60gaibb) of Cornell University. Much of the quadrature code came from the [`DistQuads.jl`](https://github.com/pkofod/DistQuads.jl) library, which is maintained by [Patrick K. Mogensen](https://github.com/pkofod) at the University of Copenhagen. In addition, there are some objects contributed by individual users; see docstring for citations. 127 | 128 | > **WARNING**: It is important to be aware of the deficiencies of numerical quadrature schemes. For example, it is recommended to be careful when using these methods for the following classes of functions and situations: 129 | 130 | * Discontinuous or nondifferentiable functions (even if the function is a.e.-differentiable) 131 | * Periodic/oscillatory functions with a high frequency 132 | * Extremely large numbers of quadrature nodes, which may lead to vanishingly small weights. 133 | 134 | ## Contact 135 | 136 | If you would like to get in touch, please do one of the following: 137 | 138 | * Issue requests: Open an issue on the [package repository](https://github.com/QuantEcon/Expectations.jl) with the tag `feature request`. 139 | * Bugs: Same as above, but with the tag `bug`. 140 | * Pull Request: We are always open to new functionality. If you have a feature you'd like to add (say, a new distribution or algorithm), once you prepare a PR with the feature and some tests, open it in the usual way. 141 | * Other: You can reach out to Arnav Sood at [`misc@arnavsood.com`](mailto:misc@arnavsood.com) 142 | * Citation: If this package was helpful in your research work, you may consider citing the package in whatever method is appropriate for your field. 143 | 144 | [^1]: This is a quadrature scheme written by [Spencer Lyon](http://spencerlyon.com/) (PhD. NYU) as part of the [`QuantEcon`](https://quantecon.org/) project. Used with permission. 145 | -------------------------------------------------------------------------------- /src/iterable.jl: -------------------------------------------------------------------------------- 1 | #= 2 | All iterable expectations. =# 3 | 4 | # Callable behavior for the object. Parameters because we cannot add methods to an abstract type. 5 | """ 6 | function (e::IterableExpectation{NT, WT})(f::Function; kwargs...) where {NT, WT} 7 | 8 | Implements callable behavior for `IterableExpectation` objects. 9 | """ 10 | function (e::IterableExpectation{NT,WT})(f::Function) where {NT,WT} 11 | nvec, wvec = nodes(e), weights(e) 12 | E = f(nvec[1]) * wvec[1] 13 | @inbounds for i in 2:length(nvec) 14 | n, w = nvec[i], wvec[i] 15 | E += f(n) * w 16 | end 17 | return E 18 | end 19 | 20 | # Getters for the object. 21 | """ 22 | nodes(e::IterableExpectation) 23 | 24 | Returns the nodes of an `IterableExpectation`. 25 | """ 26 | nodes(e::IterableExpectation) = e.nodes 27 | 28 | """ 29 | weights(e::IterableExpectation) 30 | 31 | Returns the weights of an `IterableExpectation`. 32 | """ 33 | weights(e::IterableExpectation) = e.weights 34 | 35 | # Linear operator behavior. 36 | import Base.* 37 | 38 | # Right-multiplying an expectation by something. 39 | """ 40 | *(e::IterableExpectation, h::AbstractArray) = dot(h, weights(e)) 41 | 42 | Implements the right-application of an `IterableExpectation` by a vector of values on its nodes. 43 | """ 44 | *(e::IterableExpectation, h::AbstractArray) = dot(h, weights(e)) 45 | 46 | 47 | # Left-multiplying an expectation by a scalar. 48 | """ 49 | *(r::Real, e::IterableExpectation) = IterableExpectation(nodes(e), r * weights(e)) 50 | 51 | Implements left-multiplication of an `IterableExpectation` by a real scalar. 52 | """ 53 | *(r::Real, e::IterableExpectation) = IterableExpectation(nodes(e), r * weights(e)) # Necessary because, for example, multiplying UnitRange * 2 = StepRange 54 | 55 | 56 | #= 57 | Discrete iterable expectations. =# 58 | 59 | # Constructors for the object. 60 | """ 61 | expectation(dist::DiscreteUnivariateDistribution, alg::Type{FiniteDiscrete} = FiniteDiscrete; kwargs...) = _expectation(dist, alg; kwargs...) 62 | 63 | Dispatcher for (finite) discrete univariate expectations. 64 | """ 65 | expectation(dist::DiscreteUnivariateDistribution, alg::Type{FiniteDiscrete}=FiniteDiscrete; kwargs...) = _expectation(dist, alg; kwargs...) 66 | 67 | """ 68 | function _expectation(dist::DiscreteUnivariateDistribution, alg::Type{FiniteDiscrete}; kwargs...) 69 | 70 | Auxiliary constructor for an `IterableExpectation` object. 71 | """ 72 | function _expectation(dist::DiscreteUnivariateDistribution, alg::Type{FiniteDiscrete}; kwargs...) 73 | hasfinitesupport(dist) || throw(ArgumentError("Countably infinite distributions are not supported.")) 74 | ourSupport = support(dist) 75 | ourWeights = pdf.(Ref(dist), support(dist)) 76 | sum(ourWeights) ≈ 1.0 || warn("The distribution supplied is not approximately equal to 1 in mass.") 77 | return IterableExpectation(ourSupport, ourWeights); 78 | end 79 | 80 | #= 81 | Continuous iterable expectations (no nodes supplied.) =# 82 | 83 | # General catchall behavior --> Gauss-Legendre quadrature. 84 | """ 85 | expectation(dist::ContinuousUnivariateDistribution, alg::Type{<:QuadratureAlgorithm} = Gaussian; kwargs...) = _expectation(dist, alg; kwargs...) 86 | 87 | Dispatcher for continuous univariate expectations. 88 | """ 89 | expectation(dist::ContinuousUnivariateDistribution, alg::Type{<:QuadratureAlgorithm}=Gaussian; kwargs...) = _expectation(dist, alg; kwargs...) 90 | 91 | """ 92 | function _expectation(dist::ContinuousUnivariateDistribution, alg::Type{Gaussian}; n = 500, kwargs...) 93 | 94 | Implements Gauss-Legendre quadrature for continuous univariate distributions for which no specialized method exists. 95 | """ 96 | function _expectation(dist::ContinuousUnivariateDistribution, alg::Type{Gaussian}; n=500, kwargs...) 97 | a = minimum(dist) 98 | b = maximum(dist) 99 | (a > -Inf && b < Inf) || throw(ArgumentError("The distribution must be defined on a compact interval. If applicable, bound the distribution by truncating, e.g., expectation(truncated(Pareto(),0.0,1000.0)")) 100 | rawNodes, rawWeights = gausslegendre(n) 101 | # Transform nodes to proper interval. 102 | nodes = map(x -> (0.5(b - a)) * x + (a + b) / 2, rawNodes) 103 | # Add pdf to weights. 104 | compoundWeights = [rawWeights[i] * pdf(dist, nodes[i]) for i in 1:n] 105 | # Add scale factor to weights. 106 | weights = (b - a) / 2 * compoundWeights 107 | return IterableExpectation(nodes, weights); 108 | end 109 | 110 | # QNWDist implementation. 111 | """ 112 | function _expectation(dist::ContinuousUnivariateDistribution, alg::Type{QuantileRange}; n::Int = 50, q0::Real = 0.001, qN::Real = 0.999 kwargs...) 113 | 114 | Implementation of the qnwdist() quadrature scheme written by Spencer Lyon (PhD. NYU), as part of the QuantEcon.jl library. Used with permission. For further details, see: https://github.com/QuantEcon/QuantEcon.jl/blob/be0a32ec17d1f5b04ed8f2e52604c70c69f416b2/src/quad.jl#L892. 115 | """ 116 | function _expectation(dist::ContinuousUnivariateDistribution, alg::Type{QuantileRange}; n::Int=50, q0::Real=0.001, qN::Real=0.999, kwargs...) 117 | # check nondegeneracy. 118 | all(isfinite.(params(dist))) || throw(ArgumentError("Distribution must be nondegenerate.")) 119 | # _quadnodes in the QuantEcon. 120 | quantiles = range(q0, stop=qN, length=n) 121 | nodes = quantile.(Ref(dist), quantiles) 122 | # qnwdist in the QuantEcon. 123 | weights = zeros(n) 124 | for i in 2:n - 1 125 | weights[i] = cdf(dist, (nodes[i] + nodes[i + 1]) / 2) - cdf(dist, (nodes[i] + nodes[i - 1]) / 2) 126 | end 127 | weights[1] = cdf(dist, (nodes[1] + nodes[2]) / 2) 128 | weights[end] = 1 - cdf(dist, (nodes[end - 1] + nodes[end]) / 2) 129 | return IterableExpectation(nodes, weights) 130 | end 131 | 132 | """ 133 | function _expectation(dist::Uniform, alg::Type{Gaussian}; n = 30, kwargs...) 134 | 135 | Implements Gauss-Legendre quadrature for the uniform distribution. 136 | """ 137 | function _expectation(dist::Uniform, alg::Type{Gaussian}; n=30, kwargs...) 138 | a, b = params(dist) 139 | (isfinite(a) && isfinite(b)) || throw(ArgumentError("Both parameters must be finite.")) 140 | rawNodes, rawWeights = gausslegendre(n) 141 | nodes = map(x -> (0.5(b - a)) * x + (a + b) / 2, rawNodes) 142 | weights = map(x -> x * 1 / 2, rawWeights) # (result of doing 1/(b-a) * (b-a)/2) 143 | return IterableExpectation(nodes, weights) 144 | end 145 | 146 | # Specific method for normal distributions. 147 | # Number of points was calibrated by trial. 148 | """ 149 | function _expectation(dist::Normal, alg::Type{Gaussian}; n = 30, kwargs...) 150 | 151 | Implements Gauss-Hermite quadrature for normal distributions. 152 | """ 153 | function _expectation(dist::Normal, alg::Type{Gaussian}; n=30, kwargs...) 154 | σ = std(dist) 155 | μ = mean(dist) 156 | (isfinite(σ) && isfinite(μ)) || throw(ArgumentError("Parameters σ, μ must be finite.")) 157 | gh = gausshermite(n) 158 | nodes = gh[1] .* (sqrt(2) * (σ)) .+ μ 159 | weights = gh[2] ./ sqrt(pi) 160 | return IterableExpectation(nodes, weights) 161 | end 162 | 163 | # Specific method for lognormal distributions. 164 | """ 165 | function _expectation(dist::LogNormal, alg::Type{Gaussian}; n = 30, kwargs...) 166 | 167 | Implements Gauss-Hermite quadrature for lognormal distributions. 168 | """ 169 | function _expectation(dist::LogNormal, alg::Type{Gaussian}; n=30, kwargs...) # Same settings for the normal method. 170 | m = mean(dist) 171 | v = var(dist) 172 | (isfinite(m) && isfinite(v)) || throw(ArgumentError("Infinite μ or σ^2 are not supported.")) 173 | # get normal nodes 174 | gh = gausshermite(n) 175 | μ = log(m^2 / sqrt(v + m^2)) 176 | σ = sqrt(log(v / m^2 + 1)) 177 | nodes = gh[1] .* sqrt(2) .* (σ) .+ μ 178 | weights = gh[2] ./ sqrt(pi) 179 | # get new nodes 180 | map!(x -> exp(x), nodes, nodes) 181 | return IterableExpectation(nodes, weights) # Transform the output. 182 | end 183 | 184 | # Specific method for beta distributions. 185 | """ 186 | function _expectation(dist::Beta, alg::Type{Gaussian}; n = 32, kwargs...) 187 | 188 | Implements Gauss-Jacobi quadrature for beta distributions. 189 | """ 190 | function _expectation(dist::Beta, alg::Type{Gaussian}; n=32, kwargs...) 191 | α, β = params(dist) 192 | (isfinite(α) && isfinite(β)) || throw(ArgumentError("The beta distribution supplied is malformed.")) 193 | gj = FastGaussQuadrature.jacobi_rec(n, α - 1, β - 1) 194 | G = gamma(α) * gamma(β) / gamma(α + β) 195 | nodes = (1 .- gj[1]) / 2 196 | weights = gj[2] / ((2.0^(α + β - 1.0)) * G) 197 | return IterableExpectation(nodes, weights) 198 | end 199 | 200 | # Specific method for exponential distributions. 201 | """ 202 | function _expectation(dist::Exponential, alg::Type{Gaussian}; n = 32, kwargs...) 203 | 204 | Implements Gauss-Laguerre quadrature for Exponential distributions. 205 | """ 206 | function _expectation(dist::Exponential, alg::Type{Gaussian}; n=32, kwargs...) 207 | θ = inv(dist.θ) # To correct for the Distributions parametrization. 208 | isfinite(θ) || throw(ArgumentError("The Exponential distribution supplied is malformed.")) 209 | gl = gausslaguerre(n) 210 | nodes = gl[1] ./ θ 211 | weights = gl[2] 212 | return IterableExpectation(nodes, weights) 213 | end 214 | 215 | # Specific method for gamma distributions. 216 | """ 217 | function _expectation(dist::Union{Gamma,Erlang}, alg::Type{Gaussian}; n = 32, kwargs...) 218 | 219 | Implements Gauss-Laguerre quadrature for Gamma distributions. 220 | """ 221 | function _expectation(dist::Union{Gamma,Erlang}, alg::Type{Gaussian}; n=32, kwargs...) 222 | α, θ = params(dist) 223 | (isfinite(α) && isfinite(θ)) || throw(ArgumentError("The Gamma distribution supplied is malformed.")) 224 | gl = gausslaguerre(n, α - 1.) 225 | nodes = gl[1] .* θ 226 | weights = gl[2] ./ gamma(α) 227 | return IterableExpectation(nodes, weights) 228 | end 229 | 230 | # Specific method for Chisq. 231 | """ 232 | function _expectation(dist::Chisq, alg::Type{Gaussian}; n = 32, kwargs...) 233 | """ 234 | function _expectation(dist::Chisq, alg::Type{Gaussian}; n=32, kwargs...) 235 | ν = dist.ν # dist takes one integer parameter 236 | isfinite(ν) || throw(ArgumentError("The Chisq distribution supplied is malformed.")) 237 | gl = gausslaguerre(n, ν / 2 - 1) 238 | nodes = gl[1] .* 2 239 | weights = gl[2] ./ gamma(ν / 2) 240 | return IterableExpectation(nodes, weights) 241 | end 242 | 243 | #= 244 | Continuous iterable distributions (nodes supplied.) =# 245 | 246 | # Dispatcher. 247 | """ 248 | expectation(dist::ContinuousUnivariateDistribution, nodes, alg::Type{<:ExplicitQuadratureAlgorithm} = Trapezoidal; kwargs...) = _expectation(dist, nodes, alg; kwargs...) 249 | 250 | Dispatcher for distributions with user-defined nodes. 251 | """ 252 | expectation(dist::ContinuousUnivariateDistribution, nodes, alg::Type{<:ExplicitQuadratureAlgorithm}=Trapezoidal; kwargs...) = _expectation(dist, nodes, alg; kwargs...) 253 | 254 | # Trapezoidal general behavior. 255 | """ 256 | function _expectation(dist, nodes::AbstractArray, alg::Type{Trapezoidal}; kwargs...) 257 | 258 | Implements trapezoidal integration for general distributions with user-defined nodes. 259 | """ 260 | function _expectation(dist, nodes::AbstractArray, alg::Type{Trapezoidal}; kwargs...) 261 | isfinite(minimum(dist)) && isfinite(maximum(dist)) || throw(ArgumentError("The distribution must be defined on a compact interval.")) 262 | (first(nodes) >= minimum(dist)) && (last(nodes) <= maximum(dist)) || throw(ArgumentError("The nodes exceed the distribution's support.")) 263 | M = length(nodes) 264 | Δ = diff(nodes) 265 | prepend!(Δ, NaN) # To keep the indexing straight. Now, Δ[2] = Δ_2 = z_2 - z_1. And NaN will throw an error if we try to use it. 266 | f_vec = pdf.(Ref(dist), nodes) 267 | interiorWeights = [f_vec[i] / 2 * (Δ[i] + Δ[i + 1]) for i = 2:M - 1] 268 | allWeights = [f_vec[1] / 2 * Δ[2]; interiorWeights; f_vec[M] / 2 * Δ[M]] 269 | return IterableExpectation(nodes, allWeights) 270 | end 271 | 272 | # Trapezoidal for regular. 273 | """ 274 | function _expectation(dist, nodes::AbstractRange, alg::Type{Trapezoidal}; kwargs...) 275 | 276 | Overloads trapezoidal integration for cases where the user-defined grids are regular. 277 | """ 278 | function _expectation(dist, nodes::AbstractRange, alg::Type{Trapezoidal}; kwargs...) 279 | isfinite(minimum(dist)) && isfinite(maximum(dist)) || throw(ArgumentError("The distribution must be defined on a compact interval.")) 280 | (first(nodes) >= minimum(dist)) && (last(nodes) <= maximum(dist)) || throw(ArgumentError("The nodes exceed the distribution's support.")) 281 | M = length(nodes) 282 | Δ = nodes[2] - nodes[1] 283 | f_vec = pdf.(Ref(dist), nodes) 284 | interiorWeights = [f_vec[i] * Δ for i = 2:M - 1] 285 | allWeights = [f_vec[1] / 2 * Δ; interiorWeights; f_vec[M] / 2 * Δ] 286 | return IterableExpectation(nodes, allWeights) 287 | end 288 | 289 | #= 290 | Convenience functions. =# 291 | """ 292 | expectation(f::Function, dist::DiscreteUnivariateDistribution, alg::Type{FiniteDiscrete} = FiniteDiscrete; kwargs...) = expectation(dist, alg; kwargs...)(f) 293 | 294 | Convenience function for (finite) discrete univariate distributions. 295 | """ 296 | expectation(f::Function, dist::DiscreteUnivariateDistribution, alg::Type{FiniteDiscrete}=FiniteDiscrete; kwargs...) = expectation(dist, alg; kwargs...)(f) 297 | 298 | """ 299 | expectation(f::Function, dist::ContinuousUnivariateDistribution, alg::Type{<:QuadratureAlgorithm} = Gaussian; kwargs...) = expectation(dist, alg; kwargs...)(f) 300 | 301 | Convenience function for continuous univariate distributions. 302 | """ 303 | expectation(f::Function, dist::ContinuousUnivariateDistribution, alg::Type{<:QuadratureAlgorithm}=Gaussian; kwargs...) = expectation(dist, alg; kwargs...)(f) 304 | 305 | """ 306 | expectation(f::Function, dist::ContinuousUnivariateDistribution, nodes::AbstractArray, alg::Type{<:ExplicitQuadratureAlgorithm} = Trapezoidal; kwargs...) = expectation(dist, nodes, alg; kwargs...)(f) 307 | 308 | Convenience function for continuous univariate distributions with user-supplied nodes. 309 | """ 310 | expectation(f::Function, dist::ContinuousUnivariateDistribution, nodes::AbstractArray, alg::Type{<:ExplicitQuadratureAlgorithm}=Trapezoidal; kwargs...) = expectation(dist, nodes, alg; kwargs...)(f) 311 | --------------------------------------------------------------------------------