├── .ackrc
├── .devcontainer
└── devcontainer.json
├── .dockerignore
├── .git_archival.txt
├── .gitattributes
├── .github
├── ISSUE_TEMPLATE
│ ├── bug-report.yml
│ ├── config.yml
│ ├── documentation-report.yml
│ ├── feature-request.yml
│ └── ~release-checklist.md
├── PULL_REQUEST_TEMPLATE.md
├── PULL_REQUEST_TEMPLATE
│ ├── Bug-Fix.md
│ └── Feature.md
├── dependabot.yml
├── release.yml
├── semantic.yml
└── workflows
│ ├── bump-version.yml
│ ├── ci-windows.yml
│ ├── ci.yml
│ ├── codeql-analysis.yml
│ ├── dependencies-head.yml
│ ├── docker.yml
│ ├── docs.yml
│ ├── lint.yml
│ ├── lower-bound-requirements.yml
│ ├── merged.yml
│ ├── notebooks.yml
│ ├── publish-package.yml
│ ├── release_tests.yml
│ ├── scorecard.yml
│ └── semantic-pr-check.yml
├── .gitignore
├── .hadolint.yaml
├── .pre-commit-config.yaml
├── .readthedocs.yaml
├── .zenodo.json
├── AUTHORS
├── CITATION.cff
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── LICENSE
├── README.rst
├── binder
├── apt.txt
├── postBuild
├── runtime.txt
└── trigger_binder.sh
├── codecov.yml
├── docker
├── Dockerfile
└── gpu
│ ├── Dockerfile
│ └── install_backend.sh
├── docs
├── JOSS
│ ├── paper.bib
│ └── paper.md
├── Makefile
├── _extras
│ └── schemas
├── _static
│ ├── .gitkeep
│ ├── css
│ │ └── custom.css
│ ├── img
│ │ ├── README_1bin_example.png
│ │ ├── README_2bin_example.png
│ │ ├── hfh_1bin_55_50_7.png
│ │ ├── hfh_2_bin_100.0_145.0_100.0_150.0_15.0_20.0_30.0_45.0.png
│ │ ├── pyhf-logo-small.png
│ │ ├── pyhf-logo.png
│ │ └── pyhf-logo.svg
│ └── js
│ │ └── custom.js
├── _templates
│ └── autosummary
│ │ ├── class.rst
│ │ └── module.rst
├── api.rst
├── babel.rst
├── bib
│ ├── HEPData_likelihoods.bib
│ ├── docs.bib
│ ├── general_citations.bib
│ ├── media.bib
│ ├── posters.bib
│ ├── preferred.bib
│ ├── talks.bib
│ ├── tutorials.bib
│ └── use_citations.bib
├── citations.rst
├── cli.rst
├── conf.py
├── contributors.rst
├── development.rst
├── examples.rst
├── examples
│ ├── experiments
│ │ └── edwardpyhf.ipynb
│ ├── json
│ │ └── 2-bin_1-channel.json
│ └── notebooks
│ │ ├── ImpactPlot.ipynb
│ │ ├── Recast.ipynb
│ │ ├── ShapeFactor.ipynb
│ │ ├── StatError.ipynb
│ │ ├── XML_ImportExport.ipynb
│ │ ├── altair.ipynb
│ │ ├── binderexample
│ │ ├── StatisticalAnalysis.ipynb
│ │ ├── data.root
│ │ ├── meas.xml
│ │ ├── meas_channel1.xml
│ │ └── workflow.gif
│ │ ├── example-tensorflow.ipynb
│ │ ├── hello-world.ipynb
│ │ ├── histogrammar.ipynb
│ │ ├── histosys-pytorch.ipynb
│ │ ├── histosys.ipynb
│ │ ├── img
│ │ └── 1007.1727.fig5.png
│ │ ├── importxml.ipynb
│ │ ├── learn
│ │ ├── InterpolationCodes.ipynb
│ │ ├── TensorizingInterpolations.ipynb
│ │ ├── TestStatistics.ipynb
│ │ └── UsingCalculators.ipynb
│ │ ├── multiBinPois.ipynb
│ │ ├── multichannel-coupled-histo.ipynb
│ │ ├── multichannel-coupled-normsys.ipynb
│ │ ├── multichannel-normsys.ipynb
│ │ ├── normsys.ipynb
│ │ ├── pullplot.ipynb
│ │ ├── pytorch_tests_onoff.ipynb
│ │ ├── tensorflow-limit.ipynb
│ │ └── toys.ipynb
├── exts
│ └── xref.py
├── faq.rst
├── governance
│ └── ROADMAP.rst
├── index.rst
├── installation.rst
├── intro.rst
├── jupyterlite.rst
├── learn.rst
├── likelihood.rst
├── lite
│ ├── jupyter-lite.json
│ ├── jupyterlite.py
│ └── jupytext.toml
├── outreach.rst
├── release-notes.rst
└── release-notes
│ ├── v0.5.3.rst
│ ├── v0.5.4.rst
│ ├── v0.6.0.rst
│ ├── v0.6.1.rst
│ ├── v0.6.2.rst
│ ├── v0.6.3.rst
│ ├── v0.7.0.rst
│ ├── v0.7.1.rst
│ ├── v0.7.2.rst
│ ├── v0.7.3.rst
│ ├── v0.7.4.rst
│ ├── v0.7.5.rst
│ └── v0.7.6.rst
├── noxfile.py
├── pyproject.toml
├── src
├── conftest.py
└── pyhf
│ ├── __init__.py
│ ├── cli
│ ├── __init__.py
│ ├── cli.py
│ ├── complete.py
│ ├── infer.py
│ ├── patchset.py
│ ├── rootio.py
│ └── spec.py
│ ├── compat.py
│ ├── constraints.py
│ ├── contrib
│ ├── __init__.py
│ ├── cli.py
│ ├── utils.py
│ └── viz
│ │ ├── __init__.py
│ │ └── brazil.py
│ ├── data
│ └── citation.bib
│ ├── events.py
│ ├── exceptions
│ └── __init__.py
│ ├── infer
│ ├── __init__.py
│ ├── calculators.py
│ ├── intervals
│ │ ├── __init__.py
│ │ └── upper_limits.py
│ ├── mle.py
│ ├── test_statistics.py
│ └── utils.py
│ ├── interpolators
│ ├── __init__.py
│ ├── code0.py
│ ├── code1.py
│ ├── code2.py
│ ├── code4.py
│ └── code4p.py
│ ├── mixins.py
│ ├── modifiers
│ ├── __init__.py
│ ├── histosys.py
│ ├── lumi.py
│ ├── normfactor.py
│ ├── normsys.py
│ ├── shapefactor.py
│ ├── shapesys.py
│ └── staterror.py
│ ├── optimize
│ ├── __init__.py
│ ├── common.py
│ ├── mixins.py
│ ├── opt_jax.py
│ ├── opt_minuit.py
│ ├── opt_numpy.py
│ ├── opt_pytorch.py
│ ├── opt_scipy.py
│ └── opt_tflow.py
│ ├── parameters
│ ├── __init__.py
│ ├── paramsets.py
│ ├── paramview.py
│ └── utils.py
│ ├── patchset.py
│ ├── pdf.py
│ ├── probability.py
│ ├── readxml.py
│ ├── schema
│ ├── __init__.py
│ ├── loader.py
│ ├── validator.py
│ └── variables.py
│ ├── schemas
│ ├── 1.0.0
│ │ ├── defs.json
│ │ ├── jsonpatch.json
│ │ ├── measurement.json
│ │ ├── model.json
│ │ ├── patchset.json
│ │ └── workspace.json
│ └── HistFactorySchema.dtd
│ ├── simplemodels.py
│ ├── tensor
│ ├── __init__.py
│ ├── common.py
│ ├── jax_backend.py
│ ├── manager.py
│ ├── numpy_backend.py
│ ├── pytorch_backend.py
│ └── tensorflow_backend.py
│ ├── typing.py
│ ├── utils.py
│ ├── workspace.py
│ └── writexml.py
├── tbump.toml
├── tests
├── benchmarks
│ └── test_benchmark.py
├── conftest.py
├── constraints.txt
├── contrib
│ ├── baseline
│ │ ├── test_plot_results.png
│ │ ├── test_plot_results_components.png
│ │ ├── test_plot_results_components_no_clb.png
│ │ ├── test_plot_results_components_no_cls.png
│ │ ├── test_plot_results_components_no_clsb.png
│ │ └── test_plot_results_no_axis.png
│ ├── test_contrib_utils.py
│ ├── test_viz.py
│ └── test_viz
│ │ ├── hypotest_results.json
│ │ └── tail_probs_hypotest_results.json
├── test_backend_consistency.py
├── test_backends.py
├── test_calculator.py
├── test_cli.py
├── test_combined_modifiers.py
├── test_compat.py
├── test_constraints.py
├── test_custom_mods.py
├── test_events.py
├── test_examples.py
├── test_export.py
├── test_export
│ ├── workspace_integer_data.json
│ ├── workspace_no_parameter_bounds.json
│ └── workspace_no_parameter_inits.json
├── test_import.py
├── test_import
│ ├── xmlimport_missingPOI
│ │ └── config
│ │ │ ├── HistFactorySchema.dtd
│ │ │ ├── example.xml
│ │ │ └── example_channel.xml
│ ├── xmlimport_noChannelData
│ │ └── config
│ │ │ ├── HistFactorySchema.dtd
│ │ │ ├── example.xml
│ │ │ └── example_channel.xml
│ └── xmlimport_noChannelDataPaths
│ │ └── config
│ │ ├── HistFactorySchema.dtd
│ │ ├── example.xml
│ │ └── example_channel.xml
├── test_infer.py
├── test_init.py
├── test_interpolate.py
├── test_jit.py
├── test_mixins.py
├── test_modifiers.py
├── test_modifiers
│ ├── bad_histosys_modifier_patch.json
│ ├── bad_shapesys_modifier_patch.json
│ ├── bad_staterror_modifier_patch.json
│ ├── issue1720_greedy_staterror.json
│ └── spec.json
├── test_notebooks.py
├── test_optim.py
├── test_paramsets.py
├── test_paramviewer.py
├── test_patchset.py
├── test_patchset
│ ├── example_bkgonly.json
│ ├── example_patchset.json
│ ├── patchset_bad_duplicate_patch_name.json
│ ├── patchset_bad_duplicate_patch_values.json
│ ├── patchset_bad_empty_patches.json
│ ├── patchset_bad_no_version.json
│ ├── patchset_bad_wrong_values_multiplicity.json
│ ├── patchset_bad_wrong_valuetype.json
│ ├── patchset_good.json
│ ├── patchset_good_2_patches.json
│ └── patchset_good_stringvalues.json
├── test_pdf.py
├── test_probability.py
├── test_public_api.py
├── test_public_api_repr.py
├── test_regression.py
├── test_schema.py
├── test_schema
│ ├── customschema
│ │ ├── 1.1.0
│ │ │ ├── defs.json
│ │ │ ├── jsonpatch.json
│ │ │ ├── measurement.json
│ │ │ ├── model.json
│ │ │ ├── patchset.json
│ │ │ └── workspace.json
│ │ └── custom.json
│ ├── patchset_bad_empty_patches.json
│ ├── patchset_bad_hepdata_reference.json
│ ├── patchset_bad_invalid_digests.json
│ ├── patchset_bad_label_pattern.json
│ ├── patchset_bad_no_description.json
│ ├── patchset_bad_no_digests.json
│ ├── patchset_bad_no_labels.json
│ ├── patchset_bad_no_patch_name.json
│ ├── patchset_bad_no_patch_values.json
│ ├── patchset_bad_no_version.json
│ └── patchset_good.json
├── test_scripts.py
├── test_scripts
│ ├── example_bkgonly.json
│ ├── example_patchset.json
│ └── xmlimport_absolutePaths
│ │ ├── config
│ │ ├── HistFactorySchema.dtd
│ │ ├── example.xml
│ │ └── example_channel.xml
│ │ └── data
│ │ └── example.root
├── test_simplemodels.py
├── test_tensor.py
├── test_tensorviewer.py
├── test_teststats.py
├── test_toys.py
├── test_utils.py
├── test_validation.py
├── test_workspace.py
└── test_workspace
│ └── poiless.json
└── validation
├── data
├── 1bin_example1.json
├── 1bin_example1_q0.json
├── 1bin_histosys.json
├── 1bin_lumi.json
├── 1bin_normsys.json
├── 2bin_2channel_coupledhisto.json
├── 2bin_2channel_couplednorm.json
├── 2bin_2channel_coupledshapefactor.json
├── 2bin_2channel_example1.json
├── 2bin_example1.json
├── 2bin_histosys.json
├── 2bin_histosys_example2.json
└── 2bin_statex.json
├── makedata.py
├── manualonoff_roofit
└── onoff.py
├── multibin_histfactory
├── config
│ ├── HistFactorySchema.dtd
│ ├── example.xml
│ └── example_channel.xml
└── data
│ └── data.root
├── multibin_histfactory_p0
├── config
│ ├── HistFactorySchema.dtd
│ ├── example.xml
│ └── example_channel.xml
├── data
│ └── data.root
└── results
│ ├── example_GaussExample.root
│ ├── example_channel1_GaussExample_model.root
│ └── example_combined_GaussExample_model.root
├── multichan_coupledhistosys_histfactory
├── config
│ ├── HistFactorySchema.dtd
│ ├── example.xml
│ ├── example_control.xml
│ └── example_signal.xml
├── data
│ └── data.root
└── makedata.py
├── multichan_coupledoverall_histfactory
├── config
│ ├── HistFactorySchema.dtd
│ ├── example.xml
│ ├── example_control.xml
│ └── example_signal.xml
├── data
│ └── data.root
├── makedata.py
└── multichannel_data.json
├── multichannel_histfactory
├── config
│ ├── HistFactorySchema.dtd
│ ├── example.xml
│ ├── example_control.xml
│ └── example_signal.xml
├── data
│ └── data.root
├── makedata.py
└── multichannel_data.json
├── overallsys_histfactory
├── config
│ ├── HistFactorySchema.dtd
│ ├── example.xml
│ └── example_channel.xml
└── data
│ └── data.root
├── run_cls.py
├── run_single.py
├── run_single_q0.py
├── run_toys.py
├── shared_nuispar_across_types
├── config
│ ├── HistFactorySchema.dtd
│ ├── example.xml
│ ├── example_channel.xml
│ ├── example_notshared.xml
│ └── example_notshared_channel.xml
├── data
│ └── data.root
└── make_data.py
├── standard_hypo_test_demo.py
├── xmlimport_input
├── config
│ ├── HistFactorySchema.dtd
│ ├── example.xml
│ ├── example_channel.xml
│ └── examples
│ │ ├── HistFactorySchema.dtd
│ │ ├── example_DataDriven.xml
│ │ ├── example_DataDriven_controlRegion.xml
│ │ ├── example_DataDriven_signalRegion.xml
│ │ ├── example_Expression.xml
│ │ ├── example_Expression_channel.xml
│ │ ├── example_ShapeSys.xml
│ │ ├── example_ShapeSys2D.xml
│ │ ├── example_ShapeSys2D_channel.xml
│ │ └── example_ShapeSys_channel.xml
├── data
│ ├── ShapeSys.root
│ ├── ShapeSys2D.root
│ ├── StatError.root
│ ├── dataDriven.root
│ └── example.root
├── log
├── results
│ ├── example_ConstExample.root
│ ├── example_GammaExample.root
│ ├── example_GaussExample.root
│ ├── example_LogNormExample.root
│ ├── example_channel1_ConstExample_model.root
│ ├── example_channel1_GammaExample_model.root
│ ├── example_channel1_GaussExample_model.root
│ ├── example_channel1_GaussExample_profileLR.eps
│ ├── example_channel1_LogNormExample_model.root
│ ├── example_combined_ConstExample_model.root
│ ├── example_combined_GammaExample_model.root
│ ├── example_combined_GaussExample_model.root
│ ├── example_combined_GaussExample_profileLR.eps
│ ├── example_combined_LogNormExample_model.root
│ └── example_results.table
└── scan.pdf
├── xmlimport_input2
├── config
│ ├── HistFactorySchema.dtd
│ ├── example.xml
│ ├── example_control.xml
│ └── example_signal.xml
├── data
│ └── data.root
└── makedata.py
├── xmlimport_input3
├── config
│ └── examples
│ │ ├── HistFactorySchema.dtd
│ │ ├── example_ShapeSys.xml
│ │ └── example_ShapeSys_channel.xml
└── data
│ └── ShapeSys.root
├── xmlimport_input4
├── config
│ ├── HistFactorySchema.dtd
│ ├── example.xml
│ └── example_channel.xml
└── data
│ └── example.root
└── xmlimport_input_bkg.json
/.ackrc:
--------------------------------------------------------------------------------
1 | --ignore-directory=docs/_generated
2 | --ignore-directory=htmlcov
3 |
--------------------------------------------------------------------------------
/.devcontainer/devcontainer.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "pyhf-devcontainer",
3 | "build": {
4 | "context": "..",
5 | "dockerfile": "../docker/Dockerfile"
6 | },
7 | "features": {
8 | "ghcr.io/devcontainers/features/git:1": {}
9 | },
10 |
11 | "customizations": {
12 | "vscode": {
13 | "settings": {
14 | "python.defaultInterpreterPath": "/opt/venv/bin/python"
15 | },
16 | "extensions": [
17 | "ms-python.python",
18 | "ms-toolsai.jupyter"
19 | ]
20 | }
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/.dockerignore:
--------------------------------------------------------------------------------
1 | .nox
2 | .*cache
3 |
--------------------------------------------------------------------------------
/.git_archival.txt:
--------------------------------------------------------------------------------
1 | node: 40ebf6dadd73de3830a931eac04a260ef8f62709
2 | node-date: 2025-03-11T01:07:46+01:00
3 | describe-name: v0.7.0-287-g40ebf6dadd
4 | ref-names: HEAD -> main
5 |
--------------------------------------------------------------------------------
/.gitattributes:
--------------------------------------------------------------------------------
1 | .git_archival.txt export-subst
2 | *.py diff=python
3 | *.md diff=markdown
4 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/config.yml:
--------------------------------------------------------------------------------
1 | # Ref: https://help.github.com/en/github/building-a-strong-community/configuring-issue-templates-for-your-repository#configuring-the-template-chooser
2 | blank_issues_enabled: true
3 | contact_links:
4 | - name: 🙋 Usage Questions
5 | url: https://github.com/scikit-hep/pyhf/discussions
6 | about: |
7 | Use pyhf's GitHub Discussions to ask "How do I do X with pyhf?".
8 | - name: 📖 Tutorial
9 | url: https://pyhf.github.io/pyhf-tutorial/
10 | about: |
11 | The pyhf tutorial is continually updated and provides an in depth walkthrough
12 | of how to use the latest release of pyhf.
13 | - name: 📝 pyhf Code of Conduct
14 | url: https://github.com/scikit-hep/pyhf/blob/main/CODE_OF_CONDUCT.md
15 | about: Expectations for how people will interact with each other on pyhf's GitHub.
16 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/documentation-report.yml:
--------------------------------------------------------------------------------
1 | ---
2 | name: 📝 Documentation Report
3 | description: Create a report for problems with the docs
4 | labels: ["docs", "needs-triage"]
5 |
6 | body:
7 | - type: markdown
8 | attributes:
9 | value: >
10 | **Thank you for wanting to report a problem with pyhf's documentation!**
11 |
12 |
13 | ⚠
14 | Verify first that your issue is not [already reported on
15 | GitHub][issue search].
16 |
17 |
18 | [issue search]: https://github.com/scikit-hep/pyhf/issues?q=is%3Aopen+is%3Aissue+label%3Adocs
19 |
20 | - type: textarea
21 | attributes:
22 | label: Summary
23 | description: >-
24 | Explain the problem briefly below, add suggestions to wording or structure.
25 | If there are external references that are related please link them here
26 | as well.
27 | placeholder: >-
28 | I was reading the pyhf documentation for pyhf version X and I'm having
29 | problems understanding Y.
30 | It would be very helpful if that got rephrased as Z.
31 | validations:
32 | required: true
33 |
34 | - type: input
35 | attributes:
36 | label: Documentation Page Link
37 | description: |
38 | Paste the link to the documentation webpage that you have a question on.
39 | validations:
40 | required: true
41 |
42 | - type: checkboxes
43 | attributes:
44 | label: Code of Conduct
45 | description: |
46 | Read the [`pyhf` Code of Conduct][CoC] first.
47 |
48 | [CoC]: https://github.com/scikit-hep/pyhf/blob/main/CODE_OF_CONDUCT.md
49 | options:
50 | - label: I agree to follow the Code of Conduct
51 | required: true
52 | ...
53 |
--------------------------------------------------------------------------------
/.github/PULL_REQUEST_TEMPLATE.md:
--------------------------------------------------------------------------------
1 | # Description
2 |
3 | Please first read [CONTRIBUTING.md](https://github.com/scikit-hep/pyhf/tree/main/CONTRIBUTING.md).
4 |
5 | Please describe the purpose of this pull request in some detail. Reference and link to any relevant issues or pull requests.
6 |
7 | # Checklist Before Requesting Reviewer
8 |
9 | - [ ] Tests are passing
10 | - [ ] "WIP" removed from the title of the pull request
11 | - [ ] Selected an Assignee for the PR to be responsible for the log summary
12 |
13 | # Before Merging
14 |
15 | For the PR Assignees:
16 |
17 | - [ ] Summarize commit messages into a comprehensive review of the PR
18 |
--------------------------------------------------------------------------------
/.github/PULL_REQUEST_TEMPLATE/Bug-Fix.md:
--------------------------------------------------------------------------------
1 | # Pull Request Description
2 |
3 | Please first read [CONTRIBUTING.md](https://github.com/scikit-hep/pyhf/tree/main/CONTRIBUTING.md).
4 |
5 | Please describe the purpose of this pull request in some detail and what bug it fixes. Reference and link to any relevant issues or pull requests (such as the issue in which this bug was first discussed).
6 |
7 | # Checklist Before Requesting Reviewer
8 |
9 | - [ ] Tests are passing
10 | - [ ] "WIP" removed from the title of the pull request
11 | - [ ] Selected an Assignee for the PR to be responsible for the log summary
12 |
13 | # Before Merging
14 |
15 | For the PR Assignees:
16 |
17 | - [ ] Summarize commit messages into a comprehensive review of the PR
18 |
--------------------------------------------------------------------------------
/.github/PULL_REQUEST_TEMPLATE/Feature.md:
--------------------------------------------------------------------------------
1 | # Pull Request Description
2 |
3 | Please first read [CONTRIBUTING.md](https://github.com/scikit-hep/pyhf/tree/main/CONTRIBUTING.md).
4 |
5 | Please describe the purpose of this pull request in some detail and what the specific feature being added will do. Reference and link to any relevant issues or pull requests (such as the issue in which this feature was first suggested).
6 |
7 | # Checklist Before Requesting Reviewer
8 |
9 | - [ ] Tests are passing
10 | - [ ] "WIP" removed from the title of the pull request
11 | - [ ] Selected an Assignee for the PR to be responsible for the log summary
12 |
13 | # Before Merging
14 |
15 | For the PR Assignees:
16 |
17 | - [ ] Summarize commit messages into a comprehensive review of the PR
18 |
--------------------------------------------------------------------------------
/.github/dependabot.yml:
--------------------------------------------------------------------------------
1 | version: 2
2 | updates:
3 | # Maintain dependencies for GitHub Actions
4 | - package-ecosystem: "github-actions"
5 | directory: "/"
6 | schedule:
7 | interval: "weekly"
8 | groups:
9 | actions:
10 | patterns:
11 | - "*"
12 | labels:
13 | - "github-actions"
14 | - "dependencies"
15 | reviewers:
16 | - "matthewfeickert"
17 |
18 | # Ignore all pip dependencies to avoid PRs to update tests/constraints.txt
19 | - package-ecosystem: "pip"
20 | directory: "/"
21 | schedule:
22 | interval: "weekly"
23 | ignore:
24 | - dependency-name: "*"
25 |
--------------------------------------------------------------------------------
/.github/release.yml:
--------------------------------------------------------------------------------
1 | changelog:
2 | exclude:
3 | authors:
4 | - dependabot
5 | - pre-commit-ci
6 |
--------------------------------------------------------------------------------
/.github/semantic.yml:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/scikit-hep/pyhf/40ebf6dadd73de3830a931eac04a260ef8f62709/.github/semantic.yml
--------------------------------------------------------------------------------
/.github/workflows/ci-windows.yml:
--------------------------------------------------------------------------------
1 | name: CI on Windows
2 |
3 | on:
4 | # Run daily at 1:23 UTC
5 | schedule:
6 | - cron: '23 1 * * *'
7 | workflow_dispatch:
8 |
9 | concurrency:
10 | group: ${{ github.workflow }}-${{ github.ref }}
11 | cancel-in-progress: true
12 |
13 | permissions:
14 | contents: read
15 |
16 | jobs:
17 | test:
18 |
19 | runs-on: ${{ matrix.os }}
20 | strategy:
21 | matrix:
22 | os: [windows-latest]
23 | python-version: ['3.9', '3.10', '3.11', '3.12']
24 |
25 | steps:
26 | - uses: actions/checkout@v4
27 |
28 | - name: Set up Python ${{ matrix.python-version }}
29 | uses: actions/setup-python@v5
30 | with:
31 | python-version: ${{ matrix.python-version }}
32 |
33 | - name: Install dependencies
34 | run: |
35 | python -m pip install uv
36 | uv pip install --system --upgrade '.[all,test]'
37 |
38 | - name: List installed Python packages
39 | run: python -m pip list
40 |
41 | - name: Test with pytest and coverage
42 | # FIXME: ignore tests/test_scripts.py as runner breaks on Windows currently
43 | run: |
44 | coverage run --module pytest --ignore tests/test_scripts.py --ignore tests/contrib --ignore tests/benchmarks --ignore tests/test_notebooks.py
45 |
46 | - name: Coverage report for core project
47 | run: |
48 | coverage report
49 | coverage xml
50 |
--------------------------------------------------------------------------------
/.github/workflows/codeql-analysis.yml:
--------------------------------------------------------------------------------
1 | name: "CodeQL"
2 |
3 | on:
4 | push:
5 | branches: [main]
6 | pull_request:
7 | # branches must be a subset of push branches
8 | branches: [main]
9 | # Run weekly on Sundays at 0:01 UTC
10 | schedule:
11 | - cron: '1 0 * * 0'
12 | workflow_dispatch:
13 |
14 | concurrency:
15 | group: ${{ github.workflow }}-${{ github.ref }}
16 | cancel-in-progress: true
17 |
18 | permissions:
19 | contents: read
20 |
21 | jobs:
22 | analyze:
23 | permissions:
24 | actions: read # for github/codeql-action/init to get workflow details
25 | contents: read # for actions/checkout to fetch code
26 | security-events: write # for github/codeql-action/analyze to upload SARIF results
27 | name: Analyze
28 | runs-on: ubuntu-latest
29 |
30 | steps:
31 | - name: Checkout repository
32 | uses: actions/checkout@v4
33 |
34 | # Initializes the CodeQL tools for scanning.
35 | - name: Initialize CodeQL
36 | uses: github/codeql-action/init@v3
37 | with:
38 | languages: python
39 | # If you wish to specify custom queries, you can do so here or in a config file.
40 | # By default, queries listed here will override any specified in a config file.
41 | # Prefix the list here with "+" to use these queries and those in the config file.
42 | # queries: ./path/to/local/query, your-org/your-repo/queries@main
43 |
44 | - name: Perform CodeQL Analysis
45 | uses: github/codeql-action/analyze@v3
46 |
--------------------------------------------------------------------------------
/.github/workflows/lint.yml:
--------------------------------------------------------------------------------
1 | name: Lint
2 |
3 | on:
4 | pull_request:
5 | workflow_dispatch:
6 |
7 | concurrency:
8 | group: ${{ github.workflow }}-${{ github.ref }}
9 | cancel-in-progress: true
10 |
11 | permissions:
12 | contents: read
13 |
14 | jobs:
15 | lint:
16 |
17 | name: Lint Codebase
18 | runs-on: ubuntu-latest
19 |
20 | steps:
21 | - uses: actions/checkout@v4
22 |
23 | - name: Lint Dockerfile
24 | uses: hadolint/hadolint-action@v3.1.0
25 | with:
26 | dockerfile: docker/Dockerfile
27 |
--------------------------------------------------------------------------------
/.github/workflows/lower-bound-requirements.yml:
--------------------------------------------------------------------------------
1 | name: Minimum supported dependencies
2 |
3 | on:
4 | # Run daily at 0:01 UTC
5 | schedule:
6 | - cron: '1 0 * * *'
7 | workflow_dispatch:
8 |
9 | permissions:
10 | contents: read
11 |
12 | jobs:
13 | test:
14 |
15 | runs-on: ${{ matrix.os }}
16 | strategy:
17 | matrix:
18 | os: [ubuntu-latest]
19 | # minimum supported Python
20 | python-version: ['3.8']
21 |
22 | steps:
23 | - uses: actions/checkout@v4
24 |
25 | - name: Set up Python ${{ matrix.python-version }}
26 | uses: actions/setup-python@v5
27 | with:
28 | python-version: ${{ matrix.python-version }}
29 |
30 | - name: Install dependencies and force lowest bound
31 | run: |
32 | python -m pip install uv
33 | uv pip install --system --upgrade 'setuptools<70.0.0'
34 | uv pip --no-cache install --system --constraint tests/constraints.txt ".[all,test]"
35 |
36 | - name: List installed Python packages
37 | run: python -m pip list
38 |
39 | - name: Test with pytest
40 | run: |
41 | # Override the ini option for filterwarnings with an empty list to disable error on filterwarnings
42 | # as testing for oldest releases that work with latest API, not the oldest releases that are warning
43 | # free. Though still show warnings by setting warning control to 'default'.
44 | export PYTHONWARNINGS='default'
45 | # Run on tests/ to skip doctests of src given examples are for latest APIs
46 | pytest --override-ini filterwarnings= --ignore tests/contrib --ignore tests/benchmarks --ignore tests/test_notebooks.py tests/
47 |
--------------------------------------------------------------------------------
/.github/workflows/merged.yml:
--------------------------------------------------------------------------------
1 | name: Merged PR
2 |
3 | on:
4 | pull_request:
5 | types: [closed]
6 | workflow_dispatch:
7 |
8 | permissions:
9 | contents: read
10 |
11 | jobs:
12 | binder:
13 | name: Trigger Binder build
14 | runs-on: ubuntu-latest
15 | if: github.event.pull_request.merged
16 | steps:
17 | - uses: actions/checkout@v4
18 | - name: Trigger Binder build
19 | run: |
20 | # Use Binder build API to trigger repo2docker to build image on Google Cloud and Turing Institute Binder Federation clusters
21 | bash binder/trigger_binder.sh https://turing.mybinder.org/build/gh/scikit-hep/pyhf/main
22 |
--------------------------------------------------------------------------------
/.github/workflows/notebooks.yml:
--------------------------------------------------------------------------------
1 | name: Notebooks
2 |
3 | on:
4 | # Run daily at 0:01 UTC
5 | schedule:
6 | - cron: '1 0 * * *'
7 | workflow_dispatch:
8 |
9 | concurrency:
10 | group: ${{ github.workflow }}-${{ github.ref }}
11 | cancel-in-progress: true
12 |
13 | permissions:
14 | contents: read
15 |
16 | jobs:
17 | test:
18 |
19 | runs-on: ubuntu-latest
20 | strategy:
21 | matrix:
22 | python-version: ['3.12']
23 |
24 | steps:
25 | - uses: actions/checkout@v4
26 |
27 | - name: Set up Python
28 | uses: actions/setup-python@v5
29 | with:
30 | python-version: ${{ matrix.python-version }}
31 |
32 | - name: Install dependencies
33 | run: |
34 | python -m pip install uv
35 | # FIXME: c.f. https://github.com/scikit-hep/pyhf/issues/2104
36 | uv pip install --system --upgrade ".[all,test]" 'jupyter-client<8.0.0'
37 |
38 | - name: List installed Python packages
39 | run: python -m pip list
40 |
41 | - name: Test example notebooks
42 | run: |
43 | # Override the ini option for filterwarnings with an empty list to disable error
44 | # on filterwarnings as testing for notebooks to run with the latest API, not if
45 | # Jupyter infrastructure is warning free.
46 | pytest --verbose --override-ini filterwarnings= tests/test_notebooks.py
47 |
--------------------------------------------------------------------------------
/.github/workflows/release_tests.yml:
--------------------------------------------------------------------------------
1 | name: Current Release
2 |
3 | on:
4 | # Run daily at 0:01 UTC
5 | schedule:
6 | - cron: '1 0 * * *'
7 | workflow_dispatch:
8 |
9 | concurrency:
10 | group: ${{ github.workflow }}-${{ github.ref }}
11 | cancel-in-progress: true
12 |
13 | permissions:
14 | contents: read
15 |
16 | jobs:
17 |
18 | pypi_release:
19 |
20 | runs-on: ${{ matrix.os }}
21 | strategy:
22 | matrix:
23 | os: [ubuntu-latest]
24 | python-version: ['3.8', '3.9', '3.10', '3.11', '3.12']
25 | include:
26 | - os: macos-latest
27 | python-version: '3.12'
28 | # Intel runner
29 | - os: macos-13
30 | python-version: '3.12'
31 | fail-fast: false
32 |
33 | steps:
34 | - uses: actions/checkout@v4
35 |
36 | - name: Set up Python ${{ matrix.python-version }}
37 | uses: actions/setup-python@v5
38 | with:
39 | python-version: ${{ matrix.python-version }}
40 |
41 | - name: Install from PyPI
42 | run: |
43 | python -m pip install uv
44 | uv pip install --system --pre 'pyhf[backends,xmlio]'
45 | uv pip install --system pytest
46 | python -m pip list
47 |
48 | - name: Canary test public API
49 | run: |
50 | # Override the ini option for filterwarnings with an empty list to disable error on filterwarnings
51 | # as testing the latest release API still works, not the release is warning free.
52 | # Though still show warnings by setting warning control to 'default'.
53 | export PYTHONWARNINGS='default'
54 | pytest tests/test_public_api.py
55 |
--------------------------------------------------------------------------------
/.github/workflows/semantic-pr-check.yml:
--------------------------------------------------------------------------------
1 | name: Semantic Pull Request
2 |
3 | on:
4 | pull_request_target:
5 | types:
6 | - opened
7 | - edited
8 | - synchronize
9 |
10 | concurrency:
11 | group: ${{ github.workflow }}-${{ github.ref }}-${{ github.event.number }}
12 | cancel-in-progress: true
13 |
14 | permissions:
15 | contents: read
16 |
17 | jobs:
18 | main:
19 |
20 | permissions:
21 | pull-requests: read # for amannn/action-semantic-pull-request to analyze PRs
22 | statuses: write # for amannn/action-semantic-pull-request to mark status of analyzed PR
23 | name: Validate PR title
24 | runs-on: ubuntu-latest
25 |
26 | steps:
27 | - name: Check PR title matches Conventional Commits spec
28 | uses: amannn/action-semantic-pull-request@v5
29 | env:
30 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
31 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | src/pyhf/_version.py
2 | MANIFEST
3 | build
4 | dist
5 | _build
6 | docs/man/*.gz
7 | docs/api/generated
8 | docs/config.rst
9 | docs/gh-pages
10 | docs/pyhf.*.rst
11 | docs/_*/
12 | *.py[co]
13 | __pycache__
14 | *.egg-info
15 | *.eggs
16 | *~
17 | *.bak
18 | .ipynb_checkpoints
19 | .tox
20 | .DS_Store
21 | \#*#
22 | .#*
23 | .coverage*
24 | !.coveragerc
25 | coverage*
26 | *,cover
27 |
28 | *.swp
29 | *.map
30 | .idea/
31 | Read the Docs
32 | config.rst
33 |
34 | /.project
35 | /.pydevproject
36 |
37 | # pytest
38 | .pytest_cache
39 | htmlcov
40 | .benchmarks
41 |
42 | # nox
43 | .nox/
44 |
45 | # text editors
46 | .vscode/
47 |
--------------------------------------------------------------------------------
/.hadolint.yaml:
--------------------------------------------------------------------------------
1 | ignored:
2 | - DL3008 # Pin versions in apt get install
3 |
--------------------------------------------------------------------------------
/.readthedocs.yaml:
--------------------------------------------------------------------------------
1 | # .readthedocs.yaml
2 | # Read the Docs configuration file
3 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
4 |
5 | # Required
6 | version: 2
7 |
8 | # Set the version of Python and other tools you might need
9 | build:
10 | os: ubuntu-22.04
11 | tools:
12 | python: "3.12"
13 | apt_packages:
14 | - curl
15 | - jq
16 | jobs:
17 | post_create_environment:
18 | - pip install uv
19 | post_install:
20 | # VIRTUAL_ENV needs to be set manually for now.
21 | # See https://github.com/readthedocs/readthedocs.org/pull/11152/
22 | - VIRTUAL_ENV=$READTHEDOCS_VIRTUALENV_PATH uv pip install '.[docs]'
23 |
24 | # Build documentation in the docs/ directory with Sphinx
25 | sphinx:
26 | configuration: docs/conf.py
27 |
28 | # If using Sphinx, optionally build your docs in additional formats such as PDF and ePub
29 | formats: all
30 |
--------------------------------------------------------------------------------
/.zenodo.json:
--------------------------------------------------------------------------------
1 | {
2 | "description": "pure-Python HistFactory implementation with tensors and autodiff",
3 | "license": "Apache-2.0",
4 | "title": "scikit-hep/pyhf: v0.7.6",
5 | "version": "v0.7.6",
6 | "upload_type": "software",
7 | "creators": [
8 | {
9 | "affiliation": "Technical University of Munich",
10 | "name": "Lukas Heinrich",
11 | "orcid": "0000-0002-4048-7584"
12 | },
13 | {
14 | "affiliation": "University of Wisconsin-Madison",
15 | "name": "Matthew Feickert",
16 | "orcid": "0000-0003-4124-7862"
17 | },
18 | {
19 | "affiliation": "SCIPP, University of California, Santa Cruz",
20 | "name": "Giordon Stark",
21 | "orcid": "0000-0001-6616-3433"
22 | }
23 | ],
24 | "access_right": "open",
25 | "keywords": [
26 | "physics",
27 | "statistics",
28 | "fitting",
29 | "scipy",
30 | "numpy",
31 | "tensorflow",
32 | "pytorch",
33 | "jax",
34 | "auto-differentiation"
35 | ],
36 | "related_identifiers": [
37 | {
38 | "scheme": "url",
39 | "identifier": "https://github.com/scikit-hep/pyhf/tree/v0.7.6",
40 | "relation": "isSupplementTo"
41 | }
42 | ]
43 | }
44 |
--------------------------------------------------------------------------------
/AUTHORS:
--------------------------------------------------------------------------------
1 | pyhf is openly developed by the core dev team consisting of:
2 |
3 | Lukas Heinrich
4 | Matthew Feickert
5 | Giordon Stark
6 |
--------------------------------------------------------------------------------
/binder/apt.txt:
--------------------------------------------------------------------------------
1 | jq
2 |
--------------------------------------------------------------------------------
/binder/postBuild:
--------------------------------------------------------------------------------
1 | python -m pip install --upgrade '.[all]'
2 | python -m pip install --upgrade \
3 | ipywidgets \
4 | ipympl \
5 | altair
6 |
--------------------------------------------------------------------------------
/binder/runtime.txt:
--------------------------------------------------------------------------------
1 | python-3.12
2 |
--------------------------------------------------------------------------------
/binder/trigger_binder.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | function trigger_binder() {
4 | local URL="${1}"
5 |
6 | curl -L --connect-timeout 10 --max-time 30 "${URL}"
7 | curl_return=$?
8 |
9 | # Return code 28 is when the --max-time is reached
10 | if [ "${curl_return}" -eq 0 ] || [ "${curl_return}" -eq 28 ]; then
11 | if [[ "${curl_return}" -eq 28 ]]; then
12 | printf "\nBinder build started.\nCheck back soon.\n"
13 | fi
14 | else
15 | return "${curl_return}"
16 | fi
17 |
18 | return 0
19 | }
20 |
21 | function main() {
22 | # 1: the Binder build API URL to curl
23 | trigger_binder $1
24 | }
25 |
26 | main "$@" || exit 1
27 |
--------------------------------------------------------------------------------
/codecov.yml:
--------------------------------------------------------------------------------
1 | coverage:
2 | status:
3 | project:
4 | default:
5 | threshold: 0.2%
6 |
7 | ignore:
8 | - "**/typing.py"
9 |
--------------------------------------------------------------------------------
/docker/Dockerfile:
--------------------------------------------------------------------------------
1 | ARG BASE_IMAGE=python:3.12-slim-bullseye
2 | # hadolint ignore=DL3006
3 | FROM ${BASE_IMAGE} AS base
4 |
5 | FROM base AS builder
6 | # Set PATH to pickup virtual environment by default
7 | ENV PATH=/usr/local/venv/bin:"${PATH}"
8 | COPY . /code
9 | # hadolint ignore=DL3003,SC2102,SC1091
10 | RUN apt-get -qq -y update && \
11 | apt-get -qq -y install --no-install-recommends \
12 | git && \
13 | apt-get -y autoclean && \
14 | apt-get -y autoremove && \
15 | rm -rf /var/lib/apt/lists/* && \
16 | python -m venv /usr/local/venv && \
17 | . /usr/local/venv/bin/activate && \
18 | cd /code && \
19 | python -m pip --no-cache-dir install --upgrade uv && \
20 | uv pip install --no-cache '.[xmlio,contrib]' && \
21 | uv pip list
22 |
23 | FROM base
24 |
25 | USER root
26 |
27 | SHELL [ "/bin/bash", "-c" ]
28 | ENV PATH=/usr/local/venv/bin:"${PATH}"
29 |
30 | RUN apt-get -qq -y update && \
31 | apt-get -qq -y install --no-install-recommends \
32 | curl && \
33 | apt-get -y autoclean && \
34 | apt-get -y autoremove && \
35 | rm -rf /var/lib/apt/lists/*
36 |
37 | # Create non-root user "moby" with uid 1000
38 | RUN adduser \
39 | --shell /bin/bash \
40 | --gecos "default user" \
41 | --uid 1000 \
42 | --disabled-password \
43 | moby && \
44 | chown -R moby /home/moby && \
45 | mkdir /work && \
46 | chown -R moby /work && \
47 | echo -e "\nexport PATH=/usr/local/venv/bin:${PATH}\n" >> /home/moby/.bashrc
48 |
49 | COPY --from=builder --chown=moby /usr/local/venv /usr/local/venv/
50 |
51 | # Does NOT define USER as ENV
52 | USER moby
53 |
54 | ENV USER=moby
55 | ENV HOME=/home/moby
56 | WORKDIR ${HOME}/work
57 |
58 | # Use C.UTF-8 locale to avoid issues with ASCII encoding
59 | ENV LC_ALL=C.UTF-8
60 | ENV LANG=C.UTF-8
61 |
62 | ENV PATH=${HOME}/.local/bin:${PATH}
63 |
64 | # The first ever run of the CLI API incurs some overhead so do that during the
65 | # build instead of making the user wait
66 | RUN pyhf --version
67 |
68 | ENTRYPOINT ["/usr/local/venv/bin/pyhf"]
69 |
--------------------------------------------------------------------------------
/docker/gpu/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM nvidia/cuda:10.1-cudnn7-runtime-ubuntu18.04 AS base
2 |
3 | FROM base AS builder
4 | # hadolint ignore=DL3015
5 | RUN apt-get update -y && \
6 | apt-get install -y \
7 | git \
8 | python3 \
9 | python3-pip && \
10 | apt-get -y autoclean && \
11 | apt-get -y autoremove && \
12 | rm -rf /var/lib/apt/lists/*
13 | COPY . /code
14 | COPY ./docker/gpu/install_backend.sh /code/install_backend.sh
15 | WORKDIR /code
16 | ARG BACKEND=tensorflow
17 | RUN python3 -m pip --no-cache-dir install --upgrade pip wheel && \
18 | /bin/bash install_backend.sh ${BACKEND} && \
19 | python3 -m pip list
20 |
21 | FROM base
22 | # Use C.UTF-8 locale to avoid issues with ASCII encoding
23 | ENV LC_ALL=C.UTF-8
24 | ENV LANG=C.UTF-8
25 | COPY --from=builder /lib/x86_64-linux-gnu /lib/x86_64-linux-gnu
26 | COPY --from=builder /usr/local /usr/local
27 | COPY --from=builder /usr/bin/python3 /usr/bin/python3
28 | COPY --from=builder /usr/bin/python3.6 /usr/bin/python3.6
29 | COPY --from=builder /usr/bin/pip3 /usr/bin/pip3
30 | COPY --from=builder /usr/lib/python3 /usr/lib/python3
31 | COPY --from=builder /usr/lib/python3.6 /usr/lib/python3.6
32 | COPY --from=builder /usr/lib/x86_64-linux-gnu /usr/lib/x86_64-linux-gnu
33 | ENTRYPOINT ["/usr/local/bin/pyhf"]
34 |
--------------------------------------------------------------------------------
/docker/gpu/install_backend.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | set -e
4 |
5 | function get_JAXLIB_GPU_WHEEL {
6 | # c.f. https://github.com/google/jax#pip-installation
7 | local PYTHON_VERSION # alternatives: cp35, cp36, cp37, cp38
8 | PYTHON_VERSION="cp"$(python3 --version | awk '{print $NF}' | awk '{split($0, rel, "."); print rel[1]rel[2]}')
9 | local CUDA_VERSION # alternatives: cuda90, cuda92, cuda100, cuda101
10 | CUDA_VERSION="cuda"$(< /usr/local/cuda/version.txt awk '{print $NF}' | awk '{split($0, rel, "."); print rel[1]rel[2]}')
11 | local PLATFORM=linux_x86_64
12 | local JAXLIB_VERSION=0.1.37
13 | local BASE_URL="https://storage.googleapis.com/jax-releases"
14 | local JAXLIB_GPU_WHEEL="${BASE_URL}/${CUDA_VERSION}/jaxlib-${JAXLIB_VERSION}-${PYTHON_VERSION}-none-${PLATFORM}.whl"
15 | echo "${JAXLIB_GPU_WHEEL}"
16 | }
17 |
18 | function install_backend() {
19 | # 1: the backend option name in setup.py
20 | local backend="${1}"
21 | if [[ "${backend}" == "tensorflow" ]]; then
22 | # shellcheck disable=SC2102
23 | python3 -m pip install --no-cache-dir .[xmlio,tensorflow]
24 | elif [[ "${backend}" == "torch" ]]; then
25 | # shellcheck disable=SC2102
26 | python3 -m pip install --no-cache-dir .[xmlio,torch]
27 | elif [[ "${backend}" == "jax" ]]; then
28 | python3 -m pip install --no-cache-dir .[xmlio]
29 | python3 -m pip install --no-cache-dir "$(get_JAXLIB_GPU_WHEEL)"
30 | python3 -m pip install --no-cache-dir jax
31 | fi
32 | }
33 |
34 | function main() {
35 | # 1: the backend option name in setup.py
36 | local BACKEND="${1}"
37 | install_backend "${BACKEND}"
38 | }
39 |
40 | main "$@" || exit 1
41 |
--------------------------------------------------------------------------------
/docs/_extras/schemas:
--------------------------------------------------------------------------------
1 | ../../src/pyhf/schemas
--------------------------------------------------------------------------------
/docs/_static/.gitkeep:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/scikit-hep/pyhf/40ebf6dadd73de3830a931eac04a260ef8f62709/docs/_static/.gitkeep
--------------------------------------------------------------------------------
/docs/_static/css/custom.css:
--------------------------------------------------------------------------------
1 | /* 1200px for slightly wider for most monitors */
2 | .wy-nav-content {
3 | max-width: 1200px !important;
4 | }
5 |
6 | /* version warning badge */
7 | #dev-version {
8 | display: none;
9 | }
10 |
11 | #dev-version.version-warning {
12 | display: block;
13 | }
14 |
15 | p.version-warning {
16 | position: sticky;
17 | top: 10px;
18 |
19 | margin: 10px 0;
20 | padding: 5px 10px;
21 | border-radius: 4px;
22 |
23 | letter-spacing: 1px;
24 | color: #fff;
25 | text-shadow: 0 0 2px #000;
26 | text-align: center;
27 |
28 | background: #900 repeating-linear-gradient(
29 | 135deg,
30 | transparent,
31 | transparent 56px,
32 | rgba(255, 255, 255, 0.35) 56px,
33 | rgba(255, 255, 255, 0.35) 112px
34 | );
35 | }
36 |
37 | p.version-warning a {
38 | color: #fff;
39 | text-decoration: none;
40 | border-bottom: 1px dotted #fff;
41 | }
42 |
43 | p.version-warning a:hover {
44 | border-bottom-style: solid;
45 | }
46 | /* -------------------------- */
47 |
--------------------------------------------------------------------------------
/docs/_static/img/README_1bin_example.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/scikit-hep/pyhf/40ebf6dadd73de3830a931eac04a260ef8f62709/docs/_static/img/README_1bin_example.png
--------------------------------------------------------------------------------
/docs/_static/img/README_2bin_example.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/scikit-hep/pyhf/40ebf6dadd73de3830a931eac04a260ef8f62709/docs/_static/img/README_2bin_example.png
--------------------------------------------------------------------------------
/docs/_static/img/hfh_1bin_55_50_7.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/scikit-hep/pyhf/40ebf6dadd73de3830a931eac04a260ef8f62709/docs/_static/img/hfh_1bin_55_50_7.png
--------------------------------------------------------------------------------
/docs/_static/img/hfh_2_bin_100.0_145.0_100.0_150.0_15.0_20.0_30.0_45.0.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/scikit-hep/pyhf/40ebf6dadd73de3830a931eac04a260ef8f62709/docs/_static/img/hfh_2_bin_100.0_145.0_100.0_150.0_15.0_20.0_30.0_45.0.png
--------------------------------------------------------------------------------
/docs/_static/img/pyhf-logo-small.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/scikit-hep/pyhf/40ebf6dadd73de3830a931eac04a260ef8f62709/docs/_static/img/pyhf-logo-small.png
--------------------------------------------------------------------------------
/docs/_static/img/pyhf-logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/scikit-hep/pyhf/40ebf6dadd73de3830a931eac04a260ef8f62709/docs/_static/img/pyhf-logo.png
--------------------------------------------------------------------------------
/docs/_static/js/custom.js:
--------------------------------------------------------------------------------
1 | document.addEventListener("DOMContentLoaded", function() {
2 | let dev_version = document.getElementById("dev-version");
3 | let on_scikit_hep = window.location.href.indexOf("scikit-hep.org/pyhf") > -1;
4 |
5 | if(dev_version && on_scikit_hep){
6 | // are we not on readthedocs?
7 | dev_version.classList.add("version-warning");
8 | }
9 | });
10 |
--------------------------------------------------------------------------------
/docs/_templates/autosummary/class.rst:
--------------------------------------------------------------------------------
1 | {{ name | escape | underline}}
2 |
3 | .. currentmodule:: {{ module }}
4 |
5 | .. autoclass:: {{ objname }}
6 | :show-inheritance:
7 |
8 | .. automethod:: __init__
9 |
10 | {% block attributes %}
11 | {% if attributes %}
12 | .. rubric:: {{ _('Attributes') }}
13 |
14 | {% for item in attributes %}
15 | .. autoattribute:: {{ name }}.{{ item }}
16 | {%- endfor %}
17 | {% endif %}
18 | {% if name == 'numpy_backend' %}
19 | {% if 'array_type' in members %}
20 | .. autoattribute:: {{ name }}.array_type
21 | {% endif %}
22 | {% if 'array_subtype' in members %}
23 | .. autoattribute:: {{ name }}.array_subtype
24 | {% endif %}
25 | {% endif %}
26 | {% endblock %}
27 |
28 | {% block methods %}
29 |
30 | {% if methods %}
31 | .. rubric:: {{ _('Methods') }}
32 |
33 | {% for item in members %}
34 | {% if item not in attributes and item not in inherited_members and not item.startswith('__') and item not in ['array_type', 'array_subtype'] %}
35 | .. automethod:: {{ name }}.{{ item }}
36 | {% endif %}
37 | {%- endfor %}
38 |
39 | {% endif %}
40 | {% endblock %}
41 |
--------------------------------------------------------------------------------
/docs/_templates/autosummary/module.rst:
--------------------------------------------------------------------------------
1 | {{ fullname | escape | underline }}
2 |
3 | .. rubric:: Description
4 |
5 | .. automodule:: {{ fullname }}
6 |
7 | .. currentmodule:: {{ fullname }}
8 |
9 | {% if classes %}
10 | .. rubric:: Classes
11 |
12 | .. autosummary::
13 | :toctree: .
14 | {% for class in classes %}
15 | {{ class }}
16 | {% endfor %}
17 |
18 | {% endif %}
19 |
20 | {% if functions %}
21 | .. rubric:: Functions
22 |
23 | .. autosummary::
24 | :toctree: .
25 | {% for function in functions %}
26 | {{ function }}
27 | {% endfor %}
28 |
29 | {% endif %}
30 |
--------------------------------------------------------------------------------
/docs/bib/media.bib:
--------------------------------------------------------------------------------
1 | @article{CERN-COURIER-61-3_May_2021,
2 | title = {{LHC reinterpreters think long-term}},
3 | author = {Sabine Kraml},
4 | journal = {CERN Courier Volume 61, Number 3, May/June 2021},
5 | year = {2021},
6 | month = {April},
7 | day = {28},
8 | url = {https://cerncourier.com/a/lhc-reinterpreters-think-long-term/},
9 | note = {https://cds.cern.ch/record/2765233}
10 | }
11 |
12 | @article{Symmetry_magazine_2021,
13 | title = {{ATLAS releases 'full orchestra' of analysis instruments}},
14 | author = {Stephanie Melchor},
15 | journal = {Symmetry Magazine},
16 | year = {2021},
17 | month = {January},
18 | day = {14},
19 | url = {https://www.symmetrymagazine.org/article/atlas-releases-full-orchestra-of-analysis-instruments},
20 | }
21 |
22 | @article{CERN_News_2019,
23 | title = {{New open release allows theorists to explore LHC data in a new way}},
24 | author = {Katarina Anthony},
25 | journal = {CERN News},
26 | year = {2020},
27 | month = {January},
28 | day = {9},
29 | url = {https://home.cern/news/news/knowledge-sharing/new-open-release-allows-theorists-explore-lhc-data-new-way},
30 | }
31 |
32 | @article{ATLAS_News_2019,
33 | title = {{New open release streamlines interactions with theoretical physicists}},
34 | author = {Katarina Anthony},
35 | journal = {ATLAS News},
36 | year = {2019},
37 | month = {December},
38 | day = {12},
39 | url = {https://atlas.cern/updates/atlas-news/new-open-likelihoods},
40 | }
41 |
--------------------------------------------------------------------------------
/docs/bib/posters.bib:
--------------------------------------------------------------------------------
1 | @unpublished{Heinrich_ACAT2019,
2 | title = {{pyhf: auto-differentiable binned statistical models}},
3 | author = {Lukas Heinrich and Matthew Feickert and Giordon Stark and Kyle Cranmer},
4 | year = {2019},
5 | month = {March},
6 | day = {12},
7 | note = {19th International Workshop on Advanced Computing and Analysis Techniques in Physics Research (ACAT 2019)},
8 | organization = {CERN},
9 | url = {https://indico.cern.ch/event/708041/contributions/3272095/},
10 | }
11 |
12 | @misc{Feickert_SciPy2019,
13 | title = {{pyhf: a pure Python statistical fitting library for High Energy Physics with tensors and autograd}},
14 | author = {Matthew Feickert and Lukas Heinrich and Giordon Stark and Kyle Cranmer},
15 | year = {2019},
16 | month = {July},
17 | day = {10},
18 | note = {18th Scientific Computing with Python Conference (SciPy 2019)},
19 | doi = {10.25080/Majora-7ddc1dd1-019},
20 | url = {http://conference.scipy.org/proceedings/scipy2019/slides.html},
21 | }
22 |
23 | @misc{Feickert_CHEP2019,
24 | title = {{pyhf: pure Python implementation of HistFactory}},
25 | author = {Matthew Feickert and Lukas Heinrich and Giordon Stark and Kyle Cranmer},
26 | year = {2019},
27 | month = {November},
28 | day = {5},
29 | note = {24th International Conference on computing in High Energy \& Nuclear Physics (CHEP 2019)},
30 | url = {https://indico.cern.ch/event/773049/contributions/3476180/},
31 | }
32 |
--------------------------------------------------------------------------------
/docs/bib/preferred.bib:
--------------------------------------------------------------------------------
1 | ../../src/pyhf/data/citation.bib
--------------------------------------------------------------------------------
/docs/bib/tutorials.bib:
--------------------------------------------------------------------------------
1 | % NB: entries with same author-title-year are not picked up:
2 | % https://github.com/mcmtroffaes/sphinxcontrib-bibtex/issues/117
3 |
4 | @unpublished{Feickert_20210407,
5 | title = {{Tutorial on pyhf}},
6 | author = {Matthew Feickert},
7 | year = {2021},
8 | month = {Apr},
9 | day = {7},
10 | note = {PyHEP Python Module of the Month (April 2021)},
11 | doi = {10.5281/zenodo.4670322},
12 | url = {https://indico.cern.ch/event/985425/},
13 | }
14 |
15 | @unpublished{GStark20200925,
16 | title = {{ATLAS Exotics + SUSY Workshop 2020 pyhf Tutorial}},
17 | author = {Giordon Stark},
18 | year = {2020},
19 | month = {September},
20 | day = {25},
21 | note = {ATLAS Exotics + SUSY Workshop 2020},
22 | organization = {CERN},
23 | url = {https://pyhf.github.io/tutorial-ATLAS-SUSY-Exotics-2020/introduction.html},
24 | }
25 |
26 | @unpublished{Feickert20200716,
27 | title = {{pyhf: Accelerating analyses and preserving likelihoods}},
28 | author = {Matthew Feickert},
29 | year = {2020},
30 | month = {Jul},
31 | day = {16},
32 | note = {PyHEP 2020 Workshop (pyhf v0.4.4)},
33 | organization = {HEP Software Foundation},
34 | url = {https://indico.cern.ch/event/882824/contributions/3931292/},
35 | }
36 |
37 | @unpublished{Heinrich20200716,
38 | title = {{pyhf tutorial}},
39 | author = {Lukas Heinrich},
40 | year = {2020},
41 | month = {Jul},
42 | day = {16},
43 | note = {(Internal) ATLAS Induction Day + Software Tutorial (pyhf v0.4.4)},
44 | organization = {CERN},
45 | url = {https://indico.cern.ch/event/892952/contributions/3853306/},
46 | }
47 |
48 | @unpublished{Heinrich20191025,
49 | title = {{Introduction to pyhf}},
50 | author = {Lukas Heinrich},
51 | year = {2019},
52 | month = {Oct},
53 | day = {25},
54 | note = {(Internal) ATLAS Induction Day + Software Tutorial (pyhf v0.1.2)},
55 | organization = {CERN},
56 | url = {https://indico.cern.ch/event/831761/contributions/3484275/},
57 | }
58 |
--------------------------------------------------------------------------------
/docs/citations.rst:
--------------------------------------------------------------------------------
1 | Use and Citations
2 | =================
3 |
4 | .. raw:: html
5 |
6 |
Warning: This is a development version and should not be cited. To find the specific version to cite, please go to ReadTheDocs.
7 |
8 | Citation
9 | --------
10 |
11 | The preferred BibTeX entry for citation of ``pyhf`` includes both the `Zenodo `__
12 | archive and the `JOSS `__ paper:
13 |
14 | .. literalinclude:: bib/preferred.bib
15 | :language: bibtex
16 |
17 | Use in Publications
18 | -------------------
19 |
20 | The following is an updating list of citations and use cases of :code:`pyhf`.
21 | There is an incomplete but automatically updated `list of citations on INSPIRE
22 | `__ as well.
23 |
24 | Use Citations
25 | ~~~~~~~~~~~~~
26 |
27 | .. bibliography:: bib/use_citations.bib
28 | :list: enumerated
29 | :all:
30 | :style: unsrt
31 |
32 | General Citations
33 | ~~~~~~~~~~~~~~~~~
34 |
35 | .. bibliography:: bib/general_citations.bib
36 | :list: enumerated
37 | :all:
38 | :style: unsrt
39 |
40 | Published Statistical Models
41 | ----------------------------
42 |
43 | The following is an updating list of HEPData entries for publications using ``HistFactory`` JSON statistical models.
44 |
45 | .. note::
46 |
47 | There is also an automatically generated list of statistical models that is updated
48 | nightly available at `pyhf.github.io/public-probability-models
49 | `__.
50 |
51 | .. bibliography:: bib/HEPData_likelihoods.bib
52 | :list: enumerated
53 | :all:
54 | :style: unsrt
55 |
56 | .. note::
57 |
58 | ATLAS maintains a public listing of all published statistical models on the `ATLAS public results
59 | page `__ which can be found by filtering all
60 | public results by the "Likelihood available" analysis characteristics keyword.
61 |
--------------------------------------------------------------------------------
/docs/cli.rst:
--------------------------------------------------------------------------------
1 | Command Line API
2 | ================
3 |
4 | .. click:: pyhf.cli.cli:pyhf
5 | :prog: pyhf
6 | :show-nested:
7 |
--------------------------------------------------------------------------------
/docs/contributors.rst:
--------------------------------------------------------------------------------
1 | Contributors
2 | ============
3 |
4 | ``pyhf`` is openly developed and benefits from the contributions and feedback
5 | from its users.
6 | The ``pyhf`` dev team would like to thank all contributors to the project for
7 | their support and help.
8 | Thank you!
9 |
10 | Contributors include:
11 |
12 | - Jessica Forde
13 | - Ruggero Turra
14 | - Tadej Novak
15 | - Frank Sauerburger
16 | - Lars Nielsen
17 | - Kanishk Kalra
18 | - Nikolai Hartmann
19 | - Alexander Held
20 | - Karthikeyan Singaravelan
21 | - Marco Gorelli
22 | - Pradyumna Rahul K
23 | - Eric Schanet
24 | - Henry Schreiner
25 | - Saransh Chopra
26 | - Sviatoslav Sydorenko
27 | - Mason Proffitt
28 | - Lars Henkelmann
29 | - Aryan Roy
30 | - Graeme Watt
31 | - Jerry Ling
32 | - Nathan Simpson
33 | - Beojan Stanislaus
34 | - Daniel Werner
35 | - Jonas Rembser
36 | - Lorenz Gaertner
37 | - Melissa Weber Mendonça
38 | - Matthias Bussonnier
39 |
--------------------------------------------------------------------------------
/docs/examples.rst:
--------------------------------------------------------------------------------
1 | Examples
2 | ========
3 |
4 | Try out in Binder! |Binder|
5 |
6 | .. |Binder| image:: https://mybinder.org/badge_logo.svg
7 | :target: https://mybinder.org/v2/gh/scikit-hep/pyhf/main?labpath=docs%2Fexamples%2Fnotebooks%2Fbinderexample%2FStatisticalAnalysis.ipynb
8 |
9 | Notebooks:
10 |
11 | .. toctree::
12 | :maxdepth: 2
13 | :glob:
14 |
15 | examples/notebooks/*
16 | examples/notebooks/learn/*
17 | examples/notebooks/binderexample/*
18 |
--------------------------------------------------------------------------------
/docs/examples/json/2-bin_1-channel.json:
--------------------------------------------------------------------------------
1 | {
2 | "channels": [
3 | { "name": "singlechannel",
4 | "samples": [
5 | { "name": "signal",
6 | "data": [5.0, 10.0],
7 | "modifiers": [ { "name": "mu", "type": "normfactor", "data": null} ]
8 | },
9 | { "name": "background",
10 | "data": [50.0, 60.0],
11 | "modifiers": [ {"name": "uncorr_bkguncrt", "type": "shapesys", "data": [5.0, 12.0]} ]
12 | }
13 | ]
14 | }
15 | ],
16 | "observations": [
17 | { "name": "singlechannel", "data": [50.0, 60.0] }
18 | ],
19 | "measurements": [
20 | { "name": "Measurement", "config": {"poi": "mu", "parameters": []} }
21 | ],
22 | "version": "1.0.0"
23 | }
24 |
--------------------------------------------------------------------------------
/docs/examples/notebooks/binderexample/data.root:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/scikit-hep/pyhf/40ebf6dadd73de3830a931eac04a260ef8f62709/docs/examples/notebooks/binderexample/data.root
--------------------------------------------------------------------------------
/docs/examples/notebooks/binderexample/meas.xml:
--------------------------------------------------------------------------------
1 |
5 |
6 |
7 |
8 |
9 |
10 | ./meas_channel1.xml
11 |
12 |
13 | SigXsecOverSM
14 |
15 |
16 |
17 |
--------------------------------------------------------------------------------
/docs/examples/notebooks/binderexample/meas_channel1.xml:
--------------------------------------------------------------------------------
1 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
--------------------------------------------------------------------------------
/docs/examples/notebooks/binderexample/workflow.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/scikit-hep/pyhf/40ebf6dadd73de3830a931eac04a260ef8f62709/docs/examples/notebooks/binderexample/workflow.gif
--------------------------------------------------------------------------------
/docs/examples/notebooks/img/1007.1727.fig5.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/scikit-hep/pyhf/40ebf6dadd73de3830a931eac04a260ef8f62709/docs/examples/notebooks/img/1007.1727.fig5.png
--------------------------------------------------------------------------------
/docs/exts/xref.py:
--------------------------------------------------------------------------------
1 | from docutils import nodes
2 |
3 | from sphinx.util import caption_ref_re
4 |
5 |
6 | def xref(typ, rawtext, text, lineno, inliner, options=None, content=None):
7 | # avoid mutable defaults
8 | options = {} if options is None else options
9 | content = [] if content is None else content
10 |
11 | title = target = text
12 | # titleistarget = True
13 | # look if explicit title and target are given with `foo ` syntax
14 | brace = text.find('<')
15 | if brace != -1:
16 | # titleistarget = False
17 | m = caption_ref_re.match(text)
18 | if m:
19 | target = m.group(2)
20 | title = m.group(1)
21 | else:
22 | # fallback: everything after '<' is the target
23 | target = text[brace + 1 :]
24 | title = text[:brace]
25 |
26 | link = xref.links[target]
27 |
28 | if brace != -1:
29 | pnode = nodes.reference(target, title, refuri=link[1])
30 | else:
31 | pnode = nodes.reference(target, link[0], refuri=link[1])
32 |
33 | return [pnode], []
34 |
35 |
36 | def get_refs(app):
37 | xref.links = app.config.xref_links
38 |
39 |
40 | def setup(app):
41 | app.add_config_value('xref_links', {}, True)
42 | app.add_role('xref', xref)
43 | app.connect("builder-inited", get_refs)
44 |
--------------------------------------------------------------------------------
/docs/index.rst:
--------------------------------------------------------------------------------
1 | .. pyhf documentation master file, created by
2 | sphinx-quickstart on Fri Feb 9 11:58:49 2018.
3 | You can adapt this file completely to your liking, but it should at least
4 | contain the root ``toctree`` directive.
5 |
6 | .. toctree::
7 | :hidden:
8 |
9 | intro
10 | likelihood
11 | examples
12 | api
13 | cli
14 | installation
15 | development
16 | faq
17 | learn
18 | babel
19 | outreach
20 | citations
21 | governance/ROADMAP
22 | release-notes
23 | contributors
24 |
25 | .. raw:: html
26 |
27 | Warning: This is a development version. The latest stable version is at ReadTheDocs.
28 |
29 | ..
30 | Comment: Splice the JupyterLite example into the README by looking for a particular comment
31 |
32 | .. include:: ../README.rst
33 | :end-before: Comment: JupyterLite segment goes here in docs
34 |
35 | .. include:: jupyterlite.rst
36 |
37 | .. include:: ../README.rst
38 | :start-after: Comment: JupyterLite segment goes here in docs
39 |
40 | Indices and tables
41 | ==================
42 |
43 | * :ref:`genindex`
44 | * :ref:`modindex`
45 | * :ref:`search`
46 |
--------------------------------------------------------------------------------
/docs/jupyterlite.rst:
--------------------------------------------------------------------------------
1 | Try out now with JupyterLite_
2 | -----------------------------
3 |
4 | ..
5 | Comment: Use https://github.com/jupyterlite/jupyterlite-sphinx
6 |
7 | .. retrolite:: lite/jupyterlite.ipynb
8 | :width: 100%
9 | :height: 600px
10 | :prompt: Try pyhf!
11 | :prompt_color: #3a77b0
12 |
13 | ..
14 | Comment: Add an extra blank line as a spacer
15 |
16 | |
17 |
18 | .. _JupyterLite: https://jupyterlite.readthedocs.io/
19 |
--------------------------------------------------------------------------------
/docs/learn.rst:
--------------------------------------------------------------------------------
1 | Fundamentals
2 | ============
3 |
4 | Notebooks:
5 |
6 | .. toctree::
7 | :maxdepth: 2
8 | :glob:
9 |
10 | examples/notebooks/learn/*
11 |
--------------------------------------------------------------------------------
/docs/lite/jupyter-lite.json:
--------------------------------------------------------------------------------
1 | {
2 | "jupyter-lite-schema-version": 0,
3 | "jupyter-config-data": {
4 | "enableMemoryStorage": true,
5 | "settingsStorageDrivers": ["memoryStorageDriver"],
6 | "contentsStorageDrivers": ["memoryStorageDriver"]
7 | }
8 | }
9 |
--------------------------------------------------------------------------------
/docs/lite/jupyterlite.py:
--------------------------------------------------------------------------------
1 | # ---
2 | # jupyter:
3 | # kernelspec:
4 | # display_name: Python (Pyodide)
5 | # language: python
6 | # name: python
7 | # ---
8 |
9 | # %% [markdown]
10 | # # `pyhf` in the browser
11 |
12 | # %% [markdown]
13 | # * To run the code, click on the first cell (gray box) and press Shift+Enter or click on the (Run) ▶ button to run each cell.
14 | # * Alternatively, from the `Run` menu select `Run All Cells`.
15 | # * Feel free to experiment, and if you need to restore the original code reload this browser page. Any changes you make will be lost when you reload.
16 | #
17 | # To get going try copying and pasting the "Hello World" example below!
18 |
19 | # %%
20 | import piplite
21 |
22 | # Install pyhf in the browser
23 | await piplite.install(["pyhf==0.7.6", "matplotlib>=3.0.0"])
24 | # %matplotlib inline
25 | import pyhf
26 |
27 | # You can now use pyhf!
28 |
--------------------------------------------------------------------------------
/docs/lite/jupytext.toml:
--------------------------------------------------------------------------------
1 | # Always pair ipynb notebooks in the current directory to py:percent files
2 | formats = ["ipynb", "py:percent"]
3 | notebook_metadata_filter = "-all,kernelspec"
4 |
--------------------------------------------------------------------------------
/docs/release-notes.rst:
--------------------------------------------------------------------------------
1 | =============
2 | Release Notes
3 | =============
4 |
5 | .. include:: release-notes/v0.7.6.rst
6 | .. include:: release-notes/v0.7.5.rst
7 | .. include:: release-notes/v0.7.4.rst
8 | .. include:: release-notes/v0.7.3.rst
9 | .. include:: release-notes/v0.7.2.rst
10 | .. include:: release-notes/v0.7.1.rst
11 | .. include:: release-notes/v0.7.0.rst
12 | .. include:: release-notes/v0.6.3.rst
13 | .. include:: release-notes/v0.6.2.rst
14 | .. include:: release-notes/v0.6.1.rst
15 | .. include:: release-notes/v0.6.0.rst
16 | .. include:: release-notes/v0.5.4.rst
17 | .. include:: release-notes/v0.5.3.rst
18 |
--------------------------------------------------------------------------------
/docs/release-notes/v0.5.3.rst:
--------------------------------------------------------------------------------
1 | |release v0.5.3|_
2 | =================
3 |
4 | This is a patch release from ``v0.5.2`` → ``v0.5.3``.
5 |
6 | Fixes
7 | -----
8 |
9 | * Workspaces are now immutable
10 | * ShapeFactor support added to XML reading and writing
11 | * An error is raised if a fit initialization parameter is outside of its bounds
12 | (preventing hypotest with POI outside of bounds)
13 |
14 | Features
15 | --------
16 |
17 | Python API
18 | ~~~~~~~~~~
19 |
20 | * Inverting hypothesis tests to get upper limits now has an API with
21 | ``pyhf.infer.intervals.upperlimit``
22 | * Building workspaces from a model and data added with ``pyhf.workspace.build``
23 |
24 | CLI API
25 | ~~~~~~~
26 |
27 | * Added CLI API for ``pyhf.infer.fit``: ``pyhf fit``
28 | * pyhf combine now allows for merging channels: ``pyhf combine --merge-channels --join ``
29 | * Added utility to download archived pyhf pallets (workspaces + patchsets) to contrib module: ``pyhf contrib download``
30 |
31 | Contributors
32 | ------------
33 |
34 | ``v0.5.3`` benefited from contributions from:
35 |
36 | * Karthikeyan Singaravelan
37 |
38 | .. |release v0.5.3| replace:: ``v0.5.3``
39 | .. _`release v0.5.3`: https://github.com/scikit-hep/pyhf/releases/tag/v0.5.3
40 |
--------------------------------------------------------------------------------
/docs/release-notes/v0.5.4.rst:
--------------------------------------------------------------------------------
1 | |release v0.5.4|_
2 | =================
3 |
4 | This is a patch release from ``v0.5.3`` → ``v0.5.4``.
5 |
6 | Fixes
7 | -----
8 |
9 | * Require ``uproot3`` instead of ``uproot`` ``v3.X`` releases to avoid conflicts when
10 | ``uproot4`` is installed in an environment with ``uproot`` ``v3.X`` installed and
11 | namespace conflicts with ``uproot-methods``.
12 | Adoption of ``uproot3`` in ``v0.5.4`` will ensure ``v0.5.4`` works far into the future
13 | if XML and ROOT I/O through uproot is required.
14 |
15 | **Example:**
16 |
17 | Without the ``v0.5.4`` patch release there is a regression in using ``uproot`` ``v3.X``
18 | and ``uproot4`` in the same environment (which was swiftly identified and patched by the
19 | fantastic ``uproot`` team)
20 |
21 | .. code-block:: shell
22 |
23 | $ python -m pip install "pyhf[xmlio]<0.5.4"
24 | $ python -m pip list | grep "pyhf\|uproot"
25 | pyhf 0.5.3
26 | uproot 3.13.1
27 | uproot-methods 0.8.0
28 | $ python -m pip install uproot4
29 | $ python -m pip list | grep "pyhf\|uproot"
30 | pyhf 0.5.3
31 | uproot 4.0.0
32 | uproot-methods 0.8.0
33 | uproot4 4.0.0
34 |
35 | this is resolved in ``v0.5.4`` with the requirement of ``uproot3``
36 |
37 | .. code-block:: shell
38 |
39 | $ python -m pip install "pyhf[xmlio]>=0.5.4"
40 | $ python -m pip list | grep "pyhf\|uproot"
41 | pyhf 0.5.4
42 | uproot3 3.14.1
43 | uproot3-methods 0.10.0
44 | $ python -m pip install uproot4 # or uproot
45 | $ python -m pip list | grep "pyhf\|uproot"
46 | pyhf 0.5.4
47 | uproot 4.0.0
48 | uproot3 3.14.1
49 | uproot3-methods 0.10.0
50 | uproot4 4.0.0
51 |
52 | .. |release v0.5.4| replace:: ``v0.5.4``
53 | .. _`release v0.5.4`: https://github.com/scikit-hep/pyhf/releases/tag/v0.5.4
54 |
--------------------------------------------------------------------------------
/docs/release-notes/v0.6.1.rst:
--------------------------------------------------------------------------------
1 | |release v0.6.1|_
2 | =================
3 |
4 | This is a patch release from ``v0.6.0`` → ``v0.6.1``.
5 |
6 | Important Notes
7 | ---------------
8 |
9 | * As a result of changes to the default behavior of ``torch.distributions`` in
10 | PyTorch ``v1.8.0``, accommodating changes have been made in the underlying
11 | implementations for :func:`pyhf.tensor.pytorch_backend.pytorch_backend`.
12 | These changes require a new lower bound of ``torch`` ``v1.8.0`` for use of the
13 | PyTorch backend.
14 |
15 | Fixes
16 | -----
17 |
18 | * In the PyTorch backend the ``validate_args`` kwarg is used with
19 | ``torch.distributions`` to ensure a continuous approximation of the Poisson
20 | distribution in ``torch`` ``v1.8.0+``.
21 |
22 | Features
23 | --------
24 |
25 | Python API
26 | ~~~~~~~~~~
27 |
28 | * The ``solver_options`` kwarg can be passed to the
29 | :func:`pyhf.optimize.opt_scipy.scipy_optimizer` optimizer for additional
30 | configuration of the minimization.
31 | See :func:`scipy.optimize.show_options` for additional options of optimization
32 | solvers.
33 | * The ``torch`` API is now used to provide the implementations of the ``ravel``,
34 | ``tile``, and ``outer`` tensorlib methods for the PyTorch backend.
35 |
36 | .. |release v0.6.1| replace:: ``v0.6.1``
37 | .. _`release v0.6.1`: https://github.com/scikit-hep/pyhf/releases/tag/v0.6.1
38 |
--------------------------------------------------------------------------------
/docs/release-notes/v0.7.1.rst:
--------------------------------------------------------------------------------
1 | |release v0.7.1|_
2 | =================
3 |
4 | This is a patch release from ``v0.7.0`` → ``v0.7.1``.
5 |
6 | Important Notes
7 | ---------------
8 |
9 | * All backends are now fully compatible and tested with
10 | `Python 3.11 `_.
11 | (PR :pr:`2145`)
12 | * The ``tensorflow`` extra (``'pyhf[tensorflow]'``) now automatically installs
13 | ``tensorflow-macos`` for Apple silicon machines.
14 | (PR :pr:`2119`)
15 |
16 | Fixes
17 | -----
18 |
19 | * Raise :class:`NotImplementedError` when attempting to convert a XML
20 | workspace that contains no data.
21 | (PR :pr:`2109`)
22 |
23 | Contributors
24 | ------------
25 |
26 | ``v0.7.1`` benefited from contributions from:
27 |
28 | * Alexander Held
29 |
30 | .. |release v0.7.1| replace:: ``v0.7.1``
31 | .. _`release v0.7.1`: https://github.com/scikit-hep/pyhf/releases/tag/v0.7.1
32 |
--------------------------------------------------------------------------------
/docs/release-notes/v0.7.2.rst:
--------------------------------------------------------------------------------
1 | |release v0.7.2|_
2 | =================
3 |
4 | This is a patch release from ``v0.7.1`` → ``v0.7.2``.
5 |
6 | Important Notes
7 | ---------------
8 |
9 | * ``pyhf`` became a `NumFOCUS Affiliated Project
10 | `__ on
11 | 2022-12-19. |NumFOCUS Affiliated Project|
12 | ``v0.7.1`` is the first release to appear in a NumFOCUS Newsletter and
13 | ``v0.7.2`` is the first release to appear as part of the Affiliated Projects
14 | page.
15 | (PR :pr:`2179`)
16 |
17 | Fixes
18 | -----
19 |
20 | * If a multiple component parameter of interest is used raise
21 | :class:`~pyhf.exceptions.InvalidModel`.
22 | This guards against modifiers like :class:`~pyhf.modifiers.shapefactor`,
23 | :class:`~pyhf.modifiers.shapesys`, and :class:`~pyhf.modifiers.staterror`
24 | from being used as POIs.
25 | (PR :pr:`2197`)
26 | * Use :data:`typing.TYPE_CHECKING` guard to avoid causing a
27 | :class:`ModuleNotFoundError` when the version of NumPy installed is older
28 | than ``v1.21.0``, which is the first NumPy release to include
29 | :mod:`numpy.typing`.
30 | (PR :pr:`2208`)
31 |
32 | Contributors
33 | ------------
34 |
35 | ``v0.7.2`` benefited from contributions from:
36 |
37 | * Alexander Held
38 |
39 | .. |release v0.7.2| replace:: ``v0.7.2``
40 | .. _`release v0.7.2`: https://github.com/scikit-hep/pyhf/releases/tag/v0.7.2
41 |
42 | .. |NumFOCUS Affiliated Project| image:: https://img.shields.io/badge/NumFOCUS-Affiliated%20Project-orange.svg?style=flat&colorA=E1523D&colorB=007D8A
43 | :target: https://numfocus.org/sponsored-projects/affiliated-projects
44 |
--------------------------------------------------------------------------------
/docs/release-notes/v0.7.4.rst:
--------------------------------------------------------------------------------
1 | |release v0.7.4|_
2 | =================
3 |
4 | This is a patch release from ``v0.7.3`` → ``v0.7.4``.
5 |
6 | Fixes
7 | -----
8 |
9 | * Skip callbacks with dead weakrefs while iterating over callbacks in ``pyhf``
10 | events, like :func:`pyhf.set_backend`, to avoid the possibility of accessing
11 | dead weakrefs before they could be garbage collected.
12 | (PR :pr:`2310`)
13 |
14 | The fixed bug was subtle and occurred nondeterministically when the
15 | :class:`pyhf.tensorlib` was changed repeatedly causing dead weakrefs
16 | to be accessed before Python's garbage collection could remove them.
17 | Most users should be unaffected.
18 |
19 | Contributors
20 | ------------
21 |
22 | ``v0.7.4`` benefited from contributions from:
23 |
24 | * Daniel Werner
25 | * Jonas Rembser
26 |
27 | .. |release v0.7.4| replace:: ``v0.7.4``
28 | .. _`release v0.7.4`: https://github.com/scikit-hep/pyhf/releases/tag/v0.7.4
29 |
--------------------------------------------------------------------------------
/docs/release-notes/v0.7.5.rst:
--------------------------------------------------------------------------------
1 | |release v0.7.5|_
2 | =================
3 |
4 | This is a patch release from ``v0.7.4`` → ``v0.7.5``.
5 |
6 | Fixes
7 | -----
8 |
9 | * Remove operating system dependent components of schema validation to allow for
10 | validation on Windows.
11 | (PR :pr:`2357`)
12 |
13 | .. |release v0.7.5| replace:: ``v0.7.5``
14 | .. _`release v0.7.5`: https://github.com/scikit-hep/pyhf/releases/tag/v0.7.5
15 |
--------------------------------------------------------------------------------
/docs/release-notes/v0.7.6.rst:
--------------------------------------------------------------------------------
1 | |release v0.7.6|_
2 | =================
3 |
4 | This is a patch release from ``v0.7.5`` → ``v0.7.6``.
5 |
6 | Fixes
7 | -----
8 |
9 | * For the JAX backend access ``jax.config`` from the ``jax`` top level API to
10 | avoid support issues with ``jax`` and ``jaxlib`` ``v0.4.20+``.
11 | (PR :pr:`2376`)
12 | * Add information in the warnings for :func:`pyhf.infer.test_statistics.qmu` and
13 | :func:`pyhf.infer.test_statistics.qmu_tilde` that provides users with the
14 | higher level ``pyhf.infer`` APIs ``kwarg`` to set the correct test statistic.
15 | (PR :pr:`2390`)
16 | * Correct the variable assignment for the one-sigma and two-sigma limit band
17 | artists in :func:`pyhf.contrib.viz.brazil.plot_brazil_band` to match the
18 | stated return structure.
19 | (PR :pr:`2411`)
20 | * In the ``pyhf.infer`` module, correct the ``fixed_params`` type in the docs
21 | to be to :obj:`tuple` or :obj:`list`.
22 | (PR :pr:`2420`)
23 |
24 | Contributors
25 | ------------
26 |
27 | ``v0.7.6`` benefited from contributions from:
28 |
29 | * Lorenz Gaertner
30 |
31 | .. |release v0.7.6| replace:: ``v0.7.6``
32 | .. _`release v0.7.6`: https://github.com/scikit-hep/pyhf/releases/tag/v0.7.6
33 |
--------------------------------------------------------------------------------
/src/conftest.py:
--------------------------------------------------------------------------------
1 | ../tests/conftest.py
--------------------------------------------------------------------------------
/src/pyhf/__init__.py:
--------------------------------------------------------------------------------
1 | from pyhf.tensor import BackendRetriever as tensor
2 | from pyhf.optimize import OptimizerRetriever as optimize # noqa
3 | from pyhf.tensor.manager import get_backend
4 | from pyhf.tensor.manager import set_backend
5 | from pyhf._version import version as __version__
6 |
7 | from pyhf.pdf import Model
8 | from pyhf.workspace import Workspace
9 | from pyhf import schema
10 | from pyhf import simplemodels
11 | from pyhf import infer
12 | from pyhf import compat
13 | from pyhf.patchset import PatchSet
14 |
15 | __all__ = [
16 | "Model",
17 | "PatchSet",
18 | "Workspace",
19 | "__version__",
20 | "compat",
21 | "default_backend",
22 | "exceptions",
23 | "get_backend",
24 | "infer",
25 | "interpolators",
26 | "modifiers",
27 | "optimizer",
28 | "parameters",
29 | "patchset",
30 | "pdf",
31 | "probability",
32 | "schema",
33 | "set_backend",
34 | "simplemodels",
35 | "tensor",
36 | "tensorlib",
37 | "utils",
38 | "workspace",
39 | ]
40 |
41 |
42 | def __dir__():
43 | return __all__
44 |
45 |
46 | def __getattr__(name):
47 | if name == 'tensorlib':
48 | return get_backend(default=False)[0]
49 | if name == 'optimizer':
50 | return get_backend(default=False)[1]
51 | if name == 'default_backend':
52 | return get_backend(default=True)[0]
53 | raise AttributeError
54 |
--------------------------------------------------------------------------------
/src/pyhf/cli/__init__.py:
--------------------------------------------------------------------------------
1 | """The pyhf command line interface."""
2 |
3 | from pyhf.cli.cli import pyhf as cli
4 | from pyhf.cli.rootio import cli as rootio
5 | from pyhf.cli.spec import cli as spec
6 | from pyhf.cli.infer import cli as infer
7 | from pyhf.cli.complete import cli as complete
8 | from pyhf.contrib import cli as contrib
9 |
10 | __all__ = ['cli', 'complete', 'contrib', 'infer', 'rootio', 'spec']
11 |
12 |
13 | def __dir__():
14 | return __all__
15 |
--------------------------------------------------------------------------------
/src/pyhf/cli/cli.py:
--------------------------------------------------------------------------------
1 | """The pyhf Command Line Interface."""
2 |
3 | import logging
4 |
5 | import click
6 |
7 | from pyhf import __version__
8 | from pyhf.cli import rootio, spec, infer, patchset, complete
9 | from pyhf.contrib import cli as contrib
10 | from pyhf import utils
11 |
12 | logging.basicConfig()
13 | log = logging.getLogger(__name__)
14 |
15 |
16 | def _print_citation(ctx, param, value):
17 | if not value or ctx.resilient_parsing:
18 | return
19 | click.echo(utils.citation())
20 | ctx.exit()
21 |
22 |
23 | @click.group(context_settings=dict(help_option_names=['-h', '--help']))
24 | @click.version_option(version=__version__)
25 | @click.option(
26 | "--cite",
27 | "--citation",
28 | help="Print the bibtex citation for this software",
29 | default=False,
30 | is_flag=True,
31 | callback=_print_citation,
32 | expose_value=False,
33 | is_eager=True,
34 | )
35 | def pyhf():
36 | """Top-level CLI entrypoint."""
37 |
38 |
39 | # pyhf.add_command(rootio.cli)
40 | pyhf.add_command(rootio.json2xml)
41 | pyhf.add_command(rootio.xml2json)
42 |
43 | # pyhf.add_command(spec.cli)
44 | pyhf.add_command(spec.inspect)
45 | pyhf.add_command(spec.prune)
46 | pyhf.add_command(spec.rename)
47 | pyhf.add_command(spec.combine)
48 | pyhf.add_command(spec.digest)
49 | pyhf.add_command(spec.sort)
50 |
51 | # pyhf.add_command(infer.cli)
52 | pyhf.add_command(infer.fit)
53 | pyhf.add_command(infer.cls)
54 |
55 | pyhf.add_command(patchset.cli)
56 |
57 | pyhf.add_command(complete.cli)
58 |
59 | pyhf.add_command(contrib.cli)
60 |
--------------------------------------------------------------------------------
/src/pyhf/cli/complete.py:
--------------------------------------------------------------------------------
1 | '''Shell completions for pyhf.'''
2 |
3 | import click
4 |
5 | try:
6 | import click_completion
7 |
8 | click_completion.init()
9 |
10 | @click.command(help='Generate shell completion code.', name='completions')
11 | @click.argument(
12 | 'shell',
13 | required=False,
14 | type=click_completion.DocumentedChoice(click_completion.core.shells),
15 | )
16 | def cli(shell):
17 | '''Generate shell completion code for various shells.'''
18 | click.echo(click_completion.core.get_code(shell, prog_name='pyhf'))
19 |
20 | except ImportError:
21 |
22 | @click.command(help='Generate shell completion code.', name='completions')
23 | @click.argument('shell', default=None)
24 | def cli(shell):
25 | """Placeholder for shell completion code generatioon function if necessary dependency is missing."""
26 | click.secho(
27 | "This requires the click_completion module.\n"
28 | "You can install it with the shellcomplete extra:\n"
29 | "python -m pip install 'pyhf[shellcomplete]'"
30 | )
31 |
--------------------------------------------------------------------------------
/src/pyhf/contrib/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | Contributions to pyhf.
3 |
4 | Modules in contrib should never be dependencies of pyhf.
5 | """
6 |
--------------------------------------------------------------------------------
/src/pyhf/contrib/cli.py:
--------------------------------------------------------------------------------
1 | """CLI for functionality that will get migrated out eventually."""
2 |
3 | import logging
4 | import click
5 | from pathlib import Path
6 |
7 | logging.basicConfig()
8 | log = logging.getLogger(__name__)
9 |
10 | __all__ = ["download"]
11 |
12 |
13 | def __dir__():
14 | return __all__
15 |
16 |
17 | @click.group(name="contrib")
18 | def cli():
19 | """
20 | Contrib experimental operations.
21 |
22 | .. note::
23 |
24 | Requires installation of the ``contrib`` extra.
25 |
26 | .. code-block:: shell
27 |
28 | $ python -m pip install 'pyhf[contrib]'
29 | """
30 | from pyhf.contrib import utils # Guard CLI from missing extra # noqa: F401
31 |
32 |
33 | @cli.command()
34 | @click.argument("archive-url")
35 | @click.argument("output-directory")
36 | @click.option("-v", "--verbose", is_flag=True, help="Enables verbose mode")
37 | @click.option(
38 | "-f", "--force", is_flag=True, help="Force download from non-approved host"
39 | )
40 | @click.option(
41 | "-c",
42 | "--compress",
43 | is_flag=True,
44 | help="Keep the archive in a compressed tar.gz form",
45 | )
46 | def download(archive_url, output_directory, verbose, force, compress):
47 | """
48 | Download the patchset archive from the remote URL and extract it in a
49 | directory at the path given.
50 |
51 | Example:
52 |
53 | .. code-block:: shell
54 |
55 | $ pyhf contrib download --verbose https://doi.org/10.17182/hepdata.90607.v3/r3 1Lbb-likelihoods
56 |
57 | \b
58 | 1Lbb-likelihoods/patchset.json
59 | 1Lbb-likelihoods/README.md
60 | 1Lbb-likelihoods/BkgOnly.json
61 |
62 | Raises:
63 | :class:`~pyhf.exceptions.InvalidArchiveHost`: if the provided archive host name is not known to be valid
64 | """
65 | try:
66 | from pyhf.contrib import utils
67 |
68 | utils.download(archive_url, output_directory, force, compress)
69 |
70 | if verbose:
71 | file_list = [str(file) for file in list(Path(output_directory).glob("*"))]
72 | print("\n".join(file_list))
73 | except AttributeError:
74 | log.error(
75 | "\nInstallation of the contrib extra is required to use the contrib CLI API"
76 | + "\nPlease install with: python -m pip install 'pyhf[contrib]'\n",
77 | exc_info=True,
78 | )
79 |
--------------------------------------------------------------------------------
/src/pyhf/contrib/viz/__init__.py:
--------------------------------------------------------------------------------
1 | """Visualizations of pyhf models and results."""
2 |
--------------------------------------------------------------------------------
/src/pyhf/data/citation.bib:
--------------------------------------------------------------------------------
1 | @software{pyhf,
2 | author = {Lukas Heinrich and Matthew Feickert and Giordon Stark},
3 | title = "{pyhf: v0.7.6}",
4 | version = {0.7.6},
5 | doi = {10.5281/zenodo.1169739},
6 | url = {https://doi.org/10.5281/zenodo.1169739},
7 | note = {https://github.com/scikit-hep/pyhf/releases/tag/v0.7.6}
8 | }
9 |
10 | @article{pyhf_joss,
11 | doi = {10.21105/joss.02823},
12 | url = {https://doi.org/10.21105/joss.02823},
13 | year = {2021},
14 | publisher = {The Open Journal},
15 | volume = {6},
16 | number = {58},
17 | pages = {2823},
18 | author = {Lukas Heinrich and Matthew Feickert and Giordon Stark and Kyle Cranmer},
19 | title = {pyhf: pure-Python implementation of HistFactory statistical models},
20 | journal = {Journal of Open Source Software}
21 | }
22 |
--------------------------------------------------------------------------------
/src/pyhf/infer/intervals/__init__.py:
--------------------------------------------------------------------------------
1 | """Interval estimation"""
2 |
3 | import pyhf.infer.intervals.upper_limits
4 |
5 | __all__ = ["upper_limits.upper_limit"]
6 |
7 |
8 | def __dir__():
9 | return __all__
10 |
11 |
12 | def upperlimit(
13 | data, model, scan=None, level=0.05, return_results=False, **hypotest_kwargs
14 | ):
15 | """
16 | .. deprecated:: 0.7.0
17 | Use :func:`~pyhf.infer.intervals.upper_limits.upper_limit` instead.
18 | .. warning:: :func:`~pyhf.infer.intervals.upperlimit` will be removed in
19 | ``pyhf`` ``v0.9.0``.
20 | """
21 | from pyhf.exceptions import _deprecated_api_warning
22 |
23 | _deprecated_api_warning(
24 | "pyhf.infer.intervals.upperlimit",
25 | "pyhf.infer.intervals.upper_limits.upper_limit",
26 | "0.7.0",
27 | "0.9.0",
28 | )
29 | return pyhf.infer.intervals.upper_limits.upper_limit(
30 | data, model, scan, level, return_results, **hypotest_kwargs
31 | )
32 |
--------------------------------------------------------------------------------
/src/pyhf/interpolators/__init__.py:
--------------------------------------------------------------------------------
1 | """Histogram Interpolation."""
2 |
3 |
4 | def _slow_interpolator_looper(histogramssets, alphasets, func):
5 | all_results = []
6 | for histoset, alphaset in zip(histogramssets, alphasets):
7 | all_results.append([])
8 | set_result = all_results[-1]
9 | for histo in histoset:
10 | set_result.append([])
11 | histo_result = set_result[-1]
12 | for alpha in alphaset:
13 | alpha_result = []
14 | for down, nom, up in zip(histo[0], histo[1], histo[2]):
15 | v = func(down, nom, up, alpha)
16 | alpha_result.append(v)
17 | histo_result.append(alpha_result)
18 | return all_results
19 |
20 |
21 | # interpolation codes come from https://cds.cern.ch/record/1456844/files/CERN-OPEN-2012-016.pdf
22 | from pyhf.interpolators.code0 import code0, _slow_code0
23 | from pyhf.interpolators.code1 import code1, _slow_code1
24 | from pyhf.interpolators.code2 import code2, _slow_code2
25 | from pyhf.interpolators.code4 import code4, _slow_code4
26 | from pyhf.interpolators.code4p import code4p, _slow_code4p
27 | from pyhf import exceptions
28 |
29 |
30 | def get(interpcode, do_tensorized_calc=True):
31 | interpcodes = {
32 | 0: code0 if do_tensorized_calc else _slow_code0,
33 | 1: code1 if do_tensorized_calc else _slow_code1,
34 | 2: code2 if do_tensorized_calc else _slow_code2,
35 | 4: code4 if do_tensorized_calc else _slow_code4,
36 | '4p': code4p if do_tensorized_calc else _slow_code4p,
37 | }
38 |
39 | try:
40 | return interpcodes[interpcode]
41 | except KeyError:
42 | raise exceptions.InvalidInterpCode
43 |
44 |
45 | __all__ = ['code0', 'code1', 'code2', 'code4', 'code4p']
46 |
47 |
48 | def __dir__():
49 | return __all__
50 |
--------------------------------------------------------------------------------
/src/pyhf/modifiers/__init__.py:
--------------------------------------------------------------------------------
1 | from pyhf.modifiers.histosys import histosys_builder, histosys_combined
2 | from pyhf.modifiers.lumi import lumi_builder, lumi_combined
3 | from pyhf.modifiers.normfactor import normfactor_builder, normfactor_combined
4 | from pyhf.modifiers.normsys import normsys_builder, normsys_combined
5 | from pyhf.modifiers.shapefactor import shapefactor_builder, shapefactor_combined
6 | from pyhf.modifiers.shapesys import shapesys_builder, shapesys_combined
7 | from pyhf.modifiers.staterror import staterror_builder, staterror_combined
8 |
9 | __all__ = [
10 | "histfactory_set",
11 | "histosys",
12 | "histosys_builder",
13 | "histosys_combined",
14 | "lumi",
15 | "lumi_builder",
16 | "lumi_combined",
17 | "normfactor",
18 | "normfactor_builder",
19 | "normfactor_combined",
20 | "normsys",
21 | "normsys_builder",
22 | "normsys_combined",
23 | "shapefactor",
24 | "shapefactor_builder",
25 | "shapefactor_combined",
26 | "shapesys",
27 | "shapesys_builder",
28 | "shapesys_combined",
29 | "staterror",
30 | "staterror_builder",
31 | "staterror_combined",
32 | ]
33 |
34 |
35 | def __dir__():
36 | return __all__
37 |
38 |
39 | histfactory_set = {
40 | "histosys": (histosys_builder, histosys_combined),
41 | "lumi": (lumi_builder, lumi_combined),
42 | "normfactor": (normfactor_builder, normfactor_combined),
43 | "normsys": (normsys_builder, normsys_combined),
44 | "shapefactor": (shapefactor_builder, shapefactor_combined),
45 | "shapesys": (shapesys_builder, shapesys_combined),
46 | "staterror": (staterror_builder, staterror_combined),
47 | }
48 |
--------------------------------------------------------------------------------
/src/pyhf/optimize/__init__.py:
--------------------------------------------------------------------------------
1 | """Optimizers for Tensor Functions."""
2 |
3 | from pyhf import exceptions
4 |
5 |
6 | class _OptimizerRetriever:
7 | def __getattr__(self, name):
8 | if name == 'scipy_optimizer':
9 | from pyhf.optimize.opt_scipy import scipy_optimizer
10 |
11 | assert scipy_optimizer
12 | # hide away one level of the module name
13 | # pyhf.optimize.scipy_optimizer.scipy_optimizer->pyhf.optimize.scipy_optimizer
14 | scipy_optimizer.__module__ = __name__
15 | # for autocomplete and dir() calls
16 | self.scipy_optimizer = scipy_optimizer
17 | return scipy_optimizer
18 | elif name == 'minuit_optimizer':
19 | try:
20 | from pyhf.optimize.opt_minuit import minuit_optimizer
21 |
22 | assert minuit_optimizer
23 | # hide away one level of the module name
24 | # pyhf.optimize.minuit_optimizer.minuit_optimizer->pyhf.optimize.minuit_optimizer
25 | minuit_optimizer.__module__ = __name__
26 | # for autocomplete and dir() calls
27 | self.minuit_optimizer = minuit_optimizer
28 | return minuit_optimizer
29 | except ImportError as e:
30 | raise exceptions.ImportBackendError(
31 | "There was a problem importing Minuit. The minuit optimizer cannot be used.",
32 | e,
33 | )
34 | elif name == '__wrapped__': # doctest
35 | pass
36 |
37 |
38 | OptimizerRetriever = _OptimizerRetriever()
39 | __all__ = ['OptimizerRetriever']
40 |
--------------------------------------------------------------------------------
/src/pyhf/optimize/opt_numpy.py:
--------------------------------------------------------------------------------
1 | """Numpy Backend Function Shim."""
2 |
3 | from pyhf import get_backend
4 | from pyhf import exceptions
5 |
6 |
7 | def wrap_objective(objective, data, pdf, stitch_pars, do_grad=False, jit_pieces=None):
8 | """
9 | Wrap the objective function for the minimization.
10 |
11 | Args:
12 | objective (:obj:`func`): objective function
13 | data (:obj:`list`): observed data
14 | pdf (~pyhf.pdf.Model): The statistical model adhering to the schema model.json
15 | stitch_pars (:obj:`func`): callable that stitches parameters, see :func:`pyhf.optimize.common.shim`.
16 | do_grad (:obj:`bool`): enable autodifferentiation mode. Default is off.
17 |
18 | Returns:
19 | objective_and_grad (:obj:`func`): tensor backend wrapped objective,gradient pair
20 | """
21 |
22 | tensorlib, _ = get_backend()
23 |
24 | if do_grad:
25 | raise exceptions.Unsupported("Numpy does not support autodifferentiation.")
26 |
27 | def func(pars):
28 | pars = tensorlib.astensor(pars)
29 | constrained_pars = stitch_pars(pars)
30 | return objective(constrained_pars, data, pdf)[0]
31 |
32 | return func
33 |
--------------------------------------------------------------------------------
/src/pyhf/optimize/opt_pytorch.py:
--------------------------------------------------------------------------------
1 | """PyTorch Backend Function Shim."""
2 |
3 | from pyhf import get_backend
4 | import torch
5 |
6 |
7 | def wrap_objective(objective, data, pdf, stitch_pars, do_grad=False, jit_pieces=None):
8 | """
9 | Wrap the objective function for the minimization.
10 |
11 | Args:
12 | objective (:obj:`func`): objective function
13 | data (:obj:`list`): observed data
14 | pdf (~pyhf.pdf.Model): The statistical model adhering to the schema model.json
15 | stitch_pars (:obj:`func`): callable that stitches parameters, see :func:`pyhf.optimize.common.shim`.
16 | do_grad (:obj:`bool`): enable autodifferentiation mode. Default is off.
17 |
18 | Returns:
19 | objective_and_grad (:obj:`func`): tensor backend wrapped objective,gradient pair
20 | """
21 |
22 | tensorlib, _ = get_backend()
23 |
24 | if do_grad:
25 |
26 | def func(pars):
27 | pars = tensorlib.astensor(pars)
28 | pars.requires_grad = True
29 | constrained_pars = stitch_pars(pars)
30 | constr_nll = objective(constrained_pars, data, pdf)
31 | grad = torch.autograd.grad(constr_nll, pars)[0]
32 | return constr_nll.detach().numpy()[0], grad
33 |
34 | else:
35 |
36 | def func(pars):
37 | pars = tensorlib.astensor(pars)
38 | constrained_pars = stitch_pars(pars)
39 | constr_nll = objective(constrained_pars, data, pdf)
40 | return constr_nll[0]
41 |
42 | return func
43 |
--------------------------------------------------------------------------------
/src/pyhf/optimize/opt_tflow.py:
--------------------------------------------------------------------------------
1 | """Tensorflow Backend Function Shim."""
2 |
3 | from pyhf import get_backend
4 | import tensorflow as tf
5 |
6 |
7 | def wrap_objective(objective, data, pdf, stitch_pars, do_grad=False, jit_pieces=None):
8 | """
9 | Wrap the objective function for the minimization.
10 |
11 | Args:
12 | objective (:obj:`func`): objective function
13 | data (:obj:`list`): observed data
14 | pdf (~pyhf.pdf.Model): The statistical model adhering to the schema model.json
15 | stitch_pars (:obj:`func`): callable that stitches parameters, see :func:`pyhf.optimize.common.shim`.
16 | do_grad (:obj:`bool`): enable autodifferentiation mode. Default is off.
17 |
18 | Returns:
19 | objective_and_grad (:obj:`func`): tensor backend wrapped objective,gradient pair
20 | """
21 | tensorlib, _ = get_backend()
22 |
23 | if do_grad:
24 |
25 | def func(pars):
26 | pars = tensorlib.astensor(pars)
27 | with tf.GradientTape() as tape:
28 | tape.watch(pars)
29 | constrained_pars = stitch_pars(pars)
30 | constr_nll = objective(constrained_pars, data, pdf)
31 | # NB: tape.gradient can return a sparse gradient (tf.IndexedSlices)
32 | # when tf.gather is used and this needs to be converted back to a
33 | # tensor to be usable as a value
34 | grad = tape.gradient(constr_nll, pars)
35 | return constr_nll.numpy()[0], tf.convert_to_tensor(grad)
36 |
37 | else:
38 |
39 | def func(pars):
40 | pars = tensorlib.astensor(pars)
41 | constrained_pars = stitch_pars(pars)
42 | return objective(constrained_pars, data, pdf)[0]
43 |
44 | return func
45 |
--------------------------------------------------------------------------------
/src/pyhf/parameters/__init__.py:
--------------------------------------------------------------------------------
1 | from pyhf.parameters.paramsets import (
2 | paramset,
3 | unconstrained,
4 | constrained_by_normal,
5 | constrained_by_poisson,
6 | )
7 | from pyhf.parameters.utils import reduce_paramsets_requirements
8 | from pyhf.parameters.paramview import ParamViewer
9 |
10 | __all__ = [
11 | 'ParamViewer',
12 | 'constrained_by_normal',
13 | 'constrained_by_poisson',
14 | 'paramset',
15 | 'reduce_paramsets_requirements',
16 | 'unconstrained',
17 | ]
18 |
19 |
20 | def __dir__():
21 | return __all__
22 |
--------------------------------------------------------------------------------
/src/pyhf/schema/loader.py:
--------------------------------------------------------------------------------
1 | from pathlib import Path
2 | import sys
3 | import json
4 | import pyhf.exceptions
5 | from pyhf.schema import variables
6 |
7 | # importlib.resources.as_file wasn't added until Python 3.9
8 | # c.f. https://docs.python.org/3.9/library/importlib.html#importlib.resources.as_file
9 | if sys.version_info >= (3, 9):
10 | from importlib import resources
11 | else:
12 | import importlib_resources as resources
13 |
14 |
15 | def load_schema(schema_id: str):
16 | """
17 | Get a schema by relative path from cache, or load it into the cache and return.
18 |
19 | Args:
20 | schema_id (str): Relative path to schema from :attr:`pyhf.schema.path`
21 |
22 | Example:
23 | >>> import pyhf
24 | >>> schema = pyhf.schema.load_schema("1.0.0/defs.json")
25 | >>> type(schema)
26 |
27 | >>> schema.keys()
28 | dict_keys(['$schema', '$id', 'definitions'])
29 | >>> pyhf.schema.load_schema("0.0.0/defs.json") # doctest: +ELLIPSIS
30 | Traceback (most recent call last):
31 | ...
32 | pyhf.exceptions.SchemaNotFound: ...
33 |
34 | Returns:
35 | schema (dict): The loaded schema.
36 |
37 | Raises:
38 | ~pyhf.exceptions.SchemaNotFound: if the provided ``schema_id`` cannot be found.
39 | """
40 | try:
41 | return variables.SCHEMA_CACHE[
42 | f'{Path(variables.SCHEMA_BASE).joinpath(schema_id)}'
43 | ]
44 | except KeyError:
45 | pass
46 |
47 | ref = variables.schemas.joinpath(schema_id)
48 | with resources.as_file(ref) as path:
49 | if not path.exists():
50 | raise pyhf.exceptions.SchemaNotFound(
51 | f'The schema {schema_id} was not found. Do you have the right version or the right path? {path}'
52 | )
53 | with path.open(encoding="utf-8") as json_schema:
54 | schema = json.load(json_schema)
55 | variables.SCHEMA_CACHE[schema['$id']] = schema
56 | return variables.SCHEMA_CACHE[schema['$id']]
57 |
58 |
59 | # pre-populate the cache to avoid network access
60 | # on first validation in standard usage
61 | # (not in pyhf.schema.variables to avoid circular imports)
62 | load_schema(f'{variables.SCHEMA_VERSION}/defs.json')
63 |
--------------------------------------------------------------------------------
/src/pyhf/schema/variables.py:
--------------------------------------------------------------------------------
1 | import sys
2 |
3 | # importlib.resources.as_file wasn't added until Python 3.9
4 | # c.f. https://docs.python.org/3.9/library/importlib.html#importlib.resources.as_file
5 | if sys.version_info >= (3, 9):
6 | from importlib import resources
7 | else:
8 | import importlib_resources as resources
9 | schemas = resources.files('pyhf') / "schemas"
10 |
11 | SCHEMA_CACHE = {}
12 | SCHEMA_BASE = "https://scikit-hep.org/pyhf/schemas/"
13 | SCHEMA_VERSION = '1.0.0'
14 |
--------------------------------------------------------------------------------
/src/pyhf/schemas/1.0.0/jsonpatch.json:
--------------------------------------------------------------------------------
1 | {
2 | "$schema": "http://json-schema.org/draft-06/schema#",
3 | "$id": "https://scikit-hep.org/pyhf/schemas/1.0.0/jsonpatch.json",
4 | "$ref": "defs.json#/definitions/jsonpatch"
5 | }
6 |
--------------------------------------------------------------------------------
/src/pyhf/schemas/1.0.0/measurement.json:
--------------------------------------------------------------------------------
1 | {
2 | "$schema": "http://json-schema.org/draft-06/schema#",
3 | "$id": "https://scikit-hep.org/pyhf/schemas/1.0.0/measurement.json",
4 | "$ref": "defs.json#/definitions/measurement"
5 | }
6 |
--------------------------------------------------------------------------------
/src/pyhf/schemas/1.0.0/model.json:
--------------------------------------------------------------------------------
1 | {
2 | "$schema": "http://json-schema.org/draft-06/schema#",
3 | "$id": "https://scikit-hep.org/pyhf/schemas/1.0.0/model.json",
4 | "$ref": "defs.json#/definitions/model"
5 | }
6 |
--------------------------------------------------------------------------------
/src/pyhf/schemas/1.0.0/patchset.json:
--------------------------------------------------------------------------------
1 | {
2 | "$schema": "http://json-schema.org/draft-06/schema#",
3 | "$id": "https://scikit-hep.org/pyhf/schemas/1.0.0/patchset.json",
4 | "$ref": "defs.json#/definitions/patchset"
5 | }
6 |
--------------------------------------------------------------------------------
/src/pyhf/schemas/1.0.0/workspace.json:
--------------------------------------------------------------------------------
1 | {
2 | "$schema": "http://json-schema.org/draft-06/schema#",
3 | "$id": "https://scikit-hep.org/pyhf/schemas/1.0.0/workspace.json",
4 | "$ref": "defs.json#/definitions/workspace"
5 | }
6 |
--------------------------------------------------------------------------------
/tbump.toml:
--------------------------------------------------------------------------------
1 | github_url = "https://github.com/scikit-hep/pyhf/"
2 |
3 | [version]
4 | current = "0.7.6"
5 |
6 | # Example of a semver regexp.
7 | # Make sure this matches current_version before
8 | # using tbump
9 | regex = '''
10 | (?P\d+)
11 | \.
12 | (?P\d+)
13 | \.
14 | (?P\d+)
15 | (rc
16 | (?P\d+)
17 | )?
18 | '''
19 |
20 | [git]
21 | # The current version will get updated when tbump is run
22 | message_template = "Bump version: 0.7.6 → {new_version}"
23 | tag_template = "v{new_version}"
24 |
25 | # For each file to patch, add a [[file]] config
26 | # section containing the path of the file, relative to the
27 | # tbump.toml location.
28 | [[file]]
29 | src = "tbump.toml"
30 | # Restrict search to make it explicit why tbump.toml
31 | # is even included as a file to bump, as it will get
32 | # its version.current attribute bumped anyway.
33 | search = "Bump version: {current_version} → "
34 |
35 | [[file]]
36 | src = "src/pyhf/utils.py"
37 | # Guard SCHEMA_VERSION
38 | # This search is just identifying the line to restrict the
39 | # regex to, but all matches in the line will get bumped.
40 | search = "pyhf: v{current_version}"
41 |
42 | [[file]]
43 | src = "README.rst"
44 |
45 | [[file]]
46 | src = "src/pyhf/data/citation.bib"
47 |
48 | [[file]]
49 | src = ".zenodo.json"
50 |
51 | [[file]]
52 | src = "CITATION.cff"
53 |
54 | [[file]]
55 | src = "docs/lite/jupyterlite.py"
56 |
57 | [[field]]
58 | # the name of the field
59 | name = "candidate"
60 | # the default value to use, if there is no match
61 | default = ""
62 |
--------------------------------------------------------------------------------
/tests/constraints.txt:
--------------------------------------------------------------------------------
1 | # core
2 | scipy==1.5.2 # c.f. PR #2469
3 | click==8.0.0 # c.f. PR #1958, #1909
4 | tqdm==4.56.0
5 | jsonschema==4.15.0 # c.f. PR #1979
6 | jsonpatch==1.15
7 | pyyaml==5.1
8 | importlib_resources==1.4.0 # c.f. PR #1979
9 | numpy==1.21.0 # constrained by jax v0.4.1
10 | # xmlio
11 | uproot==4.1.1
12 | # minuit
13 | iminuit==2.7.0 # c.f. PR #1895
14 | # tensorflow
15 | tensorflow==2.7.0 # c.f. PR #1962
16 | tensorflow-probability==0.11.0 # c.f. PR #1657
17 | protobuf<4.21.0 # c.f. PR #2117
18 | # torch
19 | torch==1.10.0
20 | # jax
21 | # Use Google Cloud Storage buckets for long term wheel support
22 | # c.f. https://github.com/google/jax/discussions/7608#discussioncomment-1269342
23 | --find-links https://storage.googleapis.com/jax-releases/jax_releases.html
24 | jax==0.4.1 # c.f. PR #2079
25 | jaxlib==0.4.1 # c.f. PR #2079
26 |
--------------------------------------------------------------------------------
/tests/contrib/baseline/test_plot_results.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/scikit-hep/pyhf/40ebf6dadd73de3830a931eac04a260ef8f62709/tests/contrib/baseline/test_plot_results.png
--------------------------------------------------------------------------------
/tests/contrib/baseline/test_plot_results_components.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/scikit-hep/pyhf/40ebf6dadd73de3830a931eac04a260ef8f62709/tests/contrib/baseline/test_plot_results_components.png
--------------------------------------------------------------------------------
/tests/contrib/baseline/test_plot_results_components_no_clb.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/scikit-hep/pyhf/40ebf6dadd73de3830a931eac04a260ef8f62709/tests/contrib/baseline/test_plot_results_components_no_clb.png
--------------------------------------------------------------------------------
/tests/contrib/baseline/test_plot_results_components_no_cls.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/scikit-hep/pyhf/40ebf6dadd73de3830a931eac04a260ef8f62709/tests/contrib/baseline/test_plot_results_components_no_cls.png
--------------------------------------------------------------------------------
/tests/contrib/baseline/test_plot_results_components_no_clsb.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/scikit-hep/pyhf/40ebf6dadd73de3830a931eac04a260ef8f62709/tests/contrib/baseline/test_plot_results_components_no_clsb.png
--------------------------------------------------------------------------------
/tests/contrib/baseline/test_plot_results_no_axis.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/scikit-hep/pyhf/40ebf6dadd73de3830a931eac04a260ef8f62709/tests/contrib/baseline/test_plot_results_no_axis.png
--------------------------------------------------------------------------------
/tests/test_backends.py:
--------------------------------------------------------------------------------
1 | import pyhf
2 | import jax
3 | import pytest
4 |
5 |
6 | def test_default_backend():
7 | pyhf.set_backend("jax", default=True)
8 |
9 | assert pyhf.default_backend.name == 'jax'
10 | assert pyhf.tensorlib.name == 'jax'
11 |
12 |
13 | def test_nondefault_backend():
14 | pyhf.set_backend("jax", default=False)
15 |
16 | assert pyhf.default_backend.name == 'numpy'
17 | assert pyhf.tensorlib.name == 'jax'
18 |
19 |
20 | @pytest.mark.parametrize('jitted', (False, True))
21 | def test_diffable_backend(jitted):
22 | pyhf.set_backend("jax", default=True)
23 |
24 | def example_op(x):
25 | y = pyhf.default_backend.astensor(x)
26 | return 2 * y
27 |
28 | if jitted:
29 | assert jax.jacrev(jax.jit(example_op))([1.0]) == [2.0]
30 | else:
31 | assert jax.jacrev(example_op)([1.0]) == [2.0]
32 |
33 | def example_op2(x):
34 | y = pyhf.default_backend.power(x, 2)
35 | z = pyhf.tensorlib.sum(y)
36 | return z
37 |
38 | if jitted:
39 | assert jax.jacrev(jax.jit(example_op2))(
40 | pyhf.tensorlib.astensor([2.0, 3.0])
41 | ).tolist() == [
42 | 4.0,
43 | 6.0,
44 | ]
45 | else:
46 | assert jax.jacrev(example_op2)(
47 | pyhf.tensorlib.astensor([2.0, 3.0])
48 | ).tolist() == [
49 | 4.0,
50 | 6.0,
51 | ]
52 |
53 |
54 | def test_diffable_backend_failure():
55 | pyhf.set_backend("numpy", default=True)
56 | pyhf.set_backend("jax")
57 |
58 | def example_op(x):
59 | y = pyhf.default_backend.astensor(x)
60 | return 2 * y
61 |
62 | with pytest.raises(
63 | (
64 | ValueError,
65 | jax.errors.TracerArrayConversionError,
66 | jax.errors.ConcretizationTypeError,
67 | )
68 | ):
69 | jax.jacrev(example_op)([1.0])
70 |
71 | def example_op2(x):
72 | y = pyhf.default_backend.power(x, 2)
73 | z = pyhf.tensorlib.sum(y)
74 | return z
75 |
76 | with pytest.raises(jax.errors.TracerArrayConversionError):
77 | jax.jacrev(example_op2)(pyhf.tensorlib.astensor([2.0, 3.0]))
78 |
79 |
80 | def test_backend_array_type(backend):
81 | assert backend[0].array_type is not None
82 |
83 |
84 | def test_tensor_array_types():
85 | # can't really assert the content of them so easily
86 | assert pyhf.tensor.array_types
87 |
--------------------------------------------------------------------------------
/tests/test_cli.py:
--------------------------------------------------------------------------------
1 | from click.testing import CliRunner
2 | import sys
3 | import importlib
4 |
5 |
6 | def test_shllcomplete_cli(isolate_modules):
7 | from pyhf.cli.complete import cli
8 |
9 | runner = CliRunner()
10 | result = runner.invoke(cli, ['bash'])
11 | assert 'complete -F _pyhf_completion -o default pyhf' in result.output
12 |
13 |
14 | def test_shllcomplete_cli_missing_extra(isolate_modules):
15 | sys.modules['click_completion'] = None
16 | importlib.reload(sys.modules['pyhf.cli.complete'])
17 | from pyhf.cli.complete import cli
18 |
19 | runner = CliRunner()
20 | result = runner.invoke(cli, ['bash'])
21 | assert 'You can install it with the shellcomplete extra' in result.output
22 |
--------------------------------------------------------------------------------
/tests/test_examples.py:
--------------------------------------------------------------------------------
1 | import shlex
2 |
3 |
4 | def test_2bin_1channel(tmp_path, script_runner):
5 | command = f"pyhf inspect {'docs/examples/json/2-bin_1-channel.json':s}"
6 | ret = script_runner.run(shlex.split(command))
7 | assert ret.success
8 |
--------------------------------------------------------------------------------
/tests/test_export/workspace_no_parameter_bounds.json:
--------------------------------------------------------------------------------
1 | {
2 | "channels": [
3 | {
4 | "name": "ch",
5 | "samples": [
6 | {
7 | "data": [1000.0],
8 | "modifiers": [
9 | {"data": null, "name": "mu_sig", "type": "normfactor"},
10 | {
11 | "data": {"hi": 1.5, "lo": 0.5},
12 | "name": "unc",
13 | "type": "normsys"
14 | }
15 | ],
16 | "name": "signal"
17 | }
18 | ]
19 | }
20 | ],
21 | "measurements": [
22 | {
23 | "config": {
24 | "parameters": [
25 | {
26 | "name": "mu_sig",
27 | "inits": [5]
28 | }
29 | ],
30 | "poi": "mu_sig"
31 | },
32 | "name": "meas"
33 | }
34 | ],
35 | "observations": [{"data": [1000], "name": "ch"}],
36 | "version": "1.0.0"
37 | }
38 |
--------------------------------------------------------------------------------
/tests/test_export/workspace_no_parameter_inits.json:
--------------------------------------------------------------------------------
1 | {
2 | "channels": [
3 | {
4 | "name": "ch",
5 | "samples": [
6 | {
7 | "data": [1000.0],
8 | "modifiers": [
9 | {"data": null, "name": "mu_sig", "type": "normfactor"},
10 | {
11 | "data": {"hi": 1.5, "lo": 0.5},
12 | "name": "unc",
13 | "type": "normsys"
14 | }
15 | ],
16 | "name": "signal"
17 | }
18 | ]
19 | }
20 | ],
21 | "measurements": [
22 | {
23 | "config": {
24 | "parameters": [
25 | {
26 | "name": "mu_sig",
27 | "bounds": [[-5, 5]]
28 | }
29 | ],
30 | "poi": "mu_sig"
31 | },
32 | "name": "meas"
33 | }
34 | ],
35 | "observations": [{"data": [1000], "name": "ch"}],
36 | "version": "1.0.0"
37 | }
38 |
--------------------------------------------------------------------------------
/tests/test_import/xmlimport_missingPOI/config/example.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | ./config/example_channel.xml
4 |
5 |
6 | Lumi alpha_syst1
7 |
8 |
9 |
--------------------------------------------------------------------------------
/tests/test_import/xmlimport_missingPOI/config/example_channel.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
--------------------------------------------------------------------------------
/tests/test_import/xmlimport_noChannelData/config/example.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | ./config/example_channel.xml
4 |
5 | SigXsecOverSM
6 | Lumi alpha_syst1
7 |
8 |
9 |
--------------------------------------------------------------------------------
/tests/test_import/xmlimport_noChannelData/config/example_channel.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
--------------------------------------------------------------------------------
/tests/test_import/xmlimport_noChannelDataPaths/config/example.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | ./config/example_channel.xml
4 |
5 | SigXsecOverSM
6 | Lumi alpha_syst1
7 |
8 |
9 |
--------------------------------------------------------------------------------
/tests/test_import/xmlimport_noChannelDataPaths/config/example_channel.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
--------------------------------------------------------------------------------
/tests/test_mixins.py:
--------------------------------------------------------------------------------
1 | import pyhf
2 | import pyhf.readxml
3 | import pytest
4 |
5 |
6 | @pytest.fixture(
7 | scope='session',
8 | params=[
9 | ('validation/xmlimport_input/config/example.xml', 'validation/xmlimport_input/')
10 | ],
11 | ids=['example-one'],
12 | )
13 | def spec(request):
14 | return pyhf.readxml.parse(*request.param)
15 |
16 |
17 | def test_channel_summary_mixin(spec):
18 | assert 'channels' in spec
19 | mixin = pyhf.mixins._ChannelSummaryMixin(channels=spec['channels'])
20 | assert mixin.channel_nbins == {'channel1': 2}
21 | assert mixin.channels == ['channel1']
22 | assert mixin.modifiers == [
23 | ('SigXsecOverSM', 'normfactor'),
24 | ('lumi', 'lumi'),
25 | ('staterror_channel1', 'staterror'),
26 | ('syst1', 'normsys'),
27 | ('syst2', 'normsys'),
28 | ('syst3', 'normsys'),
29 | ]
30 | assert mixin.samples == ['background1', 'background2', 'signal']
31 |
32 |
33 | def test_channel_summary_mixin_empty():
34 | mixin = pyhf.mixins._ChannelSummaryMixin(channels=[])
35 | assert mixin.channel_nbins == {}
36 | assert mixin.channels == []
37 | assert mixin.modifiers == []
38 | assert mixin.samples == []
39 |
40 |
41 | def test_channel_nbins_sorted_as_channels(spec):
42 | assert "channels" in spec
43 | spec["channels"].append(spec["channels"][0].copy())
44 | spec["channels"][-1]["name"] = "a_make_first_in_sort_channel2"
45 | mixin = pyhf.mixins._ChannelSummaryMixin(channels=spec["channels"])
46 | assert mixin.channels == ["a_make_first_in_sort_channel2", "channel1"]
47 | assert list(mixin.channel_nbins) == mixin.channels
48 |
--------------------------------------------------------------------------------
/tests/test_modifiers/bad_histosys_modifier_patch.json:
--------------------------------------------------------------------------------
1 | [
2 | {
3 | "op": "add",
4 | "path": "/channels/0/samples/1/modifiers",
5 | "value": [
6 | {
7 | "name": "histosys_bad",
8 | "type": "histosys",
9 | "data": {
10 | "hi_data": [
11 | 3,
12 | 6,
13 | 9
14 | ],
15 | "lo_data": [
16 | 1,
17 | 2,
18 | 3
19 | ]
20 | }
21 | }
22 | ]
23 | }
24 | ]
25 |
--------------------------------------------------------------------------------
/tests/test_modifiers/bad_shapesys_modifier_patch.json:
--------------------------------------------------------------------------------
1 | [
2 | {
3 | "op": "add",
4 | "path": "/channels/0/samples/1/modifiers",
5 | "value": [
6 | {
7 | "name": "shapesys_bad",
8 | "type": "shapesys",
9 | "data": [
10 | 1,
11 | 2,
12 | 3
13 | ]
14 | }
15 | ]
16 | }
17 | ]
18 |
--------------------------------------------------------------------------------
/tests/test_modifiers/bad_staterror_modifier_patch.json:
--------------------------------------------------------------------------------
1 | [
2 | {
3 | "op": "add",
4 | "path": "/channels/0/samples/1/modifiers",
5 | "value": [
6 | {
7 | "name": "staterror_bad",
8 | "type": "staterror",
9 | "data": [
10 | 1,
11 | 2,
12 | 3
13 | ]
14 | }
15 | ]
16 | }
17 | ]
18 |
--------------------------------------------------------------------------------
/tests/test_modifiers/issue1720_greedy_staterror.json:
--------------------------------------------------------------------------------
1 | {
2 | "channels": [
3 | {
4 | "name": "channel",
5 | "samples": [
6 | {
7 | "data": [
8 | 5
9 | ],
10 | "modifiers": [],
11 | "name": "signal"
12 | },
13 | {
14 | "data": [
15 | 1
16 | ],
17 | "modifiers": [
18 | {
19 | "data": [
20 | 1.5
21 | ],
22 | "name": "NP",
23 | "type": "shapesys"
24 | }
25 | ],
26 | "name": "bkg"
27 | }
28 | ]
29 | }
30 | ],
31 | "measurements": [
32 | {
33 | "config": {
34 | "parameters": [],
35 | "poi": "NP"
36 | },
37 | "name": ""
38 | }
39 | ],
40 | "observations": [
41 | {
42 | "data": [
43 | 0
44 | ],
45 | "name": "channel"
46 | }
47 | ],
48 | "version": "1.0.0"
49 | }
50 |
--------------------------------------------------------------------------------
/tests/test_modifiers/spec.json:
--------------------------------------------------------------------------------
1 | {
2 | "channels": [
3 | {
4 | "name": "channel_1",
5 | "samples": [
6 | {
7 | "name": "sample_1",
8 | "data": [
9 | 1,
10 | 2,
11 | 3,
12 | 4
13 | ],
14 | "modifiers": [
15 | {
16 | "name": "mu",
17 | "type": "normfactor",
18 | "data": null
19 | }
20 | ]
21 | },
22 | {
23 | "name": "sample_2",
24 | "data": [
25 | 2,
26 | 4,
27 | 6,
28 | 8
29 | ],
30 | "modifiers": []
31 | }
32 | ]
33 | }
34 | ]
35 | }
36 |
--------------------------------------------------------------------------------
/tests/test_paramviewer.py:
--------------------------------------------------------------------------------
1 | from skhep_testdata import data_path
2 |
3 | import pyhf
4 | from pyhf.parameters import ParamViewer
5 |
6 |
7 | def test_paramviewer_simple_nonbatched(backend):
8 | pars = pyhf.tensorlib.astensor([1, 2, 3, 4, 5, 6, 7])
9 |
10 | parshape = pyhf.tensorlib.shape(pars)
11 |
12 | view = ParamViewer(
13 | parshape,
14 | {'hello': {'slice': slice(0, 2)}, 'world': {'slice': slice(5, 7)}},
15 | ['world', 'hello'],
16 | )
17 | par_slice = view.get(pars)
18 | assert pyhf.tensorlib.tolist(par_slice[slice(2, 4)]) == [1, 2]
19 |
20 | assert pyhf.tensorlib.tolist(par_slice[slice(0, 2)]) == [6, 7]
21 |
22 | assert pyhf.tensorlib.tolist(par_slice) == [6, 7, 1, 2]
23 |
24 |
25 | def test_paramviewer_order(get_json_from_tarfile):
26 | sbottom_archive = data_path("pyhf-ins1748602-probability-models.tar.gz")
27 | lhood = get_json_from_tarfile(sbottom_archive, "RegionA/BkgOnly.json")
28 | patch = get_json_from_tarfile(
29 | sbottom_archive, "RegionA/patch.sbottom_1300_205_60.json"
30 | )
31 | workspace = pyhf.workspace.Workspace(lhood)
32 | model = workspace.model(patches=[patch])
33 |
34 | pv = ParamViewer((model.config.npars,), model.config.par_map, [])
35 | assert list(pv.allpar_viewer.names) == model.config.par_order
36 |
37 |
38 | def test_paramviewer_simple_batched(backend):
39 | pars = pyhf.tensorlib.astensor([[1, 2, 3, 4], [5, 6, 7, 8], [9, 10, 11, 12]])
40 |
41 | parshape = pyhf.tensorlib.shape(pars)
42 |
43 | view = ParamViewer(
44 | parshape,
45 | {'hello': {'slice': slice(0, 2)}, 'world': {'slice': slice(3, 4)}},
46 | ['world', 'hello'],
47 | )
48 | par_slice = view.get(pars)
49 |
50 | assert isinstance(view.index_selection, list)
51 | assert all(
52 | [len(x) == 3 for x in view.index_selection]
53 | ) # first dimension is batch dim
54 |
55 | assert pyhf.tensorlib.shape(par_slice) == (3, 3)
56 | assert pyhf.tensorlib.tolist(par_slice[slice(1, 3)]) == [[1, 5, 9], [2, 6, 10]]
57 | assert pyhf.tensorlib.tolist(par_slice[slice(0, 1)]) == [[4, 8, 12]]
58 |
59 | assert pyhf.tensorlib.tolist(par_slice) == [[4, 8, 12], [1, 5, 9], [2, 6, 10]]
60 |
--------------------------------------------------------------------------------
/tests/test_patchset/example_patchset.json:
--------------------------------------------------------------------------------
1 | {
2 | "metadata": {
3 | "references": { "hepdata": "ins1234567" },
4 | "description": "patchset for validation/xmlimport_input/config/example.xml",
5 | "digests": { "sha256": "7c32ca3b8db75cbafcf5cd7ed4672fa2b1fa69e391c9b89068dd947a521866ec" },
6 | "labels": ["x"]
7 | },
8 | "patches": [
9 | {
10 | "metadata": {
11 | "name": "patch_channel1_signal_syst1",
12 | "values": [0]
13 | },
14 | "patch": [
15 | {
16 | "op": "replace",
17 | "path": "/channels/0/samples/0/modifiers/0/data/hi",
18 | "value": 1.2
19 | },
20 | {
21 | "op": "replace",
22 | "path": "/channels/0/samples/0/modifiers/0/data/lo",
23 | "value": 0.8
24 | }
25 | ]
26 | }
27 | ],
28 | "version": "1.0.0"
29 | }
30 |
--------------------------------------------------------------------------------
/tests/test_patchset/patchset_bad_duplicate_patch_name.json:
--------------------------------------------------------------------------------
1 | {
2 | "metadata": {
3 | "references": { "hepdata": "ins1234567" },
4 | "description": "signal patchset for the SUSY Multi-b-jet analysis",
5 | "digests": { "md5": "098f6bcd4621d373cade4e832627b4f6" },
6 | "labels": ["mass_stop", "mass_neutralino"]
7 | },
8 | "patches": [
9 | {
10 | "metadata": {
11 | "name": "Gtt_2100_5000_800",
12 | "values": [2100, 800]
13 | },
14 | "patch": [
15 | {
16 | "op": "add",
17 | "path": "/foo/0/bar",
18 | "value": {
19 | "foo": [1.0]
20 | }
21 | }
22 | ]
23 | },
24 | {
25 | "metadata": {
26 | "name": "Gtt_2100_5000_800",
27 | "values": [2200, 800]
28 | },
29 | "patch": [
30 | {
31 | "op": "add",
32 | "path": "/foo/0/bar",
33 | "value": {
34 | "foo": [1.0]
35 | }
36 | }
37 | ]
38 | }
39 | ],
40 | "version": "1.0.0"
41 | }
42 |
--------------------------------------------------------------------------------
/tests/test_patchset/patchset_bad_duplicate_patch_values.json:
--------------------------------------------------------------------------------
1 | {
2 | "metadata": {
3 | "references": { "hepdata": "ins1234567" },
4 | "description": "signal patchset for the SUSY Multi-b-jet analysis",
5 | "digests": { "md5": "098f6bcd4621d373cade4e832627b4f6" },
6 | "labels": ["mass_stop", "mass_neutralino"]
7 | },
8 | "patches": [
9 | {
10 | "metadata": {
11 | "name": "Gtt_2100_5000_800",
12 | "values": [2100, 800]
13 | },
14 | "patch": [
15 | {
16 | "op": "add",
17 | "path": "/foo/0/bar",
18 | "value": {
19 | "foo": [1.0]
20 | }
21 | }
22 | ]
23 | },
24 | {
25 | "metadata": {
26 | "name": "Gtt_2200_5000_800",
27 | "values": [2100, 800]
28 | },
29 | "patch": [
30 | {
31 | "op": "add",
32 | "path": "/foo/0/bar",
33 | "value": {
34 | "foo": [1.0]
35 | }
36 | }
37 | ]
38 | }
39 | ],
40 | "version": "1.0.0"
41 | }
42 |
--------------------------------------------------------------------------------
/tests/test_patchset/patchset_bad_empty_patches.json:
--------------------------------------------------------------------------------
1 | ../test_schema/patchset_bad_empty_patches.json
--------------------------------------------------------------------------------
/tests/test_patchset/patchset_bad_no_version.json:
--------------------------------------------------------------------------------
1 | ../test_schema/patchset_bad_no_version.json
--------------------------------------------------------------------------------
/tests/test_patchset/patchset_bad_wrong_values_multiplicity.json:
--------------------------------------------------------------------------------
1 | {
2 | "metadata": {
3 | "references": { "hepdata": "ins1234567" },
4 | "description": "signal patchset for the SUSY Multi-b-jet analysis",
5 | "digests": { "md5": "098f6bcd4621d373cade4e832627b4f6" },
6 | "labels": ["mass_stop", "mass_neutralino"]
7 | },
8 | "patches": [
9 | {
10 | "metadata": {
11 | "name": "Gtt_2100_5000_800",
12 | "values": [2100, 800]
13 | },
14 | "patch": [
15 | {
16 | "op": "add",
17 | "path": "/foo/0/bar",
18 | "value": {
19 | "foo": [1.0]
20 | }
21 | }
22 | ]
23 | },
24 | {
25 | "metadata": {
26 | "name": "Gtt_2200_5000_800",
27 | "values": [2200, 5000, 800]
28 | },
29 | "patch": [
30 | {
31 | "op": "add",
32 | "path": "/foo/0/bar",
33 | "value": {
34 | "foo": [1.0]
35 | }
36 | }
37 | ]
38 | }
39 | ],
40 | "version": "1.0.0"
41 | }
42 |
--------------------------------------------------------------------------------
/tests/test_patchset/patchset_bad_wrong_valuetype.json:
--------------------------------------------------------------------------------
1 | {
2 | "metadata": {
3 | "references": { "hepdata": "ins1234567" },
4 | "description": "signal patchset for the SUSY Multi-b-jet analysis",
5 | "digests": { "md5": "098f6bcd4621d373cade4e832627b4f6" },
6 | "labels": ["mass_stop", "mass_neutralino"]
7 | },
8 | "patches": [
9 | {
10 | "metadata": {
11 | "name": "Gtt_2100_5000_800",
12 | "values": [2100, {"a": "b"}]
13 | },
14 | "patch": [
15 | {
16 | "op": "add",
17 | "path": "/foo/0/bar",
18 | "value": {
19 | "foo": [1.0]
20 | }
21 | }
22 | ]
23 | },
24 | {
25 | "metadata": {
26 | "name": "Gtt_2200_5000_800",
27 | "values": [2200, 5000]
28 | },
29 | "patch": [
30 | {
31 | "op": "add",
32 | "path": "/foo/0/bar",
33 | "value": {
34 | "foo": [1.0]
35 | }
36 | }
37 | ]
38 | }
39 | ],
40 | "version": "1.0.0"
41 | }
42 |
--------------------------------------------------------------------------------
/tests/test_patchset/patchset_good.json:
--------------------------------------------------------------------------------
1 | ../test_schema/patchset_good.json
--------------------------------------------------------------------------------
/tests/test_patchset/patchset_good_2_patches.json:
--------------------------------------------------------------------------------
1 | {
2 | "metadata": {
3 | "references": { "hepdata": "ins1234567" },
4 | "description": "signal patchset for the SUSY Multi-b-jet analysis",
5 | "digests": { "md5": "098f6bcd4621d373cade4e832627b4f6" },
6 | "labels": ["mass_stop", "mass_neutralino"]
7 | },
8 | "patches": [
9 | {
10 | "metadata": {
11 | "name": "Gtt_2100_5000_800",
12 | "values": [2100, 800]
13 | },
14 | "patch": [
15 | {
16 | "op": "add",
17 | "path": "/foo/0/bar",
18 | "value": {
19 | "foo": [1.0]
20 | }
21 | }
22 | ]
23 | },
24 | {
25 | "metadata": {
26 | "name": "Gtt_2200_5000_800",
27 | "values": [2200, 800]
28 | },
29 | "patch": [
30 | {
31 | "op": "add",
32 | "path": "/foo/0/bar",
33 | "value": {
34 | "foo": [1.0]
35 | }
36 | }
37 | ]
38 | }
39 | ],
40 | "version": "1.0.0"
41 | }
42 |
--------------------------------------------------------------------------------
/tests/test_patchset/patchset_good_stringvalues.json:
--------------------------------------------------------------------------------
1 | {
2 | "metadata": {
3 | "references": { "hepdata": "ins1234567" },
4 | "description": "signal patchset for the SUSY Multi-b-jet analysis",
5 | "digests": { "md5": "098f6bcd4621d373cade4e832627b4f6" },
6 | "labels": ["mass_stop", "mass_neutralino", "decay"]
7 | },
8 | "patches": [
9 | {
10 | "metadata": {
11 | "name": "Gtt_2100_5000_800",
12 | "values": [2100, 800, "Gtt"]
13 | },
14 | "patch": [
15 | {
16 | "op": "add",
17 | "path": "/foo/0/bar",
18 | "value": {
19 | "foo": [1.0]
20 | }
21 | }
22 | ]
23 | },
24 | {
25 | "metadata": {
26 | "name": "Gbb_2200_5000_800",
27 | "values": [2100, 800, "Gbb"]
28 | },
29 | "patch": [
30 | {
31 | "op": "add",
32 | "path": "/foo/0/bar",
33 | "value": {
34 | "foo": [1.0]
35 | }
36 | }
37 | ]
38 | }
39 | ],
40 | "version": "1.0.0"
41 | }
42 |
--------------------------------------------------------------------------------
/tests/test_schema/customschema/1.1.0/jsonpatch.json:
--------------------------------------------------------------------------------
1 | {
2 | "$schema": "http://json-schema.org/draft-06/schema#",
3 | "$id": "1.1.0/jsonpatch.json",
4 | "$ref": "defs.json#/definitions/jsonpatch"
5 | }
6 |
--------------------------------------------------------------------------------
/tests/test_schema/customschema/1.1.0/measurement.json:
--------------------------------------------------------------------------------
1 | {
2 | "$schema": "http://json-schema.org/draft-06/schema#",
3 | "$id": "1.1.0/measurement.json",
4 | "$ref": "defs.json#/definitions/measurement"
5 | }
6 |
--------------------------------------------------------------------------------
/tests/test_schema/customschema/1.1.0/model.json:
--------------------------------------------------------------------------------
1 | {
2 | "$schema": "http://json-schema.org/draft-06/schema#",
3 | "$id": "1.1.0/model.json",
4 | "$ref": "defs.json#/definitions/model"
5 | }
6 |
--------------------------------------------------------------------------------
/tests/test_schema/customschema/1.1.0/patchset.json:
--------------------------------------------------------------------------------
1 | {
2 | "$schema": "http://json-schema.org/draft-06/schema#",
3 | "$id": "1.1.0/patchset.json",
4 | "$ref": "defs.json#/definitions/patchset"
5 | }
6 |
--------------------------------------------------------------------------------
/tests/test_schema/customschema/1.1.0/workspace.json:
--------------------------------------------------------------------------------
1 | {
2 | "$schema": "http://json-schema.org/draft-06/schema#",
3 | "$id": "1.1.0/workspace.json",
4 | "$ref": "defs.json#/definitions/workspace"
5 | }
6 |
--------------------------------------------------------------------------------
/tests/test_schema/customschema/custom.json:
--------------------------------------------------------------------------------
1 | {
2 | "channels": [
3 | {
4 | "name": "singlechannel",
5 | "samples": [
6 | {
7 | "name": "signal",
8 | "data": [
9 | 10
10 | ],
11 | "modifiers": [
12 | {
13 | "name": "mu",
14 | "type": "normfactor",
15 | "data": null
16 | }
17 | ]
18 | },
19 | {
20 | "name": "background",
21 | "data": [
22 | 5
23 | ],
24 | "modifiers": [
25 | {
26 | "name": "uncorr_bkguncrt",
27 | "type": "shapesys",
28 | "data": [
29 | 1
30 | ]
31 | }
32 | ]
33 | }
34 | ]
35 | }
36 | ],
37 | "measurements": [
38 | {
39 | "config": {
40 | "parameters": [
41 | {
42 | "auxdata": [
43 | 1
44 | ],
45 | "bounds": [
46 | [
47 | 0.915,
48 | 1.085
49 | ]
50 | ],
51 | "inits": [
52 | 1
53 | ],
54 | "name": "lumi",
55 | "sigmas": [
56 | 0.017
57 | ]
58 | }
59 | ],
60 | "poi": "mu_SIG"
61 | },
62 | "name": "NormalMeasurement"
63 | }
64 | ],
65 | "observations": [
66 | {
67 | "data": [
68 | 5
69 | ],
70 | "name": "singlechannel"
71 | }
72 | ],
73 | "version": "1.1.0"
74 | }
75 |
--------------------------------------------------------------------------------
/tests/test_schema/patchset_bad_empty_patches.json:
--------------------------------------------------------------------------------
1 | {
2 | "metadata": {
3 | "references": { "hepdata": "ins1234567" },
4 | "description": "signal patchset for the SUSY Multi-b-jet analysis",
5 | "digests": { "md5": "098f6bcd4621d373cade4e832627b4f6" },
6 | "labels": ["mass_stop", "mass_neutralino"]
7 | },
8 | "patches": [],
9 | "version": "1.0.0"
10 | }
11 |
--------------------------------------------------------------------------------
/tests/test_schema/patchset_bad_hepdata_reference.json:
--------------------------------------------------------------------------------
1 | {
2 | "metadata": {
3 | "references": { "hepdata": "wrongid12374" },
4 | "description": "signal patchset for the SUSY Multi-b-jet analysis",
5 | "digests": { "md5": "098f6bcd4621d373cade4e832627b4f6" },
6 | "labels": ["mass_stop", "mass_neutralino"]
7 | },
8 | "patches": [
9 | {
10 | "metadata": {
11 | "name": "Gtt_2100_5000_800",
12 | "values": [2100, 800]
13 | },
14 | "patch": [
15 | {
16 | "op": "add",
17 | "path": "/foo/0/bar",
18 | "value": {
19 | "foo": [1.0]
20 | }
21 | }
22 | ]
23 | }
24 | ],
25 | "version": "1.0.0"
26 | }
27 |
--------------------------------------------------------------------------------
/tests/test_schema/patchset_bad_invalid_digests.json:
--------------------------------------------------------------------------------
1 | {
2 | "metadata": {
3 | "references": { "hepdata": "ins1234567" },
4 | "description": "signal patchset for the SUSY Multi-b-jet analysis",
5 | "digests": { "nonexistent": "098f6bcd4621d373cade4e832627b4f6" },
6 | "labels": ["mass_stop", "mass_neutralino"]
7 | },
8 | "patches": [
9 | {
10 | "metadata": {
11 | "name": "Gtt_2100_5000_800",
12 | "values": [2100, 800]
13 | },
14 | "patch": [
15 | {
16 | "op": "add",
17 | "path": "/foo/0/bar",
18 | "value": {
19 | "foo": [1.0]
20 | }
21 | }
22 | ]
23 | }
24 | ],
25 | "version": "1.0.0"
26 | }
27 |
--------------------------------------------------------------------------------
/tests/test_schema/patchset_bad_label_pattern.json:
--------------------------------------------------------------------------------
1 | {
2 | "metadata": {
3 | "references": { "hepdata": "ins1234567" },
4 | "description": "signal patchset for the SUSY Multi-b-jet analysis",
5 | "digests": { "md5": "098f6bcd4621d373cade4e832627b4f6" },
6 | "labels": ["mass_stop-", "^mass_neutralino", "bad label"]
7 | },
8 | "patches": [
9 | {
10 | "metadata": {
11 | "name": "Gtt_2100_5000_800",
12 | "values": [2100, 800]
13 | },
14 | "patch": [
15 | {
16 | "op": "add",
17 | "path": "/foo/0/bar",
18 | "value": {
19 | "foo": [1.0]
20 | }
21 | }
22 | ]
23 | }
24 | ],
25 | "version": "1.0.0"
26 | }
27 |
--------------------------------------------------------------------------------
/tests/test_schema/patchset_bad_no_description.json:
--------------------------------------------------------------------------------
1 | {
2 | "metadata": {
3 | "references": { "hepdata": "ins1234567" },
4 | "digests": { "md5": "098f6bcd4621d373cade4e832627b4f6" },
5 | "labels": ["mass_stop", "mass_neutralino"]
6 | },
7 | "patches": [
8 | {
9 | "metadata": {
10 | "name": "Gtt_2100_5000_800",
11 | "values": [2100, 800]
12 | },
13 | "patch": [
14 | {
15 | "op": "add",
16 | "path": "/foo/0/bar",
17 | "value": {
18 | "foo": [1.0]
19 | }
20 | }
21 | ]
22 | }
23 | ],
24 | "version": "1.0.0"
25 | }
26 |
--------------------------------------------------------------------------------
/tests/test_schema/patchset_bad_no_digests.json:
--------------------------------------------------------------------------------
1 | {
2 | "metadata": {
3 | "references": { "hepdata": "ins1234567" },
4 | "description": "signal patchset for the SUSY Multi-b-jet analysis",
5 | "labels": ["mass_stop", "mass_neutralino"]
6 | },
7 | "patches": [
8 | {
9 | "metadata": {
10 | "name": "Gtt_2100_5000_800",
11 | "values": [2100, 800]
12 | },
13 | "patch": [
14 | {
15 | "op": "add",
16 | "path": "/foo/0/bar",
17 | "value": {
18 | "foo": [1.0]
19 | }
20 | }
21 | ]
22 | }
23 | ],
24 | "version": "1.0.0"
25 | }
26 |
--------------------------------------------------------------------------------
/tests/test_schema/patchset_bad_no_labels.json:
--------------------------------------------------------------------------------
1 | {
2 | "metadata": {
3 | "references": { "hepdata": "ins1234567" },
4 | "description": "signal patchset for the SUSY Multi-b-jet analysis",
5 | "digests": { "md5": "098f6bcd4621d373cade4e832627b4f6" }
6 | },
7 | "patches": [
8 | {
9 | "metadata": {
10 | "name": "Gtt_2100_5000_800",
11 | "values": [2100, 800]
12 | },
13 | "patch": [
14 | {
15 | "op": "add",
16 | "path": "/foo/0/bar",
17 | "value": {
18 | "foo": [1.0]
19 | }
20 | }
21 | ]
22 | }
23 | ],
24 | "version": "1.0.0"
25 | }
26 |
--------------------------------------------------------------------------------
/tests/test_schema/patchset_bad_no_patch_name.json:
--------------------------------------------------------------------------------
1 | {
2 | "metadata": {
3 | "references": { "hepdata": "ins1234567" },
4 | "description": "signal patchset for the SUSY Multi-b-jet analysis",
5 | "digests": { "md5": "098f6bcd4621d373cade4e832627b4f6" },
6 | "labels": ["mass_stop", "mass_neutralino"]
7 | },
8 | "patches": [
9 | {
10 | "metadata": {
11 | "values": [2100, 800]
12 | },
13 | "patch": [
14 | {
15 | "op": "add",
16 | "path": "/foo/0/bar",
17 | "value": {
18 | "foo": [1.0]
19 | }
20 | }
21 | ]
22 | }
23 | ],
24 | "version": "1.0.0"
25 | }
26 |
--------------------------------------------------------------------------------
/tests/test_schema/patchset_bad_no_patch_values.json:
--------------------------------------------------------------------------------
1 | {
2 | "metadata": {
3 | "references": { "hepdata": "ins1234567" },
4 | "description": "signal patchset for the SUSY Multi-b-jet analysis",
5 | "digests": { "md5": "098f6bcd4621d373cade4e832627b4f6" },
6 | "labels": ["mass_stop", "mass_neutralino"]
7 | },
8 | "patches": [
9 | {
10 | "metadata": {
11 | "name": "Gtt_2100_5000_800"
12 | },
13 | "patch": [
14 | {
15 | "op": "add",
16 | "path": "/foo/0/bar",
17 | "value": {
18 | "foo": [1.0]
19 | }
20 | }
21 | ]
22 | }
23 | ],
24 | "version": "1.0.0"
25 | }
26 |
--------------------------------------------------------------------------------
/tests/test_schema/patchset_bad_no_version.json:
--------------------------------------------------------------------------------
1 | {
2 | "metadata": {
3 | "references": { "hepdata": "ins1234567" },
4 | "description": "signal patchset for the SUSY Multi-b-jet analysis",
5 | "digests": { "md5": "098f6bcd4621d373cade4e832627b4f6" },
6 | "labels": ["mass_stop", "mass_neutralino"]
7 | },
8 | "patches": [
9 | {
10 | "metadata": {
11 | "name": "Gtt_2100_5000_800",
12 | "values": [2100, 800]
13 | },
14 | "patch": [
15 | {
16 | "op": "add",
17 | "path": "/foo/0/bar",
18 | "value": {
19 | "foo": [1.0]
20 | }
21 | }
22 | ]
23 | }
24 | ]
25 | }
26 |
--------------------------------------------------------------------------------
/tests/test_schema/patchset_good.json:
--------------------------------------------------------------------------------
1 | {
2 | "metadata": {
3 | "references": { "hepdata": "ins1234567" },
4 | "description": "signal patchset for the SUSY Multi-b-jet analysis",
5 | "digests": { "md5": "098f6bcd4621d373cade4e832627b4f6" },
6 | "labels": ["mass_stop", "mass_neutralino"]
7 | },
8 | "patches": [
9 | {
10 | "metadata": {
11 | "name": "Gtt_2100_5000_800",
12 | "values": [2100, 800]
13 | },
14 | "patch": [
15 | {
16 | "op": "add",
17 | "path": "/foo/0/bar",
18 | "value": {
19 | "foo": [1.0]
20 | }
21 | }
22 | ]
23 | }
24 | ],
25 | "version": "1.0.0"
26 | }
27 |
--------------------------------------------------------------------------------
/tests/test_scripts/example_bkgonly.json:
--------------------------------------------------------------------------------
1 | ../test_patchset/example_bkgonly.json
--------------------------------------------------------------------------------
/tests/test_scripts/example_patchset.json:
--------------------------------------------------------------------------------
1 | ../test_patchset/example_patchset.json
--------------------------------------------------------------------------------
/tests/test_scripts/xmlimport_absolutePaths/config/example.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | /absolute/path/to/config/example_channel.xml
4 |
5 | SigXsecOverSM
6 | Lumi alpha_syst1
7 |
8 |
9 |
--------------------------------------------------------------------------------
/tests/test_scripts/xmlimport_absolutePaths/config/example_channel.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
--------------------------------------------------------------------------------
/tests/test_scripts/xmlimport_absolutePaths/data/example.root:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/scikit-hep/pyhf/40ebf6dadd73de3830a931eac04a260ef8f62709/tests/test_scripts/xmlimport_absolutePaths/data/example.root
--------------------------------------------------------------------------------
/tests/test_tensorviewer.py:
--------------------------------------------------------------------------------
1 | from pyhf.tensor.common import _TensorViewer
2 |
3 |
4 | def test_tensorviewer(backend):
5 | tb, _ = backend
6 | tv = _TensorViewer(
7 | [tb.astensor([0, 4, 5]), tb.astensor([1, 2, 3]), tb.astensor([6])],
8 | names=['zzz', 'aaa', 'x'],
9 | )
10 |
11 | data = tb.astensor(tb.astensor(list(range(7))) * 10, dtype='int')
12 |
13 | a = [tb.tolist(x) for x in tv.split(data, selection=['aaa'])]
14 | assert a == [[10, 20, 30]]
15 |
16 | a = [tb.tolist(x) for x in tv.split(data, selection=['aaa', 'zzz'])]
17 | assert a == [[10, 20, 30], [0, 40, 50]]
18 |
19 | a = [tb.tolist(x) for x in tv.split(data, selection=['zzz', 'aaa'])]
20 | assert a == [[0, 40, 50], [10, 20, 30]]
21 |
22 | a = [tb.tolist(x) for x in tv.split(data, selection=['x', 'aaa'])]
23 | assert a == [[60], [10, 20, 30]]
24 |
25 | a = [tb.tolist(x) for x in tv.split(data, selection=[])]
26 | assert a == []
27 |
28 | a = [tb.tolist(x) for x in tv.split(data)]
29 | assert a == [[0, 40, 50], [10, 20, 30], [60]]
30 |
31 | subviewer = _TensorViewer(
32 | [tb.astensor([0]), tb.astensor([1, 2, 3])], names=['x', 'aaa']
33 | )
34 | assert tb.tolist(subviewer.stitch(tv.split(data, ['x', 'aaa']))) == [60, 10, 20, 30]
35 |
36 | subviewer = _TensorViewer(
37 | [tb.astensor([0, 1, 2]), tb.astensor([3])], names=['aaa', 'x']
38 | )
39 | assert tb.tolist(subviewer.stitch(tv.split(data, ['aaa', 'x']))) == [10, 20, 30, 60]
40 |
--------------------------------------------------------------------------------
/tests/test_toys.py:
--------------------------------------------------------------------------------
1 | import pyhf
2 | import numpy as np
3 |
4 |
5 | def test_smoketest_toys(backend):
6 | tb, _ = backend
7 | m = pyhf.simplemodels.uncorrelated_background([6], [9], [3])
8 | s = m.make_pdf(pyhf.tensorlib.astensor(m.config.suggested_init()))
9 | assert np.asarray(tb.tolist(s.log_prob(s.sample((1000,))))).shape == (1000,)
10 |
11 | tb, _ = backend
12 | m = pyhf.simplemodels.uncorrelated_background([6, 6], [9, 9], [3, 3], batch_size=13)
13 | s = m.make_pdf(pyhf.tensorlib.astensor(m.batch_size * [m.config.suggested_init()]))
14 | assert np.asarray(tb.tolist(s.sample((10,)))).shape == (10, 13, 4)
15 |
--------------------------------------------------------------------------------
/tests/test_utils.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | import pyhf
3 |
4 |
5 | @pytest.mark.parametrize(
6 | 'opts,obj',
7 | [
8 | (['a=10'], {'a': 10}),
9 | (['b=test'], {'b': 'test'}),
10 | (['c=1.0e-8'], {'c': 1.0e-8}),
11 | (['d=3.14'], {'d': 3.14}),
12 | (['e=True'], {'e': True}),
13 | (['f=false'], {'f': False}),
14 | (['a=b', 'c=d'], {'a': 'b', 'c': 'd'}),
15 | (['g=h=i'], {'g': 'h=i'}),
16 | ],
17 | )
18 | def test_options_from_eqdelimstring(opts, obj):
19 | assert pyhf.utils.options_from_eqdelimstring(opts) == obj
20 |
21 |
22 | @pytest.mark.parametrize(
23 | 'obj',
24 | [
25 | {'a': 2.0, 'b': 1.0, 'c': 'a'},
26 | {'b': 1.0, 'c': 'a', 'a': 2.0},
27 | {'c': 'a', 'a': 2.0, 'b': 1.0},
28 | ],
29 | )
30 | @pytest.mark.parametrize('algorithm', ['md5', 'sha256'])
31 | def test_digest(obj, algorithm):
32 | results = {
33 | 'md5': '155e52b05179a1106d71e5e053452517',
34 | 'sha256': '03dfbceade79855fc9b4e4d6fbd4f437109de68330dab37c3091a15f4bffe593',
35 | }
36 | assert pyhf.utils.digest(obj, algorithm=algorithm) == results[algorithm]
37 |
38 |
39 | def test_digest_bad_obj():
40 | with pytest.raises(ValueError, match="not JSON-serializable"):
41 | pyhf.utils.digest(object())
42 |
43 |
44 | def test_digest_bad_alg():
45 | with pytest.raises(ValueError, match="nonexistent_algorithm"):
46 | pyhf.utils.digest({}, algorithm='nonexistent_algorithm')
47 |
48 |
49 | @pytest.mark.parametrize('oneline', [False, True])
50 | def test_citation(oneline):
51 | citation = pyhf.utils.citation(oneline)
52 | assert citation
53 | if oneline:
54 | assert '\n' not in citation
55 |
--------------------------------------------------------------------------------
/tests/test_workspace/poiless.json:
--------------------------------------------------------------------------------
1 | {
2 | "channels": [
3 | {
4 | "name": "singlechannel",
5 | "samples": [
6 | {
7 | "name": "signal",
8 | "data": [
9 | 12,
10 | 11
11 | ],
12 | "modifiers": [
13 | {
14 | "name": "mu",
15 | "type": "normfactor",
16 | "data": null
17 | }
18 | ]
19 | },
20 | {
21 | "name": "background",
22 | "data": [
23 | 50,
24 | 52
25 | ],
26 | "modifiers": [
27 | {
28 | "name": "uncorr_bkguncrt",
29 | "type": "shapesys",
30 | "data": [
31 | 3,
32 | 7
33 | ]
34 | }
35 | ]
36 | }
37 | ]
38 | }
39 | ],
40 | "observations": [
41 | {
42 | "name": "singlechannel",
43 | "data": [
44 | 51,
45 | 48
46 | ]
47 | }
48 | ],
49 | "measurements": [
50 | {
51 | "name": "Measurement",
52 | "config": {
53 | "poi": "",
54 | "parameters": []
55 | }
56 | }
57 | ],
58 | "version": "1.0.0"
59 | }
60 |
--------------------------------------------------------------------------------
/validation/data/1bin_example1.json:
--------------------------------------------------------------------------------
1 | {
2 | "binning": [2,-0.5,1.5],
3 | "bindata": {
4 | "data": [55.0],
5 | "bkg": [50.0],
6 | "bkgerr": [7.0],
7 | "sig": [10.0]
8 | }
9 | }
10 |
--------------------------------------------------------------------------------
/validation/data/1bin_example1_q0.json:
--------------------------------------------------------------------------------
1 | {
2 | "binning": [2,-0.5,1.5],
3 | "bindata": {
4 | "data": [80.0],
5 | "bkg": [50.0],
6 | "bkgerr": [7.0],
7 | "sig": [25.0]
8 | }
9 | }
10 |
--------------------------------------------------------------------------------
/validation/data/1bin_histosys.json:
--------------------------------------------------------------------------------
1 | {
2 | "binning": [2,-0.5,1.5],
3 | "bindata": {
4 | "data": [120.0],
5 | "bkg": [100.0],
6 | "bkgsys_up": [110],
7 | "bkgsys_dn": [90],
8 | "sig": [30.0]
9 | }
10 | }
11 |
12 |
--------------------------------------------------------------------------------
/validation/data/1bin_lumi.json:
--------------------------------------------------------------------------------
1 | {
2 | "binning": [2,-0.5,1.5],
3 | "bindata": {
4 | "data": [55.0],
5 | "bkg1": [100.0],
6 | "bkg2": [0.0],
7 | "sig": [20.0]
8 | }
9 | }
10 |
--------------------------------------------------------------------------------
/validation/data/1bin_normsys.json:
--------------------------------------------------------------------------------
1 | {
2 | "binning": [2,-0.5,1.5],
3 | "bindata": {
4 | "data": [120.0, 180.0],
5 | "bkg": [100.0, 150.0],
6 | "sig": [30.0, 95.0]
7 | }
8 | }
9 |
--------------------------------------------------------------------------------
/validation/data/2bin_2channel_coupledhisto.json:
--------------------------------------------------------------------------------
1 | {
2 | "channels": {
3 | "signal": {
4 | "binning": [2,-0.5,1.5],
5 | "bindata": {
6 | "data": [170.0, 220.0],
7 | "bkg1": [100.0, 100.0],
8 | "bkg1_up": [110.0, 110.0],
9 | "bkg1_dn": [90.0, 90.0],
10 | "bkg2": [50.0, 120.0],
11 | "bkg2_up": [55.0, 55.0],
12 | "bkg2_dn": [45.0, 45.0],
13 | "sig": [30.0, 35.0]
14 | }
15 | },
16 | "control": {
17 | "binning": [2,-0.5,1.5],
18 | "bindata": {
19 | "data": [110.0, 105.0],
20 | "bkg1": [105.0, 100.0],
21 | "bkg1_up": [110.0, 115.0],
22 | "bkg1_dn": [95.0, 90.0]
23 | }
24 | }
25 | }
26 | }
27 |
28 |
--------------------------------------------------------------------------------
/validation/data/2bin_2channel_couplednorm.json:
--------------------------------------------------------------------------------
1 | {
2 | "channels": {
3 | "signal": {
4 | "binning": [2,-0.5,1.5],
5 | "bindata": {
6 | "data": [105.0, 220.0],
7 | "bkg1": [100.0, 100.0],
8 | "bkg2": [50.0, 100.0],
9 | "sig": [10.0, 35.0]
10 | }
11 | },
12 | "control": {
13 | "binning": [2,-0.5,1.5],
14 | "bindata": {
15 | "data": [110.0, 105.0],
16 | "bkg1": [100.0, 100.0]
17 | }
18 | }
19 | }
20 | }
21 |
--------------------------------------------------------------------------------
/validation/data/2bin_2channel_coupledshapefactor.json:
--------------------------------------------------------------------------------
1 | {
2 | "channels": {
3 | "signal": {
4 | "binning": [2,-0.5,1.5],
5 | "bindata": {
6 | "data": [220.0, 230.0],
7 | "bkg1": [100.0, 70.0],
8 | "sig": [ 20.0, 20.0]
9 | }
10 | },
11 | "control": {
12 | "binning": [2,-0.5,1.5],
13 | "bindata": {
14 | "data": [200.0, 300.0],
15 | "bkg1": [100.0, 100.0]
16 | }
17 | }
18 | }
19 | }
20 |
21 |
--------------------------------------------------------------------------------
/validation/data/2bin_2channel_example1.json:
--------------------------------------------------------------------------------
1 | {
2 | "channels": {
3 | "signal": {
4 | "binning": [2,-0.5,1.5],
5 | "bindata": {
6 | "data": [110.0, 155.0],
7 | "bkgerr": [10.0, 10.0],
8 | "bkg": [100.0, 150.0],
9 | "sig": [10.0, 35.0]
10 | }
11 | },
12 | "control": {
13 | "binning": [2,-0.5,1.5],
14 | "bindata": {
15 | "data": [205.0, 345.0],
16 | "bkg": [200.0, 350.0],
17 | "bkgerr": [5.0, 10.0]
18 | }
19 | }
20 | }
21 | }
22 |
--------------------------------------------------------------------------------
/validation/data/2bin_example1.json:
--------------------------------------------------------------------------------
1 | {
2 | "binning": [2,-0.5,1.5],
3 | "bindata": {
4 | "data": [120.0, 180.0],
5 | "bkg": [100.0, 150.0],
6 | "bkgerr": [15.0, 20.0],
7 | "sig": [30.0, 95.0]
8 | }
9 | }
10 |
--------------------------------------------------------------------------------
/validation/data/2bin_histosys.json:
--------------------------------------------------------------------------------
1 | {
2 | "binning": [2,-0.5,1.5],
3 | "bindata": {
4 | "data": [120.0, 180.0],
5 | "bkg": [100.0, 150.0],
6 | "bkgsys_up": [110, 190],
7 | "bkgsys_dn": [90, 100],
8 | "sig": [30.0, 95.0]
9 | }
10 | }
11 |
--------------------------------------------------------------------------------
/validation/data/2bin_histosys_example2.json:
--------------------------------------------------------------------------------
1 | {
2 | "binning": [2,-0.5,1.5],
3 | "bindata": {
4 | "data": [120.0, 180.0],
5 | "bkg": [100.0, 150.0],
6 | "bkgsys_up": [102, 190],
7 | "bkgsys_dn": [98, 100],
8 | "sig": [30.0, 95.0]
9 | }
10 | }
11 |
12 |
--------------------------------------------------------------------------------
/validation/data/2bin_statex.json:
--------------------------------------------------------------------------------
1 | {
2 | "binning": [2,-0.5,1.5],
3 | "bindata": {
4 | "data": [100.0, 100.0],
5 | "bkg1": [50.0, 70.0],
6 | "bkg1err": [10.0, 10.0],
7 | "bkg2": [30.0, 20.0],
8 | "bkg2err": [ 5.0, 5.0],
9 | "bkg3": [20.0, 15.0],
10 | "sig": [10.0, 10.0]
11 | }
12 | }
13 |
--------------------------------------------------------------------------------
/validation/makedata.py:
--------------------------------------------------------------------------------
1 | import json
2 | import sys
3 |
4 | import ROOT
5 |
6 | if __name__ == "__main__":
7 | with open(sys.argv[1], encoding="utf-8") as source_file:
8 | source_data = json.load(source_file)
9 | root_file = sys.argv[2]
10 |
11 | binning = source_data["binning"]
12 | bin_data = source_data["bin_data"]
13 |
14 | out_file = ROOT.TFile(root_file, "RECREATE")
15 | data = ROOT.TH1F("data", "data", *binning)
16 | for idx, value in enumerate(bin_data["data"]):
17 | data.SetBinContent(idx + 1, value)
18 | data.Sumw2()
19 |
20 | bkg = ROOT.TH1F("bkg", "bkg", *binning)
21 | for idx, value in enumerate(bin_data["bkg"]):
22 | bkg.SetBinContent(idx + 1, value)
23 | bkg.Sumw2()
24 |
25 | if "bkgerr" in bin_data:
26 | bkgerr = ROOT.TH1F("bkgerr", "bkgerr", *binning)
27 |
28 | # shapesys must be as multiplicative factor
29 | for idx, value in enumerate(bin_data["bkgerr"]):
30 | bkgerr.SetBinContent(idx + 1, value / bkg.GetBinContent(idx + 1))
31 | bkgerr.Sumw2()
32 |
33 | sig = ROOT.TH1F("sig", "sig", *binning)
34 | for idx, value in enumerate(bin_data["sig"]):
35 | sig.SetBinContent(idx + 1, value)
36 | sig.Sumw2()
37 |
38 | out_file.Write()
39 |
--------------------------------------------------------------------------------
/validation/multibin_histfactory/config/example.xml:
--------------------------------------------------------------------------------
1 |
6 |
7 |
14 |
15 |
16 |
17 |
18 | ./config/example_channel.xml
19 |
20 | SigXsecOverSM
21 | Lumi
22 |
23 |
24 |
--------------------------------------------------------------------------------
/validation/multibin_histfactory/config/example_channel.xml:
--------------------------------------------------------------------------------
1 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
--------------------------------------------------------------------------------
/validation/multibin_histfactory/data/data.root:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/scikit-hep/pyhf/40ebf6dadd73de3830a931eac04a260ef8f62709/validation/multibin_histfactory/data/data.root
--------------------------------------------------------------------------------
/validation/multibin_histfactory_p0/config/example.xml:
--------------------------------------------------------------------------------
1 |
6 |
7 |
14 |
15 |
16 |
17 |
18 | ./config/example_channel.xml
19 |
20 | SigXsecOverSM
21 | Lumi
22 |
23 |
24 |
--------------------------------------------------------------------------------
/validation/multibin_histfactory_p0/config/example_channel.xml:
--------------------------------------------------------------------------------
1 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
--------------------------------------------------------------------------------
/validation/multibin_histfactory_p0/data/data.root:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/scikit-hep/pyhf/40ebf6dadd73de3830a931eac04a260ef8f62709/validation/multibin_histfactory_p0/data/data.root
--------------------------------------------------------------------------------
/validation/multibin_histfactory_p0/results/example_GaussExample.root:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/scikit-hep/pyhf/40ebf6dadd73de3830a931eac04a260ef8f62709/validation/multibin_histfactory_p0/results/example_GaussExample.root
--------------------------------------------------------------------------------
/validation/multibin_histfactory_p0/results/example_channel1_GaussExample_model.root:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/scikit-hep/pyhf/40ebf6dadd73de3830a931eac04a260ef8f62709/validation/multibin_histfactory_p0/results/example_channel1_GaussExample_model.root
--------------------------------------------------------------------------------
/validation/multibin_histfactory_p0/results/example_combined_GaussExample_model.root:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/scikit-hep/pyhf/40ebf6dadd73de3830a931eac04a260ef8f62709/validation/multibin_histfactory_p0/results/example_combined_GaussExample_model.root
--------------------------------------------------------------------------------
/validation/multichan_coupledhistosys_histfactory/config/example.xml:
--------------------------------------------------------------------------------
1 |
6 |
7 |
14 |
15 |
16 |
17 |
18 | ./config/example_signal.xml
19 | ./config/example_control.xml
20 |
21 | SigXsecOverSM
22 | Lumi
23 |
24 |
25 |
--------------------------------------------------------------------------------
/validation/multichan_coupledhistosys_histfactory/config/example_control.xml:
--------------------------------------------------------------------------------
1 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
--------------------------------------------------------------------------------
/validation/multichan_coupledhistosys_histfactory/config/example_signal.xml:
--------------------------------------------------------------------------------
1 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
--------------------------------------------------------------------------------
/validation/multichan_coupledhistosys_histfactory/data/data.root:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/scikit-hep/pyhf/40ebf6dadd73de3830a931eac04a260ef8f62709/validation/multichan_coupledhistosys_histfactory/data/data.root
--------------------------------------------------------------------------------
/validation/multichan_coupledhistosys_histfactory/makedata.py:
--------------------------------------------------------------------------------
1 | import ROOT
2 |
3 | import json
4 | import sys
5 |
6 | with open(sys.argv[1], encoding="utf-8") as source_file:
7 | source_data = json.load(source_file)
8 | root_file = sys.argv[2]
9 |
10 | f = ROOT.TFile(root_file, 'RECREATE')
11 |
12 |
13 | hists = []
14 | for cname, channel_def in source_data['channels'].iteritems():
15 | print('CH', cname)
16 | binning = channel_def['binning']
17 | bindata = channel_def['bindata']
18 |
19 | for hist, data in bindata.iteritems():
20 | print(f'{cname}_{hist}')
21 | h = ROOT.TH1F(f'{cname}_{hist}', f'{cname}_{hist}', *binning)
22 | hists += [h]
23 | for i, v in enumerate(data):
24 | h.SetBinContent(i + 1, v)
25 | h.Sumw2()
26 |
27 | f.Write()
28 |
--------------------------------------------------------------------------------
/validation/multichan_coupledoverall_histfactory/config/example.xml:
--------------------------------------------------------------------------------
1 |
6 |
7 |
14 |
15 |
16 |
17 |
18 | ./config/example_signal.xml
19 | ./config/example_control.xml
20 |
21 | SigXsecOverSM
22 | Lumi
23 |
24 |
25 |
--------------------------------------------------------------------------------
/validation/multichan_coupledoverall_histfactory/config/example_control.xml:
--------------------------------------------------------------------------------
1 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
--------------------------------------------------------------------------------
/validation/multichan_coupledoverall_histfactory/config/example_signal.xml:
--------------------------------------------------------------------------------
1 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
--------------------------------------------------------------------------------
/validation/multichan_coupledoverall_histfactory/data/data.root:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/scikit-hep/pyhf/40ebf6dadd73de3830a931eac04a260ef8f62709/validation/multichan_coupledoverall_histfactory/data/data.root
--------------------------------------------------------------------------------
/validation/multichan_coupledoverall_histfactory/makedata.py:
--------------------------------------------------------------------------------
1 | import ROOT
2 |
3 | import json
4 | import sys
5 |
6 | with open(sys.argv[1], encoding="utf-8") as source_file:
7 | source_data = json.load(source_file)
8 | root_file = sys.argv[2]
9 |
10 | f = ROOT.TFile(root_file, 'RECREATE')
11 |
12 | for cname, channel_def in source_data['channels'].iteritems():
13 | print('CH', cname)
14 | binning = channel_def['binning']
15 | bindata = channel_def['bindata']
16 |
17 | data = ROOT.TH1F(f'{cname}_data', f'{cname}_data', *binning)
18 | for i, v in enumerate(bindata['data']):
19 | data.SetBinContent(i + 1, v)
20 | data.Sumw2()
21 |
22 | bkg1 = ROOT.TH1F(f'{cname}_bkg1', f'{cname}_bkg1', *binning)
23 | for i, v in enumerate(bindata['bkg1']):
24 | bkg1.SetBinContent(i + 1, v)
25 | bkg1.Sumw2()
26 |
27 | if 'bkg2' in bindata:
28 | bkg2 = ROOT.TH1F(f'{cname}_bkg2', f'{cname}_bkg2', *binning)
29 | for i, v in enumerate(bindata['bkg2']):
30 | bkg2.SetBinContent(i + 1, v)
31 | bkg2.Sumw2()
32 |
33 | if 'sig' in bindata:
34 | sig = ROOT.TH1F(f'{cname}_signal', f'{cname}_signal', *binning)
35 | for i, v in enumerate(bindata['sig']):
36 | sig.SetBinContent(i + 1, v)
37 | sig.Sumw2()
38 | f.Write()
39 |
--------------------------------------------------------------------------------
/validation/multichan_coupledoverall_histfactory/multichannel_data.json:
--------------------------------------------------------------------------------
1 | {
2 | "channels": {
3 | "signal": {
4 | "binning": [2,-0.5,1.5],
5 | "bindata": {
6 | "data": [105.0, 220.0],
7 | "bkg1": [100.0, 100.0],
8 | "bkg2": [50.0, 100.0],
9 | "sig": [10.0, 35.0]
10 | }
11 | },
12 | "control": {
13 | "binning": [2,-0.5,1.5],
14 | "bindata": {
15 | "data": [110.0, 105.0],
16 | "bkg1": [100.0, 100.0]
17 | }
18 | }
19 | }
20 | }
21 |
--------------------------------------------------------------------------------
/validation/multichannel_histfactory/config/example.xml:
--------------------------------------------------------------------------------
1 |
6 |
7 |
14 |
15 |
16 |
17 |
18 | ./config/example_signal.xml
19 | ./config/example_control.xml
20 |
21 | SigXsecOverSM
22 | Lumi
23 |
24 |
25 |
--------------------------------------------------------------------------------
/validation/multichannel_histfactory/config/example_control.xml:
--------------------------------------------------------------------------------
1 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
--------------------------------------------------------------------------------
/validation/multichannel_histfactory/config/example_signal.xml:
--------------------------------------------------------------------------------
1 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
--------------------------------------------------------------------------------
/validation/multichannel_histfactory/data/data.root:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/scikit-hep/pyhf/40ebf6dadd73de3830a931eac04a260ef8f62709/validation/multichannel_histfactory/data/data.root
--------------------------------------------------------------------------------
/validation/multichannel_histfactory/makedata.py:
--------------------------------------------------------------------------------
1 | import ROOT
2 |
3 | import json
4 | import sys
5 |
6 | with open(sys.argv[1], encoding="utf-8") as source_file:
7 | source_data = json.load(source_file)
8 | root_file = sys.argv[2]
9 |
10 | f = ROOT.TFile(root_file, 'RECREATE')
11 |
12 | for cname, channel_def in source_data['channels'].iteritems():
13 | print('CH', cname)
14 | binning = channel_def['binning']
15 | bindata = channel_def['bindata']
16 |
17 | data = ROOT.TH1F(f'{cname}_data', f'{cname}_data', *binning)
18 | for i, v in enumerate(bindata['data']):
19 | data.SetBinContent(i + 1, v)
20 | data.Sumw2()
21 |
22 | print(data.GetName())
23 |
24 | bkg = ROOT.TH1F(f'{cname}_bkg', f'{cname}_bkg', *binning)
25 | for i, v in enumerate(bindata['bkg']):
26 | bkg.SetBinContent(i + 1, v)
27 | bkg.Sumw2()
28 |
29 | if 'bkgerr' in bindata:
30 | bkgerr = ROOT.TH1F(f'{cname}_bkgerr', f'{cname}_bkgerr', *binning)
31 |
32 | # shapesys must be as multiplicative factor
33 | for i, v in enumerate(bindata['bkgerr']):
34 | bkgerr.SetBinContent(i + 1, v / bkg.GetBinContent(i + 1))
35 | bkgerr.Sumw2()
36 |
37 | if 'sig' in bindata:
38 | sig = ROOT.TH1F(f'{cname}_signal', f'{cname}_signal', *binning)
39 | for i, v in enumerate(bindata['sig']):
40 | sig.SetBinContent(i + 1, v)
41 | sig.Sumw2()
42 | f.Write()
43 |
--------------------------------------------------------------------------------
/validation/multichannel_histfactory/multichannel_data.json:
--------------------------------------------------------------------------------
1 | {
2 | "channels": {
3 | "signal": {
4 | "binning": [2,-0.5,1.5],
5 | "bindata": {
6 | "data": [110.0, 155.0],
7 | "bkgerr": [10.0, 10.0],
8 | "bkg": [100.0, 150.0],
9 | "sig": [10.0, 35.0]
10 | }
11 | },
12 | "control": {
13 | "binning": [2,-0.5,1.5],
14 | "bindata": {
15 | "data": [205.0, 345.0],
16 | "bkg": [200.0, 350.0],
17 | "bkgerr": [5.0, 10.0]
18 | }
19 | }
20 | }
21 | }
22 |
--------------------------------------------------------------------------------
/validation/overallsys_histfactory/config/example.xml:
--------------------------------------------------------------------------------
1 |
6 |
7 |
14 |
15 |
16 |
17 |
18 | ./config/example_channel.xml
19 |
20 | SigXsecOverSM
21 | Lumi
22 |
23 |
24 |
--------------------------------------------------------------------------------
/validation/overallsys_histfactory/config/example_channel.xml:
--------------------------------------------------------------------------------
1 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
--------------------------------------------------------------------------------
/validation/overallsys_histfactory/data/data.root:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/scikit-hep/pyhf/40ebf6dadd73de3830a931eac04a260ef8f62709/validation/overallsys_histfactory/data/data.root
--------------------------------------------------------------------------------
/validation/run_cls.py:
--------------------------------------------------------------------------------
1 | import sys
2 |
3 | import ROOT
4 |
5 | if __name__ == "__main__":
6 | infile = sys.argv[1]
7 |
8 | infile = ROOT.TFile.Open(infile)
9 | workspace = infile.Get("combined")
10 | data = workspace.data("obsData")
11 |
12 | sb_model = workspace.obj("ModelConfig")
13 | poi = sb_model.GetParametersOfInterest().first()
14 |
15 | sb_model.SetSnapshot(ROOT.RooArgSet(poi))
16 |
17 | bkg_model = sb_model.Clone()
18 | bkg_model.SetName("bonly")
19 | poi.setVal(0)
20 | bkg_model.SetSnapshot(ROOT.RooArgSet(poi))
21 |
22 | calc = ROOT.RooStats.AsymptoticCalculator(data, bkg_model, sb_model)
23 | calc.SetPrintLevel(10)
24 | calc.SetOneSided(True)
25 | calc.SetQTilde(True)
26 |
27 | test_inverter = ROOT.RooStats.HypoTestInverter(calc)
28 | test_inverter.RunFixedScan(51, 0, 5)
29 | test_inverter.SetConfidenceLevel(0.95)
30 | test_inverter.UseCLs(True)
31 |
32 | result = test_inverter.GetInterval()
33 |
34 | plot = ROOT.RooStats.HypoTestInverterPlot("plot", "plot", result)
35 | canvas = ROOT.TCanvas()
36 | canvas.SetLogy(False)
37 | plot.Draw("OBS EXP CLb 2CL")
38 | canvas.Draw()
39 | canvas.SaveAs("scan.pdf")
40 |
41 | print(f"observed: {result.UpperLimit()}")
42 |
43 | for n_sigma in [-2, -1, 0, 1, 2]:
44 | print(f"expected {n_sigma}: {result.GetExpectedUpperLimit(n_sigma)}")
45 |
--------------------------------------------------------------------------------
/validation/run_single.py:
--------------------------------------------------------------------------------
1 | import json
2 | import sys
3 |
4 | import ROOT
5 |
6 | if __name__ == "__main__":
7 | infile = sys.argv[1]
8 |
9 | infile = ROOT.TFile.Open(infile)
10 | workspace = infile.Get("combined")
11 | data = workspace.data("obsData")
12 |
13 | sb_model = workspace.obj("ModelConfig")
14 | poi = sb_model.GetParametersOfInterest().first()
15 |
16 | sb_model.SetSnapshot(ROOT.RooArgSet(poi))
17 |
18 | bkg_model = sb_model.Clone()
19 | bkg_model.SetName("bonly")
20 | poi.setVal(0)
21 | bkg_model.SetSnapshot(ROOT.RooArgSet(poi))
22 |
23 | calc = ROOT.RooStats.AsymptoticCalculator(data, bkg_model, sb_model)
24 | calc.SetPrintLevel(10)
25 | calc.SetOneSided(True)
26 | calc.SetQTilde(True)
27 |
28 | test_inverter = ROOT.RooStats.HypoTestInverter(calc)
29 | test_inverter.SetConfidenceLevel(0.95)
30 | test_inverter.UseCLs(True)
31 | test_inverter.RunFixedScan(1, 1, 1)
32 |
33 | result = test_inverter.GetInterval()
34 |
35 | index = 0
36 | dist = result.GetExpectedPValueDist(index)
37 | CLs_obs = result.CLs(index)
38 | CLs_exp = list(dist.GetSamplingDistribution())[3:-3]
39 |
40 | print(
41 | json.dumps({"CLs_obs": CLs_obs, "CLs_exp": CLs_exp}, sort_keys=True, indent=4)
42 | )
43 |
--------------------------------------------------------------------------------
/validation/run_single_q0.py:
--------------------------------------------------------------------------------
1 | import json
2 | import sys
3 |
4 | import ROOT
5 |
6 | if __name__ == "__main__":
7 | infile = sys.argv[1]
8 |
9 | infile = ROOT.TFile.Open(infile)
10 | workspace = infile.Get("combined")
11 | data = workspace.data("obsData")
12 |
13 | sb_model = workspace.obj("ModelConfig")
14 | poi = sb_model.GetParametersOfInterest().first()
15 | poi.setVal(1)
16 | sb_model.SetSnapshot(ROOT.RooArgSet(poi))
17 |
18 | bkg_model = sb_model.Clone()
19 | bkg_model.SetName("bonly")
20 | poi.setVal(0)
21 | bkg_model.SetSnapshot(ROOT.RooArgSet(poi))
22 |
23 | calc = ROOT.RooStats.AsymptoticCalculator(data, sb_model, bkg_model)
24 | calc.SetPrintLevel(10)
25 | calc.SetOneSidedDiscovery(True)
26 |
27 | result = calc.GetHypoTest()
28 | pnull_obs = result.NullPValue()
29 | palt_obs = result.AlternatePValue()
30 | usecls = 0
31 | pnull_exp = [
32 | calc.GetExpectedPValues(pnull_obs, palt_obs, sigma, usecls)
33 | for sigma in [-2, -1, 0, 1, 2]
34 | ]
35 |
36 | print(
37 | json.dumps({"p0_obs": pnull_obs, "p0_exp": pnull_exp}, sort_keys=True, indent=4)
38 | )
39 |
--------------------------------------------------------------------------------
/validation/shared_nuispar_across_types/config/example.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | ./config/example_channel.xml
5 |
6 |
7 | SigXsecOverSM
8 | Lumi
9 |
10 |
11 |
12 |
--------------------------------------------------------------------------------
/validation/shared_nuispar_across_types/config/example_channel.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
--------------------------------------------------------------------------------
/validation/shared_nuispar_across_types/config/example_notshared.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | ./config/example_notshared_channel.xml
5 |
6 |
7 | SigXsecOverSM
8 | Lumi
9 |
10 |
11 |
12 |
--------------------------------------------------------------------------------
/validation/shared_nuispar_across_types/config/example_notshared_channel.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
--------------------------------------------------------------------------------
/validation/shared_nuispar_across_types/data/data.root:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/scikit-hep/pyhf/40ebf6dadd73de3830a931eac04a260ef8f62709/validation/shared_nuispar_across_types/data/data.root
--------------------------------------------------------------------------------
/validation/shared_nuispar_across_types/make_data.py:
--------------------------------------------------------------------------------
1 | import ROOT
2 |
3 | sig = 'sig', [3, 1]
4 | nom = 'nom', [12, 13]
5 |
6 | histo_up = 'hup', [14, 15]
7 | histo_dn = 'hdn', [10, 11]
8 |
9 | data = 'data', [15, 16]
10 |
11 | f = ROOT.TFile.Open('data.root', 'recreate')
12 |
13 |
14 | for n, h in [sig, nom, histo_up, histo_dn, data]:
15 | rh = ROOT.TH1F(n, n, 2, -0.5, 1.5)
16 | for i, c in enumerate(h):
17 | rh.SetBinContent(1 + i, c)
18 | rh.Sumw2()
19 | rh.Write()
20 |
21 | f.Close()
22 |
--------------------------------------------------------------------------------
/validation/xmlimport_input/config/example.xml:
--------------------------------------------------------------------------------
1 |
6 |
7 |
14 |
15 |
16 |
17 |
18 |
19 | ./config/example_channel.xml
20 |
21 |
22 | SigXsecOverSM
23 | Lumi alpha_syst1
24 |
25 |
26 |
27 |
28 | SigXsecOverSM
29 | Lumi alpha_syst1
30 | syst2
31 |
32 |
33 |
34 | SigXsecOverSM
35 | Lumi alpha_syst1
36 | syst2
37 |
38 |
39 |
40 | SigXsecOverSM
41 | Lumi alpha_syst1
42 |
43 |
44 |
45 |
46 |
--------------------------------------------------------------------------------
/validation/xmlimport_input/config/example_channel.xml:
--------------------------------------------------------------------------------
1 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
--------------------------------------------------------------------------------
/validation/xmlimport_input/config/examples/example_DataDriven.xml:
--------------------------------------------------------------------------------
1 |
6 |
7 |
14 |
15 |
16 |
17 |
18 |
19 | ./config/examples/example_DataDriven_signalRegion.xml
20 | ./config/examples/example_DataDriven_controlRegion.xml
21 |
22 |
23 | SigXsecOverSM
24 | Lumi alpha_syst1
25 |
26 |
27 |
28 |
29 |
--------------------------------------------------------------------------------
/validation/xmlimport_input/config/examples/example_DataDriven_controlRegion.xml:
--------------------------------------------------------------------------------
1 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
--------------------------------------------------------------------------------
/validation/xmlimport_input/config/examples/example_DataDriven_signalRegion.xml:
--------------------------------------------------------------------------------
1 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
--------------------------------------------------------------------------------
/validation/xmlimport_input/config/examples/example_Expression.xml:
--------------------------------------------------------------------------------
1 |
6 |
7 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 | ./config/examples/example_Expression_channel.xml
23 |
24 |
25 | SigXsecOverSM
26 | Lumi alpha_syst1
27 |
28 |
29 |
30 |
31 |
32 |
33 |
--------------------------------------------------------------------------------
/validation/xmlimport_input/config/examples/example_Expression_channel.xml:
--------------------------------------------------------------------------------
1 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
--------------------------------------------------------------------------------
/validation/xmlimport_input/config/examples/example_ShapeSys.xml:
--------------------------------------------------------------------------------
1 |
6 |
7 |
14 |
15 |
16 |
17 |
18 |
19 | ./config/examples/example_ShapeSys_channel.xml
20 |
21 |
22 |
23 | SigXsecOverSM
24 | Lumi alpha_syst1
25 |
26 |
27 |
28 |
29 |
--------------------------------------------------------------------------------
/validation/xmlimport_input/config/examples/example_ShapeSys2D.xml:
--------------------------------------------------------------------------------
1 |
6 |
7 |
14 |
15 |
16 |
17 |
18 |
19 | ./config/examples/example_ShapeSys2D_channel.xml
20 |
21 |
22 |
23 | SigXsecOverSM
24 | Lumi alpha_syst1
25 |
26 |
27 |
28 |
29 |
30 |
--------------------------------------------------------------------------------
/validation/xmlimport_input/config/examples/example_ShapeSys2D_channel.xml:
--------------------------------------------------------------------------------
1 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
--------------------------------------------------------------------------------
/validation/xmlimport_input/config/examples/example_ShapeSys_channel.xml:
--------------------------------------------------------------------------------
1 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
--------------------------------------------------------------------------------
/validation/xmlimport_input/data/ShapeSys.root:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/scikit-hep/pyhf/40ebf6dadd73de3830a931eac04a260ef8f62709/validation/xmlimport_input/data/ShapeSys.root
--------------------------------------------------------------------------------
/validation/xmlimport_input/data/ShapeSys2D.root:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/scikit-hep/pyhf/40ebf6dadd73de3830a931eac04a260ef8f62709/validation/xmlimport_input/data/ShapeSys2D.root
--------------------------------------------------------------------------------
/validation/xmlimport_input/data/StatError.root:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/scikit-hep/pyhf/40ebf6dadd73de3830a931eac04a260ef8f62709/validation/xmlimport_input/data/StatError.root
--------------------------------------------------------------------------------
/validation/xmlimport_input/data/dataDriven.root:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/scikit-hep/pyhf/40ebf6dadd73de3830a931eac04a260ef8f62709/validation/xmlimport_input/data/dataDriven.root
--------------------------------------------------------------------------------
/validation/xmlimport_input/data/example.root:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/scikit-hep/pyhf/40ebf6dadd73de3830a931eac04a260ef8f62709/validation/xmlimport_input/data/example.root
--------------------------------------------------------------------------------
/validation/xmlimport_input/results/example_ConstExample.root:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/scikit-hep/pyhf/40ebf6dadd73de3830a931eac04a260ef8f62709/validation/xmlimport_input/results/example_ConstExample.root
--------------------------------------------------------------------------------
/validation/xmlimport_input/results/example_GammaExample.root:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/scikit-hep/pyhf/40ebf6dadd73de3830a931eac04a260ef8f62709/validation/xmlimport_input/results/example_GammaExample.root
--------------------------------------------------------------------------------
/validation/xmlimport_input/results/example_GaussExample.root:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/scikit-hep/pyhf/40ebf6dadd73de3830a931eac04a260ef8f62709/validation/xmlimport_input/results/example_GaussExample.root
--------------------------------------------------------------------------------
/validation/xmlimport_input/results/example_LogNormExample.root:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/scikit-hep/pyhf/40ebf6dadd73de3830a931eac04a260ef8f62709/validation/xmlimport_input/results/example_LogNormExample.root
--------------------------------------------------------------------------------
/validation/xmlimport_input/results/example_channel1_ConstExample_model.root:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/scikit-hep/pyhf/40ebf6dadd73de3830a931eac04a260ef8f62709/validation/xmlimport_input/results/example_channel1_ConstExample_model.root
--------------------------------------------------------------------------------
/validation/xmlimport_input/results/example_channel1_GammaExample_model.root:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/scikit-hep/pyhf/40ebf6dadd73de3830a931eac04a260ef8f62709/validation/xmlimport_input/results/example_channel1_GammaExample_model.root
--------------------------------------------------------------------------------
/validation/xmlimport_input/results/example_channel1_GaussExample_model.root:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/scikit-hep/pyhf/40ebf6dadd73de3830a931eac04a260ef8f62709/validation/xmlimport_input/results/example_channel1_GaussExample_model.root
--------------------------------------------------------------------------------
/validation/xmlimport_input/results/example_channel1_LogNormExample_model.root:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/scikit-hep/pyhf/40ebf6dadd73de3830a931eac04a260ef8f62709/validation/xmlimport_input/results/example_channel1_LogNormExample_model.root
--------------------------------------------------------------------------------
/validation/xmlimport_input/results/example_combined_ConstExample_model.root:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/scikit-hep/pyhf/40ebf6dadd73de3830a931eac04a260ef8f62709/validation/xmlimport_input/results/example_combined_ConstExample_model.root
--------------------------------------------------------------------------------
/validation/xmlimport_input/results/example_combined_GammaExample_model.root:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/scikit-hep/pyhf/40ebf6dadd73de3830a931eac04a260ef8f62709/validation/xmlimport_input/results/example_combined_GammaExample_model.root
--------------------------------------------------------------------------------
/validation/xmlimport_input/results/example_combined_GaussExample_model.root:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/scikit-hep/pyhf/40ebf6dadd73de3830a931eac04a260ef8f62709/validation/xmlimport_input/results/example_combined_GaussExample_model.root
--------------------------------------------------------------------------------
/validation/xmlimport_input/results/example_combined_LogNormExample_model.root:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/scikit-hep/pyhf/40ebf6dadd73de3830a931eac04a260ef8f62709/validation/xmlimport_input/results/example_combined_LogNormExample_model.root
--------------------------------------------------------------------------------
/validation/xmlimport_input/results/example_results.table:
--------------------------------------------------------------------------------
1 | GaussExample & -0.5968 / 0.6116 & -0.5968 / 0.6116 \\
2 | GammaExample & & \\
3 | LogNormExample & & \\
4 | ConstExample & & \\
5 | GaussExample & -0.5968 / 0.6116 & -0.5968 / 0.6116 \\
6 | GammaExample & & \\
7 | LogNormExample & & \\
8 | ConstExample & & \\
9 |
--------------------------------------------------------------------------------
/validation/xmlimport_input/scan.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/scikit-hep/pyhf/40ebf6dadd73de3830a931eac04a260ef8f62709/validation/xmlimport_input/scan.pdf
--------------------------------------------------------------------------------
/validation/xmlimport_input2/config/example.xml:
--------------------------------------------------------------------------------
1 |
6 |
7 |
14 |
15 |
16 |
17 |
18 | ./config/example_signal.xml
19 | ./config/example_control.xml
20 |
21 | SigXsecOverSM
22 | Lumi
23 |
24 |
25 |
--------------------------------------------------------------------------------
/validation/xmlimport_input2/config/example_control.xml:
--------------------------------------------------------------------------------
1 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
--------------------------------------------------------------------------------
/validation/xmlimport_input2/config/example_signal.xml:
--------------------------------------------------------------------------------
1 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
--------------------------------------------------------------------------------
/validation/xmlimport_input2/data/data.root:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/scikit-hep/pyhf/40ebf6dadd73de3830a931eac04a260ef8f62709/validation/xmlimport_input2/data/data.root
--------------------------------------------------------------------------------
/validation/xmlimport_input2/makedata.py:
--------------------------------------------------------------------------------
1 | import ROOT
2 |
3 | import json
4 | import sys
5 |
6 | with open(sys.argv[1], encoding="utf-8") as source_file:
7 | source_data = json.load(source_file)
8 | root_file = sys.argv[2]
9 |
10 | f = ROOT.TFile(root_file, 'RECREATE')
11 |
12 |
13 | hists = []
14 | for cname, channel_def in source_data['channels'].iteritems():
15 | print('CH', cname)
16 | binning = channel_def['binning']
17 | bindata = channel_def['bindata']
18 |
19 | for hist, data in bindata.iteritems():
20 | print(f'{cname}_{hist}')
21 | h = ROOT.TH1F(f'{cname}_{hist}', f'{cname}_{hist}', *binning)
22 | hists += [h]
23 | for i, v in enumerate(data):
24 | h.SetBinContent(i + 1, v)
25 | h.Sumw2()
26 |
27 | f.Write()
28 |
--------------------------------------------------------------------------------
/validation/xmlimport_input3/config/examples/example_ShapeSys.xml:
--------------------------------------------------------------------------------
1 |
6 |
7 |
14 |
15 |
16 |
17 |
18 |
19 | ./config/examples/example_ShapeSys_channel.xml
20 |
21 |
22 |
23 | SigXsecOverSM
24 | Lumi alpha_syst1
25 |
26 |
27 |
28 |
29 |
--------------------------------------------------------------------------------
/validation/xmlimport_input3/config/examples/example_ShapeSys_channel.xml:
--------------------------------------------------------------------------------
1 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
--------------------------------------------------------------------------------
/validation/xmlimport_input3/data/ShapeSys.root:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/scikit-hep/pyhf/40ebf6dadd73de3830a931eac04a260ef8f62709/validation/xmlimport_input3/data/ShapeSys.root
--------------------------------------------------------------------------------
/validation/xmlimport_input4/config/example.xml:
--------------------------------------------------------------------------------
1 |
6 |
7 |
14 |
15 |
16 |
17 |
18 |
19 | ./config/example_channel.xml
20 |
21 |
22 | SigXsecOverSM
23 | Lumi alpha_syst1
24 |
25 |
26 |
27 |
--------------------------------------------------------------------------------
/validation/xmlimport_input4/config/example_channel.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
--------------------------------------------------------------------------------
/validation/xmlimport_input4/data/example.root:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/scikit-hep/pyhf/40ebf6dadd73de3830a931eac04a260ef8f62709/validation/xmlimport_input4/data/example.root
--------------------------------------------------------------------------------