├── src └── pylhe │ ├── py.typed │ ├── _version.pyi │ └── awkward.py ├── apt.txt ├── examples ├── codecov.yml ├── docs ├── source │ ├── bibliography.rst │ ├── examples │ │ ├── z0-event.pdf │ │ ├── 93_parquet_cache.ipynb │ │ ├── 92_multiple_files.ipynb │ │ └── 03_write_monte_carlo_example.ipynb │ ├── _static │ │ └── img │ │ │ └── pylhe-logo.png │ ├── _templates │ │ ├── as_module.rst │ │ ├── custom-class-template.rst │ │ └── custom-module-template.rst │ ├── index.rst │ ├── references.bib │ ├── conf.py │ └── lhe.rst ├── Makefile └── make.bat ├── .flake8 ├── environment.yml ├── pytest.ini ├── .github ├── dependabot.yml └── workflows │ ├── semantic-pr-check.yml │ ├── lint.yml │ ├── draft-pdf.yml │ ├── ci.yml │ ├── benchmark.yml │ ├── docs.yml │ ├── publish-package.yml │ └── bump-version.yml ├── .gitignore ├── docker └── Dockerfile ├── tests ├── test_api.py ├── test_visualize.py ├── test_awkward.py ├── test_classes.py ├── test_lhe_writer.py ├── test_errors.py ├── test_warnings.py └── test_lhe_reader.py ├── .readthedocs.yaml ├── CITATION.cff ├── tbump.toml ├── .zenodo.json ├── .pre-commit-config.yaml ├── benchmarks ├── test_count_events_bench.py └── test_awkward_bench.py ├── CODE_OF_CONDUCT.md ├── .all-contributorsrc ├── pyproject.toml ├── paper └── paper.md ├── LICENSE └── README.md /src/pylhe/py.typed: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /apt.txt: -------------------------------------------------------------------------------- 1 | graphviz 2 | -------------------------------------------------------------------------------- /examples: -------------------------------------------------------------------------------- 1 | docs/source/examples -------------------------------------------------------------------------------- /codecov.yml: -------------------------------------------------------------------------------- 1 | codecov: 2 | require_ci_to_pass: true 3 | -------------------------------------------------------------------------------- /docs/source/bibliography.rst: -------------------------------------------------------------------------------- 1 | Bibliography 2 | ============ 3 | 4 | .. bibliography:: 5 | -------------------------------------------------------------------------------- /src/pylhe/_version.pyi: -------------------------------------------------------------------------------- 1 | version: str 2 | version_tuple: tuple[int, int, int] | tuple[int, int, int, str, str] 3 | -------------------------------------------------------------------------------- /docs/source/examples/z0-event.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/scikit-hep/pylhe/HEAD/docs/source/examples/z0-event.pdf -------------------------------------------------------------------------------- /docs/source/_static/img/pylhe-logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/scikit-hep/pylhe/HEAD/docs/source/_static/img/pylhe-logo.png -------------------------------------------------------------------------------- /.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | # E501: line too long 3 | extend-ignore = E501 4 | max-line-length = 88 5 | max-complexity = 18 6 | count = True 7 | statistics = True 8 | -------------------------------------------------------------------------------- /environment.yml: -------------------------------------------------------------------------------- 1 | name: pylhe 2 | channels: 3 | - conda-forge 4 | dependencies: 5 | - hist>=2 6 | # needed by parquet cache 7 | - pyarrow 8 | - scikit-hep-testdata>=0.5.5 9 | - pip: 10 | - . 11 | -------------------------------------------------------------------------------- /pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | docstyle_convention = numpy 3 | junit_family = xunit2 4 | addopts = --ignore=setup.py --ignore=binder/ --ignore=docs/ --cov=pylhe --cov-report=term-missing --cov-config=.coveragerc --cov-report xml --doctest-modules --doctest-glob='*.rst' 5 | -------------------------------------------------------------------------------- /docs/source/_templates/as_module.rst: -------------------------------------------------------------------------------- 1 | :mod:`{{fullname}}` 2 | ====================== 3 | 4 | .. currentmodule:: {{fullname}} 5 | 6 | 7 | .. automodule:: {{fullname}} 8 | :members: 9 | 10 | .. autosummary:: 11 | {% for element in functions %} 12 | {{element}} 13 | {% endfor %} 14 | 15 | .. rubric:: Functions 16 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | # Maintain dependencies for GitHub Actions 4 | - package-ecosystem: "github-actions" 5 | directory: "/" 6 | schedule: 7 | interval: "weekly" 8 | groups: 9 | actions: 10 | patterns: 11 | - "*" 12 | labels: 13 | - "github-actions" 14 | - "dependencies" 15 | reviewers: 16 | - "matthewfeickert" 17 | - "eduardo-rodrigues" 18 | - "APN-Pucky" 19 | -------------------------------------------------------------------------------- /.github/workflows/semantic-pr-check.yml: -------------------------------------------------------------------------------- 1 | name: Semantic Pull Request 2 | 3 | on: 4 | pull_request_target: 5 | types: 6 | - opened 7 | - edited 8 | - synchronize 9 | 10 | concurrency: 11 | group: semantic-pr-${{ github.ref }} 12 | cancel-in-progress: true 13 | 14 | jobs: 15 | main: 16 | 17 | name: Validate PR title 18 | runs-on: ubuntu-latest 19 | 20 | steps: 21 | - name: Check PR title matches Conventional Commits spec 22 | uses: amannn/action-semantic-pull-request@v6 23 | env: 24 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 25 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | src/pylhe/_version.py 2 | MANIFEST 3 | build 4 | dist 5 | _build 6 | *.py[co] 7 | __pycache__ 8 | *.egg-info 9 | *~ 10 | *.bak 11 | .ipynb_checkpoints 12 | .tox 13 | .DS_Store 14 | \#*# 15 | .#* 16 | .coverage* 17 | !.coveragerc 18 | coverage* 19 | *,cover 20 | 21 | *.swp 22 | *.map 23 | .idea/ 24 | config.rst 25 | 26 | /.project 27 | /.pydevproject 28 | 29 | # pytest 30 | .pytest_cache 31 | htmlcov 32 | .benchmarks 33 | 34 | docs/source/_autosummary 35 | docs/source/examples/*.lhe 36 | docs/source/examples/*.lhe.gz 37 | docs/source/examples/*.parquet 38 | 39 | paper/paper.pdf 40 | paper/jats 41 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line, and also 5 | # from the environment for the first two. 6 | SPHINXOPTS ?= 7 | SPHINXBUILD ?= sphinx-build 8 | SOURCEDIR = source 9 | BUILDDIR = build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 21 | -------------------------------------------------------------------------------- /docker/Dockerfile: -------------------------------------------------------------------------------- 1 | ARG BASE_IMAGE=python:3.12-slim 2 | # hadolint ignore=DL3006 3 | FROM ${BASE_IMAGE} as base 4 | 5 | FROM base as builder 6 | COPY . /code 7 | RUN apt-get -qq -y update && \ 8 | apt-get -qq -y install --no-install-recommends \ 9 | git && \ 10 | apt-get -y autoclean && \ 11 | apt-get -y autoremove && \ 12 | rm -rf /var/lib/apt-get/lists/* && \ 13 | cd /code && \ 14 | python -m pip install uv && \ 15 | uv pip install --system --upgrade --no-cache pip wheel && \ 16 | uv pip install --system --no-cache . && \ 17 | python -m pip list 18 | 19 | FROM base 20 | COPY --from=builder /usr/local /usr/local 21 | ENTRYPOINT pip list | grep Package && pip list | grep pylhe && /bin/sh 22 | -------------------------------------------------------------------------------- /.github/workflows/lint.yml: -------------------------------------------------------------------------------- 1 | name: Lint 2 | 3 | on: 4 | pull_request: 5 | workflow_dispatch: 6 | 7 | concurrency: 8 | group: ${{ github.workflow }}-${{ github.ref }} 9 | cancel-in-progress: true 10 | 11 | permissions: 12 | contents: read 13 | 14 | jobs: 15 | lint: 16 | 17 | name: Lint Codebase 18 | runs-on: ubuntu-latest 19 | 20 | steps: 21 | - uses: actions/checkout@v6 22 | with: 23 | fetch-depth: 0 24 | 25 | - name: Set up Python 26 | uses: actions/setup-python@v6 27 | with: 28 | python-version: '3.x' 29 | 30 | - name: Lint with flake8 31 | run: | 32 | pipx run flake8 . 33 | 34 | - name: Lint with Black 35 | run: | 36 | pipx run black --check --diff --verbose . 37 | -------------------------------------------------------------------------------- /.github/workflows/draft-pdf.yml: -------------------------------------------------------------------------------- 1 | name: Draft PDF 2 | on: [push] 3 | 4 | jobs: 5 | paper: 6 | runs-on: ubuntu-latest 7 | name: Paper Draft 8 | steps: 9 | - name: Checkout 10 | uses: actions/checkout@v6 11 | - name: Build draft PDF 12 | uses: openjournals/openjournals-draft-action@master 13 | with: 14 | journal: joss 15 | # This should be the path to the paper within your repo. 16 | paper-path: paper/paper.md 17 | - name: Upload 18 | uses: actions/upload-artifact@v6 19 | with: 20 | name: paper 21 | # This is the output path where Pandoc will write the compiled 22 | # PDF. Note, this should be the same directory as the input 23 | # paper.md 24 | path: paper/paper.pdf 25 | -------------------------------------------------------------------------------- /docs/source/_templates/custom-class-template.rst: -------------------------------------------------------------------------------- 1 | {{ fullname | escape | underline}} 2 | 3 | .. currentmodule:: {{ module }} 4 | 5 | .. autoclass:: {{ objname }} 6 | :members: 7 | :show-inheritance: 8 | :inherited-members: 9 | 10 | {% block methods %} 11 | .. automethod:: __init__ 12 | 13 | {% if methods %} 14 | .. rubric:: {{ _('Methods') }} 15 | 16 | .. autosummary:: 17 | {% for item in methods %} 18 | ~{{ name }}.{{ item }} 19 | {%- endfor %} 20 | {% endif %} 21 | {% endblock %} 22 | 23 | {% block attributes %} 24 | {% if attributes %} 25 | .. rubric:: {{ _('Attributes') }} 26 | 27 | .. autosummary:: 28 | :nosignatures: 29 | {% for item in attributes %} 30 | ~{{ name }}.{{ item }} 31 | {%- endfor %} 32 | {% endif %} 33 | {% endblock %} 34 | -------------------------------------------------------------------------------- /tests/test_api.py: -------------------------------------------------------------------------------- 1 | import pylhe 2 | 3 | 4 | def test_top_level_api(): 5 | assert dir(pylhe) == [ 6 | "LHEEvent", 7 | "LHEEventInfo", 8 | "LHEFile", 9 | "LHEInit", 10 | "LHEInitInfo", 11 | "LHEParticle", 12 | "LHEProcInfo", 13 | "LHEWeightGroup", 14 | "LHEWeightInfo", 15 | "__version__", 16 | "read_lhe", 17 | "read_lhe_file", 18 | "read_lhe_init", 19 | "read_lhe_with_attributes", 20 | "read_num_events", 21 | "to_awkward", 22 | "write_lhe_file", 23 | "write_lhe_file_path", 24 | "write_lhe_file_string", 25 | "write_lhe_string", 26 | ] 27 | 28 | 29 | def test_awkward_api(): 30 | assert dir(pylhe.awkward) == ["to_awkward"] 31 | 32 | 33 | def test_load_version(): 34 | assert pylhe.__version__ 35 | -------------------------------------------------------------------------------- /.readthedocs.yaml: -------------------------------------------------------------------------------- 1 | # .readthedocs.yaml 2 | # Read the Docs configuration file 3 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details 4 | 5 | # Required 6 | version: 2 7 | 8 | # Set the version of Python and other tools you might need 9 | build: 10 | os: ubuntu-22.04 11 | tools: 12 | python: "3.12" 13 | apt_packages: 14 | - curl 15 | - jq 16 | jobs: 17 | post_create_environment: 18 | - pip install uv 19 | post_install: 20 | # VIRTUAL_ENV needs to be set manually for now. 21 | # See https://github.com/readthedocs/readthedocs.org/pull/11152/ 22 | - VIRTUAL_ENV=$READTHEDOCS_VIRTUALENV_PATH uv pip install '.[docs]' 23 | 24 | # Build documentation in the docs/ directory with Sphinx 25 | sphinx: 26 | configuration: docs/source/conf.py 27 | 28 | # If using Sphinx, optionally build your docs in additional formats such as PDF and ePub 29 | formats: all 30 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=source 11 | set BUILDDIR=build 12 | 13 | %SPHINXBUILD% >NUL 2>NUL 14 | if errorlevel 9009 ( 15 | echo. 16 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 17 | echo.installed, then set the SPHINXBUILD environment variable to point 18 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 19 | echo.may add the Sphinx directory to PATH. 20 | echo. 21 | echo.If you don't have Sphinx installed, grab it from 22 | echo.https://www.sphinx-doc.org/ 23 | exit /b 1 24 | ) 25 | 26 | if "%1" == "" goto help 27 | 28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 29 | goto end 30 | 31 | :help 32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 33 | 34 | :end 35 | popd 36 | -------------------------------------------------------------------------------- /docs/source/index.rst: -------------------------------------------------------------------------------- 1 | .. pylhe documentation master file, created by 2 | sphinx-quickstart on Thu Jul 31 09:01:00 2025. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root ``toctree`` directive. 5 | 6 | |project| |version| documentation 7 | ================================= 8 | 9 | .. include:: ../../README.md 10 | :parser: myst_parser.sphinx_ 11 | 12 | 13 | .. toctree:: 14 | :glob: 15 | :maxdepth: 3 16 | :caption: Format Reference: 17 | 18 | lhe.rst 19 | bibliography.rst 20 | 21 | .. autosummary:: 22 | :toctree: _autosummary 23 | :template: custom-module-template.rst 24 | :recursive: 25 | :caption: Modules: 26 | 27 | pylhe 28 | pylhe.awkward 29 | 30 | 31 | .. toctree:: 32 | :glob: 33 | :maxdepth: 3 34 | :caption: Examples: 35 | 36 | examples/* 37 | 38 | .. toctree:: 39 | :glob: 40 | :hidden: 41 | :caption: Links: 42 | :maxdepth: 3 43 | 44 | GitHub 45 | 46 | 47 | 48 | Indices and tables 49 | ================== 50 | 51 | * :ref:`genindex` 52 | * :ref:`modindex` 53 | * :ref:`search` 54 | -------------------------------------------------------------------------------- /CITATION.cff: -------------------------------------------------------------------------------- 1 | cff-version: 1.2.0 2 | message: "Please cite the following works when using this software." 3 | type: software 4 | authors: 5 | - family-names: "Heinrich" 6 | given-names: "Lukas" 7 | orcid: "https://orcid.org/0000-0002-4048-7584" 8 | affiliation: "Technical University of Munich" 9 | - family-names: "Feickert" 10 | given-names: "Matthew" 11 | orcid: "https://orcid.org/0000-0003-4124-7862" 12 | affiliation: "University of Wisconsin-Madison" 13 | - family-names: "Rodrigues" 14 | given-names: "Eduardo" 15 | orcid: "https://orcid.org/0000-0003-2846-7625" 16 | affiliation: "University of Liverpool" 17 | - family-names: "Neuwirth" 18 | given-names: "Alexander Puck" 19 | orcid: "https://orcid.org/0000-0002-2484-1328" 20 | affiliation: "Institut für Theoretische Physik, Universität Münster" 21 | title: "pylhe: v1.0.2" 22 | version: 1.0.2 23 | doi: 10.5281/zenodo.1217031 24 | repository-code: "https://github.com/scikit-hep/pylhe/releases/tag/v1.0.2" 25 | keywords: 26 | - lhe 27 | - physics 28 | - python 29 | - scikit-hep 30 | license: "Apache-2.0" 31 | abstract: | 32 | A small package to get structured data out of Les Houches Event files. 33 | -------------------------------------------------------------------------------- /tbump.toml: -------------------------------------------------------------------------------- 1 | github_url = "https://github.com/scikit-hep/pylhe/" 2 | 3 | [version] 4 | current = "1.0.2" 5 | 6 | # Example of a semver regexp. 7 | # Make sure this matches current_version before 8 | # using tbump 9 | regex = ''' 10 | (?P\d+) 11 | \. 12 | (?P\d+) 13 | \. 14 | (?P\d+) 15 | (rc 16 | (?P\d+) 17 | )? 18 | ''' 19 | 20 | [git] 21 | # The current version will get updated when tbump is run 22 | message_template = "Bump version: 1.0.2 → {new_version}" 23 | tag_template = "v{new_version}" 24 | 25 | # For each file to patch, add a [[file]] config 26 | # section containing the path of the file, relative to the 27 | # tbump.toml location. 28 | [[file]] 29 | src = "tbump.toml" 30 | # Restrict search to make it explicit why tbump.toml 31 | # is even included as a file to bump, as it will get 32 | # its version.current attribute bumped anyway. 33 | search = "Bump version: {current_version} → " 34 | 35 | [[file]] 36 | src = "README.md" 37 | 38 | [[file]] 39 | src = ".zenodo.json" 40 | 41 | [[file]] 42 | src = "CITATION.cff" 43 | 44 | [[field]] 45 | # the name of the field 46 | name = "candidate" 47 | # the default value to use, if there is no match 48 | default = "" 49 | -------------------------------------------------------------------------------- /docs/source/_templates/custom-module-template.rst: -------------------------------------------------------------------------------- 1 | {{ fullname | escape | underline}} 2 | 3 | .. automodule:: {{ fullname }} 4 | 5 | {% block attributes %} 6 | {% if attributes %} 7 | .. rubric:: Module Attributes 8 | 9 | .. autosummary:: 10 | :toctree: 11 | {% for item in attributes %} 12 | {{ item }} 13 | {%- endfor %} 14 | {% endif %} 15 | {% endblock %} 16 | 17 | {% block functions %} 18 | {% if functions %} 19 | .. rubric:: {{ _('Functions') }} 20 | 21 | .. autosummary:: 22 | :toctree: 23 | {% for item in functions %} 24 | {{ item }} 25 | {%- endfor %} 26 | {% endif %} 27 | {% endblock %} 28 | 29 | {% block classes %} 30 | {% if classes %} 31 | .. rubric:: {{ _('Classes') }} 32 | 33 | .. autosummary:: 34 | :toctree: 35 | :template: custom-class-template.rst 36 | {% for item in classes %} 37 | {{ item }} 38 | {%- endfor %} 39 | {% endif %} 40 | {% endblock %} 41 | 42 | {% block exceptions %} 43 | {% if exceptions %} 44 | .. rubric:: {{ _('Exceptions') }} 45 | 46 | .. autosummary:: 47 | :toctree: 48 | {% for item in exceptions %} 49 | {{ item }} 50 | {%- endfor %} 51 | {% endif %} 52 | {% endblock %} 53 | -------------------------------------------------------------------------------- /docs/source/references.bib: -------------------------------------------------------------------------------- 1 | @article{Alwall:2006yp, 2 | author = "Alwall, J. and others", 3 | title = "{A Standard format for Les Houches event files}", 4 | eprint = "hep-ph/0609017", 5 | archivePrefix = "arXiv", 6 | reportNumber = "FERMILAB-PUB-06-337-T, CERN-LCGAPP-2006-03", 7 | doi = "10.1016/j.cpc.2006.11.010", 8 | journal = "Comput. Phys. Commun.", 9 | volume = "176", 10 | pages = "300--304", 11 | year = "2007" 12 | } 13 | 14 | %journal had to be injected below to fix missing field warning in Sphinx bibtex extension 15 | @article{Andersen:2014efa, 16 | author = "Andersen, J. R. and others", 17 | title = "{Les Houches 2013: Physics at TeV Colliders: Standard Model Working Group Report}", 18 | eprint = "1405.1067", 19 | archivePrefix = "arXiv", 20 | primaryClass = "hep-ph", 21 | month = "5", 22 | year = "2014", 23 | journal = "arXiv e-prints" 24 | } 25 | 26 | @inproceedings{Butterworth:2010ym, 27 | author = "Butterworth, J. M. and others", 28 | title = "{THE TOOLS AND MONTE CARLO WORKING GROUP Summary Report from the Les Houches 2009 Workshop on TeV Colliders}", 29 | booktitle = "{6th Les Houches Workshop on Physics at TeV Colliders}", 30 | eprint = "1003.1643", 31 | archivePrefix = "arXiv", 32 | primaryClass = "hep-ph", 33 | month = "3", 34 | year = "2010" 35 | } 36 | -------------------------------------------------------------------------------- /.zenodo.json: -------------------------------------------------------------------------------- 1 | { 2 | "description": "A small package to get structured data out of Les Houches Event files", 3 | "license": "Apache-2.0", 4 | "title": "scikit-hep/pylhe: v1.0.2", 5 | "version": "v1.0.2", 6 | "upload_type": "software", 7 | "creators": [ 8 | { 9 | "affiliation": "Technical University of Munich", 10 | "name": "Lukas Heinrich", 11 | "orcid": "0000-0002-4048-7584" 12 | }, 13 | { 14 | "affiliation": "University of Wisconsin-Madison", 15 | "name": "Matthew Feickert", 16 | "orcid": "0000-0003-4124-7862" 17 | }, 18 | { 19 | "affiliation": "University of Liverpool", 20 | "name": "Eduardo Rodrigues", 21 | "orcid": "0000-0003-2846-7625" 22 | }, 23 | { 24 | "affiliation": "Institut für Theoretische Physik, Universität Münster", 25 | "name": "Alexander Puck Neuwirth", 26 | "orcid": "0000-0002-2484-1328" 27 | } 28 | ], 29 | "access_right": "open", 30 | "keywords": [ 31 | "lhe", 32 | "physics", 33 | "python", 34 | "scikit-hep" 35 | ], 36 | "related_identifiers": [ 37 | { 38 | "scheme": "url", 39 | "identifier": "https://github.com/scikit-hep/pylhe/tree/v1.0.2", 40 | "relation": "isSupplementTo" 41 | } 42 | ] 43 | } 44 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | ci: 2 | autoupdate_commit_msg: "chore: [pre-commit.ci] pre-commit autoupdate" 3 | autoupdate_schedule: "monthly" 4 | 5 | repos: 6 | - repo: https://github.com/pre-commit/pre-commit-hooks 7 | rev: v6.0.0 8 | hooks: 9 | - id: check-added-large-files 10 | - id: check-case-conflict 11 | - id: check-merge-conflict 12 | - id: check-symlinks 13 | - id: check-json 14 | - id: check-yaml 15 | - id: check-toml 16 | - id: check-xml 17 | - id: debug-statements 18 | - id: end-of-file-fixer 19 | - id: mixed-line-ending 20 | - id: trailing-whitespace 21 | 22 | - repo: https://github.com/codespell-project/codespell 23 | rev: v2.4.1 24 | hooks: 25 | - id: codespell 26 | additional_dependencies: 27 | - tomli 28 | 29 | - repo: https://github.com/astral-sh/ruff-pre-commit 30 | rev: "v0.14.7" 31 | hooks: 32 | - id: ruff-check 33 | args: ["--fix", "--show-fixes"] 34 | - id: ruff-format 35 | 36 | - repo: https://github.com/pre-commit/mirrors-mypy 37 | rev: v1.19.0 38 | hooks: 39 | - id: mypy 40 | files: src 41 | args: [--show-error-codes] 42 | additional_dependencies: [particle, awkward, vector, graphviz] 43 | 44 | - repo: https://github.com/pre-commit/pygrep-hooks 45 | rev: v1.10.0 46 | hooks: 47 | - id: rst-backticks 48 | - id: rst-directive-colons 49 | - id: rst-inline-touching-normal 50 | 51 | - repo: https://github.com/adamchainz/blacken-docs 52 | rev: "1.20.0" 53 | hooks: 54 | - id: blacken-docs 55 | additional_dependencies: [black==23.*] 56 | -------------------------------------------------------------------------------- /tests/test_visualize.py: -------------------------------------------------------------------------------- 1 | import itertools 2 | 3 | import skhep_testdata 4 | 5 | import pylhe 6 | 7 | 8 | def test_LHEEvent_graph_source(): 9 | lhe_file = skhep_testdata.data_path("pylhe-testfile-pr29.lhe") 10 | events = pylhe.read_lhe_with_attributes(lhe_file) 11 | 12 | # Get the first event 13 | event = next(events) 14 | # ... it contains 8 pions and a proton 15 | # pi unicode character is 'π' 16 | assert event.graph.source.count("π") == 8 17 | assert "p" in event.graph.source 18 | 19 | 20 | def test_LHEEvent_graph_source_nonstandard_pdg(): 21 | lhe_file = skhep_testdata.data_path("pylhe-testfile-pr180.lhe") 22 | events = pylhe.read_lhe_with_attributes(lhe_file) 23 | 24 | # Get the first event 25 | event = next(events) 26 | # building the graph should succeed even though there is 27 | # a non-standard PDG ID 1023 and the name in the graph 28 | # source should just be the ID number itself 29 | assert event.graph.source.count("1023") == 1 30 | 31 | 32 | def test_LHEEvent_graph_render(): 33 | lhe_file = skhep_testdata.data_path("pylhe-testfile-pr29.lhe") 34 | events = pylhe.read_lhe_with_attributes(lhe_file) 35 | 36 | event = next(itertools.islice(events, 1, 2)) 37 | event.graph.render(filename="test_event1", format="pdf", cleanup=True) 38 | 39 | 40 | def test_mime(): 41 | lhe_file = skhep_testdata.data_path("pylhe-testfile-pr29.lhe") 42 | events = pylhe.read_lhe_with_attributes(lhe_file) 43 | 44 | event = next(itertools.islice(events, 1, 2)) 45 | assert event._repr_mimebundle_() == event.graph._repr_mimebundle_() 46 | -------------------------------------------------------------------------------- /benchmarks/test_count_events_bench.py: -------------------------------------------------------------------------------- 1 | """ 2 | Benchmark tests for pylhe event counting performance. 3 | """ 4 | 5 | import skhep_testdata 6 | 7 | import pylhe 8 | 9 | # Test data files from skhep_testdata - all LHE and LHE.gz files 10 | TEST_FILES_LHE_ALL = [ 11 | skhep_testdata.data_path("pylhe-testfile-pr29.lhe"), 12 | skhep_testdata.data_path("pylhe-testlhef3.lhe"), 13 | *[ 14 | skhep_testdata.data_path(f"pylhe-testfile-powheg-box-v2-{proc}.lhe") 15 | for proc in ["Z", "W", "Zj", "trijet", "directphoton", "hvq"] 16 | ], 17 | skhep_testdata.data_path("pylhe-testfile-madgraph-2.0.0-wbj.lhe"), 18 | skhep_testdata.data_path("pylhe-testfile-madgraph-2.2.1-Z-ckkwl.lhe.gz"), 19 | skhep_testdata.data_path("pylhe-testfile-madgraph-2.2.1-Z-fxfx.lhe.gz"), 20 | skhep_testdata.data_path("pylhe-testfile-madgraph-2.2.1-Z-mlm.lhe.gz"), 21 | skhep_testdata.data_path("pylhe-testfile-madgraph5-3.5.8-pp_to_jj.lhe.gz"), 22 | skhep_testdata.data_path("pylhe-testfile-pythia-6.413-ttbar.lhe"), 23 | skhep_testdata.data_path("pylhe-testfile-pythia-8.3.14-weakbosons.lhe"), 24 | skhep_testdata.data_path("pylhe-testfile-sherpa-3.0.1-eejjj.lhe"), 25 | skhep_testdata.data_path("pylhe-testfile-whizard-3.1.4-eeWW.lhe"), 26 | ] 27 | 28 | 29 | def test_count_events_benchmark(benchmark): 30 | """Benchmark using the count_events function across all test files.""" 31 | 32 | def count_events_all_files(filepaths): 33 | total_events = 0 34 | for filepath in filepaths: 35 | num_events = pylhe.LHEFile.count_events(filepath) 36 | total_events += num_events 37 | return total_events 38 | 39 | result = benchmark(count_events_all_files, TEST_FILES_LHE_ALL) 40 | print(f"Total events across all files: {result}") 41 | -------------------------------------------------------------------------------- /benchmarks/test_awkward_bench.py: -------------------------------------------------------------------------------- 1 | """ 2 | Benchmark tests for pylhe awkward array conversion performance. 3 | """ 4 | 5 | import skhep_testdata 6 | 7 | import pylhe 8 | 9 | # Test data files from skhep_testdata - all LHE and LHE.gz files 10 | TEST_FILES_LHE_ALL = [ 11 | skhep_testdata.data_path("pylhe-testfile-pr29.lhe"), 12 | skhep_testdata.data_path("pylhe-testlhef3.lhe"), 13 | *[ 14 | skhep_testdata.data_path(f"pylhe-testfile-powheg-box-v2-{proc}.lhe") 15 | for proc in ["Z", "W", "Zj", "trijet", "directphoton", "hvq"] 16 | ], 17 | skhep_testdata.data_path("pylhe-testfile-madgraph-2.0.0-wbj.lhe"), 18 | skhep_testdata.data_path("pylhe-testfile-madgraph-2.2.1-Z-ckkwl.lhe.gz"), 19 | skhep_testdata.data_path("pylhe-testfile-madgraph-2.2.1-Z-fxfx.lhe.gz"), 20 | skhep_testdata.data_path("pylhe-testfile-madgraph-2.2.1-Z-mlm.lhe.gz"), 21 | skhep_testdata.data_path("pylhe-testfile-madgraph5-3.5.8-pp_to_jj.lhe.gz"), 22 | skhep_testdata.data_path("pylhe-testfile-pythia-6.413-ttbar.lhe"), 23 | skhep_testdata.data_path("pylhe-testfile-pythia-8.3.14-weakbosons.lhe"), 24 | skhep_testdata.data_path("pylhe-testfile-sherpa-3.0.1-eejjj.lhe"), 25 | skhep_testdata.data_path("pylhe-testfile-whizard-3.1.4-eeWW.lhe"), 26 | ] 27 | 28 | 29 | def test_fromfile_and_to_awkward(benchmark): 30 | """Benchmark LHEFile.fromfile and to_awkward conversion across all test files.""" 31 | 32 | def fromfile_and_to_awkward_all_files(filepaths): 33 | for filepath in filepaths: 34 | # Load LHE file using fromfile 35 | lhe_file = pylhe.LHEFile.fromfile(filepath) 36 | 37 | # Convert events to awkward array 38 | pylhe.to_awkward(lhe_file.events) 39 | 40 | benchmark(fromfile_and_to_awkward_all_files, TEST_FILES_LHE_ALL) 41 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI/CD 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | pull_request: 8 | # Run daily at 0:01 UTC 9 | schedule: 10 | - cron: '1 0 * * *' 11 | workflow_dispatch: 12 | 13 | concurrency: 14 | group: ${{ github.workflow }}-${{ github.ref }} 15 | cancel-in-progress: true 16 | 17 | permissions: 18 | contents: read 19 | 20 | jobs: 21 | test: 22 | 23 | runs-on: ${{ matrix.os }} 24 | strategy: 25 | matrix: 26 | os: [ubuntu-latest] 27 | python-version: ['3.9', '3.10', '3.11', '3.12', '3.13', '3.14'] 28 | 29 | steps: 30 | - uses: actions/checkout@v6 31 | with: 32 | fetch-depth: 0 33 | 34 | - name: Set up Python ${{ matrix.python-version }} 35 | uses: actions/setup-python@v6 36 | with: 37 | python-version: ${{ matrix.python-version }} 38 | allow-prereleases: true 39 | 40 | - uses: astral-sh/setup-uv@v7 41 | 42 | - name: Install dependencies 43 | run: | 44 | uv pip install --system '.[test]' 45 | uv pip list 46 | 47 | - name: Install external packages 48 | run: >- 49 | sudo apt-get update -y && 50 | sudo apt-get install -y 51 | graphviz 52 | texlive-latex-base 53 | texlive-fonts-extra 54 | texlive-extra-utils 55 | 56 | - name: Test with pytest 57 | run: | 58 | pytest -r sx tests/ 59 | 60 | - name: Report core project coverage with Codecov 61 | if: >- 62 | github.event_name != 'schedule' && 63 | matrix.os == 'ubuntu-latest' 64 | uses: codecov/codecov-action@v5 65 | with: 66 | fail_ci_if_error: true 67 | files: ./coverage.xml 68 | flags: unittests-${{ matrix.python-version }} 69 | name: pylhe 70 | token: ${{ secrets.CODECOV_TOKEN }} 71 | 72 | docker: 73 | 74 | runs-on: ubuntu-latest 75 | 76 | steps: 77 | - uses: actions/checkout@v6 78 | with: 79 | fetch-depth: 0 80 | - name: Build Docker image 81 | run: | 82 | docker build . --file docker/Dockerfile --tag pylhe/pylhe:$GITHUB_SHA 83 | docker images 84 | -------------------------------------------------------------------------------- /.github/workflows/benchmark.yml: -------------------------------------------------------------------------------- 1 | name: Benchmark Comparison 2 | 3 | permissions: 4 | pull-requests: write # Forks need this to post comments 5 | 6 | on: 7 | pull_request: 8 | 9 | jobs: 10 | benchmark: 11 | runs-on: ubuntu-latest 12 | 13 | steps: 14 | - name: Checkout PR branch 15 | uses: actions/checkout@v6 16 | with: 17 | fetch-depth: 0 # important to fetch full history for git compare 18 | 19 | - name: Set up Python 20 | uses: actions/setup-python@v6 21 | with: 22 | python-version: "3.13" 23 | 24 | 25 | - uses: astral-sh/setup-uv@v7 26 | 27 | - name: Install base 28 | run: | 29 | git checkout ${{ github.event.pull_request.base.sha }} 30 | uv pip install --system '.[test]' 31 | 32 | # Run benchmarks on base 33 | - name: Run benchmarks on base 34 | run: | 35 | pytest benchmarks --benchmark-only --benchmark-warmup='on' --benchmark-warmup-iterations=1 --benchmark-min-rounds=10 --benchmark-save=base --no-cov --disable-warnings 36 | 37 | - name: Install PR 38 | run: | 39 | git checkout - 40 | uv pip install --system '.[test]' 41 | 42 | # Run benchmarks on MR 43 | - name: Run benchmarks on MR 44 | run: | 45 | pytest benchmarks --benchmark-only --benchmark-warmup='on' --benchmark-warmup-iterations=1 --benchmark-min-rounds=10 --benchmark-save=mr --benchmark-compare=0001_base --benchmark-compare-fail=min:5% --no-cov --disable-warnings 46 | 47 | # Compare results 48 | - name: Run benchmarks on PR and compare 49 | id: compare 50 | run: | 51 | set +e 52 | result=$(pytest-benchmark compare) 53 | echo "$result" 54 | echo "result<> $GITHUB_OUTPUT 55 | echo "$result" >> $GITHUB_OUTPUT 56 | echo "EOF" >> $GITHUB_OUTPUT 57 | 58 | # Post a comment to the PR with results 59 | - name: Comment on PR 60 | uses: marocchino/sticky-pull-request-comment@v2 61 | continue-on-error: true # Forks might not have the permission to post comments 62 | with: 63 | header: benchmark 64 | message: | 65 | ## 🏎️ Benchmark Comparison 66 | ``` 67 | ${{ steps.compare.outputs.result }} 68 | ``` 69 | -------------------------------------------------------------------------------- /docs/source/conf.py: -------------------------------------------------------------------------------- 1 | # Configuration file for the Sphinx documentation builder. 2 | # 3 | # For the full list of built-in configuration values, see the documentation: 4 | # https://www.sphinx-doc.org/en/master/usage/configuration.html 5 | import os 6 | import re 7 | from datetime import datetime 8 | 9 | # we use toml to read pyproject.toml 10 | # the python provided toml parser does not support older python versions 11 | import toml 12 | 13 | # -- Project information ----------------------------------------------------- 14 | # https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information 15 | try: 16 | info = toml.load("../../pyproject.toml") 17 | except FileNotFoundError: 18 | info = toml.load("pyproject.toml") 19 | project = info["project"]["name"] 20 | current_year = datetime.now().year 21 | copyright = f"{current_year}, The Scikit-HEP admins" 22 | # Handle multiple authors 23 | authors_list = info.get("authors", []) 24 | author_names = [a.get("name", "") for a in authors_list if "name" in a] 25 | author = ", ".join(author_names) 26 | version = re.sub("^", "", os.popen("git describe --tags").read().strip()) 27 | rst_epilog = f""".. |project| replace:: {project} \n\n""" 28 | 29 | # -- General configuration --------------------------------------------------- 30 | # https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration 31 | 32 | extensions = [ 33 | "myst_nb", 34 | "sphinx.ext.autodoc", 35 | "sphinx.ext.githubpages", 36 | "sphinx.ext.viewcode", 37 | "sphinx.ext.mathjax", 38 | "sphinx.ext.todo", 39 | "sphinx.ext.doctest", 40 | "sphinx_math_dollar", 41 | "sphinx.ext.autosummary", 42 | "sphinx.ext.coverage", 43 | "sphinxcontrib.bibtex", 44 | ] 45 | nb_execution_mode = "off" 46 | templates_path = ["_templates"] 47 | exclude_patterns = [] 48 | bibtex_bibfiles = ["references.bib"] 49 | bibtex_default_style = "unsrt" 50 | # Configure bibtex to ignore missing fields 51 | bibtex_reference_style = "label" 52 | 53 | 54 | # -- Options for HTML output ------------------------------------------------- 55 | # https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output 56 | 57 | html_theme = "sphinx_rtd_theme" 58 | html_static_path = ["_static"] 59 | 60 | # Patterns of URLs to ignore 61 | linkcheck_ignore = [ 62 | # Currently, down or blocking 63 | r"https?://allcontributors\.org/.*", 64 | # Often interrupted service 65 | r"https?://.*\.hepforge\.org/.*", 66 | ] 67 | -------------------------------------------------------------------------------- /.github/workflows/docs.yml: -------------------------------------------------------------------------------- 1 | name: Docs 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | pull_request: 8 | workflow_dispatch: 9 | 10 | concurrency: 11 | group: ${{ github.workflow }}-${{ github.ref }} 12 | cancel-in-progress: true 13 | 14 | permissions: 15 | contents: read 16 | 17 | jobs: 18 | build: 19 | name: Build docs 20 | runs-on: ubuntu-latest 21 | 22 | steps: 23 | - uses: actions/checkout@v6 24 | with: 25 | fetch-depth: 0 26 | 27 | - name: Set up Python 28 | uses: actions/setup-python@v6 29 | with: 30 | python-version: '3.12' 31 | 32 | - name: Install Python dependencies 33 | run: | 34 | python -m pip install uv 35 | uv pip --quiet install --system --upgrade ".[docs,test]" 36 | uv pip install --system yq 37 | python -m pip list 38 | 39 | - name: Install apt-get dependencies 40 | run: | 41 | sudo apt-get update 42 | # Ubuntu 22.04's pandoc is too old (2.9.2.x), so install manually 43 | # until the ubuntu-latest updates. 44 | curl --silent --location --remote-name https://github.com/jgm/pandoc/releases/download/3.1.6.2/pandoc-3.1.6.2-1-amd64.deb 45 | sudo apt-get install ./pandoc-*amd64.deb 46 | 47 | - name: Check docstrings 48 | run: | 49 | # ignore D107 missing comment for __init__, since starting from 1.0.0 there are no more __init__ functions 50 | pydocstyle --select=D100,D101,D102,D103,D104,D105,D106 src/pylhe/ 51 | 52 | - name: Verify CITATION.cff schema 53 | run: | 54 | jsonschema <(curl -sL "https://citation-file-format.github.io/1.2.0/schema.json") --instance <(cat CITATION.cff | yq) 55 | 56 | # Disabled linkcheck due to too many false positives with external links. 57 | # Also github might get blocked for sending too many requests during the development of a PR. 58 | # 59 | # - name: Check for broken links 60 | # run: | 61 | # pushd docs 62 | # make linkcheck 63 | # # Don't ship the linkcheck 64 | # rm -r build/linkcheck 65 | # popd 66 | 67 | - name: Build docs 68 | run: | 69 | pushd docs 70 | make html 71 | 72 | - name: Fix permissions if needed 73 | run: | 74 | chmod -c -R +rX "docs/build/html/" | while read line; do 75 | echo "::warning title=Invalid file permissions automatically fixed::$line" 76 | done 77 | 78 | - name: Upload artifact 79 | uses: actions/upload-pages-artifact@v4 80 | with: 81 | path: 'docs/build/html' 82 | 83 | deploy: 84 | name: Deploy docs to GitHub Pages 85 | if: github.event_name == 'push' && github.ref == 'refs/heads/main' 86 | needs: build 87 | # Set permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages 88 | permissions: 89 | contents: read 90 | pages: write 91 | id-token: write 92 | 93 | environment: 94 | name: github-pages 95 | url: ${{ steps.deployment.outputs.page_url }} 96 | 97 | runs-on: ubuntu-latest 98 | 99 | steps: 100 | - name: Setup Pages 101 | uses: actions/configure-pages@v5 102 | 103 | - name: Deploy to GitHub Pages 104 | id: deployment 105 | uses: actions/deploy-pages@v4 106 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Contributor Covenant Code of Conduct 2 | 3 | ## Our Pledge 4 | 5 | In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation. 6 | 7 | ## Our Standards 8 | 9 | Examples of behavior that contributes to creating a positive environment include: 10 | 11 | * Using welcoming and inclusive language 12 | * Being respectful of differing viewpoints and experiences 13 | * Gracefully accepting constructive criticism 14 | * Focusing on what is best for the community 15 | * Showing empathy towards other community members 16 | 17 | Examples of unacceptable behavior by participants include: 18 | 19 | * The use of sexualized language or imagery and unwelcome sexual attention or advances 20 | * Trolling, insulting/derogatory comments, and personal or political attacks 21 | * Public or private harassment 22 | * Publishing others' private information, such as a physical or electronic address, without explicit permission 23 | * Other conduct which could reasonably be considered inappropriate in a professional setting 24 | 25 | ## Our Responsibilities 26 | 27 | Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior. 28 | 29 | Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful. 30 | 31 | ## Scope 32 | 33 | This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers. 34 | 35 | ## Enforcement 36 | 37 | Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at lukas.heinrich@cern.ch. The project team will review and investigate all complaints, and will respond in a way that it deems appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately. 38 | 39 | Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership. 40 | 41 | ## Attribution 42 | 43 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at [http://contributor-covenant.org/version/1/4][version] 44 | 45 | [homepage]: http://contributor-covenant.org 46 | [version]: http://contributor-covenant.org/version/1/4/ 47 | -------------------------------------------------------------------------------- /tests/test_awkward.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import skhep_testdata 3 | 4 | import pylhe 5 | 6 | TEST_FILE_WITHOUT_WEIGHTS = skhep_testdata.data_path("pylhe-testfile-pr29.lhe") 7 | TEST_FILE_WITH_WEIGHTS = skhep_testdata.data_path("pylhe-testlhef3.lhe") 8 | 9 | 10 | def test_to_awkward(): 11 | arr = pylhe.to_awkward(pylhe.read_lhe_with_attributes(TEST_FILE_WITHOUT_WEIGHTS)) 12 | assert len(arr) == 791 13 | assert len(arr) == len(arr.particles) 14 | assert len(arr) == len(arr.eventinfo) 15 | for field in arr.particles.fields: 16 | assert len(arr) == len(arr.particles[field]) 17 | for field in arr.eventinfo.fields: 18 | assert len(arr) == len(arr.eventinfo[field]) 19 | assert "weights" not in arr.fields 20 | 21 | arr = pylhe.to_awkward(pylhe.read_lhe_with_attributes(TEST_FILE_WITH_WEIGHTS)) 22 | assert len(arr) == 59 23 | assert len(arr) == len(arr.particles) 24 | assert len(arr) == len(arr.eventinfo) 25 | for field in arr.particles.fields: 26 | assert len(arr) == len(arr.particles[field]) 27 | for field in arr.eventinfo.fields: 28 | assert len(arr) == len(arr.eventinfo[field]) 29 | assert "weights" in arr.fields 30 | assert len(arr) == len(arr.weights) 31 | assert arr.weights.fields == [ 32 | "1001", 33 | "1002", 34 | "1003", 35 | "1004", 36 | "1005", 37 | "1006", 38 | "1007", 39 | "1008", 40 | "1009", 41 | ] 42 | assert arr.weights["1001"][0] == pytest.approx(0.50109e02) 43 | assert arr.weights["1002"][0] == pytest.approx(0.45746e02) 44 | assert arr.weights["1003"][0] == pytest.approx(0.52581e02) 45 | assert arr.weights["1004"][0] == pytest.approx(0.50109e02) 46 | assert arr.weights["1005"][0] == pytest.approx(0.45746e02) 47 | assert arr.weights["1006"][0] == pytest.approx(0.52581e02) 48 | assert arr.weights["1007"][0] == pytest.approx(0.50109e02) 49 | assert arr.weights["1008"][0] == pytest.approx(0.45746e02) 50 | assert arr.weights["1009"][0] == pytest.approx(0.52581e02) 51 | for field in arr.weights.fields: 52 | assert len(arr) == len(arr.weights[field]) 53 | 54 | 55 | def test_awkward_registration(): 56 | arr = pylhe.to_awkward(pylhe.read_lhe_with_attributes(TEST_FILE_WITHOUT_WEIGHTS)) 57 | assert len(arr.particles.vector.mass) == len(arr.particles) 58 | 59 | arr = pylhe.to_awkward(pylhe.read_lhe_with_attributes(TEST_FILE_WITH_WEIGHTS)) 60 | assert len(arr.particles.vector.mass) == len(arr.particles) 61 | assert len(arr.particles.vector.mass) == len(arr.weights) 62 | 63 | 64 | def test_to_awkward_vector(): 65 | """ 66 | Test numeric equality of momenta represented by vectors. 67 | """ 68 | arr = pylhe.to_awkward(pylhe.read_lhe_with_attributes(TEST_FILE_WITHOUT_WEIGHTS)) 69 | 70 | assert arr.particles.vector.px[0][0] == pytest.approx(-3.1463804033e-01) 71 | assert arr.particles.vector.x[0][0] == pytest.approx(-3.1463804033e-01) 72 | 73 | assert arr.particles.vector.py[0][0] == pytest.approx(-6.3041724109e-01) 74 | assert arr.particles.vector.y[0][0] == pytest.approx(-6.3041724109e-01) 75 | 76 | assert arr.particles.vector.pz[0][0] == pytest.approx(8.5343193374e00) 77 | assert arr.particles.vector.z[0][0] == pytest.approx(8.5343193374e00) 78 | 79 | assert arr.particles.vector.e[0][0] == pytest.approx(8.5644657479e00) 80 | assert arr.particles.vector.t[0][0] == pytest.approx(8.5644657479e00) 81 | -------------------------------------------------------------------------------- /src/pylhe/awkward.py: -------------------------------------------------------------------------------- 1 | """ 2 | `Awkward array `_ interface for `pylhe`. 3 | """ 4 | 5 | from collections.abc import Iterable 6 | 7 | import awkward as ak # type: ignore[import-untyped] 8 | import vector 9 | 10 | from pylhe import LHEEvent 11 | 12 | __all__ = ["to_awkward"] 13 | 14 | 15 | def __dir__() -> list[str]: 16 | return __all__ 17 | 18 | 19 | def to_awkward(event_iterable: Iterable[LHEEvent]) -> ak.Array: 20 | """Convert an iterable of LHEEvent instances to an Awkward-Array. 21 | 22 | Uses Awkward's ArrayBuilder to construct the array by iterating over the events. 23 | The events_iterable should yield instances of LHEEvent. 24 | This is typically created by one of the reading functions pylhe provides like 25 | pylhe.LHEFile.fromfile(filepath).events. 26 | 27 | Args: 28 | event_iterable (iterable): An iterable of LHEEvent instances. 29 | 30 | Returns: 31 | awkward.Array: An Awkward array of all the events. 32 | """ 33 | 34 | builder = ak.ArrayBuilder() 35 | for event in event_iterable: 36 | with builder.record(name="Event"): 37 | builder.field("eventinfo") 38 | with builder.record(name="EventInfo"): 39 | ei = event.eventinfo 40 | builder.field("nparticles").integer(ei.nparticles) 41 | builder.field("pid").integer(ei.pid) 42 | builder.field("weight").real(ei.weight) 43 | builder.field("scale").real(ei.scale) 44 | builder.field("aqed").real(ei.aqed) 45 | builder.field("aqcd").real(ei.aqcd) 46 | if event.weights != {}: 47 | builder.field("weights") 48 | with builder.record(name="Weights"): 49 | for label, w in event.weights.items(): 50 | builder.field(label).real(w) 51 | builder.field("particles") 52 | with builder.list(): 53 | for particle in event.particles: 54 | with builder.record(name="Particle"): 55 | builder.field("vector") 56 | with builder.record(name="Momentum4D"): 57 | builder.field("px").real(particle.px) 58 | builder.field("py").real(particle.py) 59 | builder.field("pz").real(particle.pz) 60 | builder.field("e").real(particle.e) 61 | builder.field("id").integer(particle.id) 62 | builder.field("status").integer(particle.status) 63 | builder.field("mother1").integer(particle.mother1) 64 | builder.field("mother2").integer(particle.mother2) 65 | builder.field("color1").integer(particle.color1) 66 | builder.field("color2").integer(particle.color2) 67 | builder.field("m").real(particle.m) 68 | builder.field("lifetime").real(particle.lifetime) 69 | builder.field("spin").real(particle.spin) 70 | return builder.snapshot() # build the final awkward array 71 | 72 | 73 | # Used to register Awkward behaviors 74 | class Particle: 75 | pass 76 | 77 | 78 | class Event: 79 | pass 80 | 81 | 82 | class EventInfo: 83 | pass 84 | 85 | 86 | class Weights: 87 | pass 88 | 89 | 90 | # Register Awkward behaviors 91 | # See https://awkward-array.org/doc/main/reference/generated/ak.mixin_class.html 92 | # and https://awkward-array.org/doc/main/reference/ak.behavior.html#mixin-decorators 93 | vector.register_awkward() 94 | ak.mixin_class(ak.behavior)(Particle) 95 | ak.mixin_class(ak.behavior)(Event) 96 | ak.mixin_class(ak.behavior)(EventInfo) 97 | ak.mixin_class(ak.behavior)(Weights) 98 | -------------------------------------------------------------------------------- /.all-contributorsrc: -------------------------------------------------------------------------------- 1 | { 2 | "files": [ 3 | "README.md" 4 | ], 5 | "imageSize": 100, 6 | "commit": false, 7 | "commitConvention": "angular", 8 | "contributors": [ 9 | { 10 | "login": "matthewfeickert", 11 | "name": "Matthew Feickert", 12 | "avatar_url": "https://avatars.githubusercontent.com/u/5142394?v=4", 13 | "profile": "http://www.matthewfeickert.com/", 14 | "contributions": [ 15 | "maintenance", 16 | "design", 17 | "code", 18 | "doc" 19 | ] 20 | }, 21 | { 22 | "login": "lukasheinrich", 23 | "name": "Lukas", 24 | "avatar_url": "https://avatars.githubusercontent.com/u/2318083?v=4", 25 | "profile": "http://www.lukasheinrich.com", 26 | "contributions": [ 27 | "maintenance", 28 | "design", 29 | "code", 30 | "doc" 31 | ] 32 | }, 33 | { 34 | "login": "eduardo-rodrigues", 35 | "name": "Eduardo Rodrigues", 36 | "avatar_url": "https://avatars.githubusercontent.com/u/5013581?v=4", 37 | "profile": "http://cern.ch/eduardo.rodrigues", 38 | "contributions": [ 39 | "maintenance", 40 | "code", 41 | "doc" 42 | ] 43 | }, 44 | { 45 | "login": "APN-Pucky", 46 | "name": "Alexander Puck Neuwirth", 47 | "avatar_url": "https://avatars.githubusercontent.com/u/4533248?v=4", 48 | "profile": "https://github.com/APN-Pucky", 49 | "contributions": [ 50 | "maintenance", 51 | "design", 52 | "code", 53 | "doc" 54 | ] 55 | }, 56 | { 57 | "login": "8me", 58 | "name": "Johannes Schumann", 59 | "avatar_url": "https://avatars.githubusercontent.com/u/17862090?v=4", 60 | "profile": "https://github.com/8me", 61 | "contributions": [ 62 | "code" 63 | ] 64 | }, 65 | { 66 | "login": "henryiii", 67 | "name": "Henry Schreiner", 68 | "avatar_url": "https://avatars.githubusercontent.com/u/4616906?v=4", 69 | "profile": "http://iscinumpy.dev", 70 | "contributions": [ 71 | "code" 72 | ] 73 | }, 74 | { 75 | "login": "ariaradick", 76 | "name": "ariaradick", 77 | "avatar_url": "https://avatars.githubusercontent.com/u/53235605?v=4", 78 | "profile": "https://github.com/ariaradick", 79 | "contributions": [ 80 | "code" 81 | ] 82 | }, 83 | { 84 | "login": "jhgoh", 85 | "name": "Junghwan John Goh", 86 | "avatar_url": "https://avatars.githubusercontent.com/u/4388926?v=4", 87 | "profile": "https://github.com/jhgoh", 88 | "contributions": [ 89 | "code" 90 | ] 91 | }, 92 | { 93 | "login": "fuenfundachtzig", 94 | "name": "fuenfundachtzig", 95 | "avatar_url": "https://avatars.githubusercontent.com/u/8006302?v=4", 96 | "profile": "https://github.com/fuenfundachtzig", 97 | "contributions": [ 98 | "code" 99 | ] 100 | }, 101 | { 102 | "login": "shantanu-gontia", 103 | "name": "Shantanu Gontia", 104 | "avatar_url": "https://avatars.githubusercontent.com/u/4872525?v=4", 105 | "profile": "https://shantanu-gontia.github.io", 106 | "contributions": [ 107 | "code" 108 | ] 109 | }, 110 | { 111 | "login": "tomeichlersmith", 112 | "name": "Tom Eichlersmith", 113 | "avatar_url": "https://avatars.githubusercontent.com/u/31970302?v=4", 114 | "profile": "https://github.com/tomeichlersmith", 115 | "contributions": [ 116 | "code" 117 | ] 118 | }, 119 | { 120 | "login": "ikrommyd", 121 | "name": "Iason Krommydas", 122 | "avatar_url": "https://avatars.githubusercontent.com/u/82155404?v=4", 123 | "profile": "https://github.com/ikrommyd", 124 | "contributions": [ 125 | "code", 126 | "doc" 127 | ] 128 | } 129 | ], 130 | "contributorsPerLine": 7, 131 | "skipCi": true, 132 | "repoType": "github", 133 | "repoHost": "https://github.com", 134 | "projectName": "pylhe", 135 | "projectOwner": "scikit-hep", 136 | "commitType": "docs" 137 | } 138 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = [ 3 | "hatchling>=1.13.0", 4 | "hatch-vcs>=0.3.0", 5 | ] 6 | build-backend = "hatchling.build" 7 | 8 | [project] 9 | name = "pylhe" 10 | dynamic = ["version"] 11 | description = "A small package to get structured data out of Les Houches Event files" 12 | readme = "README.md" 13 | license = { text = "Apache-2.0" } # SPDX short identifier 14 | requires-python = ">=3.9" 15 | authors = [ 16 | { name = "Lukas Heinrich", email = "lukas.heinrich@cern.ch" }, 17 | { name = "Matthew Feickert", email = "matthew.feickert@cern.ch" }, 18 | { name = "Eduardo Rodrigues", email = "eduardo.rodrigues@cern.ch" }, 19 | { name = "Alexander Puck Neuwirth", email = "a_neuw01@uni-muenster.de" }, 20 | ] 21 | maintainers = [ {name = "The Scikit-HEP admins", email = "scikit-hep-admins@googlegroups.com"} ] 22 | keywords = [ 23 | "lhe", 24 | "physics", 25 | ] 26 | classifiers = [ 27 | "Development Status :: 4 - Beta", 28 | "Intended Audience :: Developers", 29 | "Intended Audience :: Science/Research", 30 | "License :: OSI Approved :: Apache Software License", 31 | "Programming Language :: Python :: 3", 32 | "Programming Language :: Python :: 3 :: Only", 33 | "Programming Language :: Python :: 3.9", 34 | "Programming Language :: Python :: 3.10", 35 | "Programming Language :: Python :: 3.11", 36 | "Programming Language :: Python :: 3.12", 37 | "Programming Language :: Python :: 3.13", 38 | "Programming Language :: Python :: 3.14", 39 | "Topic :: Scientific/Engineering", 40 | "Topic :: Scientific/Engineering :: Physics", 41 | ] 42 | dependencies = [ 43 | "awkward>=1.2.0", 44 | "graphviz>=0.19", 45 | "particle>=0.16", 46 | "vector>=0.8.1", 47 | ] 48 | 49 | [project.optional-dependencies] 50 | lint = [ 51 | "ruff", 52 | "mypy>=1.0.0", 53 | ] 54 | test = [ 55 | "pydocstyle", 56 | "pytest>=6.0", 57 | "pytest-benchmark", 58 | "pytest-cov>=2.5.1", 59 | "scikit-hep-testdata>=0.5.5", 60 | ] 61 | develop = [ 62 | "pylhe[lint,test]", 63 | "pre-commit", 64 | "tbump>=6.7.0", 65 | ] 66 | docs = [ 67 | "sphinx", 68 | "sphinx-copybutton", 69 | "sphinx-rtd-theme>=0.5.0", 70 | "nbsphinx", 71 | "jupyter-sphinx", 72 | "sphinx-autoapi", 73 | "sphinx-autobuild", 74 | "sphinx-math-dollar", 75 | "pandoc", 76 | "myst-nb", 77 | "myst-parser", 78 | "sphinxcontrib-bibtex", 79 | "toml", 80 | ] 81 | 82 | [project.urls] 83 | Homepage = "https://github.com/scikit-hep/pylhe" 84 | "Source Code" = "https://github.com/scikit-hep/pylhe" 85 | "Issue Tracker" = "https://github.com/scikit-hep/pylhe/issues" 86 | "Releases" = "https://github.com/scikit-hep/pylhe/releases" 87 | 88 | [tool.hatch.version] 89 | source = "vcs" 90 | 91 | [tool.hatch.version.raw-options] 92 | local_scheme = "no-local-version" 93 | 94 | [tool.hatch.build.hooks.vcs] 95 | version-file = "src/pylhe/_version.py" 96 | 97 | [tool.hatch.build.targets.sdist] 98 | # hatchling always includes: 99 | # pyproject.toml, .gitignore, any README, any LICENSE, AUTHORS 100 | include = [ 101 | "/src", 102 | "/tests", 103 | "/CITATION.cff", 104 | ] 105 | 106 | [tool.hatch.build.targets.wheel] 107 | packages = ["src/pylhe"] 108 | 109 | [tool.mypy] 110 | warn_unused_configs = true 111 | warn_unused_ignores = true 112 | python_version = "3.9" 113 | files = "src" 114 | strict = true 115 | warn_unreachable = true 116 | enable_error_code = [ 117 | "ignore-without-code", 118 | "redundant-expr", 119 | "truthy-bool", 120 | ] 121 | 122 | [tool.pytest.ini_options] 123 | minversion = "6.0" 124 | testpaths = ["tests"] 125 | log_level = "INFO" 126 | xfail_strict = true 127 | addopts = [ 128 | "-ra", 129 | "--strict-markers", 130 | "--strict-config", 131 | ] 132 | 133 | [tool.ruff.lint] 134 | extend-select = [ 135 | "B", # flake8-bugbear 136 | "I", # isort 137 | "ARG", # flake8-unused-arguments 138 | "C4", # flake8-comprehensions 139 | "EM", # flake8-errmsg 140 | "ICN", # flake8-import-conventions 141 | "G", # flake8-logging-format 142 | "PGH", # pygrep-hooks 143 | "PIE", # flake8-pie 144 | "PL", # pylint 145 | "PT", # flake8-pytest-style 146 | "RET", # flake8-return 147 | "RUF", # Ruff-specific 148 | "SIM", # flake8-simplify 149 | "UP", # pyupgrade 150 | "YTT", # flake8-2020 151 | "EXE", # flake8-executable 152 | "NPY", # NumPy specific rules 153 | "PD", # pandas-vet 154 | "FURB", # refurb 155 | "PYI", # flake8-pyi 156 | ] 157 | ignore = [ 158 | "PLR09", # Too many <...> 159 | "PLR2004", # Magic value used in comparison 160 | "ISC001", # Conflicts with formatter 161 | "RUF012", # TODO: mutable class attributes 162 | "SIM115", # TODO: use context manager for opening files 163 | ] 164 | 165 | [tool.codespell] 166 | # https://github.com/codespell-project/codespell/issues/2801 167 | # Ignore very long base64-like strings 168 | ignore-regex = '[A-Za-z0-9+/]{100,}' 169 | skip = "*.bib" 170 | -------------------------------------------------------------------------------- /paper/paper.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "pylhe: A Lightweight Python interface to Les Houches Event files" 3 | tags: 4 | - Python 5 | - physics 6 | - high energy physics 7 | authors: 8 | - name: Alexander Puck Neuwirth^[Corresponding author.] 9 | orcid: 0000-0002-2484-1328 10 | affiliation: "1, 2" 11 | - name: Matthew Feickert 12 | orcid: 0000-0003-4124-7862 13 | affiliation: 3 14 | - name: Lukas Heinrich 15 | orcid: 0000-0002-4048-7584 16 | affiliation: 4 17 | - name: Eduardo Rodrigues 18 | orcid: 0000-0003-2846-7625 19 | affiliation: 5 20 | 21 | affiliations: 22 | - name: University of Milan Bicocca 23 | index: 1 24 | - name: INFN Milan Bicocca 25 | index: 2 26 | - name: University of Wisconsin-Madison 27 | index: 3 28 | - name: Technical University of Munich 29 | index: 4 30 | - name: University of Liverpool 31 | index: 5 32 | 33 | date: 26 November 2025 # APN TODO: update date 34 | bibliography: paper.bib 35 | --- 36 | 37 | # Summary 38 | 39 | Historically, the first standards for event representation in high-energy physics were the HEPEVT and HEPRUP common blocks [@Boos:2001cv], which provided a Fortran-based structure for storing event information. 40 | As the complexity of Monte Carlo event generators increased, the need for a more flexible and extensible format led to the development of the Les Houches Event (LHE) file format [@Alwall:2006yp]. 41 | LHE introduced an XML structure that allowed for better organization of event data and facilitated interoperability between different tools. 42 | Typically, LHE files are used to describe parton-level events generated by matrix element generators that are passed to parton shower and hadronization programs producing HepMC files [@Dobbs:2001ck;@Verbytskyi:2020sus;@hans_dembinski_2022_7013498], or directly to analysis frameworks such as Rivet [@Buckley:2010ar;@Bierlich:2019rhm;@Bierlich:2024vqo]. 43 | In contrast to LHE files, HepMC stores fewer details about the particles, but also contains many more particles per event. 44 | Both files are plain text-based formats, making them human-readable but potentially large. 45 | Consequently, compression using gzip is common practice for both formats. 46 | In the future a binary format such as HDF5 [@hdf5] could be considered for event storage to improve read/write performance and reduce file sizes further. 47 | 48 | The LHE format stores information within `` and multiple `` blocks consisting of whitespace-separated values designed historically for straightforward parsing in Fortran. 49 | Further details can be found in the original definition of the Les Houches Event file standard [@Alwall:2006yp]. 50 | Following the original publication there were two extensions to the LHE format, version 2.0 in 2009 [@Butterworth:2010ym] and version 3.0 in 2013 [@Andersen:2014efa]. 51 | However, `pylhe` currently only implements the widely adopted extension on top of version 1.0, that is the addition of multiple weights via ``, ``, ``, ``, ``, and ``. 52 | In the future, if there is a demand for ``, ``, ``, or ``, support for these can be added as well. 53 | 54 | # Statement of need 55 | 56 | The LHE format is used by all major Monte Carlo event generators such as MadGraph [@Alwall:2014hca], POWHEG-BOX [@Nason:2004rx;@Frixione:2007vw;@Alioli:2010xd], Sherpa [@Gleisberg:2008ta;@Sherpa:2019gpd], HERWIG [@Corcella:2000bw;@Bahr:2008pv;@Bellm:2015jjp;@Bellm:2019zci;@Bewick:2023tfi], Pythia [@Sjostrand:2006za;@Sjostrand:2007gs;@Sjostrand:2014zea;@Bierlich:2022pfr], Whizard [@Kilian:2007gr;@Moretti:2001zz]. 57 | While interfaces for C/C++/Fortran exist in the respective generators, a lightweight and easy-to-use Python interface was missing until the inception of `pylhe` in 2015. 58 | 59 | `pylhe` allows for easy reading and writing of `.lhe` and `.lhe.gz` files in Python, enabling seamless integration into modern data analysis workflows in high-energy physics. 60 | The pythonic event yielding approach allows for memory-efficient processing of arbitrarily large LHE files by streaming events one at a time rather than loading all of them at once into memory. 61 | The library facilitates quick validation of event files through programmatic access to event structure and particle properties, making it straightforward to perform sanity checks on generated events. 62 | This can be done for example via the integration with Awkward Array [@Pivarski_Awkward_Array_2018] through the `to_awkward()` function, which converts LHE events into columnar data structures optimized for vectorized operations and efficient analysis of large datasets. 63 | Additionally, `pylhe` can serve as a crucial interface for emerging machine learning applications in particle physics, allowing researchers to efficiently extract event data for training neural networks and other ML models used in event classification, anomaly detection, and physics analysis. 64 | 65 | ## Impact 66 | 67 | `pylhe` has already been used in various research projects and publications within high-energy physics. 68 | It has been cited in Higgs studies [@Brehmer:2019gmn;@Stylianou:2023tgg;@Feuerstake:2024uxs], SUSY / BSM / dark matter searches [@Beresford:2018pbt;@Kling:2020iar;@Anisha:2023xmh;@Zhou:2022jgj;@Zhou:2024fjf;@Cheung:2024oxh;@Beresford:2024dsc], forward physics [@Kling:2022ykt;@Kelly:2021mcd;@Kling:2020mch], but also in methodological studies involving machine learning techniques for event generation and analysis [@Brehmer:2019xox;@Kofler:2024efb]. 69 | 70 | # Acknowledgements 71 | 72 | We would additionally like to thank the contributors of pylhe and the Scikit-HEP community for their support. 73 | 74 | # References 75 | -------------------------------------------------------------------------------- /.github/workflows/publish-package.yml: -------------------------------------------------------------------------------- 1 | name: publish distributions 2 | on: 3 | push: 4 | branches: 5 | - main 6 | tags: 7 | - v* 8 | pull_request: 9 | branches: 10 | - main 11 | - release/v* 12 | release: 13 | types: [published] 14 | workflow_dispatch: 15 | inputs: 16 | publish: 17 | type: choice 18 | description: 'Publish to TestPyPI?' 19 | options: 20 | - false 21 | - true 22 | 23 | concurrency: 24 | group: ${{ github.workflow }}-${{ github.ref }} 25 | cancel-in-progress: true 26 | 27 | permissions: 28 | contents: read 29 | 30 | jobs: 31 | build: 32 | name: Build Python distribution 33 | runs-on: ubuntu-latest 34 | permissions: 35 | id-token: write 36 | attestations: write 37 | 38 | steps: 39 | - uses: actions/checkout@v6 40 | with: 41 | fetch-depth: 0 42 | 43 | - name: Set up Python 44 | uses: actions/setup-python@v6 45 | with: 46 | python-version: '3.x' 47 | 48 | - name: Install dependencies 49 | run: | 50 | python -m pip install uv 51 | 52 | - name: Build a sdist and a wheel 53 | run: | 54 | pipx run build --installer uv . 55 | 56 | - name: Verify history available for dev versions 57 | run: | 58 | wheel_name=$(find dist/ -iname "*.whl" -printf "%f\n") 59 | if [[ "${wheel_name}" == *"pylhe-0.1.dev"* ]]; then 60 | echo "python-build incorrectly named built distribution: ${wheel_name}" 61 | echo "python-build is lacking the history and tags required to determine version number" 62 | echo "intentionally erroring with 'return 1' now" 63 | return 1 64 | fi 65 | echo "python-build named built distribution: ${wheel_name}" 66 | 67 | - name: Verify the distribution 68 | run: pipx run twine check --strict dist/* 69 | 70 | - name: List contents of sdist 71 | run: python -m tarfile --list dist/pylhe-*.tar.gz 72 | 73 | - name: List contents of wheel 74 | run: python -m zipfile --list dist/pylhe-*.whl 75 | 76 | - name: Generate artifact attestation for sdist and wheel 77 | # If publishing to TestPyPI or PyPI 78 | if: >- 79 | (github.event_name == 'push' && startsWith(github.ref, 'refs/tags/v') && github.repository == 'scikit-hep/pylhe') 80 | || (github.event_name == 'workflow_dispatch' && github.event.inputs.publish == 'true' && github.repository == 'scikit-hep/pylhe') 81 | || (github.event_name == 'release' && github.event.action == 'published' && github.repository == 'scikit-hep/pylhe') 82 | uses: actions/attest-build-provenance@977bb373ede98d70efdf65b84cb5f73e068dcc2a # v3.0.0 83 | with: 84 | subject-path: "dist/pylhe-*" 85 | 86 | - name: Upload distribution artifact 87 | uses: actions/upload-artifact@v6 88 | with: 89 | name: dist-artifact 90 | path: dist 91 | 92 | publish: 93 | name: Publish Python distribution to (Test)PyPI 94 | if: github.event_name != 'pull_request' 95 | needs: build 96 | runs-on: ubuntu-latest 97 | # Mandatory for publishing with a trusted publisher 98 | # c.f. https://docs.pypi.org/trusted-publishers/using-a-publisher/ 99 | permissions: 100 | id-token: write 101 | # Restrict to the environment set for the trusted publisher 102 | environment: 103 | name: publish-package 104 | 105 | steps: 106 | - name: Download distribution artifact 107 | uses: actions/download-artifact@v7 108 | with: 109 | name: dist-artifact 110 | path: dist 111 | 112 | - name: List all files 113 | run: ls -lh dist 114 | 115 | - name: Verify sdist artifact attestation 116 | # If publishing to TestPyPI or PyPI 117 | if: >- 118 | (github.event_name == 'push' && startsWith(github.ref, 'refs/tags/v') && github.repository == 'scikit-hep/pylhe') 119 | || (github.event_name == 'workflow_dispatch' && github.event.inputs.publish == 'true' && github.repository == 'scikit-hep/pylhe') 120 | || (github.event_name == 'release' && github.event.action == 'published' && github.repository == 'scikit-hep/pylhe') 121 | env: 122 | GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} 123 | run: gh attestation verify dist/pylhe-*.tar.gz --repo ${{ github.repository }} 124 | 125 | - name: Verify wheel artifact attestation 126 | # If publishing to TestPyPI or PyPI 127 | if: >- 128 | (github.event_name == 'push' && startsWith(github.ref, 'refs/tags/v') && github.repository == 'scikit-hep/pylhe') 129 | || (github.event_name == 'workflow_dispatch' && github.event.inputs.publish == 'true' && github.repository == 'scikit-hep/pylhe') 130 | || (github.event_name == 'release' && github.event.action == 'published' && github.repository == 'scikit-hep/pylhe') 131 | env: 132 | GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} 133 | run: gh attestation verify dist/pylhe-*.whl --repo ${{ github.repository }} 134 | 135 | - name: Publish distribution 📦 to Test PyPI 136 | # Publish to TestPyPI on tag events of if manually triggered 137 | # Compare to 'true' string as booleans get turned into strings in the console 138 | if: >- 139 | (github.event_name == 'push' && startsWith(github.ref, 'refs/tags/v') && github.repository == 'scikit-hep/pylhe') 140 | || (github.event_name == 'workflow_dispatch' && github.event.inputs.publish == 'true' && github.repository == 'scikit-hep/pylhe') 141 | uses: pypa/gh-action-pypi-publish@v1.13.0 142 | with: 143 | repository-url: https://test.pypi.org/legacy/ 144 | print-hash: true 145 | attestations: true 146 | 147 | - name: Publish distribution 📦 to PyPI 148 | if: github.event_name == 'release' && github.event.action == 'published' && github.repository == 'scikit-hep/pylhe' 149 | uses: pypa/gh-action-pypi-publish@v1.13.0 150 | with: 151 | print-hash: true 152 | attestations: true 153 | -------------------------------------------------------------------------------- /docs/source/examples/93_parquet_cache.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "id": "257f4ff7-0a37-45e7-a701-bab07d2b757f", 6 | "metadata": {}, 7 | "source": [ 8 | "# Speed Up Analysis Code with Parquet Cache\n", 9 | "Looping through the XML-like LHE text file format and reconstructing the objects in memory is a slow process. If the in-memory analysis tool you use for studying the LHE files is the awkward library, one can avoid this by caching the awkward-form of the LHE data in a data file format that is much faster to read than the raw LHE file.\n", 10 | "\n", 11 | "The code below is a small function that will store a parquet cache file alongside any LHE file you wish to read, so any subsequent reads can go through the faster parquet. The parquet cache file will be re-created if anything modifies the original LHE file." 12 | ] 13 | }, 14 | { 15 | "cell_type": "code", 16 | "execution_count": 5, 17 | "id": "e2ae414e-d09a-4792-a60f-b8c4d1a8644e", 18 | "metadata": {}, 19 | "outputs": [], 20 | "source": [ 21 | "import os\n", 22 | "\n", 23 | "import awkward as ak\n", 24 | "from platformdirs import user_cache_dir\n", 25 | "\n", 26 | "import pylhe\n", 27 | "\n", 28 | "\n", 29 | "def _parquet_cache(lhe_fp):\n", 30 | " \"\"\"Return a stable, user-writable cache path for a given LHE file.\"\"\"\n", 31 | " app_cache_dir = user_cache_dir(\n", 32 | " \"pylhe\"\n", 33 | " ) # ~/.cache/pylhe on Linux, ~/Library/Caches/pylhe on macOS, %LOCALAPPDATA%\\pylhe on Windows\n", 34 | " os.makedirs(app_cache_dir, exist_ok=True)\n", 35 | "\n", 36 | " base = os.path.splitext(os.path.basename(lhe_fp))[0]\n", 37 | " return os.path.join(app_cache_dir, base + \".parquet\")\n", 38 | "\n", 39 | "\n", 40 | "def _from_pylhe(lhe_fp):\n", 41 | " \"\"\"Read an LHE file into an awkward array in memory.\"\"\"\n", 42 | " return pylhe.to_awkward(pylhe.LHEFile.fromfile(lhe_fp).events)\n", 43 | "\n", 44 | "\n", 45 | "def convert_to_parquet(lhe_fp):\n", 46 | " \"\"\"Convert the input LHE file into a parquet file of the same name and location\n", 47 | " but with the extension updated.\n", 48 | "\n", 49 | " Converting the LHE file to a parquet file is beneficial because the resulting\n", 50 | " parquet file is about the same size as the gzipped LHE file but it offers about\n", 51 | " 2 orders of magnitude speed up when reading the data back into an awkward array\n", 52 | " in memory.\n", 53 | "\n", 54 | " Parameters\n", 55 | " ----------\n", 56 | " lhe_fp : str\n", 57 | " path to LHE file to convert\n", 58 | " \"\"\"\n", 59 | "\n", 60 | " ak.to_parquet(_from_pylhe(lhe_fp), _parquet_cache(lhe_fp))\n", 61 | "\n", 62 | "\n", 63 | "def from_lhe(filepath, *, parquet_cache=True):\n", 64 | " \"\"\"Load an awkward array of the events in the passed LHE file\n", 65 | "\n", 66 | " Parameters\n", 67 | " ----------\n", 68 | " filepath : str\n", 69 | " Path to LHE file to load\n", 70 | " parquet_cache : bool, optional\n", 71 | " If true, use a parquet file alongside the LHE file to cache the parsing.\n", 72 | " This caching makes sure to update the cache if the LHE file timestamp is\n", 73 | " newer than the parquet cache timestamp. If false, never use a cache.\n", 74 | " \"\"\"\n", 75 | "\n", 76 | " # need the file to exist\n", 77 | " if not os.path.exists(filepath):\n", 78 | " msg = f\"Input LHE file {filepath} does not exist.\"\n", 79 | " raise FileNotFoundError(msg)\n", 80 | "\n", 81 | " # leave early without even thinking about cache if user doesn't want it\n", 82 | " if not parquet_cache:\n", 83 | " return _from_pylhe(filepath)\n", 84 | "\n", 85 | " # if cache doesn't exist or its last modification time is earlier than\n", 86 | " # the last modification time of the original LHE file, we need to create\n", 87 | " # the cache file\n", 88 | " cache_fp = _parquet_cache(filepath)\n", 89 | " if not os.path.exists(cache_fp) or os.path.getmtime(cache_fp) < os.path.getmtime(\n", 90 | " filepath\n", 91 | " ):\n", 92 | " convert_to_parquet(filepath)\n", 93 | "\n", 94 | " # load the data from the cache\n", 95 | " return ak.from_parquet(cache_fp)" 96 | ] 97 | }, 98 | { 99 | "cell_type": "markdown", 100 | "id": "63c527ef-4bb9-4982-badc-2145ff81d031", 101 | "metadata": {}, 102 | "source": [ 103 | "Just as an example, we can use the scikit-hep test data to show how much faster the parquet reading is." 104 | ] 105 | }, 106 | { 107 | "cell_type": "code", 108 | "execution_count": 6, 109 | "id": "705a9b59-3044-456c-b9b9-3a0e1f5bf711", 110 | "metadata": {}, 111 | "outputs": [ 112 | { 113 | "name": "stdout", 114 | "output_type": "stream", 115 | "text": [ 116 | "CPU times: user 7.71 s, sys: 19.9 ms, total: 7.73 s\n", 117 | "Wall time: 7.73 s\n", 118 | "CPU times: user 8.12 s, sys: 96.7 ms, total: 8.22 s\n", 119 | "Wall time: 8.2 s\n", 120 | "CPU times: user 103 ms, sys: 16.7 ms, total: 120 ms\n", 121 | "Wall time: 103 ms\n" 122 | ] 123 | } 124 | ], 125 | "source": [ 126 | "from skhep_testdata import data_path\n", 127 | "\n", 128 | "lhe_file = data_path(\"pylhe-drell-yan-ll-lhe.gz\")\n", 129 | "\n", 130 | "%time events = _from_pylhe(lhe_file)\n", 131 | "# first run needs to generate the cache\n", 132 | "# so it will be about as slow as normal LHE reading\n", 133 | "%time events = from_lhe(lhe_file)\n", 134 | "# later runs will be faster\n", 135 | "%time events = from_lhe(lhe_file)" 136 | ] 137 | }, 138 | { 139 | "cell_type": "code", 140 | "execution_count": null, 141 | "id": "f7efdbf7-f40c-4b29-8b00-f455b4a25684", 142 | "metadata": {}, 143 | "outputs": [], 144 | "source": [] 145 | } 146 | ], 147 | "metadata": { 148 | "kernelspec": { 149 | "display_name": "Python 3 (ipykernel)", 150 | "language": "python", 151 | "name": "python3" 152 | }, 153 | "language_info": { 154 | "codemirror_mode": { 155 | "name": "ipython", 156 | "version": 3 157 | }, 158 | "file_extension": ".py", 159 | "mimetype": "text/x-python", 160 | "name": "python", 161 | "nbconvert_exporter": "python", 162 | "pygments_lexer": "ipython3", 163 | "version": "3.13.5" 164 | } 165 | }, 166 | "nbformat": 4, 167 | "nbformat_minor": 5 168 | } 169 | -------------------------------------------------------------------------------- /docs/source/lhe.rst: -------------------------------------------------------------------------------- 1 | Les Houches Event Format 2 | ======================== 3 | 4 | The Les Houches Event (LHE) format uses an XML-like structure, but the content within the ``` and ```` blocks consists of whitespace-separated values designed for straightforward parsing in Fortran. 5 | Its first version was defined in :cite:`Alwall:2006yp`. 6 | The ``
`` block can contain arbitrary XML content, usually metadata or comments explaining how the events were generated. 7 | The following skeleton example illustrates the overall structure of an LHE file using the ``pylhe`` naming of the attributes: 8 | 9 | .. code-block:: xml 10 | 11 | 12 |
13 | 14 | beamA beamB energyA energyB PDFgroupA PDFgroupB PDFsetA PDFsetB weightingStrategy numProcesses 15 | xSection error unitWeight procId 16 | ... 17 | # additional hash-commented information can go here 18 | 19 | 20 | nparticles pid weight scale aqed aqcd 21 | id status mother1 mother2 color1 color2 px py pz e m lifetime spin 22 | ... 23 | # additional hash-commented information can go here 24 | 25 | ... 26 |
27 | 28 | The table below summarizes the main parameters found in LHE files grouped by their ``dataclass`` representation in ``pylhe``. 29 | 30 | +-------------------+-------+--------------------------------------+------+ 31 | | Parameter | Type | Description | Unit | 32 | +===================+=======+======================================+======+ 33 | | :py:class:`pylhe.LHEInitInfo` | 34 | +-------------------+-------+--------------------------------------+------+ 35 | | beamA | int | PDG ID of first beam particle | - | 36 | +-------------------+-------+--------------------------------------+------+ 37 | | beamB | int | PDG ID of second beam particle | - | 38 | +-------------------+-------+--------------------------------------+------+ 39 | | energyA | float | Energy of first beam particle | GeV | 40 | +-------------------+-------+--------------------------------------+------+ 41 | | energyB | float | Energy of second beam particle | GeV | 42 | +-------------------+-------+--------------------------------------+------+ 43 | | PDFgroupA | int | PDF group ID for first beam | - | 44 | +-------------------+-------+--------------------------------------+------+ 45 | | PDFgroupB | int | PDF group ID for second beam | - | 46 | +-------------------+-------+--------------------------------------+------+ 47 | | PDFsetA | int | PDF set ID for first beam | - | 48 | +-------------------+-------+--------------------------------------+------+ 49 | | PDFsetB | int | PDF set ID for second beam | - | 50 | +-------------------+-------+--------------------------------------+------+ 51 | | weightingStrategy | int | Weighting strategy | - | 52 | +-------------------+-------+--------------------------------------+------+ 53 | | numProcesses | int | Number of processes | - | 54 | +-------------------+-------+--------------------------------------+------+ 55 | | :py:class:`pylhe.LHEProcInfo` | 56 | +-------------------+-------+--------------------------------------+------+ 57 | | xSection | float | Cross section | pb | 58 | +-------------------+-------+--------------------------------------+------+ 59 | | error | float | Cross section uncertainty | pb | 60 | +-------------------+-------+--------------------------------------+------+ 61 | | unitWeight | float | Maximum cross section | pb | 62 | +-------------------+-------+--------------------------------------+------+ 63 | | procId | int | Process ID | - | 64 | +-------------------+-------+--------------------------------------+------+ 65 | | :py:class:`pylhe.LHEEventInfo` | 66 | +-------------------+-------+--------------------------------------+------+ 67 | | nparticles | int | Number of particles in event | - | 68 | +-------------------+-------+--------------------------------------+------+ 69 | | pid | int | Process ID for this event | - | 70 | +-------------------+-------+--------------------------------------+------+ 71 | | weight | float | Event weight | - | 72 | +-------------------+-------+--------------------------------------+------+ 73 | | scale | float | Factorization/renormalization scale | GeV | 74 | +-------------------+-------+--------------------------------------+------+ 75 | | aqed | float | QED coupling constant | - | 76 | +-------------------+-------+--------------------------------------+------+ 77 | | aqcd | float | QCD coupling constant | - | 78 | +-------------------+-------+--------------------------------------+------+ 79 | | :py:class:`pylhe.LHEParticle` | 80 | +-------------------+-------+--------------------------------------+------+ 81 | | id | int | PDG particle ID | - | 82 | +-------------------+-------+--------------------------------------+------+ 83 | | status | int | Particle status code | - | 84 | +-------------------+-------+--------------------------------------+------+ 85 | | mother1 | int | Index of first mother particle | - | 86 | +-------------------+-------+--------------------------------------+------+ 87 | | mother2 | int | Index of second mother particle | - | 88 | +-------------------+-------+--------------------------------------+------+ 89 | | color1 | int | First color line index | - | 90 | +-------------------+-------+--------------------------------------+------+ 91 | | color2 | int | Second color line index | - | 92 | +-------------------+-------+--------------------------------------+------+ 93 | | px | float | x-component of momentum | GeV | 94 | +-------------------+-------+--------------------------------------+------+ 95 | | py | float | y-component of momentum | GeV | 96 | +-------------------+-------+--------------------------------------+------+ 97 | | pz | float | z-component of momentum | GeV | 98 | +-------------------+-------+--------------------------------------+------+ 99 | | e | float | Energy | GeV | 100 | +-------------------+-------+--------------------------------------+------+ 101 | | m | float | Mass | GeV | 102 | +-------------------+-------+--------------------------------------+------+ 103 | | lifetime | float | Proper lifetime | mm | 104 | +-------------------+-------+--------------------------------------+------+ 105 | | spin | float | Spin information. 9.0 for unpolarized| - | 106 | +-------------------+-------+--------------------------------------+------+ 107 | 108 | Further details can be found in the original definition of the LHE file standard. 109 | Besides the original publication there were two extensions to the LHE format, version 2.0 in 2009 :cite:`Butterworth:2010ym` and version 3.0 in 2013 :cite:`Andersen:2014efa`. 110 | However, ``pylhe`` currently only implements the widely adopted extension from version 1.0, that is the addition of multiple weights via ````, ````, ````, ````, ````, and ````. 111 | If in the future there is a demand for ````, ````, ````, or ````, support for these can be added as well. 112 | -------------------------------------------------------------------------------- /tests/test_classes.py: -------------------------------------------------------------------------------- 1 | import dataclasses 2 | 3 | import pytest 4 | import skhep_testdata 5 | 6 | from pylhe import ( 7 | LHEEventInfo, 8 | LHEFile, 9 | LHEInit, 10 | LHEInitInfo, 11 | LHEParticle, 12 | LHEProcInfo, 13 | read_lhe, 14 | ) 15 | 16 | TEST_FILE = skhep_testdata.data_path("pylhe-testfile-pr29.lhe") 17 | 18 | 19 | def test_LHEEvent(): 20 | events = read_lhe(TEST_FILE) 21 | event = next(events) # it contains 8 pions and a proton 22 | 23 | assert event.eventinfo is not None 24 | 25 | assert len(event.particles) == 9 26 | 27 | for p in event.particles: 28 | assert p.event == event 29 | 30 | assert event._graph is None 31 | 32 | 33 | def test_LHEEventInfo_no_default_init(): 34 | with pytest.raises(TypeError): 35 | _ = LHEEventInfo() 36 | 37 | 38 | def test_LHEEventInfo_fromstring(): 39 | """ 40 | Data taken from the first block of scikit-hep-testdata's package 41 | "pylhe-testlhef3.lhe" file. 42 | """ 43 | data = "5 66 0.50109093E+02 0.14137688E+03 0.75563862E-02 0.12114027E+00" 44 | event_info = LHEEventInfo.fromstring(data) 45 | 46 | assert event_info.nparticles == 5 47 | assert event_info.pid == 66 48 | assert event_info.weight == pytest.approx(0.50109093e02) 49 | assert event_info.scale == pytest.approx(0.14137688e03) 50 | assert event_info.aqed == pytest.approx(0.75563862e-02) 51 | assert event_info.aqcd == pytest.approx(0.12114027e00) 52 | 53 | 54 | def test_LHEEventInfo_backwards_compatibility(): 55 | """ 56 | Test backwards-compatibility of fieldnames. 57 | """ 58 | event_info = LHEEventInfo( 59 | nparticles=6, pid=67, weight=0.6, scale=0.2, aqed=0.8, aqcd=0.2 60 | ) 61 | 62 | assert event_info.fieldnames == [ 63 | "nparticles", 64 | "pid", 65 | "weight", 66 | "scale", 67 | "aqed", 68 | "aqcd", 69 | ] 70 | 71 | 72 | def test_LHEFile_no_default_init(): 73 | with pytest.raises(TypeError): 74 | _ = LHEFile() 75 | 76 | 77 | def test_LHEInit_no_default_init(): 78 | with pytest.raises(TypeError): 79 | _ = LHEInit() 80 | 81 | 82 | def test_LHEInit_fromstring(): 83 | """ 84 | Data taken from the block of scikit-hep-testdata's package 85 | "pylhe-testlhef3.lhe" file. 86 | """ 87 | data = "2212 2212 0.40000000E+04 0.40000000E+04 -1 -1 21100 21100 -4 1" 88 | result = { 89 | "beamA": 2212.0, 90 | "beamB": 2212.0, 91 | "energyA": 4000.0, 92 | "energyB": 4000.0, 93 | "PDFgroupA": -1.0, 94 | "PDFgroupB": -1.0, 95 | "PDFsetA": 21100.0, 96 | "PDFsetB": 21100.0, 97 | "weightingStrategy": -4.0, 98 | "numProcesses": 1.0, 99 | } 100 | assert dataclasses.asdict(LHEInitInfo.fromstring(data)) == result 101 | 102 | 103 | def test_LHEParticle_no_default_init(): 104 | with pytest.raises(TypeError): 105 | _ = LHEParticle() 106 | 107 | 108 | def test_LHEParticle_fromstring(): 109 | """ 110 | Data taken from the first block of scikit-hep-testdata's package 111 | "pylhe-testlhef3.lhe" file. 112 | """ 113 | particles = [ 114 | " 5 -1 0 0 501 0 0.00000000E+00 0.00000000E+00 0.14322906E+03 0.14330946E+03 0.48000000E+01 0.0000E+00 0.0000E+00", 115 | " 2 -1 0 0 502 0 0.00000000E+00 0.00000000E+00 -.93544317E+03 0.93544323E+03 0.33000000E+00 0.0000E+00 0.0000E+00", 116 | " 24 1 1 2 0 0 -.84258804E+02 -.15708566E+03 -.10629600E+03 0.22257162E+03 0.80398000E+02 0.0000E+00 0.0000E+00", 117 | " 5 1 1 2 501 0 -.13668073E+03 -.36307424E+02 -.40614473E+02 0.14721558E+03 0.48000000E+01 0.0000E+00 0.0000E+00", 118 | " 1 1 1 2 502 0 0.22093954E+03 0.19339308E+03 -.64530364E+03 0.70896548E+03 0.33000000E+00 0.0000E+00 0.0000E+00", 119 | ] 120 | 121 | particle_objs = [LHEParticle.fromstring(p) for p in particles] 122 | 123 | assert [p.id for p in particle_objs] == [5, 2, 24, 5, 1] 124 | assert [p.status for p in particle_objs] == [-1.0, -1.0, 1.0, 1.0, 1.0] 125 | assert [p.mother1 for p in particle_objs] == [0.0, 0.0, 1.0, 1.0, 1.0] 126 | assert [p.mother2 for p in particle_objs] == [0.0, 0.0, 2.0, 2.0, 2.0] 127 | assert [p.color1 for p in particle_objs] == [501.0, 502.0, 0.0, 501.0, 502.0] 128 | assert [p.color2 for p in particle_objs] == [0.0, 0.0, 0.0, 0.0, 0.0] 129 | assert [p.px for p in particle_objs] == [ 130 | 0.0, 131 | 0.0, 132 | -84.258804, 133 | -136.68073, 134 | 220.93954, 135 | ] 136 | assert [p.py for p in particle_objs] == [ 137 | 0.0, 138 | 0.0, 139 | -157.08566, 140 | -36.307424, 141 | 193.39308, 142 | ] 143 | assert [p.pz for p in particle_objs] == [ 144 | 143.22906, 145 | -935.44317, 146 | -106.296, 147 | -40.614473, 148 | -645.30364, 149 | ] 150 | assert [p.e for p in particle_objs] == [ 151 | 143.30946, 152 | 935.44323, 153 | 222.57162, 154 | 147.21558, 155 | 708.96548, 156 | ] 157 | assert [p.m for p in particle_objs] == [4.8, 0.33, 80.398, 4.8, 0.33] 158 | assert [p.lifetime for p in particle_objs] == [0.0, 0.0, 0.0, 0.0, 0.0] 159 | assert [p.spin for p in particle_objs] == [0.0, 0.0, 0.0, 0.0, 0.0] 160 | 161 | 162 | def test_LHEParticle_backwards_compatibility(): 163 | """ 164 | Test backwards-compatibility of fieldnames. 165 | """ 166 | particle = LHEParticle( 167 | id=5, 168 | status=-1, 169 | mother1=0, 170 | mother2=0, 171 | color1=501, 172 | color2=0, 173 | px=0, 174 | py=0, 175 | pz=143.22906, 176 | e=143.30946, 177 | m=4.8, 178 | lifetime=0, 179 | spin=0, 180 | ) 181 | 182 | assert particle.fieldnames == [ 183 | "id", 184 | "status", 185 | "mother1", 186 | "mother2", 187 | "color1", 188 | "color2", 189 | "px", 190 | "py", 191 | "pz", 192 | "e", 193 | "m", 194 | "lifetime", 195 | "spin", 196 | ] 197 | 198 | # particle is not associated to an event thus mothers should raise a ValueError 199 | with pytest.raises(ValueError, match=r"Particle is not associated to an event."): 200 | _ = particle.mothers() 201 | 202 | 203 | def test_LHEProcInfo_no_default_init(): 204 | with pytest.raises(TypeError): 205 | _ = LHEProcInfo() 206 | 207 | 208 | def test_LHEProcInfo_fromstring(): 209 | """ 210 | Data taken from the block of scikit-hep-testdata's package 211 | "pylhe-testlhef3.lhe" file. 212 | """ 213 | data = "0.50109086E+02 0.89185414E-01 0.50109093E+02 66" 214 | result = { 215 | "xSection": 50.109086, 216 | "error": 0.089185414, 217 | "unitWeight": 50.109093, 218 | "procId": 66.0, 219 | } 220 | assert dataclasses.asdict(LHEProcInfo.fromstring(data)) == result 221 | 222 | 223 | def test_LHEProcInfo_backwards_compatibility(): 224 | """ 225 | Test backwards-compatibility of dict like access and fieldnames. 226 | """ 227 | proc_info = LHEProcInfo( 228 | xSection=50.109086, error=0.089185414, unitWeight=50.109093, procId=66.0 229 | ) 230 | 231 | assert proc_info.fieldnames == ["xSection", "error", "unitWeight", "procId"] 232 | 233 | assert proc_info["xSection"] == pytest.approx(50.109086) 234 | assert proc_info["error"] == pytest.approx(0.089185414) 235 | assert proc_info["unitWeight"] == pytest.approx(50.109093) 236 | assert proc_info["procId"] == pytest.approx(66.0) 237 | 238 | proc_info["xSection"] = 60.0 239 | proc_info["error"] = 0.1 240 | proc_info["unitWeight"] = 60.0 241 | proc_info["procId"] = 67.0 242 | 243 | assert proc_info["xSection"] == pytest.approx(60.0) 244 | assert proc_info["error"] == pytest.approx(0.1) 245 | assert proc_info["unitWeight"] == pytest.approx(60.0) 246 | assert proc_info["procId"] == pytest.approx(67.0) 247 | 248 | 249 | def test_LHEInitInfo_backwards_compatibility(): 250 | """ 251 | Test backwards-compatibility of dict like access and fieldnames. 252 | """ 253 | lheii = LHEInitInfo( 254 | beamA=1, 255 | beamB=2, 256 | energyA=3.0, 257 | energyB=4.0, 258 | PDFgroupA=-1, 259 | PDFgroupB=-1, 260 | PDFsetA=21100, 261 | PDFsetB=21100, 262 | weightingStrategy=1, 263 | numProcesses=1, 264 | ) 265 | 266 | assert lheii.fieldnames == [ 267 | "beamA", 268 | "beamB", 269 | "energyA", 270 | "energyB", 271 | "PDFgroupA", 272 | "PDFgroupB", 273 | "PDFsetA", 274 | "PDFsetB", 275 | "weightingStrategy", 276 | "numProcesses", 277 | ] 278 | 279 | assert lheii["beamA"] == 1 280 | assert lheii["beamB"] == 2 281 | assert lheii["energyA"] == 3.0 282 | assert lheii["energyB"] == 4.0 283 | assert lheii["PDFgroupA"] == -1 284 | assert lheii["PDFgroupB"] == -1 285 | assert lheii["PDFsetA"] == 21100 286 | assert lheii["PDFsetB"] == 21100 287 | assert lheii["weightingStrategy"] == 1 288 | assert lheii["numProcesses"] == 1 289 | 290 | lheii["beamA"] = 5 291 | lheii["beamB"] = 6 292 | lheii["energyA"] = 7.0 293 | lheii["energyB"] = 8.0 294 | lheii["PDFgroupA"] = -2 295 | lheii["PDFgroupB"] = -2 296 | lheii["PDFsetA"] = 21101 297 | lheii["PDFsetB"] = 21101 298 | lheii["weightingStrategy"] = 2 299 | lheii["numProcesses"] = 2 300 | 301 | assert lheii["beamA"] == 5 302 | assert lheii["beamB"] == 6 303 | assert lheii["energyA"] == 7.0 304 | assert lheii["energyB"] == 8.0 305 | assert lheii["PDFgroupA"] == -2 306 | assert lheii["PDFgroupB"] == -2 307 | assert lheii["PDFsetA"] == 21101 308 | assert lheii["PDFsetB"] == 21101 309 | assert lheii["weightingStrategy"] == 2 310 | assert lheii["numProcesses"] == 2 311 | -------------------------------------------------------------------------------- /tests/test_lhe_writer.py: -------------------------------------------------------------------------------- 1 | import skhep_testdata 2 | 3 | import pylhe 4 | 5 | TEST_FILE_LHE_v1 = skhep_testdata.data_path("pylhe-testfile-pr29.lhe") 6 | TEST_FILE_LHE_v3 = skhep_testdata.data_path("pylhe-testlhef3.lhe") 7 | TEST_FILE_LHE_INITRWGT_WEIGHTS = skhep_testdata.data_path( 8 | "pylhe-testfile-powheg-box-v2-hvq.lhe" 9 | ) 10 | TEST_FILE_LHE_RWGT_WGT = skhep_testdata.data_path("pylhe-testfile-powheg-box-v2-W.lhe") 11 | TEST_FILES_LHE_POWHEG = [ 12 | skhep_testdata.data_path(f"pylhe-testfile-powheg-box-v2-{proc}.lhe") 13 | for proc in ["Z", "W", "Zj", "trijet", "directphoton", "hvq"] 14 | ] 15 | 16 | 17 | def test_backwards_compatibility_lheinit(): 18 | init = pylhe.read_lhe_init(TEST_FILE_LHE_v3) 19 | assert init["initInfo"]["beamA"] == init["beamA"] 20 | init["beamA"] = 11 21 | assert init["initInfo"]["beamA"] == 11 22 | 23 | 24 | def test_write_lhe_eventline(): 25 | """ 26 | Test that the event line is written correctly. 27 | """ 28 | events = pylhe.read_lhe_with_attributes(TEST_FILE_LHE_v3) 29 | e = next(events) 30 | assert ( 31 | e.particles[0].tolhe() 32 | == " 5 -1 0 0 501 0 0.00000000e+00 0.00000000e+00 1.43229060e+02 1.43309460e+02 4.80000000e+00 0.0000e+00 0.0000e+00" 33 | ) 34 | 35 | 36 | def test_write_lhe_eventinfo(): 37 | """ 38 | Test that the event info is written correctly. 39 | """ 40 | events = pylhe.read_lhe_with_attributes(TEST_FILE_LHE_v3) 41 | e = next(events) 42 | assert ( 43 | e.eventinfo.tolhe() 44 | == " 5 66 5.0109093000e+01 1.4137688000e+02 7.5563862000e-03 1.2114027000e-01" 45 | ) 46 | 47 | 48 | def test_write_lhe_event(): 49 | """ 50 | Test that the event is written correctly. 51 | """ 52 | events = pylhe.read_lhe_with_attributes(TEST_FILE_LHE_v3) 53 | e = next(events) 54 | assert ( 55 | e.tolhe() 56 | == """ 57 | 5 66 5.0109093000e+01 1.4137688000e+02 7.5563862000e-03 1.2114027000e-01 58 | 5 -1 0 0 501 0 0.00000000e+00 0.00000000e+00 1.43229060e+02 1.43309460e+02 4.80000000e+00 0.0000e+00 0.0000e+00 59 | 2 -1 0 0 502 0 0.00000000e+00 0.00000000e+00 -9.35443170e+02 9.35443230e+02 3.30000000e-01 0.0000e+00 0.0000e+00 60 | 24 1 1 2 0 0 -8.42588040e+01 -1.57085660e+02 -1.06296000e+02 2.22571620e+02 8.03980000e+01 0.0000e+00 0.0000e+00 61 | 5 1 1 2 501 0 -1.36680730e+02 -3.63074240e+01 -4.06144730e+01 1.47215580e+02 4.80000000e+00 0.0000e+00 0.0000e+00 62 | 1 1 1 2 502 0 2.20939540e+02 1.93393080e+02 -6.45303640e+02 7.08965480e+02 3.30000000e-01 0.0000e+00 0.0000e+00 63 | 64 | 5.0109e+01 65 | 4.5746e+01 66 | 5.2581e+01 67 | 5.0109e+01 68 | 4.5746e+01 69 | 5.2581e+01 70 | 5.0109e+01 71 | 4.5746e+01 72 | 5.2581e+01 73 | 74 | """ 75 | ) 76 | 77 | 78 | def test_write_lhe_init(): 79 | """ 80 | Test that the block is written correctly. 81 | """ 82 | init = pylhe.read_lhe_init(TEST_FILE_LHE_v3) 83 | 84 | assert ( 85 | init["initInfo"].tolhe() 86 | == " 2212 2212 4.0000000e+03 4.0000000e+03 -1 -1 21100 21100 -4 1" 87 | ) 88 | assert ( 89 | init["procInfo"][0].tolhe() 90 | == " 5.0109086e+01 8.9185414e-02 5.0109093e+01 66" 91 | ) 92 | 93 | assert ( 94 | init.tolhe() 95 | == """ 96 | 2212 2212 4.0000000e+03 4.0000000e+03 -1 -1 21100 21100 -4 1 97 | 5.0109086e+01 8.9185414e-02 5.0109093e+01 66 98 | 99 | 100 | muR=0.10000E+01 muF=0.10000E+01 101 | muR=0.10000E+01 muF=0.20000E+01 102 | muR=0.10000E+01 muF=0.50000E+00 103 | muR=0.20000E+01 muF=0.10000E+01 104 | muR=0.20000E+01 muF=0.20000E+01 105 | muR=0.20000E+01 muF=0.50000E+00 106 | muR=0.50000E+00 muF=0.10000E+01 107 | muR=0.50000E+00 muF=0.20000E+01 108 | muR=0.50000E+00 muF=0.50000E+00 109 | 110 | 111 | """ 112 | ) 113 | assert init.tolhe() == pylhe.LHEFile.fromstring(init.tolhe()).init.tolhe() 114 | 115 | 116 | def test_write_lhe(): 117 | """ 118 | Test that the LHE file is written correctly. 119 | """ 120 | init = pylhe.read_lhe_init(TEST_FILE_LHE_v3) 121 | events = pylhe.read_lhe_with_attributes(TEST_FILE_LHE_v3) 122 | # single test event 123 | events = [next(events)] 124 | 125 | assert ( 126 | pylhe.write_lhe_string(init, events) 127 | == """ 128 | 129 | 2212 2212 4.0000000e+03 4.0000000e+03 -1 -1 21100 21100 -4 1 130 | 5.0109086e+01 8.9185414e-02 5.0109093e+01 66 131 | 132 | 133 | muR=0.10000E+01 muF=0.10000E+01 134 | muR=0.10000E+01 muF=0.20000E+01 135 | muR=0.10000E+01 muF=0.50000E+00 136 | muR=0.20000E+01 muF=0.10000E+01 137 | muR=0.20000E+01 muF=0.20000E+01 138 | muR=0.20000E+01 muF=0.50000E+00 139 | muR=0.50000E+00 muF=0.10000E+01 140 | muR=0.50000E+00 muF=0.20000E+01 141 | muR=0.50000E+00 muF=0.50000E+00 142 | 143 | 144 | 145 | 146 | 5 66 5.0109093000e+01 1.4137688000e+02 7.5563862000e-03 1.2114027000e-01 147 | 5 -1 0 0 501 0 0.00000000e+00 0.00000000e+00 1.43229060e+02 1.43309460e+02 4.80000000e+00 0.0000e+00 0.0000e+00 148 | 2 -1 0 0 502 0 0.00000000e+00 0.00000000e+00 -9.35443170e+02 9.35443230e+02 3.30000000e-01 0.0000e+00 0.0000e+00 149 | 24 1 1 2 0 0 -8.42588040e+01 -1.57085660e+02 -1.06296000e+02 2.22571620e+02 8.03980000e+01 0.0000e+00 0.0000e+00 150 | 5 1 1 2 501 0 -1.36680730e+02 -3.63074240e+01 -4.06144730e+01 1.47215580e+02 4.80000000e+00 0.0000e+00 0.0000e+00 151 | 1 1 1 2 502 0 2.20939540e+02 1.93393080e+02 -6.45303640e+02 7.08965480e+02 3.30000000e-01 0.0000e+00 0.0000e+00 152 | 153 | 5.0109e+01 154 | 4.5746e+01 155 | 5.2581e+01 156 | 5.0109e+01 157 | 4.5746e+01 158 | 5.2581e+01 159 | 5.0109e+01 160 | 4.5746e+01 161 | 5.2581e+01 162 | 163 | 164 | """ 165 | ) 166 | 167 | assert ( 168 | pylhe.write_lhe_string(init, events, rwgt=False, weights=True) 169 | == """ 170 | 171 | 2212 2212 4.0000000e+03 4.0000000e+03 -1 -1 21100 21100 -4 1 172 | 5.0109086e+01 8.9185414e-02 5.0109093e+01 66 173 | 174 | 175 | muR=0.10000E+01 muF=0.10000E+01 176 | muR=0.10000E+01 muF=0.20000E+01 177 | muR=0.10000E+01 muF=0.50000E+00 178 | muR=0.20000E+01 muF=0.10000E+01 179 | muR=0.20000E+01 muF=0.20000E+01 180 | muR=0.20000E+01 muF=0.50000E+00 181 | muR=0.50000E+00 muF=0.10000E+01 182 | muR=0.50000E+00 muF=0.20000E+01 183 | muR=0.50000E+00 muF=0.50000E+00 184 | 185 | 186 | 187 | 188 | 5 66 5.0109093000e+01 1.4137688000e+02 7.5563862000e-03 1.2114027000e-01 189 | 5 -1 0 0 501 0 0.00000000e+00 0.00000000e+00 1.43229060e+02 1.43309460e+02 4.80000000e+00 0.0000e+00 0.0000e+00 190 | 2 -1 0 0 502 0 0.00000000e+00 0.00000000e+00 -9.35443170e+02 9.35443230e+02 3.30000000e-01 0.0000e+00 0.0000e+00 191 | 24 1 1 2 0 0 -8.42588040e+01 -1.57085660e+02 -1.06296000e+02 2.22571620e+02 8.03980000e+01 0.0000e+00 0.0000e+00 192 | 5 1 1 2 501 0 -1.36680730e+02 -3.63074240e+01 -4.06144730e+01 1.47215580e+02 4.80000000e+00 0.0000e+00 0.0000e+00 193 | 1 1 1 2 502 0 2.20939540e+02 1.93393080e+02 -6.45303640e+02 7.08965480e+02 3.30000000e-01 0.0000e+00 0.0000e+00 194 | 195 | 5.0109e+01 196 | 4.5746e+01 197 | 5.2581e+01 198 | 5.0109e+01 199 | 4.5746e+01 200 | 5.2581e+01 201 | 5.0109e+01 202 | 4.5746e+01 203 | 5.2581e+01 204 | 205 | 206 | """ 207 | ) 208 | 209 | 210 | def test_write_lhe_twice(tmpdir): 211 | file1 = tmpdir.join("test1.lhe") 212 | file2 = tmpdir.join("test2.lhe") 213 | 214 | init = pylhe.read_lhe_init(TEST_FILE_LHE_v3) 215 | events = pylhe.read_lhe_with_attributes(TEST_FILE_LHE_v3) 216 | # single test event 217 | events = [next(events)] 218 | 219 | # write the file 220 | pylhe.write_lhe_file(init, events, filepath=file1.strpath) 221 | 222 | # read it again 223 | init = pylhe.read_lhe_init(file1) 224 | events = pylhe.read_lhe_with_attributes(file1) 225 | 226 | # write it again 227 | pylhe.write_lhe_file_path(pylhe.LHEFile(init, events), filepath=file2.strpath) 228 | 229 | # assert that the files are the same 230 | assert file1.read() == file2.read() 231 | 232 | 233 | def test_write_lhe_gzip(tmpdir): 234 | file1 = tmpdir.join("test1.lhe.gz") 235 | 236 | init = pylhe.read_lhe_init(TEST_FILE_LHE_v3) 237 | assert init is not None 238 | events = pylhe.read_lhe_with_attributes(TEST_FILE_LHE_v3) 239 | # single test event 240 | events = [next(events)] 241 | 242 | # write the file 243 | pylhe.write_lhe_file(init, events, filepath=file1.strpath, gz=True) 244 | 245 | # read it again 246 | init = pylhe.read_lhe_init(file1) 247 | -------------------------------------------------------------------------------- /.github/workflows/bump-version.yml: -------------------------------------------------------------------------------- 1 | name: Bump version 2 | 3 | on: 4 | workflow_dispatch: 5 | inputs: 6 | part: 7 | description: 'Semver type of new version (major | minor | patch)' 8 | required: true 9 | type: choice 10 | options: 11 | - patch 12 | - minor 13 | - major 14 | release_candidate: 15 | type: choice 16 | description: 'Release candidate?' 17 | options: 18 | - false 19 | - true 20 | new_version: 21 | description: 'New version to bump to' 22 | required: true 23 | force: 24 | type: choice 25 | description: 'Force override check?' 26 | options: 27 | - false 28 | - true 29 | dry_run: 30 | type: choice 31 | description: 'Perform a dry run to check?' 32 | options: 33 | - true 34 | - false 35 | 36 | jobs: 37 | bump-version: 38 | runs-on: ubuntu-latest 39 | if: github.repository == 'scikit-hep/pylhe' 40 | 41 | steps: 42 | # Use GitHub PAT to authenticate so other workflows trigger 43 | - name: Checkout code 44 | uses: actions/checkout@v6 45 | with: 46 | fetch-depth: 0 47 | token: ${{ secrets.ACCESS_TOKEN }} 48 | 49 | - name: Verify new version bump step is valid 50 | if: github.event.inputs.force == 'false' 51 | id: script 52 | shell: bash 53 | run: | 54 | current_tag="$(git describe --tags --abbrev=0)" 55 | current_tag="${current_tag:1}" 56 | 57 | latest_stable_tag="$(git tag | grep --invert-match 'rc' | tail -n 1)" 58 | latest_stable_tag="${latest_stable_tag:1}" 59 | 60 | echo "* Current version: ${current_tag}" 61 | echo "* Latest stable version: ${latest_stable_tag}" 62 | 63 | if [ ${{ github.event.inputs.release_candidate }} == 'true' ]; then 64 | echo "* Attempting a ${{ github.event.inputs.part }} version release candidate bump from ${current_tag} to: ${{ github.event.inputs.new_version }}" 65 | else 66 | # For ease of use, set current tag to latest stable 67 | current_tag="${latest_stable_tag}" 68 | 69 | echo "* Attempting a ${{ github.event.inputs.part }} version bump from ${current_tag} to: ${{ github.event.inputs.new_version }}" 70 | fi 71 | 72 | echo "* Validating bump target version matches SemVer..." 73 | 74 | # IFS is single character, so split on the 'r' in "rc" 75 | IFS='r' read current_tag_read current_rc <- 200 | github.event_name == 'workflow_dispatch' 201 | && ( 202 | github.event.sender.login == 'lukasheinrich' || 203 | github.event.sender.login == 'matthewfeickert' || 204 | github.event.sender.login == 'eduardo-rodrigues' || 205 | github.event.sender.login == 'APN-Pucky' 206 | ) 207 | shell: bash 208 | run: | 209 | tbump --non-interactive --no-push ${{ github.event.inputs.new_version }} 210 | 211 | - name: Update the Git tag annotation 212 | if: ${{ github.event.inputs.dry_run }} == 'false' 213 | shell: bash 214 | run: | 215 | OLD_TAG=${{ steps.script.outputs.old_tag }} 216 | git tag -n99 --list "${OLD_TAG}" 217 | 218 | NEW_TAG=v${{ github.event.inputs.new_version }} 219 | git tag -n99 --list "${NEW_TAG}" 220 | 221 | CHANGES=$(git log --pretty=format:'%s' "${OLD_TAG}"..HEAD --regexp-ignore-case --extended-regexp --grep='^([a-z]*?):') 222 | CHANGES_NEWLINE="$(echo "${CHANGES}" | sed -e 's/^/ - /')" 223 | SANITIZED_CHANGES=$(echo "${CHANGES}" | sed -e 's/^/
  • /' -e 's|$|
  • |' -e 's/(#[0-9]\+)//' -e 's/"/'"'"'/g') 224 | NUM_CHANGES=$(echo -n "${CHANGES}" | grep -c '^') 225 | 226 | if [ ${{ github.event.inputs.release_candidate }} == 'true' ]; then 227 | git tag "${NEW_TAG}" "${NEW_TAG}"^{} -f -m "$(printf "This is a ${{ github.event.inputs.part }} release candidate from ${OLD_TAG} → ${NEW_TAG}.\n\nChanges:\n${CHANGES_NEWLINE}")" 228 | else 229 | git tag "${NEW_TAG}" "${NEW_TAG}"^{} -f -m "$(printf "This is a ${{ github.event.inputs.part }} release from ${OLD_TAG} → ${NEW_TAG}.\n\nChanges:\n${CHANGES_NEWLINE}")" 230 | fi 231 | 232 | git tag -n99 --list "${NEW_TAG}" 233 | 234 | - name: Show annotated Git tag 235 | shell: bash 236 | run: | 237 | git show v${{ github.event.inputs.new_version }} 238 | 239 | - name: Push new tag back to GitHub 240 | shell: bash 241 | run: | 242 | if [ ${{ github.event.inputs.dry_run }} == 'true' ]; then 243 | echo "# DRY RUN" 244 | else 245 | git push origin main --tags 246 | fi 247 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | Copyright 2018 Lukas Heinrich 179 | 180 | Licensed under the Apache License, Version 2.0 (the "License"); 181 | you may not use this file except in compliance with the License. 182 | You may obtain a copy of the License at 183 | 184 | http://www.apache.org/licenses/LICENSE-2.0 185 | 186 | Unless required by applicable law or agreed to in writing, software 187 | distributed under the License is distributed on an "AS IS" BASIS, 188 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 189 | See the License for the specific language governing permissions and 190 | limitations under the License. 191 | -------------------------------------------------------------------------------- /tests/test_errors.py: -------------------------------------------------------------------------------- 1 | import io 2 | import os 3 | import tempfile 4 | from tempfile import NamedTemporaryFile 5 | 6 | import pytest 7 | 8 | import pylhe 9 | 10 | 11 | def test_missing_init_block_error(): 12 | """Test that ValueError is raised when no block is found in LHE file.""" 13 | # Create an invalid LHE file content without an block 14 | invalid_lhe_content = """ 15 | 16 | 2 0 +1.0000000e+00 9.11884000e+01 -1.00000000e+00 -1.00000000e+00 17 | 21 -1 0 0 501 502 +0.00000000e+00 +0.00000000e+00 +4.56308892e+02 +4.56308892e+02 +0.00000000e+00 0.0000e+00 9.0000e+00 18 | 21 -1 0 0 502 501 -0.00000000e+00 -0.00000000e+00 -2.24036073e+02 +2.24036073e+02 +0.00000000e+00 0.0000e+00 9.0000e+00 19 | 20 | """ 21 | 22 | # Test with string buffer 23 | with pytest.raises(ValueError, match=r"No block found in the LHE file"): 24 | pylhe.LHEFile.fromstring(invalid_lhe_content) 25 | 26 | # Test with file-like object 27 | buffer = io.StringIO(invalid_lhe_content) 28 | with pytest.raises(ValueError, match=r"No block found in the LHE file"): 29 | pylhe.LHEFile.frombuffer(buffer) 30 | 31 | 32 | def test_missing_init_block_error_with_file(): 33 | """Test that ValueError is raised when reading a file without block.""" 34 | # Create an invalid LHE file content without an block 35 | invalid_lhe_content = """ 36 | 37 | 2 0 +1.0000000e+00 9.11884000e+01 -1.00000000e+00 -1.00000000e+00 38 | 21 -1 0 0 501 502 +0.00000000e+00 +0.00000000e+00 +4.56308892e+02 +4.56308892e+02 +0.00000000e+00 0.0000e+00 9.0000e+00 39 | 21 -1 0 0 502 501 -0.00000000e+00 -0.00000000e+00 -2.24036073e+02 +2.24036073e+02 +0.00000000e+00 0.0000e+00 9.0000e+00 40 | 41 | """ 42 | 43 | # Create a temporary file with invalid content 44 | with NamedTemporaryFile(mode="w", suffix=".lhe", delete=False) as tmp_file: 45 | tmp_file.write(invalid_lhe_content) 46 | tmp_file_path = tmp_file.name 47 | 48 | try: 49 | # Test reading the file through read_lhe_init function 50 | with pytest.raises(ValueError, match=r"No block found in the LHE file"): 51 | pylhe.read_lhe_init(tmp_file_path) 52 | finally: 53 | os.unlink(tmp_file_path) 54 | 55 | 56 | def test_missing_init_block_error_with_only_events(): 57 | """Test that ValueError is raised when file contains only events without init.""" 58 | # Create LHE content with events but no init block 59 | events_only_content = """ 60 | 61 | 2 0 +1.0000000e+00 9.11884000e+01 -1.00000000e+00 -1.00000000e+00 62 | 21 -1 0 0 501 502 +0.00000000e+00 +0.00000000e+00 +4.56308892e+02 +4.56308892e+02 +0.00000000e+00 0.0000e+00 9.0000e+00 63 | 21 -1 0 0 502 501 -0.00000000e+00 -0.00000000e+00 -2.24036073e+02 +2.24036073e+02 +0.00000000e+00 0.0000e+00 9.0000e+00 64 | 65 | 66 | 2 0 +1.0000000e+00 9.11884000e+01 -1.00000000e+00 -1.00000000e+00 67 | 21 -1 0 0 501 502 +0.00000000e+00 +0.00000000e+00 +4.56308892e+02 +4.56308892e+02 +0.00000000e+00 0.0000e+00 9.0000e+00 68 | 21 -1 0 0 502 501 -0.00000000e+00 -0.00000000e+00 -2.24036073e+02 +2.24036073e+02 +0.00000000e+00 0.0000e+00 9.0000e+00 69 | 70 | """ 71 | 72 | with pytest.raises(ValueError, match=r"No block found in the LHE file"): 73 | pylhe.LHEFile.fromstring(events_only_content) 74 | 75 | 76 | def test_dataclass_delete_field_error(): 77 | """Test that TypeError is raised when attempting to delete a dataclass field.""" 78 | # Create a simple LHEEventInfo instance to test deletion 79 | eventinfo = pylhe.LHEEventInfo( 80 | nparticles=2, pid=1, weight=1.0, scale=100.0, aqed=0.007, aqcd=0.1 81 | ) 82 | 83 | # Test that attempting to delete a field raises TypeError 84 | with pytest.raises( 85 | TypeError, match=r"Cannot delete field 'nparticles' from dataclass instance" 86 | ): 87 | del eventinfo["nparticles"] 88 | 89 | # Test with a different field 90 | with pytest.raises( 91 | TypeError, match=r"Cannot delete field 'weight' from dataclass instance" 92 | ): 93 | del eventinfo["weight"] 94 | 95 | # Test with a non-existent field (should also raise TypeError) 96 | with pytest.raises( 97 | TypeError, match=r"Cannot delete field 'nonexistent' from dataclass instance" 98 | ): 99 | del eventinfo["nonexistent"] 100 | 101 | 102 | def test_empty_init_block_error(): 103 | """Test that ValueError is raised when block has no text content.""" 104 | # Create LHE content with an empty block 105 | empty_init_content = """ 106 | 107 | 108 | 2 0 +1.0000000e+00 9.11884000e+01 -1.00000000e+00 -1.00000000e+00 109 | 21 -1 0 0 501 502 +0.00000000e+00 +0.00000000e+00 +4.56308892e+02 +4.56308892e+02 +0.00000000e+00 0.0000e+00 9.0000e+00 110 | 21 -1 0 0 502 501 -0.00000000e+00 -0.00000000e+00 -2.24036073e+02 +2.24036073e+02 +0.00000000e+00 0.0000e+00 9.0000e+00 111 | 112 | """ 113 | 114 | # Test with string buffer 115 | with pytest.raises(ValueError, match=r" block has no text"): 116 | pylhe.LHEFile.fromstring(empty_init_content) 117 | 118 | # Test with file-like object 119 | buffer = io.StringIO(empty_init_content) 120 | with pytest.raises(ValueError, match=r" block has no text"): 121 | pylhe.LHEFile.frombuffer(buffer) 122 | 123 | 124 | def test_empty_event_block_error(): 125 | """Test that ValueError is raised when block has no text content.""" 126 | # Create LHE content with valid init but empty event block 127 | empty_event_content = """ 128 | 129 | 2212 2212 6.500000e+03 6.500000e+03 0 0 0 0 3 1 130 | 1.000000e+00 0.000000e+00 1.000000e+00 1 131 | 132 | 133 | """ 134 | 135 | # Test reading events with empty event block 136 | with tempfile.NamedTemporaryFile(mode="w", suffix=".lhe", delete=False) as tmp_file: 137 | tmp_file.write(empty_event_content) 138 | tmp_file_path = tmp_file.name 139 | 140 | try: 141 | with pytest.raises(ValueError, match=r" block has no text"): 142 | list(pylhe.read_lhe(tmp_file_path)) 143 | finally: 144 | os.unlink(tmp_file_path) 145 | 146 | 147 | def test_empty_weights_block_error(): 148 | """Test that ValueError is raised when block has no text content.""" 149 | # Create LHE content with valid init and event but empty weights block 150 | empty_weights_content = """ 151 | 152 | 2212 2212 6.500000e+03 6.500000e+03 0 0 0 0 3 1 153 | 1.000000e+00 0.000000e+00 1.000000e+00 1 154 | 155 | 156 | 2 0 +1.0000000e+00 9.11884000e+01 -1.00000000e+00 -1.00000000e+00 157 | 21 -1 0 0 501 502 +0.00000000e+00 +0.00000000e+00 +4.56308892e+02 +4.56308892e+02 +0.00000000e+00 0.0000e+00 9.0000e+00 158 | 21 -1 0 0 502 501 -0.00000000e+00 -0.00000000e+00 -2.24036073e+02 +2.24036073e+02 +0.00000000e+00 0.0000e+00 9.0000e+00 159 | 160 | 161 | """ 162 | 163 | # Test reading events with empty weights block 164 | with NamedTemporaryFile(mode="w", suffix=".lhe", delete=False) as tmp_file: 165 | tmp_file.write(empty_weights_content) 166 | tmp_file_path = tmp_file.name 167 | 168 | try: 169 | with pytest.raises(ValueError, match=r" block has no text"): 170 | list(pylhe.read_lhe_with_attributes(tmp_file_path)) 171 | finally: 172 | os.unlink(tmp_file_path) 173 | 174 | 175 | def test_empty_wgt_block_error(): 176 | """Test that ValueError is raised when block has no text content.""" 177 | # Create LHE content with valid init and event but empty wgt block 178 | empty_wgt_content = """ 179 | 180 | 2212 2212 6.500000e+03 6.500000e+03 0 0 0 0 3 1 181 | 1.000000e+00 0.000000e+00 1.000000e+00 1 182 | 183 | 184 | 2 0 +1.0000000e+00 9.11884000e+01 -1.00000000e+00 -1.00000000e+00 185 | 21 -1 0 0 501 502 +0.00000000e+00 +0.00000000e+00 +4.56308892e+02 +4.56308892e+02 +0.00000000e+00 0.0000e+00 9.0000e+00 186 | 21 -1 0 0 502 501 -0.00000000e+00 -0.00000000e+00 -2.24036073e+02 +2.24036073e+02 +0.00000000e+00 0.0000e+00 9.0000e+00 187 | 188 | 189 | 190 | 191 | """ 192 | 193 | # Test reading events with empty wgt block 194 | with NamedTemporaryFile(mode="w", suffix=".lhe", delete=False) as tmp_file: 195 | tmp_file.write(empty_wgt_content) 196 | tmp_file_path = tmp_file.name 197 | 198 | try: 199 | with pytest.raises(ValueError, match=r" block has no text"): 200 | list(pylhe.read_lhe_with_attributes(tmp_file_path)) 201 | finally: 202 | os.unlink(tmp_file_path) 203 | 204 | 205 | def test_whitespace_only_wgt_block_error(): 206 | """Test that ValueError is raised when block has only whitespace content.""" 207 | # Create LHE content with valid init and event but whitespace-only wgt block 208 | whitespace_wgt_content = """ 209 | 210 | 2212 2212 6.500000e+03 6.500000e+03 0 0 0 0 3 1 211 | 1.000000e+00 0.000000e+00 1.000000e+00 1 212 | 213 | 214 | 2 0 +1.0000000e+00 9.11884000e+01 -1.00000000e+00 -1.00000000e+00 215 | 21 -1 0 0 501 502 +0.00000000e+00 +0.00000000e+00 +4.56308892e+02 +4.56308892e+02 +0.00000000e+00 0.0000e+00 9.0000e+00 216 | 21 -1 0 0 502 501 -0.00000000e+00 -0.00000000e+00 -2.24036073e+02 +2.24036073e+02 +0.00000000e+00 0.0000e+00 9.0000e+00 217 | 218 | 219 | 220 | 221 | """ 222 | 223 | # Test reading events with whitespace-only wgt block 224 | with NamedTemporaryFile(mode="w", suffix=".lhe", delete=False) as tmp_file: 225 | tmp_file.write(whitespace_wgt_content) 226 | tmp_file_path = tmp_file.name 227 | 228 | try: 229 | with pytest.raises( 230 | ValueError, 231 | match=r"could not convert string to float|invalid literal for float", 232 | ): 233 | list(pylhe.read_lhe_with_attributes(tmp_file_path)) 234 | finally: 235 | os.unlink(tmp_file_path) 236 | 237 | 238 | def test_count_events_parse_error(): 239 | """Test that ParseError warning is issued and -1 returned when counting events in malformed LHE file.""" 240 | # Create a temporary file with invalid XML content 241 | with tempfile.NamedTemporaryFile(mode="w", suffix=".lhe", delete=True) as f: 242 | # Write invalid XML that will cause a parse error 243 | f.write('\n') 244 | f.write("\n") 245 | f.write("invalid xml content without proper closing\n") 246 | # Missing and tags 247 | 248 | f.flush() 249 | 250 | # Test that a RuntimeWarning is issued and -1 is returned 251 | with pytest.warns(RuntimeWarning, match=r"Parse Error:"): 252 | assert pylhe.LHEFile.count_events(f.name) == -1 253 | 254 | 255 | def test_fromfile_parse_error(): 256 | """Test that ParseError warning is issued when loading malformed LHE file with fromfile.""" 257 | # Create a temporary file with invalid XML content 258 | with tempfile.NamedTemporaryFile(mode="w", suffix=".lhe", delete=True) as f: 259 | # Write invalid XML that will cause a parse error 260 | f.write('\n') 261 | f.write("\n") 262 | f.write("invalid xml content without proper closing\n") 263 | # Missing and tags 264 | 265 | f.flush() 266 | 267 | # Test that a RuntimeWarning is issued when trying to load the malformed file 268 | # and potentially a ValueError if the generator stops without yielding 269 | with ( 270 | pytest.warns(RuntimeWarning, match=r"Parse Error:"), 271 | pytest.raises( 272 | ValueError, match=r"No or faulty block found in the LHE file" 273 | ), 274 | ): 275 | pylhe.LHEFile.fromfile(f.name) 276 | -------------------------------------------------------------------------------- /tests/test_warnings.py: -------------------------------------------------------------------------------- 1 | import os 2 | from tempfile import NamedTemporaryFile 3 | 4 | import pytest 5 | 6 | import pylhe 7 | 8 | 9 | def test_lhe_weight_info_getitem_deprecation_warning(): 10 | """Test that DeprecationWarning is raised when using __getitem__ on LHEWeightInfo.""" 11 | weight_info = pylhe.LHEWeightInfo( 12 | attrib={"id": "test_weight"}, name="Test weight", index=0 13 | ) 14 | 15 | with pytest.warns( 16 | DeprecationWarning, 17 | match=r'Access by `object\["attrib"\]` is deprecated and will be removed in a future version\. Use `object\.attrib` instead\.', 18 | ): 19 | _ = weight_info["attrib"] 20 | 21 | 22 | def test_lhe_weight_info_setitem_deprecation_warning(): 23 | """Test that DeprecationWarning is raised when using __setitem__ on LHEWeightInfo.""" 24 | weight_info = pylhe.LHEWeightInfo( 25 | attrib={"id": "test_weight"}, name="Test weight", index=0 26 | ) 27 | 28 | with pytest.warns( 29 | DeprecationWarning, 30 | match=r'Access by `object\["name"\]` is deprecated and will be removed in a future version\. Use `object\.name` instead\.', 31 | ): 32 | weight_info["name"] = "New test weight" 33 | 34 | 35 | def test_lhe_weight_info_iter_deprecation_warning(): 36 | """Test that DeprecationWarning is raised when using __iter__ on LHEWeightInfo.""" 37 | weight_info = pylhe.LHEWeightInfo( 38 | attrib={"id": "test_weight"}, name="Test weight", index=0 39 | ) 40 | 41 | with pytest.warns( 42 | DeprecationWarning, 43 | match=r"Dict-like iteration is deprecated and will be removed in a future version\. Use `asdict\(object\)` instead\.", 44 | ): 45 | _ = list(weight_info) 46 | 47 | 48 | def test_lhe_weight_info_len_deprecation_warning(): 49 | """Test that DeprecationWarning is raised when using __len__ on LHEWeightInfo.""" 50 | weight_info = pylhe.LHEWeightInfo( 51 | attrib={"id": "test_weight"}, name="Test weight", index=0 52 | ) 53 | 54 | with pytest.warns( 55 | DeprecationWarning, 56 | match=r"Dict-like length is deprecated and will be removed in a future version\. Use `asdict\(object\)` instead\.", 57 | ): 58 | _ = len(weight_info) 59 | 60 | 61 | def test_lhe_weight_group_getitem_deprecation_warning(): 62 | """Test that DeprecationWarning is raised when using __getitem__ on LHEWeightGroup.""" 63 | weight_group = pylhe.LHEWeightGroup(attrib={"type": "test_group"}, weights={}) 64 | 65 | with pytest.warns( 66 | DeprecationWarning, 67 | match=r'Access by `object\["attrib"\]` is deprecated and will be removed in a future version\. Use `object\.attrib` instead\.', 68 | ): 69 | _ = weight_group["attrib"] 70 | 71 | 72 | def test_lhe_weight_group_setitem_deprecation_warning(): 73 | """Test that DeprecationWarning is raised when using __setitem__ on LHEWeightGroup.""" 74 | weight_group = pylhe.LHEWeightGroup(attrib={"type": "test_group"}, weights={}) 75 | 76 | weight_info = pylhe.LHEWeightInfo( 77 | attrib={"id": "test_weight"}, name="Test weight", index=0 78 | ) 79 | 80 | with pytest.warns( 81 | DeprecationWarning, 82 | match=r'Access by `object\["weights"\]` is deprecated and will be removed in a future version\. Use `object\.weights` instead\.', 83 | ): 84 | weight_group["weights"] = weight_info 85 | 86 | 87 | def test_lhe_init_info_getitem_deprecation_warning(): 88 | """Test that DeprecationWarning is raised when using __getitem__ on LHEInitInfo.""" 89 | init_info = pylhe.LHEInitInfo( 90 | beamA=2212, 91 | beamB=2212, 92 | energyA=6500.0, 93 | energyB=6500.0, 94 | PDFgroupA=10800, 95 | PDFgroupB=10800, 96 | PDFsetA=0, 97 | PDFsetB=0, 98 | weightingStrategy=3, 99 | numProcesses=1, 100 | ) 101 | 102 | with pytest.warns( 103 | DeprecationWarning, 104 | match=r'Access by `object\["beamA"\]` is deprecated and will be removed in a future version\. Use `object\.beamA` instead\.', 105 | ): 106 | _ = init_info["beamA"] 107 | 108 | 109 | def test_lhe_init_info_setitem_deprecation_warning(): 110 | """Test that DeprecationWarning is raised when using __setitem__ on LHEInitInfo.""" 111 | init_info = pylhe.LHEInitInfo( 112 | beamA=2212, 113 | beamB=2212, 114 | energyA=6500.0, 115 | energyB=6500.0, 116 | PDFgroupA=10800, 117 | PDFgroupB=10800, 118 | PDFsetA=0, 119 | PDFsetB=0, 120 | weightingStrategy=3, 121 | numProcesses=1, 122 | ) 123 | 124 | with pytest.warns( 125 | DeprecationWarning, 126 | match=r'Access by `object\["beamA"\]` is deprecated and will be removed in a future version\. Use `object\.beamA` instead\.', 127 | ): 128 | init_info["beamA"] = 11 129 | 130 | 131 | def test_lhe_proc_info_getitem_deprecation_warning(): 132 | """Test that DeprecationWarning is raised when using __getitem__ on LHEProcInfo.""" 133 | proc_info = pylhe.LHEProcInfo(xSection=1.0, error=0.1, unitWeight=1.0, procId=1) 134 | 135 | with pytest.warns( 136 | DeprecationWarning, 137 | match=r'Access by `object\["xSection"\]` is deprecated and will be removed in a future version\. Use `object\.xSection` instead\.', 138 | ): 139 | _ = proc_info["xSection"] 140 | 141 | 142 | def test_lhe_proc_info_setitem_deprecation_warning(): 143 | """Test that DeprecationWarning is raised when using __setitem__ on LHEProcInfo.""" 144 | proc_info = pylhe.LHEProcInfo(xSection=1.0, error=0.1, unitWeight=1.0, procId=1) 145 | 146 | with pytest.warns( 147 | DeprecationWarning, 148 | match=r'Access by `object\["xSection"\]` is deprecated and will be removed in a future version\. Use `object\.xSection` instead\.', 149 | ): 150 | proc_info["xSection"] = 2.0 151 | 152 | 153 | def test_lhe_event_fieldnames_deprecation_warning(): 154 | """Test that DeprecationWarning is raised when using fieldnames property on LHEEvent.""" 155 | event_info = pylhe.LHEEventInfo( 156 | nparticles=1, pid=0, weight=1.0, scale=91.188, aqed=-1.0, aqcd=-1.0 157 | ) 158 | particle = pylhe.LHEParticle( 159 | id=21, 160 | status=-1, 161 | mother1=0, 162 | mother2=0, 163 | color1=501, 164 | color2=502, 165 | px=0.0, 166 | py=0.0, 167 | pz=456.3, 168 | e=456.3, 169 | m=0.0, 170 | lifetime=0.0, 171 | spin=9.0, 172 | ) 173 | event = pylhe.LHEEvent(eventinfo=event_info, particles=[particle]) 174 | 175 | with pytest.warns( 176 | DeprecationWarning, 177 | match=r"The fieldnames property is deprecated and will be removed in a future version\. Use `asdict\(object\)` instead\.", 178 | ): 179 | _ = event.fieldnames 180 | 181 | 182 | def test_lhe_event_iter_deprecation_warning(): 183 | """Test that DeprecationWarning is raised when using __iter__ on LHEEvent.""" 184 | event_info = pylhe.LHEEventInfo( 185 | nparticles=1, pid=0, weight=1.0, scale=91.188, aqed=-1.0, aqcd=-1.0 186 | ) 187 | particle = pylhe.LHEParticle( 188 | id=21, 189 | status=-1, 190 | mother1=0, 191 | mother2=0, 192 | color1=501, 193 | color2=502, 194 | px=0.0, 195 | py=0.0, 196 | pz=456.3, 197 | e=456.3, 198 | m=0.0, 199 | lifetime=0.0, 200 | spin=9.0, 201 | ) 202 | event = pylhe.LHEEvent(eventinfo=event_info, particles=[particle]) 203 | 204 | with pytest.warns( 205 | DeprecationWarning, 206 | match=r"Dict-like iteration is deprecated and will be removed in a future version\. Use `asdict\(object\)` instead\.", 207 | ): 208 | _ = list(event) 209 | 210 | 211 | def test_lhe_event_len_deprecation_warning(): 212 | """Test that DeprecationWarning is raised when using __len__ on LHEEvent.""" 213 | event_info = pylhe.LHEEventInfo( 214 | nparticles=1, pid=0, weight=1.0, scale=91.188, aqed=-1.0, aqcd=-1.0 215 | ) 216 | particle = pylhe.LHEParticle( 217 | id=21, 218 | status=-1, 219 | mother1=0, 220 | mother2=0, 221 | color1=501, 222 | color2=502, 223 | px=0.0, 224 | py=0.0, 225 | pz=456.3, 226 | e=456.3, 227 | m=0.0, 228 | lifetime=0.0, 229 | spin=9.0, 230 | ) 231 | event = pylhe.LHEEvent(eventinfo=event_info, particles=[particle]) 232 | 233 | with pytest.warns( 234 | DeprecationWarning, 235 | match=r"Dict-like length is deprecated and will be removed in a future version\. Use `asdict\(object\)` instead\.", 236 | ): 237 | _ = len(event) 238 | 239 | 240 | def test_lhe_init_getitem_deprecation_warning(): 241 | """Test that DeprecationWarning is raised when using __getitem__ on LHEInit.""" 242 | init_info = pylhe.LHEInitInfo( 243 | beamA=2212, 244 | beamB=2212, 245 | energyA=6500.0, 246 | energyB=6500.0, 247 | PDFgroupA=10800, 248 | PDFgroupB=10800, 249 | PDFsetA=0, 250 | PDFsetB=0, 251 | weightingStrategy=3, 252 | numProcesses=1, 253 | ) 254 | 255 | lhe_init = pylhe.LHEInit( 256 | initInfo=init_info, procInfo=[], weightgroup={}, LHEVersion="1.0" 257 | ) 258 | 259 | with pytest.warns( 260 | DeprecationWarning, match=r"Access by `lheinit\[\"initInfo\"\]` is deprecated" 261 | ): 262 | _ = lhe_init["initInfo"] 263 | 264 | 265 | def test_lhe_init_setitem_deprecation_warning(): 266 | """Test that DeprecationWarning is raised when using __setitem__ on LHEInit.""" 267 | init_info = pylhe.LHEInitInfo( 268 | beamA=2212, 269 | beamB=2212, 270 | energyA=6500.0, 271 | energyB=6500.0, 272 | PDFgroupA=10800, 273 | PDFgroupB=10800, 274 | PDFsetA=0, 275 | PDFsetB=0, 276 | weightingStrategy=3, 277 | numProcesses=1, 278 | ) 279 | 280 | lhe_init = pylhe.LHEInit( 281 | initInfo=init_info, procInfo=[], weightgroup={}, LHEVersion="1.0" 282 | ) 283 | 284 | with pytest.warns( 285 | DeprecationWarning, match=r"Access by `lheinit\[\"LHEVersion\"\]` is deprecated" 286 | ): 287 | lhe_init["LHEVersion"] = "2.0" 288 | 289 | 290 | def test_write_lhe_string_deprecation_warning(): 291 | """Test that DeprecationWarning is raised when using write_lhe_string function.""" 292 | init_info = pylhe.LHEInitInfo( 293 | beamA=2212, 294 | beamB=2212, 295 | energyA=6500.0, 296 | energyB=6500.0, 297 | PDFgroupA=10800, 298 | PDFgroupB=10800, 299 | PDFsetA=0, 300 | PDFsetB=0, 301 | weightingStrategy=3, 302 | numProcesses=1, 303 | ) 304 | 305 | proc_info = pylhe.LHEProcInfo(xSection=1.0, error=0.1, unitWeight=1.0, procId=1) 306 | 307 | lhe_init = pylhe.LHEInit( 308 | initInfo=init_info, procInfo=[proc_info], weightgroup={}, LHEVersion="1.0" 309 | ) 310 | 311 | event_info = pylhe.LHEEventInfo( 312 | nparticles=2, pid=0, weight=1.0, scale=91.188, aqed=-1.0, aqcd=-1.0 313 | ) 314 | 315 | particles = [ 316 | pylhe.LHEParticle( 317 | id=21, 318 | status=-1, 319 | mother1=0, 320 | mother2=0, 321 | color1=501, 322 | color2=502, 323 | px=0.0, 324 | py=0.0, 325 | pz=456.3, 326 | e=456.3, 327 | m=0.0, 328 | lifetime=0.0, 329 | spin=9.0, 330 | ), 331 | pylhe.LHEParticle( 332 | id=21, 333 | status=-1, 334 | mother1=0, 335 | mother2=0, 336 | color1=502, 337 | color2=501, 338 | px=0.0, 339 | py=0.0, 340 | pz=-224.0, 341 | e=224.0, 342 | m=0.0, 343 | lifetime=0.0, 344 | spin=9.0, 345 | ), 346 | ] 347 | 348 | events = [pylhe.LHEEvent(eventinfo=event_info, particles=particles)] 349 | 350 | with pytest.warns( 351 | DeprecationWarning, 352 | match=r"`write_lhe_string` is deprecated and will be removed in a future version", 353 | ): 354 | pylhe.write_lhe_string(lhe_init, events) 355 | 356 | 357 | def test_write_lhe_file_deprecation_warning(): 358 | """Test that DeprecationWarning is raised when using write_lhe_file function.""" 359 | init_info = pylhe.LHEInitInfo( 360 | beamA=2212, 361 | beamB=2212, 362 | energyA=6500.0, 363 | energyB=6500.0, 364 | PDFgroupA=10800, 365 | PDFgroupB=10800, 366 | PDFsetA=0, 367 | PDFsetB=0, 368 | weightingStrategy=3, 369 | numProcesses=1, 370 | ) 371 | 372 | proc_info = pylhe.LHEProcInfo(xSection=1.0, error=0.1, unitWeight=1.0, procId=1) 373 | 374 | lhe_init = pylhe.LHEInit( 375 | initInfo=init_info, procInfo=[proc_info], weightgroup={}, LHEVersion="1.0" 376 | ) 377 | 378 | event_info = pylhe.LHEEventInfo( 379 | nparticles=2, pid=0, weight=1.0, scale=91.188, aqed=-1.0, aqcd=-1.0 380 | ) 381 | 382 | particles = [ 383 | pylhe.LHEParticle( 384 | id=21, 385 | status=-1, 386 | mother1=0, 387 | mother2=0, 388 | color1=501, 389 | color2=502, 390 | px=0.0, 391 | py=0.0, 392 | pz=456.3, 393 | e=456.3, 394 | m=0.0, 395 | lifetime=0.0, 396 | spin=9.0, 397 | ) 398 | ] 399 | 400 | events = [pylhe.LHEEvent(eventinfo=event_info, particles=particles)] 401 | 402 | with NamedTemporaryFile(mode="w", suffix=".lhe", delete=False) as tmp_file: 403 | tmp_file_path = tmp_file.name 404 | 405 | try: 406 | with pytest.warns( 407 | DeprecationWarning, 408 | match=r"`write_lhe_file` is deprecated and will be removed in a future version", 409 | ): 410 | pylhe.write_lhe_file(lhe_init, events, tmp_file_path) 411 | finally: 412 | os.unlink(tmp_file_path) 413 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # pylhe: A Lightweight Python interface to Les Houches Event files 2 | 3 | pylhe logo 4 | 5 | [![GitHub Project](https://img.shields.io/badge/GitHub--blue?style=social&logo=GitHub)](https://github.com/scikit-hep/pylhe) 6 | [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.1217031.svg)](https://doi.org/10.5281/zenodo.1217031) 7 | [![Scikit-HEP](https://scikit-hep.org/assets/images/Scikit--HEP-Project-blue.svg)](https://scikit-hep.org/) 8 | 9 | 10 | [![RTD Docs](https://readthedocs.org/projects/pylhe/badge/?version=latest)](https://pylhe.readthedocs.io/en/latest/?badge=latest) 11 | [![Live Docs](https://img.shields.io/badge/docs-main-blue.svg)](https://scikit-hep.org/pylhe/) 12 | 13 | [![PyPI version](https://img.shields.io/pypi/v/pylhe.svg)](https://pypi.org/project/pylhe/) 14 | [![Conda-forge version](https://img.shields.io/conda/vn/conda-forge/pylhe.svg)](https://github.com/conda-forge/pylhe-feedstock) 15 | [![Supported Python versions](https://img.shields.io/pypi/pyversions/pylhe.svg)](https://pypi.org/project/pylhe/) 16 | 17 | [![GitHub Actions Status](https://github.com/scikit-hep/pylhe/actions/workflows/ci.yml/badge.svg)](https://github.com/scikit-hep/pylhe/actions/workflows/ci.yml?query=branch%3Amain) 18 | [![Code Coverage](https://codecov.io/gh/scikit-hep/pylhe/branch/main/graph/badge.svg)](https://app.codecov.io/gh/scikit-hep/pylhe/tree/main) 19 | [![pre-commit.ci status](https://results.pre-commit.ci/badge/github/scikit-hep/pylhe/main.svg)](https://results.pre-commit.ci/latest/github/scikit-hep/pylhe/main) 20 | [![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) 21 | 22 | [![Binder](https://mybinder.org/badge_logo.svg)](https://mybinder.org/v2/gh/scikit-hep/pylhe/main?urlpath=lab/tree/docs/source/examples/00_quickstart.ipynb) 23 | 24 | Small and thin Python interface to read [Les Houches Event (LHE)](https://inspirehep.net/literature/725284) files 25 | 26 | 27 | ## Tested Monte Carlo Generators' LHE output 28 | 29 | | Generator | Tested Versions | 30 | |------------------------------------------------|-----------------------| 31 | | [MadGraph5](https://launchpad.net/mg5amcnlo) | 2.0.0, 2.2.1, 3.5.8 | 32 | | [POWHEG-BOX-V2](http://powhegbox.mib.infn.it/) | r4027 | 33 | | [Pythia](https://pythia.org/) | 6.413, 8.3.14 | 34 | | [Sherpa](https://sherpa-team.gitlab.io/) | 3.0.1 | 35 | | [Whizard](https://whizard.hepforge.org/) | 3.1.4 | 36 | 37 | Note: test files are provided via the [scikit-hep-testdata](https://github.com/scikit-hep/scikit-hep-testdata) package. Please open an issue of create directly a PR for the inclusion of new files for testing. 38 | 39 | ## Install 40 | 41 | To install `pylhe` from PyPI you can just do 42 | 43 | ``` 44 | python -m pip install pylhe 45 | ``` 46 | 47 | The visualization capabilities require the external dependency of [Graphviz](https://graphviz.org/). 48 | 49 | ## Get started 50 | 51 | The example below provides a simple overview. 52 | Full functionality can be inspected from the functions provided in the `pylhe` module. 53 | For more information about the LHE format, see the [LHE format documentation](https://pylhe.readthedocs.io/en/latest/lhe.html). 54 | 55 | ### Reading 56 | 57 | ```python 58 | import itertools 59 | 60 | # You can use LHE files from scikit-hep-testdata 61 | from skhep_testdata import data_path 62 | 63 | import pylhe 64 | 65 | lhe_file = data_path("pylhe-testlhef3.lhe") 66 | events = pylhe.LHEFile.fromfile(lhe_file).events 67 | print(f"Number of events: {pylhe.LHEFile.count_events(lhe_file)}") 68 | 69 | # Get event 1 70 | event = next(itertools.islice(events, 1, 2)) 71 | 72 | # A DOT language graph of the event can be inspected as follows 73 | print(event.graph.source) 74 | 75 | # The graph is nicely displayed as SVG in Jupyter notebooks 76 | event 77 | 78 | # To save a DOT graph render the graph to a supported image format 79 | # (refer to the Graphviz documentation for more) 80 | event.graph.render(filename="test", format="png", cleanup=True) 81 | event.graph.render(filename="test", format="pdf", cleanup=True) 82 | ``` 83 | 84 | ### Writing 85 | 86 | For a full example see [write](examples/03_write_monte_carlo_example.ipynb) or [filter](examples/02_filter_events_example.ipynb). 87 | The values in the sketch below are intentionally left empty since they depend on the use-case. 88 | The data structure of `pylhe` is: 89 | 90 | ```python 91 | import pylhe 92 | 93 | file = pylhe.LHEFile( 94 | init=pylhe.LHEInit( 95 | initInfo=pylhe.LHEInitInfo( 96 | beamA=..., 97 | beamB=..., 98 | energyA=..., 99 | energyB=..., 100 | PDFgroupA=..., 101 | PDFgroupB=..., 102 | PDFsetA=..., 103 | PDFsetB=..., 104 | weightinStrategy=..., 105 | numProcesses=..., 106 | ), 107 | procInfo=pylhe.LHEProcInfo( 108 | xSection=..., 109 | error=..., 110 | unitWeight=..., 111 | procId=..., 112 | ), 113 | ), 114 | events=[ 115 | pylhe.LHEEvent( 116 | eventinfo=pylhe.LHEEventInfo( 117 | nparticles=..., 118 | pid=..., 119 | weight=..., 120 | scale=..., 121 | aqed=..., 122 | aqcd=..., 123 | ), 124 | particles=[ 125 | pylhe.LHEParticle( 126 | id=..., 127 | status=..., 128 | mother1=..., 129 | mother2=..., 130 | color1=..., 131 | color2=..., 132 | px=..., 133 | py=..., 134 | pz=..., 135 | e=..., 136 | m=..., 137 | lifetime=..., 138 | spin=..., 139 | ), 140 | ], 141 | weights=None, 142 | attributes=None, 143 | optional=None, 144 | ), 145 | ], 146 | ) 147 | 148 | # write to file, compressed if gz/gzip suffix 149 | write_lhe_file(file.init, file.events, "myevents.lhe.gz", rwgt=True, weights=False) 150 | ``` 151 | 152 | 153 | ## Citation 154 | 155 | The preferred BibTeX entry for citation of `pylhe` is 156 | 157 | ```bibtex 158 | @software{pylhe, 159 | author = {Lukas Heinrich and Matthew Feickert and Eduardo Rodrigues and Alexander Puck Neuwirth}, 160 | title = "{pylhe: v1.0.2}", 161 | version = {v1.0.2}, 162 | doi = {10.5281/zenodo.1217031}, 163 | url = {https://github.com/scikit-hep/pylhe}, 164 | } 165 | ``` 166 | 167 | `pylhe` has been referenced in: 168 | 169 | * [Flow Annealed Importance Sampling Bootstrap meets Differentiable Particle Physics](https://inspirehep.net/literature/2851739) (2024) 170 | * [Interference effects in resonant di-Higgs production at the LHC in the Higgs singlet extension](https://inspirehep.net/literature/2826958) (2024) 171 | * [Search for nearly degenerate higgsinos via photon fusion with the semileptonic channel at the LHC](https://inspirehep.net/literature/2788228) (2024) 172 | * [Strategy to measure tau via photon fusion in LHC proton collisions](https://inspirehep.net/literature/2767190) (2024) 173 | * [Probing dark photons from a light scalar at Belle II](https://inspirehep.net/literature/2744442) (2024) 174 | * [Constraints on the trilinear and quartic Higgs couplings from triple Higgs production at the LHC and beyond](https://inspirehep.net/literature/2734125) (2023) 175 | * [BSM reach of four-top production at the LHC](https://inspirehep.net/literature/2633019) (2023) 176 | * [Probing compressed higgsinos with forward protons at the LHC](https://inspirehep.net/literature/2140007) (2023) 177 | * [FLArE up dark sectors with EM form factors at the LHC Forward Physics Facility](https://inspirehep.net/literature/2085195) (2022) 178 | * [Probing Neutrino-Portal Dark Matter at the Forward Physics Facility](https://inspirehep.net/literature/1966337) (2021) 179 | * [Looking forward to test the KOTO anomaly with FASER](https://inspirehep.net/literature/1801897) (2020) 180 | * [Probing Light Gauge Bosons in Tau Neutrino Experiments](https://inspirehep.net/literature/1794757) (2020) 181 | * [Benchmarking simplified template cross sections in WH production](https://inspirehep.net/literature/1750323) (2019) 182 | * [MadMiner: Machine learning-based inference for particle physics](https://inspirehep.net/literature/1746275) (2019) 183 | * [Search Strategy for Sleptons and Dark Matter Using the LHC as a Photon Collider](https://inspirehep.net/literature/1703804) (2018) 184 | 185 | 186 | ## Contributors 187 | 188 | We hereby acknowledge the contributors that made this project possible ([emoji key](https://allcontributors.org/docs/en/emoji-key)): 189 | 190 | 191 | 192 | 193 | 194 | 195 | 196 | 197 | 198 | 199 | 200 | 201 | 202 | 203 | 204 | 205 | 206 | 207 | 208 | 209 | 210 | 211 |
    Matthew Feickert
    Matthew Feickert

    🚧 🎨 💻 📖
    Lukas
    Lukas

    🚧 🎨 💻 📖
    Eduardo Rodrigues
    Eduardo Rodrigues

    🚧 💻 📖
    Alexander Puck Neuwirth
    Alexander Puck Neuwirth

    🚧 🎨 💻 📖
    Johannes Schumann
    Johannes Schumann

    💻
    Henry Schreiner
    Henry Schreiner

    💻
    ariaradick
    ariaradick

    💻
    Junghwan John Goh
    Junghwan John Goh

    💻
    fuenfundachtzig
    fuenfundachtzig

    💻
    Shantanu Gontia
    Shantanu Gontia

    💻
    Tom Eichlersmith
    Tom Eichlersmith

    💻
    Iason Krommydas
    Iason Krommydas

    💻 📖
    212 | 213 | 214 | 215 | 216 | 217 | 218 | This project follows the [all-contributors](https://github.com/all-contributors/all-contributors) specification. 219 | -------------------------------------------------------------------------------- /tests/test_lhe_reader.py: -------------------------------------------------------------------------------- 1 | import gzip 2 | import os 3 | import shutil 4 | from pathlib import Path 5 | from tempfile import NamedTemporaryFile 6 | 7 | import pytest 8 | import skhep_testdata 9 | 10 | import pylhe 11 | from pylhe import LHEEvent 12 | 13 | TEST_FILE_LHE_v1 = skhep_testdata.data_path("pylhe-testfile-pr29.lhe") 14 | TEST_FILE_LHE_v3 = skhep_testdata.data_path("pylhe-testlhef3.lhe") 15 | TEST_FILE_LHE_INITRWGT_WEIGHTS = skhep_testdata.data_path( 16 | "pylhe-testfile-powheg-box-v2-hvq.lhe" 17 | ) 18 | TEST_FILE_LHE_RWGT_WGT = skhep_testdata.data_path("pylhe-testfile-powheg-box-v2-W.lhe") 19 | TEST_FILES_LHE_POWHEG = [ 20 | skhep_testdata.data_path(f"pylhe-testfile-powheg-box-v2-{proc}.lhe") 21 | for proc in ["Z", "W", "Zj", "trijet", "directphoton", "hvq"] 22 | ] 23 | TEST_FILES_LHE_MADGRAPH = [ 24 | skhep_testdata.data_path("pylhe-testfile-madgraph-2.0.0-wbj.lhe"), 25 | skhep_testdata.data_path("pylhe-testfile-madgraph-2.2.1-Z-ckkwl.lhe.gz"), 26 | skhep_testdata.data_path("pylhe-testfile-madgraph-2.2.1-Z-fxfx.lhe.gz"), 27 | skhep_testdata.data_path("pylhe-testfile-madgraph-2.2.1-Z-mlm.lhe.gz"), 28 | skhep_testdata.data_path("pylhe-testfile-madgraph5-3.5.8-pp_to_jj.lhe.gz"), 29 | ] 30 | TEST_FILES_LHE_PYTHIA = [ 31 | skhep_testdata.data_path("pylhe-testfile-pythia-6.413-ttbar.lhe"), 32 | skhep_testdata.data_path("pylhe-testfile-pythia-8.3.14-weakbosons.lhe"), 33 | ] 34 | TEST_FILES_LHE_SHERPA = [ 35 | skhep_testdata.data_path("pylhe-testfile-sherpa-3.0.1-eejjj.lhe"), 36 | ] 37 | TEST_FILES_LHE_WHIZARD = [ 38 | skhep_testdata.data_path("pylhe-testfile-whizard-3.1.4-eeWW.lhe"), 39 | ] 40 | TEST_FILES_LHE_GENERATORS = [ 41 | *TEST_FILES_LHE_MADGRAPH, 42 | *TEST_FILES_LHE_POWHEG, 43 | *TEST_FILES_LHE_PYTHIA, 44 | *TEST_FILES_LHE_SHERPA, 45 | *TEST_FILES_LHE_WHIZARD, 46 | ] 47 | TEST_FILES_LHE_ALL = [ 48 | TEST_FILE_LHE_v1, 49 | TEST_FILE_LHE_v3, 50 | *TEST_FILES_LHE_GENERATORS, 51 | ] 52 | 53 | 54 | @pytest.fixture(scope="session") 55 | def testdata_gzip_file(): 56 | test_data = skhep_testdata.data_path("pylhe-testfile-pr29.lhe") 57 | tmp_path = Path(NamedTemporaryFile().name) 58 | 59 | # create what is basically pylhe-testfile-pr29.lhe.gz 60 | with open(test_data, "rb") as readfile, gzip.open(tmp_path, "wb") as writefile: 61 | shutil.copyfileobj(readfile, writefile) 62 | yield tmp_path 63 | 64 | # teardown 65 | os.remove(tmp_path) 66 | 67 | 68 | def test_gzip_open(testdata_gzip_file): 69 | assert pylhe._extract_fileobj(TEST_FILE_LHE_v1) 70 | assert pylhe._extract_fileobj(testdata_gzip_file) 71 | 72 | # Needs path-like object, not a fileobj 73 | with pytest.raises(TypeError), open(TEST_FILE_LHE_v1, "rb") as fileobj: 74 | pylhe._extract_fileobj(fileobj) 75 | 76 | with open(TEST_FILE_LHE_v1, "rb") as fileobj: 77 | assert isinstance(pylhe._extract_fileobj(TEST_FILE_LHE_v1), type(fileobj)) 78 | assert isinstance(pylhe._extract_fileobj(Path(TEST_FILE_LHE_v1)), type(fileobj)) 79 | assert isinstance(pylhe._extract_fileobj(testdata_gzip_file), gzip.GzipFile) 80 | assert isinstance(pylhe._extract_fileobj(Path(testdata_gzip_file)), gzip.GzipFile) 81 | 82 | 83 | def test_read_num_events(testdata_gzip_file): 84 | assert pylhe.read_num_events(TEST_FILE_LHE_v1) == 791 85 | assert pylhe.read_num_events(TEST_FILE_LHE_v1) == pylhe.read_num_events( 86 | testdata_gzip_file 87 | ) 88 | 89 | 90 | @pytest.mark.parametrize("file", TEST_FILES_LHE_ALL) 91 | def test_count_events(file): 92 | assert pylhe.LHEFile.count_events(file) == sum( 93 | 1 for _ in pylhe.LHEFile.fromfile(file).events 94 | ) 95 | 96 | 97 | def test_read_lhe_init_gzipped_file(testdata_gzip_file): 98 | assert pylhe.read_lhe_init(TEST_FILE_LHE_v1) == pylhe.read_lhe_init( 99 | testdata_gzip_file 100 | ) 101 | 102 | 103 | def test_read_lhe_init_v1(): 104 | """ 105 | Test method read_lhe_init() on a LesHouchesEvents version="1.0" file. 106 | """ 107 | init_data = pylhe.read_lhe_init(TEST_FILE_LHE_v1) 108 | 109 | assert init_data["LHEVersion"] == "1.0" 110 | 111 | init_info = init_data["initInfo"] 112 | assert init_info["beamA"] == pytest.approx(1.0) 113 | assert init_info["beamB"] == pytest.approx(2.0) 114 | assert init_info["energyA"] == pytest.approx(1.234567) 115 | assert init_info["energyB"] == pytest.approx(2.345678) 116 | assert init_info["PDFgroupA"] == pytest.approx(3.0) 117 | assert init_info["PDFgroupB"] == pytest.approx(4.0) 118 | assert init_info["PDFsetA"] == pytest.approx(5.0) 119 | assert init_info["PDFsetB"] == pytest.approx(6.0) 120 | assert init_info["weightingStrategy"] == pytest.approx(7.0) 121 | assert init_info["numProcesses"] == pytest.approx(8.0) 122 | 123 | assert init_data["procInfo"] == [] 124 | 125 | 126 | def test_read_lhe_init_v3(): 127 | """ 128 | Test method read_lhe_init() on a LesHouchesEvents version="3.0" file. 129 | """ 130 | init_data = pylhe.read_lhe_init(TEST_FILE_LHE_v3) 131 | 132 | assert len(init_data["weightgroup"]) == 1 133 | assert len(init_data["weightgroup"]["scale_variation"]["weights"]) == 9 134 | 135 | 136 | def test_read_lhe_v1(): 137 | """ 138 | Test method read_lhe() on a LesHouchesEvents version="1.0" file. 139 | """ 140 | events = pylhe.read_lhe(TEST_FILE_LHE_v1) 141 | 142 | assert events 143 | for e in events: 144 | assert isinstance(e, LHEEvent) 145 | 146 | 147 | def test_read_lhe_v3(): 148 | """ 149 | Test method read_lhe() on a LesHouchesEvents version="3.0" file. 150 | """ 151 | events = pylhe.read_lhe(TEST_FILE_LHE_v3) 152 | 153 | assert events 154 | for e in events: 155 | assert isinstance(e, LHEEvent) 156 | 157 | 158 | def test_read_lhe_with_attributes_v1(): 159 | """ 160 | Test method read_lhe_with_attributes() on a LesHouchesEvents version="1.0" file. 161 | """ 162 | events = pylhe.read_lhe_with_attributes(TEST_FILE_LHE_v1) 163 | 164 | assert events 165 | for e in events: 166 | assert isinstance(e, LHEEvent) 167 | 168 | 169 | def test_read_lhe_with_attributes_v3(): 170 | """ 171 | Test method read_lhe_with_attributes() on a LesHouchesEvents version="3.0" file. 172 | """ 173 | events = pylhe.read_lhe_with_attributes(TEST_FILE_LHE_v3) 174 | 175 | assert events 176 | for e in events: 177 | assert isinstance(e, LHEEvent) 178 | 179 | 180 | @pytest.mark.parametrize("file", TEST_FILES_LHE_GENERATORS) 181 | def test_read_lhe_generator(file): 182 | """ 183 | Test method read_lhe() on several types of LesHouchesEvents generator files. 184 | """ 185 | events = pylhe.read_lhe(file) 186 | 187 | assert events 188 | for e in events: 189 | assert isinstance(e, LHEEvent) 190 | 191 | 192 | @pytest.mark.parametrize("file", TEST_FILES_LHE_GENERATORS) 193 | def test_read_lhe_with_attributes_generator(file): 194 | """ 195 | Test method read_lhe_with_attributes() on several types of LesHouchesEvents generator files. 196 | """ 197 | events = pylhe.read_lhe_with_attributes(file) 198 | 199 | assert events 200 | for e in events: 201 | assert isinstance(e, LHEEvent) 202 | 203 | 204 | @pytest.mark.parametrize( 205 | "file", [TEST_FILE_LHE_INITRWGT_WEIGHTS, TEST_FILE_LHE_RWGT_WGT] 206 | ) 207 | def test_read_lhe_file(file): 208 | """ 209 | Test that the read_lhe_file function works as the individual reads. 210 | """ 211 | lhefile = pylhe.read_lhe_file(file, with_attributes=False) 212 | lheinit = pylhe.read_lhe_init(file) 213 | lheevents = pylhe.read_lhe(file) 214 | 215 | assert lheinit == lhefile.init 216 | assert next(lheevents).tolhe() == next(lhefile.events).tolhe() 217 | 218 | lhefile = pylhe.read_lhe_file(file, with_attributes=True) 219 | lheevents = pylhe.read_lhe_with_attributes(file) 220 | 221 | assert lheinit == lhefile.init 222 | assert next(lheevents).tolhe() == next(lhefile.events).tolhe() 223 | 224 | 225 | @pytest.mark.parametrize( 226 | "file", [TEST_FILE_LHE_INITRWGT_WEIGHTS, TEST_FILE_LHE_RWGT_WGT] 227 | ) 228 | def test_read_buffer(file): 229 | with open(file, "rb") as f: 230 | lhef1 = pylhe.LHEFile.frombuffer(f, with_attributes=True) 231 | next(lhef1.events) 232 | next(lhef1.events) 233 | 234 | 235 | def test_read_lhe_initrwgt_weights(): 236 | """ 237 | Test the weights from initrwgt with a weights list. 238 | """ 239 | events = pylhe.read_lhe_with_attributes(TEST_FILE_LHE_INITRWGT_WEIGHTS) 240 | 241 | assert events 242 | for e in events: 243 | assert isinstance(e, LHEEvent) 244 | assert len(e.weights) > 0 245 | 246 | 247 | def test_read_lhe_rwgt_wgt(): 248 | """ 249 | Test the weights from rwgt with a wgt list. 250 | """ 251 | events = pylhe.read_lhe_with_attributes(TEST_FILE_LHE_RWGT_WGT) 252 | 253 | assert events 254 | for e in events: 255 | assert isinstance(e, LHEEvent) 256 | assert len(e.weights) > 0 257 | 258 | 259 | def test_issue_102(): 260 | """ 261 | Test a file containing lines starting with "#aMCatNLO". 262 | """ 263 | assert pylhe.read_num_events(TEST_FILE_LHE_v3) == 59 264 | assert len(list(pylhe.read_lhe(TEST_FILE_LHE_v3))) == len( 265 | list(pylhe.read_lhe_with_attributes(TEST_FILE_LHE_v3)) 266 | ) 267 | 268 | 269 | def test_read_lhe_init_raises(): 270 | """ 271 | Test that the block raises AttributeErrors on faulty inputs. 272 | """ 273 | 274 | with pytest.raises( 275 | AttributeError, match=r"weightgroup must have attribute 'type' or 'name'." 276 | ): 277 | pylhe.LHEFile.fromstring( 278 | """ 279 | 2212 2212 4.0000000e+03 4.0000000e+03 -1 -1 21100 21100 -4 1 280 | 5.0109086e+01 8.9185414e-02 5.0109093e+01 66 281 | 282 | 283 | muR=0.10000E+01 muF=0.10000E+01 284 | muR=0.10000E+01 muF=0.20000E+01 285 | muR=0.10000E+01 muF=0.50000E+00 286 | muR=0.20000E+01 muF=0.10000E+01 287 | muR=0.20000E+01 muF=0.20000E+01 288 | muR=0.20000E+01 muF=0.50000E+00 289 | muR=0.50000E+00 muF=0.10000E+01 290 | muR=0.50000E+00 muF=0.20000E+01 291 | muR=0.50000E+00 muF=0.50000E+00 292 | 293 | 294 | """ 295 | ) 296 | 297 | with pytest.raises(AttributeError, match=r"weight must have attribute 'id'"): 298 | pylhe.LHEFile.fromstring( 299 | """ 300 | 2212 2212 4.0000000e+03 4.0000000e+03 -1 -1 21100 21100 -4 1 301 | 5.0109086e+01 8.9185414e-02 5.0109093e+01 66 302 | 303 | 304 | muR=0.10000E+01 muF=0.10000E+01 305 | muR=0.10000E+01 muF=0.10000E+01 306 | muR=0.10000E+01 muF=0.20000E+01 307 | muR=0.10000E+01 muF=0.50000E+00 308 | muR=0.20000E+01 muF=0.10000E+01 309 | muR=0.20000E+01 muF=0.20000E+01 310 | muR=0.20000E+01 muF=0.50000E+00 311 | muR=0.50000E+00 muF=0.10000E+01 312 | muR=0.50000E+00 muF=0.20000E+01 313 | muR=0.50000E+00 muF=0.50000E+00 314 | 315 | 316 | """ 317 | ) 318 | 319 | 320 | def test_event_at_position_5(): 321 | """ 322 | Test that the event at position 5 has the expected values. 323 | The element at position 5 in the LHE file is: 324 | 325 | 326 | 5 0 1.554392E-03 0.000000E+00 0.000000E+00 0.000000E+00 327 | 111 0 0 0 0 0 -9.7035523745E-01 -9.8435906372E-01 5.1424008917E+00 5.3267140888E+00 1.3800000000E-01 0. 9. 328 | 211 0 0 0 0 0 -2.1024089632E-01 -3.2883303721E-02 3.1406432734E+00 3.1508676134E+00 1.3800000000E-01 0. 9. 329 | -211 0 0 0 0 0 3.9695103971E-02 -2.2872518121E-01 1.0496526207E-01 2.8974577830E-01 1.3800000000E-01 0. 9. 330 | 2212 0 0 0 0 0 -8.6160811751E-01 -7.3819273849E-01 7.4246467306E+00 7.5762050386E+00 9.9307937557E-01 0. 9. 331 | 211 0 0 0 0 0 -4.7889071830E-01 -2.8340027352E-01 1.5195379346E+00 1.6240971553E+00 1.3800000000E-01 0. 9. 332 | # 5 34 1.5543917618E-03 3.6288856778E+01 0.0000000000E+00 0.0000000000E+00 3.6288856778E+01 1.9388062018E+01 2.3242767182E+00 2.1256769904E+00 1.9130504008E+01 9.0374892691E-01 -1.4114 333 | 334 | """ 335 | events = pylhe.read_lhe(TEST_FILE_LHE_v1) 336 | 337 | # Get the event at position 5 (0-indexed) 338 | target_event = None 339 | for i, event in enumerate(events): 340 | if i == 5: 341 | target_event = event 342 | break 343 | 344 | assert target_event is not None, "Event at position 5 should exist" 345 | 346 | # Test event info values from the LHE format comment 347 | assert target_event.eventinfo.nparticles == pytest.approx(5.0) 348 | assert target_event.eventinfo.pid == pytest.approx(0.0) 349 | assert target_event.eventinfo.weight == pytest.approx(1.554392e-03) 350 | assert target_event.eventinfo.scale == pytest.approx(0.0) 351 | assert target_event.eventinfo.aqed == pytest.approx(0.0) 352 | assert target_event.eventinfo.aqcd == pytest.approx(0.0) 353 | 354 | # Test that we have the expected number of particles 355 | assert len(target_event.particles) == 5 356 | 357 | # Test particle properties based on the LHE event data 358 | # First particle: 111 (pi0) 359 | first_particle = target_event.particles[0] 360 | assert first_particle.id == pytest.approx(111.0) # pi0 361 | assert first_particle.status == pytest.approx(0.0) 362 | assert first_particle.mother1 == pytest.approx(0.0) 363 | assert first_particle.mother2 == pytest.approx(0.0) 364 | assert first_particle.px == pytest.approx(-9.7035523745e-01) 365 | assert first_particle.py == pytest.approx(-9.8435906372e-01) 366 | assert first_particle.pz == pytest.approx(5.1424008917e00) 367 | assert first_particle.e == pytest.approx(5.3267140888e00) 368 | assert first_particle.m == pytest.approx(1.3800000000e-01) 369 | 370 | # Second particle: 211 (pi+) 371 | second_particle = target_event.particles[1] 372 | assert second_particle.id == pytest.approx(211.0) # pi+ 373 | assert second_particle.status == pytest.approx(0.0) 374 | assert second_particle.px == pytest.approx(-2.1024089632e-01) 375 | assert second_particle.py == pytest.approx(-3.2883303721e-02) 376 | assert second_particle.pz == pytest.approx(3.1406432734e00) 377 | assert second_particle.e == pytest.approx(3.1508676134e00) 378 | assert second_particle.m == pytest.approx(1.3800000000e-01) 379 | 380 | # Third particle: -211 (pi-) 381 | third_particle = target_event.particles[2] 382 | assert third_particle.id == pytest.approx(-211.0) # pi- 383 | assert third_particle.status == pytest.approx(0.0) 384 | assert third_particle.px == pytest.approx(3.9695103971e-02) 385 | assert third_particle.py == pytest.approx(-2.2872518121e-01) 386 | assert third_particle.pz == pytest.approx(1.0496526207e-01) 387 | assert third_particle.e == pytest.approx(2.8974577830e-01) 388 | assert third_particle.m == pytest.approx(1.3800000000e-01) 389 | 390 | # Fourth particle: 2212 (proton) 391 | fourth_particle = target_event.particles[3] 392 | assert fourth_particle.id == pytest.approx(2212.0) # proton 393 | assert fourth_particle.status == pytest.approx(0.0) 394 | assert fourth_particle.px == pytest.approx(-8.6160811751e-01) 395 | assert fourth_particle.py == pytest.approx(-7.3819273849e-01) 396 | assert fourth_particle.pz == pytest.approx(7.4246467306e00) 397 | assert fourth_particle.e == pytest.approx(7.5762050386e00) 398 | assert fourth_particle.m == pytest.approx(9.9307937557e-01) 399 | 400 | # Fifth particle: 211 (pi+) 401 | fifth_particle = target_event.particles[4] 402 | assert fifth_particle.id == pytest.approx(211.0) # pi+ 403 | assert fifth_particle.status == pytest.approx(0.0) 404 | assert fifth_particle.px == pytest.approx(-4.7889071830e-01) 405 | assert fifth_particle.py == pytest.approx(-2.8340027352e-01) 406 | assert fifth_particle.pz == pytest.approx(1.5195379346e00) 407 | assert fifth_particle.e == pytest.approx(1.6240971553e00) 408 | assert fifth_particle.m == pytest.approx(1.3800000000e-01) 409 | 410 | # Test that all particles have proper parent-child relationships 411 | for particle in target_event.particles: 412 | assert hasattr(particle, "event") 413 | assert particle.event is target_event 414 | -------------------------------------------------------------------------------- /docs/source/examples/92_multiple_files.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "id": "5eea1686-99e8-48b9-9e4a-700f64775697", 6 | "metadata": {}, 7 | "source": [ 8 | "# Dealing with Multiple LHE Files\n", 9 | "\n", 10 | "Oftentimes, you may wish to juggle many LHE files that have been generated using the same (or extremely similar) methods and you wish to combine all of these LHE files into one \"sample\" which you can analyze with a single set of analysis code. This can be done rather easily and quickly by utilizing an intermediate parquet file which is supported by [awkward](https://awkward-array.org/doc/main/user-guide/how-to-convert-arrow.html)." 11 | ] 12 | }, 13 | { 14 | "cell_type": "code", 15 | "execution_count": 1, 16 | "id": "a9acf232-2a50-4207-8b83-fbd23abfb3b2", 17 | "metadata": {}, 18 | "outputs": [ 19 | { 20 | "data": { 21 | "text/plain": [ 22 | "\n", 23 | " created_by: parquet-cpp-arrow version 19.0.1\n", 24 | " num_columns: 164\n", 25 | " num_rows: 30000\n", 26 | " num_row_groups: 1\n", 27 | " format_version: 2.6\n", 28 | " serialized_size: 0" 29 | ] 30 | }, 31 | "execution_count": 1, 32 | "metadata": {}, 33 | "output_type": "execute_result" 34 | } 35 | ], 36 | "source": [ 37 | "import awkward as ak\n", 38 | "\n", 39 | "# Use an example LHE file from package scikit-hep-testdata\n", 40 | "from skhep_testdata import data_path\n", 41 | "\n", 42 | "import pylhe\n", 43 | "\n", 44 | "lhe_file = data_path(\"pylhe-drell-yan-ll-lhe.gz\")\n", 45 | "\n", 46 | "# Our input files will simply be multiple copies of the same file for the sake of this example,\n", 47 | "# but you can imagine doing the same process below with actually different LHE files\n", 48 | "list_of_input_files = [lhe_file for _ in range(3)]\n", 49 | "\n", 50 | "# get arrays for each file\n", 51 | "unmerged_arrays = [\n", 52 | " pylhe.to_awkward(pylhe.LHEFile.fromfile(f, with_attributes=True).events)\n", 53 | " for f in list_of_input_files\n", 54 | "]\n", 55 | "# merge arrays into single mega-array\n", 56 | "array = ak.concatenate(unmerged_arrays)\n", 57 | "# store merged array into cache parquet file\n", 58 | "ak.to_parquet(array, \"merged.parquet\")\n", 59 | "# any below analysis code can retrieve array using ak.from_parquent('merged.parquet')" 60 | ] 61 | }, 62 | { 63 | "cell_type": "markdown", 64 | "id": "b068205f-c06a-4810-9d1f-bda5a02e13df", 65 | "metadata": {}, 66 | "source": [ 67 | "Now all the analysis code can utilize the merged file which only needs to be regenerated if more files want to be included or the source LHE files change." 68 | ] 69 | }, 70 | { 71 | "cell_type": "code", 72 | "execution_count": 2, 73 | "id": "b1b13ca5-7945-470d-b305-fb38891f0c66", 74 | "metadata": {}, 75 | "outputs": [ 76 | { 77 | "data": { 78 | "text/html": [ 79 | "
    [{eventinfo: {nparticles: 4, pid: 1, ...}, weights: {...}, particles: ..., ...},\n",
     80 |        " {eventinfo: {nparticles: 5, pid: 1, ...}, weights: {...}, particles: ..., ...},\n",
     81 |        " {eventinfo: {nparticles: 5, pid: 1, ...}, weights: {...}, particles: ..., ...},\n",
     82 |        " {eventinfo: {nparticles: 4, pid: 1, ...}, weights: {...}, particles: ..., ...},\n",
     83 |        " {eventinfo: {nparticles: 4, pid: 1, ...}, weights: {...}, particles: ..., ...},\n",
     84 |        " {eventinfo: {nparticles: 4, pid: 1, ...}, weights: {...}, particles: ..., ...},\n",
     85 |        " {eventinfo: {nparticles: 5, pid: 1, ...}, weights: {...}, particles: ..., ...},\n",
     86 |        " {eventinfo: {nparticles: 4, pid: 1, ...}, weights: {...}, particles: ..., ...},\n",
     87 |        " {eventinfo: {nparticles: 5, pid: 1, ...}, weights: {...}, particles: ..., ...},\n",
     88 |        " {eventinfo: {nparticles: 4, pid: 1, ...}, weights: {...}, particles: ..., ...},\n",
     89 |        " ...,\n",
     90 |        " {eventinfo: {nparticles: 4, pid: 1, ...}, weights: {...}, particles: ..., ...},\n",
     91 |        " {eventinfo: {nparticles: 4, pid: 1, ...}, weights: {...}, particles: ..., ...},\n",
     92 |        " {eventinfo: {nparticles: 4, pid: 1, ...}, weights: {...}, particles: ..., ...},\n",
     93 |        " {eventinfo: {nparticles: 4, pid: 1, ...}, weights: {...}, particles: ..., ...},\n",
     94 |        " {eventinfo: {nparticles: 5, pid: 1, ...}, weights: {...}, particles: ..., ...},\n",
     95 |        " {eventinfo: {nparticles: 4, pid: 1, ...}, weights: {...}, particles: ..., ...},\n",
     96 |        " {eventinfo: {nparticles: 4, pid: 1, ...}, weights: {...}, particles: ..., ...},\n",
     97 |        " {eventinfo: {nparticles: 4, pid: 1, ...}, weights: {...}, particles: ..., ...},\n",
     98 |        " {eventinfo: {nparticles: 5, pid: 1, ...}, weights: {...}, particles: ..., ...}]\n",
     99 |        "-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------\n",
    100 |        "backend: cpu\n",
    101 |        "nbytes: 50.2 MB\n",
    102 |        "type: 30000 * Event[\n",
    103 |        "    eventinfo: EventInfo[\n",
    104 |        "        nparticles: float64,\n",
    105 |        "        pid: float64,\n",
    106 |        "        weight: float64,\n",
    107 |        "        scale: float64,\n",
    108 |        "        aqed: float64,\n",
    109 |        "        aqcd: float64\n",
    110 |        "    ],\n",
    111 |        "    weights: Weights[\n",
    112 |        "        "1": float64,\n",
    113 |        "        "2": float64,\n",
    114 |        "        "3": float64,\n",
    115 |        "        "4": float64,\n",
    116 |        "        "5": float64,\n",
    117 |        "        "6": float64,\n",
    118 |        "        "7": float64,\n",
    119 |        "        "8": float64,\n",
    120 |        "        "9": float64,\n",
    121 |        "        "10": float64,\n",
    122 |        "        "11": float64,\n",
    123 |        "        "12": float64,\n",
    124 |        "        "13": float64,\n",
    125 |        "        "14": float64,\n",
    126 |        "        "15": float64,\n",
    127 |        "        "16": float64,\n",
    128 |        "        "17": float64,\n",
    129 |        "        "18": float64,\n",
    130 |        "        "19": float64,\n",
    131 |        "        "20": float64,\n",
    132 |        "        "21": float64,\n",
    133 |        "        "22": float64,\n",
    134 |        "        "23": float64,\n",
    135 |        "        "24": float64,\n",
    136 |        "        "25": float64,\n",
    137 |        "        "26": float64,\n",
    138 |        "        "27": float64,\n",
    139 |        "        "28": float64,\n",
    140 |        "        "29": float64,\n",
    141 |        "        "30": float64,\n",
    142 |        "        "31": float64,\n",
    143 |        "        "32": float64,\n",
    144 |        "        "33": float64,\n",
    145 |        "        "34": float64,\n",
    146 |        "        "35": float64,\n",
    147 |        "        "36": float64,\n",
    148 |        "        "37": float64,\n",
    149 |        "        "38": float64,\n",
    150 |        "        "39": float64,\n",
    151 |        "        "40": float64,\n",
    152 |        "        "41": float64,\n",
    153 |        "        "42": float64,\n",
    154 |        "        "43": float64,\n",
    155 |        "        "44": float64,\n",
    156 |        "        "45": float64,\n",
    157 |        "        "46": float64,\n",
    158 |        "        "47": float64,\n",
    159 |        "        "48": float64,\n",
    160 |        "        "49": float64,\n",
    161 |        "        "50": float64,\n",
    162 |        "        "51": float64,\n",
    163 |        "        "52": float64,\n",
    164 |        "        "53": float64,\n",
    165 |        "        "54": float64,\n",
    166 |        "        "55": float64,\n",
    167 |        "        "56": float64,\n",
    168 |        "        "57": float64,\n",
    169 |        "        "58": float64,\n",
    170 |        "        "59": float64,\n",
    171 |        "        "60": float64,\n",
    172 |        "        "61": float64,\n",
    173 |        "        "62": float64,\n",
    174 |        "        "63": float64,\n",
    175 |        "        "64": float64,\n",
    176 |        "        "65": float64,\n",
    177 |        "        "66": float64,\n",
    178 |        "        "67": float64,\n",
    179 |        "        "68": float64,\n",
    180 |        "        "69": float64,\n",
    181 |        "        "70": float64,\n",
    182 |        "        "71": float64,\n",
    183 |        "        "72": float64,\n",
    184 |        "        "73": float64,\n",
    185 |        "        "74": float64,\n",
    186 |        "        "75": float64,\n",
    187 |        "        "76": float64,\n",
    188 |        "        "77": float64,\n",
    189 |        "        "78": float64,\n",
    190 |        "        "79": float64,\n",
    191 |        "        "80": float64,\n",
    192 |        "        "81": float64,\n",
    193 |        "        "82": float64,\n",
    194 |        "        "83": float64,\n",
    195 |        "        "84": float64,\n",
    196 |        "        "85": float64,\n",
    197 |        "        "86": float64,\n",
    198 |        "        "87": float64,\n",
    199 |        "        "88": float64,\n",
    200 |        "        "89": float64,\n",
    201 |        "        "90": float64,\n",
    202 |        "        "91": float64,\n",
    203 |        "        "92": float64,\n",
    204 |        "        "93": float64,\n",
    205 |        "        "94": float64,\n",
    206 |        "        "95": float64,\n",
    207 |        "        "96": float64,\n",
    208 |        "        "97": float64,\n",
    209 |        "        "98": float64,\n",
    210 |        "        "99": float64,\n",
    211 |        "        "100": float64,\n",
    212 |        "        "101": float64,\n",
    213 |        "        "102": float64,\n",
    214 |        "        "103": float64,\n",
    215 |        "        "104": float64,\n",
    216 |        "        "105": float64,\n",
    217 |        "        "106": float64,\n",
    218 |        "        "107": float64,\n",
    219 |        "        "108": float64,\n",
    220 |        "        "109": float64,\n",
    221 |        "        "110": float64,\n",
    222 |        "        "111": float64,\n",
    223 |        "        "112": float64,\n",
    224 |        "        "113": float64,\n",
    225 |        "        "114": float64,\n",
    226 |        "        "115": float64,\n",
    227 |        "        "116": float64,\n",
    228 |        "        "117": float64,\n",
    229 |        "        "118": float64,\n",
    230 |        "        "119": float64,\n",
    231 |        "        "120": float64,\n",
    232 |        "        "121": float64,\n",
    233 |        "        "122": float64,\n",
    234 |        "        "123": float64,\n",
    235 |        "        "124": float64,\n",
    236 |        "        "125": float64,\n",
    237 |        "        "126": float64,\n",
    238 |        "        "127": float64,\n",
    239 |        "        "128": float64,\n",
    240 |        "        "129": float64,\n",
    241 |        "        "130": float64,\n",
    242 |        "        "131": float64,\n",
    243 |        "        "132": float64,\n",
    244 |        "        "133": float64,\n",
    245 |        "        "134": float64,\n",
    246 |        "        "135": float64,\n",
    247 |        "        "136": float64,\n",
    248 |        "        "137": float64,\n",
    249 |        "        "138": float64,\n",
    250 |        "        "139": float64,\n",
    251 |        "        "140": float64,\n",
    252 |        "        "141": float64,\n",
    253 |        "        "142": float64,\n",
    254 |        "        "143": float64,\n",
    255 |        "        "144": float64,\n",
    256 |        "        "145": float64\n",
    257 |        "    ],\n",
    258 |        "    particles: var * Particle[\n",
    259 |        "        vector: Momentum4D[\n",
    260 |        "            px: float64,\n",
    261 |        "            py: float64,\n",
    262 |        "            pz: float64,\n",
    263 |        "            e: float64\n",
    264 |        "        ],\n",
    265 |        "        id: float64,\n",
    266 |        "        status: float64,\n",
    267 |        "        mother1: float64,\n",
    268 |        "        mother2: float64,\n",
    269 |        "        color1: float64,\n",
    270 |        "        color2: float64,\n",
    271 |        "        m: float64,\n",
    272 |        "        lifetime: float64,\n",
    273 |        "        spin: float64\n",
    274 |        "    ]\n",
    275 |        "]
    " 276 | ], 277 | "text/plain": [ 278 | "" 279 | ] 280 | }, 281 | "execution_count": 2, 282 | "metadata": {}, 283 | "output_type": "execute_result" 284 | } 285 | ], 286 | "source": [ 287 | "ak.from_parquet(\"merged.parquet\")" 288 | ] 289 | }, 290 | { 291 | "cell_type": "code", 292 | "execution_count": null, 293 | "id": "e3daa456-460a-43a8-90df-518cc417fb6b", 294 | "metadata": {}, 295 | "outputs": [], 296 | "source": [] 297 | } 298 | ], 299 | "metadata": { 300 | "kernelspec": { 301 | "display_name": "Python 3 (ipykernel)", 302 | "language": "python", 303 | "name": "python3" 304 | }, 305 | "language_info": { 306 | "codemirror_mode": { 307 | "name": "ipython", 308 | "version": 3 309 | }, 310 | "file_extension": ".py", 311 | "mimetype": "text/x-python", 312 | "name": "python", 313 | "nbconvert_exporter": "python", 314 | "pygments_lexer": "ipython3", 315 | "version": "3.13.5" 316 | } 317 | }, 318 | "nbformat": 4, 319 | "nbformat_minor": 5 320 | } 321 | -------------------------------------------------------------------------------- /docs/source/examples/03_write_monte_carlo_example.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "id": "f40b7065-5763-4abc-91ff-40437cf86517", 6 | "metadata": {}, 7 | "source": [ 8 | "# Simple $e^+e^- \\to \\mu^+ \\mu^- $ Monte Carlo Event Generator example" 9 | ] 10 | }, 11 | { 12 | "cell_type": "markdown", 13 | "id": "215df39b-69fc-4f74-b882-3c96f5526783", 14 | "metadata": {}, 15 | "source": [ 16 | "Differential cross section:\n", 17 | "$$\n", 18 | "\\frac{d \\sigma}{ d \\Omega} = \\frac{\\alpha^2}{4 s } ( 1+ \\cos^2(\\theta))\n", 19 | "$$" 20 | ] 21 | }, 22 | { 23 | "cell_type": "code", 24 | "execution_count": 1, 25 | "id": "f7124c5f-9f86-4a0a-ac08-6e30cc55e01d", 26 | "metadata": {}, 27 | "outputs": [], 28 | "source": [ 29 | "import math\n", 30 | "import random\n", 31 | "\n", 32 | "import hist\n", 33 | "import numpy as np\n", 34 | "\n", 35 | "import pylhe" 36 | ] 37 | }, 38 | { 39 | "cell_type": "code", 40 | "execution_count": 2, 41 | "id": "f2553a00-1d72-4d4b-ba6d-c552ca0a7b59", 42 | "metadata": {}, 43 | "outputs": [], 44 | "source": [ 45 | "alpha = 1 / 127.4 # alpha QED\n", 46 | "aqcd = 0.1075 # alpha QCD\n", 47 | "EB = 209\n", 48 | "s = (2 * EB) ** 2\n", 49 | "theta_min = 0\n", 50 | "theta_max = math.pi\n", 51 | "phi_min = 0\n", 52 | "phi_max = 2 * math.pi" 53 | ] 54 | }, 55 | { 56 | "cell_type": "code", 57 | "execution_count": 3, 58 | "id": "7242869e-7fbb-4eee-9d49-8a9d63724e35", 59 | "metadata": {}, 60 | "outputs": [], 61 | "source": [ 62 | "# https://equation-database.readthedocs.io/en/latest/_autosummary/equation_database.isbn_9780471887416.html#equation_database.isbn_9780471887416.equation_6_32\n", 63 | "\n", 64 | "\n", 65 | "def dsigma(s, theta, _phi):\n", 66 | " return (math.cos(theta) ** 2 + 1) / 4 * alpha**2 / s" 67 | ] 68 | }, 69 | { 70 | "cell_type": "code", 71 | "execution_count": 4, 72 | "id": "147dfe17-a5d9-4b68-a07f-6f8d8aa390a1", 73 | "metadata": {}, 74 | "outputs": [ 75 | { 76 | "name": "stdout", 77 | "output_type": "stream", 78 | "text": [ 79 | "Estimated integral: 1.4779947196668872e-09\n", 80 | "Real integral: 1.477056777521761e-09\n" 81 | ] 82 | } 83 | ], 84 | "source": [ 85 | "# Monte Carlo integration\n", 86 | "\n", 87 | "\n", 88 | "def monte_carlo_integration(\n", 89 | " func, s, theta_min, theta_max, phi_min, phi_max, num_samples\n", 90 | "):\n", 91 | " theta_samples = [random.uniform(theta_min, theta_max) for _ in range(num_samples)]\n", 92 | " phi_samples = [random.uniform(phi_min, phi_max) for _ in range(num_samples)]\n", 93 | " func_values = [\n", 94 | " (phi_max - phi_min)\n", 95 | " * (theta_max - theta_min)\n", 96 | " * func(s, theta, phi)\n", 97 | " * math.sin(theta)\n", 98 | " for theta, phi in zip(theta_samples, phi_samples)\n", 99 | " ]\n", 100 | " maximum = np.max(func_values)\n", 101 | " integral = np.mean(func_values)\n", 102 | " return integral, maximum\n", 103 | "\n", 104 | "\n", 105 | "# Parameters\n", 106 | "\n", 107 | "num_samples = 1_000_000\n", 108 | "\n", 109 | "# Perform integration\n", 110 | "result, maximum = monte_carlo_integration(\n", 111 | " dsigma, s, theta_min, theta_max, phi_min, phi_max, num_samples\n", 112 | ")\n", 113 | "print(f\"Estimated integral: {result}\")\n", 114 | "\n", 115 | "# https://equation-database.readthedocs.io/en/latest/_autosummary/equation_database.isbn_9780471887416.html#equation_database.isbn_9780471887416.equation_6_33\n", 116 | "sigma = 4 * math.pi / 3 * alpha**2 / s\n", 117 | "print(f\"Real integral: {sigma}\")" 118 | ] 119 | }, 120 | { 121 | "cell_type": "markdown", 122 | "id": "692b7fad-a519-49b8-9dbc-d7d8c27eaba9", 123 | "metadata": {}, 124 | "source": [ 125 | "## Weighted Events\n", 126 | "\n", 127 | "The information about the distribution is carried by the weights of the events." 128 | ] 129 | }, 130 | { 131 | "cell_type": "code", 132 | "execution_count": 7, 133 | "id": "5c26e5e3-8e1e-470f-96a6-376a6e294bf9", 134 | "metadata": {}, 135 | "outputs": [ 136 | { 137 | "name": "stdout", 138 | "output_type": "stream", 139 | "text": [ 140 | "\n", 141 | " 5 0 1.8882916063e-15 1.7472400000e+05 7.8492935636e-03 1.0750000000e-01\n", 142 | " 11 -1 0 0 0 0 0.00000000e+00 0.00000000e+00 2.09000000e+02 2.09000000e+02 0.00000000e+00 0.0000e+00 9.0000e+00\n", 143 | " -11 -1 0 0 0 0 0.00000000e+00 0.00000000e+00 -2.09000000e+02 2.09000000e+02 0.00000000e+00 0.0000e+00 9.0000e+00\n", 144 | " 22 2 1 2 0 0 0.00000000e+00 0.00000000e+00 0.00000000e+00 4.18000000e+02 0.00000000e+00 0.0000e+00 9.0000e+00\n", 145 | " 13 1 3 3 0 0 -7.52255283e+01 -1.61873499e+02 -1.08715639e+02 2.09000000e+02 0.00000000e+00 0.0000e+00 9.0000e+00\n", 146 | " -13 1 3 3 0 0 7.52255283e+01 1.61873499e+02 1.08715639e+02 2.09000000e+02 0.00000000e+00 0.0000e+00 9.0000e+00\n", 147 | "\n" 148 | ] 149 | } 150 | ], 151 | "source": [ 152 | "num_samples = 1_000_000\n", 153 | "integ = 0.0\n", 154 | "lheevents = []\n", 155 | "for _i in range(num_samples):\n", 156 | " theta = random.uniform(theta_min, theta_max)\n", 157 | " phi = random.uniform(phi_min, phi_max)\n", 158 | " sig = (\n", 159 | " (phi_max - phi_min)\n", 160 | " * (theta_max - theta_min)\n", 161 | " * dsigma(s, theta, phi)\n", 162 | " * math.sin(theta)\n", 163 | " / num_samples\n", 164 | " )\n", 165 | " integ += sig\n", 166 | " # Fill the LHE event\n", 167 | " e = pylhe.LHEEvent(\n", 168 | " eventinfo=pylhe.LHEEventInfo(\n", 169 | " nparticles=5,\n", 170 | " pid=0,\n", 171 | " weight=sig, # The individual weight per event\n", 172 | " scale=s,\n", 173 | " aqed=alpha,\n", 174 | " aqcd=aqcd,\n", 175 | " ),\n", 176 | " particles=[\n", 177 | " pylhe.LHEParticle(\n", 178 | " id=11,\n", 179 | " status=-1,\n", 180 | " mother1=0,\n", 181 | " mother2=0,\n", 182 | " color1=0,\n", 183 | " color2=0,\n", 184 | " px=0.0,\n", 185 | " py=0.0,\n", 186 | " pz=EB,\n", 187 | " e=EB,\n", 188 | " m=0.0,\n", 189 | " lifetime=0,\n", 190 | " spin=9.0,\n", 191 | " ),\n", 192 | " pylhe.LHEParticle(\n", 193 | " id=-11,\n", 194 | " status=-1,\n", 195 | " mother1=0,\n", 196 | " mother2=0,\n", 197 | " color1=0,\n", 198 | " color2=0,\n", 199 | " px=0.0,\n", 200 | " py=0.0,\n", 201 | " pz=-EB,\n", 202 | " e=EB,\n", 203 | " m=0.0,\n", 204 | " lifetime=0,\n", 205 | " spin=9.0,\n", 206 | " ),\n", 207 | " pylhe.LHEParticle(\n", 208 | " id=22,\n", 209 | " status=2,\n", 210 | " mother1=1,\n", 211 | " mother2=2,\n", 212 | " color1=0,\n", 213 | " color2=0,\n", 214 | " px=0.0,\n", 215 | " py=0.0,\n", 216 | " pz=EB - EB,\n", 217 | " e=EB + EB,\n", 218 | " m=0.0,\n", 219 | " lifetime=0,\n", 220 | " spin=9.0,\n", 221 | " ),\n", 222 | " pylhe.LHEParticle(\n", 223 | " id=13,\n", 224 | " status=1,\n", 225 | " mother1=3,\n", 226 | " mother2=3,\n", 227 | " color1=0,\n", 228 | " color2=0,\n", 229 | " px=EB * math.sin(theta) * math.cos(phi),\n", 230 | " py=EB * math.sin(theta) * math.sin(phi),\n", 231 | " pz=EB * math.cos(theta),\n", 232 | " e=EB,\n", 233 | " m=0,\n", 234 | " lifetime=0,\n", 235 | " spin=9.0,\n", 236 | " ),\n", 237 | " pylhe.LHEParticle(\n", 238 | " id=-13,\n", 239 | " status=1,\n", 240 | " mother1=3,\n", 241 | " mother2=3,\n", 242 | " color1=0,\n", 243 | " color2=0,\n", 244 | " px=-EB * math.sin(theta) * math.cos(phi),\n", 245 | " py=-EB * math.sin(theta) * math.sin(phi),\n", 246 | " pz=-EB * math.cos(theta),\n", 247 | " e=EB,\n", 248 | " m=0,\n", 249 | " lifetime=0,\n", 250 | " spin=9.0,\n", 251 | " ),\n", 252 | " ],\n", 253 | " )\n", 254 | " lheevents.append(e)\n", 255 | "# Fill the LHE init\n", 256 | "lheinit = pylhe.LHEInit(\n", 257 | " initInfo=pylhe.LHEInitInfo(\n", 258 | " beamA=11,\n", 259 | " beamB=-11,\n", 260 | " energyA=EB,\n", 261 | " energyB=EB,\n", 262 | " PDFgroupA=0,\n", 263 | " PDFgroupB=0,\n", 264 | " PDFsetA=0,\n", 265 | " PDFsetB=0,\n", 266 | " weightingStrategy=3,\n", 267 | " numProcesses=1,\n", 268 | " ),\n", 269 | " procInfo=[\n", 270 | " pylhe.LHEProcInfo(\n", 271 | " xSection=integ,\n", 272 | " error=0,\n", 273 | " unitWeight=1,\n", 274 | " procId=1,\n", 275 | " )\n", 276 | " ],\n", 277 | " weightgroup={},\n", 278 | " LHEVersion=3,\n", 279 | ")\n", 280 | "lhef = pylhe.LHEFile(lheinit, lheevents)\n", 281 | "print(lhef.events[0].tolhe(rwgt=False, weights=False))\n", 282 | "lhef.tofile(\"weighted.lhe.gz\", rwgt=False, weights=False)" 283 | ] 284 | }, 285 | { 286 | "cell_type": "code", 287 | "execution_count": null, 288 | "id": "2fca49b0-2d93-476a-9b6b-835b72a24bb0", 289 | "metadata": {}, 290 | "outputs": [], 291 | "source": [] 292 | }, 293 | { 294 | "cell_type": "code", 295 | "execution_count": null, 296 | "id": "c94b9451-e7cc-4f40-9c1e-63dd8e180595", 297 | "metadata": {}, 298 | "outputs": [], 299 | "source": [ 300 | "arr = pylhe.to_awkward(\n", 301 | " pylhe.LHEFile.fromfile(\"weighted.lhe.gz\", with_attributes=True).event\n", 302 | ")\n", 303 | "hist1 = hist.Hist.new.Reg(20, 0, math.pi).Weight()\n", 304 | "hist1.fill(arr.particles.vector[:, -1].theta, weight=arr.eventinfo.weight)" 305 | ] 306 | }, 307 | { 308 | "cell_type": "code", 309 | "execution_count": null, 310 | "id": "b43498cd-1fe7-49c1-a24c-5b66cf4e257f", 311 | "metadata": {}, 312 | "outputs": [], 313 | "source": [ 314 | "hist1.plot()" 315 | ] 316 | }, 317 | { 318 | "cell_type": "markdown", 319 | "id": "77d19d7a-07c7-4da3-81c6-31e716f3b15d", 320 | "metadata": {}, 321 | "source": [ 322 | "## Unweighted Events\n", 323 | "\n", 324 | "The information about the distribution is carried by the distribution/number of the events." 325 | ] 326 | }, 327 | { 328 | "cell_type": "code", 329 | "execution_count": 10, 330 | "id": "a43da3e7-5263-4557-8024-cb08ae00f01b", 331 | "metadata": {}, 332 | "outputs": [ 333 | { 334 | "name": "stdout", 335 | "output_type": "stream", 336 | "text": [ 337 | "\n", 338 | " 5 0 1.4779947197e-15 1.7472400000e+05 7.8492935636e-03 1.0750000000e-01\n", 339 | " 11 -1 0 0 0 0 0.00000000e+00 0.00000000e+00 2.09000000e+02 2.09000000e+02 0.00000000e+00 0.0000e+00 9.0000e+00\n", 340 | " -11 -1 0 0 0 0 0.00000000e+00 0.00000000e+00 -2.09000000e+02 2.09000000e+02 0.00000000e+00 0.0000e+00 9.0000e+00\n", 341 | " 22 2 1 2 0 0 0.00000000e+00 0.00000000e+00 0.00000000e+00 4.18000000e+02 0.00000000e+00 0.0000e+00 9.0000e+00\n", 342 | " 13 1 3 3 0 0 1.31998175e+02 -7.87144118e+01 -1.41638706e+02 2.09000000e+02 0.00000000e+00 0.0000e+00 9.0000e+00\n", 343 | " -13 1 3 3 0 0 -1.31998175e+02 7.87144118e+01 1.41638706e+02 2.09000000e+02 0.00000000e+00 0.0000e+00 9.0000e+00\n", 344 | "\n" 345 | ] 346 | } 347 | ], 348 | "source": [ 349 | "num_samples = 1_000_000\n", 350 | "integ = 0.0\n", 351 | "lheevents = []\n", 352 | "while len(lheevents) < num_samples:\n", 353 | " theta = random.uniform(theta_min, theta_max)\n", 354 | " phi = random.uniform(phi_min, phi_max)\n", 355 | " sig = (\n", 356 | " (phi_max - phi_min)\n", 357 | " * (theta_max - theta_min)\n", 358 | " * dsigma(s, theta, phi)\n", 359 | " * math.sin(theta)\n", 360 | " )\n", 361 | " if sig / maximum > random.uniform(\n", 362 | " 0, 1\n", 363 | " ): # Pick events randomly according to their contribution\n", 364 | " e = pylhe.LHEEvent(\n", 365 | " eventinfo=pylhe.LHEEventInfo(\n", 366 | " nparticles=5,\n", 367 | " pid=0,\n", 368 | " weight=result / num_samples, # The same weight per event\n", 369 | " scale=s,\n", 370 | " aqed=alpha,\n", 371 | " aqcd=aqcd,\n", 372 | " ),\n", 373 | " particles=[\n", 374 | " pylhe.LHEParticle(\n", 375 | " id=11,\n", 376 | " status=-1,\n", 377 | " mother1=0,\n", 378 | " mother2=0,\n", 379 | " color1=0,\n", 380 | " color2=0,\n", 381 | " px=0.0,\n", 382 | " py=0.0,\n", 383 | " pz=EB,\n", 384 | " e=EB,\n", 385 | " m=0.0,\n", 386 | " lifetime=0,\n", 387 | " spin=9.0,\n", 388 | " ),\n", 389 | " pylhe.LHEParticle(\n", 390 | " id=-11,\n", 391 | " status=-1,\n", 392 | " mother1=0,\n", 393 | " mother2=0,\n", 394 | " color1=0,\n", 395 | " color2=0,\n", 396 | " px=0.0,\n", 397 | " py=0.0,\n", 398 | " pz=-EB,\n", 399 | " e=EB,\n", 400 | " m=0.0,\n", 401 | " lifetime=0,\n", 402 | " spin=9.0,\n", 403 | " ),\n", 404 | " pylhe.LHEParticle(\n", 405 | " id=22,\n", 406 | " status=2,\n", 407 | " mother1=1,\n", 408 | " mother2=2,\n", 409 | " color1=0,\n", 410 | " color2=0,\n", 411 | " px=0.0,\n", 412 | " py=0.0,\n", 413 | " pz=EB - EB,\n", 414 | " e=EB + EB,\n", 415 | " m=0.0,\n", 416 | " lifetime=0,\n", 417 | " spin=9.0,\n", 418 | " ),\n", 419 | " pylhe.LHEParticle(\n", 420 | " id=13,\n", 421 | " status=1,\n", 422 | " mother1=3,\n", 423 | " mother2=3,\n", 424 | " color1=0,\n", 425 | " color2=0,\n", 426 | " px=EB * math.sin(theta) * math.cos(phi),\n", 427 | " py=EB * math.sin(theta) * math.sin(phi),\n", 428 | " pz=EB * math.cos(theta),\n", 429 | " e=EB,\n", 430 | " m=0,\n", 431 | " lifetime=0,\n", 432 | " spin=9.0,\n", 433 | " ),\n", 434 | " pylhe.LHEParticle(\n", 435 | " id=-13,\n", 436 | " status=1,\n", 437 | " mother1=3,\n", 438 | " mother2=3,\n", 439 | " color1=0,\n", 440 | " color2=0,\n", 441 | " px=-EB * math.sin(theta) * math.cos(phi),\n", 442 | " py=-EB * math.sin(theta) * math.sin(phi),\n", 443 | " pz=-EB * math.cos(theta),\n", 444 | " e=EB,\n", 445 | " m=0,\n", 446 | " lifetime=0,\n", 447 | " spin=9.0,\n", 448 | " ),\n", 449 | " ],\n", 450 | " )\n", 451 | " lheevents.append(e)\n", 452 | "lheinit = pylhe.LHEInit(\n", 453 | " initInfo=pylhe.LHEInitInfo(\n", 454 | " beamA=11,\n", 455 | " beamB=-11,\n", 456 | " energyA=EB,\n", 457 | " energyB=EB,\n", 458 | " PDFgroupA=0,\n", 459 | " PDFgroupB=0,\n", 460 | " PDFsetA=0,\n", 461 | " PDFsetB=0,\n", 462 | " weightingStrategy=3,\n", 463 | " numProcesses=1,\n", 464 | " ),\n", 465 | " procInfo=[\n", 466 | " pylhe.LHEProcInfo(\n", 467 | " xSection=result,\n", 468 | " error=0,\n", 469 | " unitWeight=1,\n", 470 | " procId=1,\n", 471 | " )\n", 472 | " ],\n", 473 | " weightgroup={},\n", 474 | " LHEVersion=3,\n", 475 | ")\n", 476 | "lhef = pylhe.LHEFile(lheinit, lheevents)\n", 477 | "print(lhef.events[0].tolhe(rwgt=False, weights=False))\n", 478 | "lhef.tofile(\"unweighted.lhe\", rwgt=False, weights=False)" 479 | ] 480 | }, 481 | { 482 | "cell_type": "code", 483 | "execution_count": 11, 484 | "id": "e840fba3-e797-497c-9ad8-e48451928614", 485 | "metadata": {}, 486 | "outputs": [ 487 | { 488 | "ename": "AttributeError", 489 | "evalue": "'LHEFile' object has no attribute 'event'", 490 | "output_type": "error", 491 | "traceback": [ 492 | "\u001b[31m---------------------------------------------------------------------------\u001b[39m", 493 | "\u001b[31mAttributeError\u001b[39m Traceback (most recent call last)", 494 | "\u001b[36mCell\u001b[39m\u001b[36m \u001b[39m\u001b[32mIn[11]\u001b[39m\u001b[32m, line 1\u001b[39m\n\u001b[32m----> \u001b[39m\u001b[32m1\u001b[39m arr = pylhe.to_awkward(\u001b[43mpylhe\u001b[49m\u001b[43m.\u001b[49m\u001b[43mLHEFile\u001b[49m\u001b[43m.\u001b[49m\u001b[43mfromfile\u001b[49m\u001b[43m(\u001b[49m\u001b[33;43m\"\u001b[39;49m\u001b[33;43munweighted.lhe\u001b[39;49m\u001b[33;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mwith_attributes\u001b[49m\u001b[43m=\u001b[49m\u001b[38;5;28;43;01mTrue\u001b[39;49;00m\u001b[43m)\u001b[49m\u001b[43m.\u001b[49m\u001b[43mevent\u001b[49m)\n\u001b[32m 2\u001b[39m hist2 = hist.Hist.new.Reg(\u001b[32m20\u001b[39m, \u001b[32m0\u001b[39m, math.pi).Weight()\n\u001b[32m 3\u001b[39m hist2.fill(arr.particles.vector[:, -\u001b[32m1\u001b[39m].theta, weight=arr.eventinfo.weight)\n", 495 | "\u001b[31mAttributeError\u001b[39m: 'LHEFile' object has no attribute 'event'" 496 | ] 497 | } 498 | ], 499 | "source": [ 500 | "arr = pylhe.to_awkward(\n", 501 | " pylhe.LHEFile.fromfile(\"unweighted.lhe\", with_attributes=True).event\n", 502 | ")\n", 503 | "hist2 = hist.Hist.new.Reg(20, 0, math.pi).Weight()\n", 504 | "hist2.fill(arr.particles.vector[:, -1].theta, weight=arr.eventinfo.weight)" 505 | ] 506 | }, 507 | { 508 | "cell_type": "code", 509 | "execution_count": null, 510 | "id": "c9ab56a1-50bc-405a-b7e5-c7d6b61881de", 511 | "metadata": {}, 512 | "outputs": [], 513 | "source": [ 514 | "hist2.plot()" 515 | ] 516 | }, 517 | { 518 | "cell_type": "code", 519 | "execution_count": null, 520 | "id": "c9be8352-6ab8-45c2-a4ac-fa458ee41652", 521 | "metadata": {}, 522 | "outputs": [], 523 | "source": [ 524 | "hist1.plot_ratio(hist2)" 525 | ] 526 | }, 527 | { 528 | "cell_type": "code", 529 | "execution_count": null, 530 | "id": "932f8f9b-b7d9-4a6e-9f34-f2f90aeb86a3", 531 | "metadata": {}, 532 | "outputs": [], 533 | "source": [] 534 | } 535 | ], 536 | "metadata": { 537 | "kernelspec": { 538 | "display_name": "Python 3 (ipykernel)", 539 | "language": "python", 540 | "name": "python3" 541 | }, 542 | "language_info": { 543 | "codemirror_mode": { 544 | "name": "ipython", 545 | "version": 3 546 | }, 547 | "file_extension": ".py", 548 | "mimetype": "text/x-python", 549 | "name": "python", 550 | "nbconvert_exporter": "python", 551 | "pygments_lexer": "ipython3", 552 | "version": "3.13.5" 553 | } 554 | }, 555 | "nbformat": 4, 556 | "nbformat_minor": 5 557 | } 558 | --------------------------------------------------------------------------------