├── .editorconfig
├── .github
├── CODEOWNERS
├── ISSUE_TEMPLATE
│ ├── BUG_REPORT.yaml
│ └── FEATURE_REQUEST.yaml
├── PULL_REQUEST_TEMPLATE.md
├── actions
│ ├── install-application
│ │ └── action.yml
│ └── install-main-dependencies
│ │ └── action.yml
└── workflows
│ └── main.yml
├── .gitignore
├── .mailmap
├── .pylintdict
├── .pylintrc
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── LICENSE.txt
├── Makefile
├── README.md
├── docs
├── .nojekyll
├── Makefile
├── conf.py
├── finance
│ └── index.rst
├── index.rst
├── lowercase_filter.py
├── machine_learning
│ └── index.rst
├── nature
│ └── index.rst
└── optimization
│ └── index.rst
├── finance
├── Makefile
├── __init__.py
├── asv.conf.json
└── benchmarks
│ └── __init__.py
├── machine_learning
├── Makefile
├── __init__.py
├── asv.conf.json
└── benchmarks
│ ├── CCPP_data.csv
│ ├── __init__.py
│ ├── base_classifier_benchmark.py
│ ├── base_regressor_benchmark.py
│ ├── circuit_qnn_base_classifier_benchmark.py
│ ├── circuit_qnn_classifier_benchmark.py
│ ├── circuit_qnn_classifier_fit_benchmark.py
│ ├── datasets.py
│ ├── opflow_qnn_base_classifier_benchmark.py
│ ├── opflow_qnn_classifier_benchmark.py
│ ├── opflow_qnn_classifier_fit_benchmark.py
│ ├── opflow_qnn_regressor_benchmark.py
│ ├── opflow_qnn_regressor_fit_benchmark.py
│ ├── vqc_base_benchmark.py
│ ├── vqc_benchmark.py
│ └── vqc_fit_benchmark.py
├── mypy.ini
├── nature
├── Makefile
├── __init__.py
├── asv.conf.json
└── benchmarks
│ ├── __init__.py
│ ├── jordan_wigner_benchmark.py
│ ├── jordan_wigner_benchmark_driver_H2.hdf5
│ ├── jordan_wigner_benchmark_driver_H2O.hdf5
│ ├── jordan_wigner_benchmark_driver_LiH.hdf5
│ └── linear_mapper_benchmark.py
├── optimization
├── Makefile
├── __init__.py
├── asv.conf.json
└── benchmarks
│ ├── __init__.py
│ ├── knapsack.py
│ └── maxcut.py
├── pyproject.toml
├── requirements-dev.txt
└── tools
├── benchmarks.sh
├── check_copyright.py
├── check_version.py
├── cron_script.sh
├── install_rust.sh
├── main_script.sh
├── ml_unittests.sh
├── send_notification.py
└── verify_headers.py
/.editorconfig:
--------------------------------------------------------------------------------
1 | # This code is part of Qiskit.
2 | #
3 | # (C) Copyright IBM 2017, 2022.
4 | #
5 | # This code is licensed under the Apache License, Version 2.0. You may
6 | # obtain a copy of this license in the LICENSE.txt file in the root directory
7 | # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
8 | #
9 | # Any modifications or derivative works of this code must retain this
10 | # copyright notice, and modified files need to carry a notice indicating
11 | # that they have been altered from the originals.
12 |
13 | # EditorConfig sets project-wide editor defaults: https://EditorConfig.org
14 |
15 | # top-most EditorConfig file, stop looking higher in the tree
16 | root = true
17 |
18 | # Default settings can be overidden by editorconfig file in a subdir
19 | # or by a specific glob later in this file
20 | [*]
21 | end_of_line = lf
22 | insert_final_newline = true
23 | charset = utf-8
24 | indent_style = space
25 | trim_trailing_whitespace = true
26 |
27 | # Python
28 | [*.py]
29 | indent_size = 4
30 |
31 | # Javascript
32 | [*.{js,json}]
33 | indent_style = space
34 | indent_size = 2
35 |
36 | ## Windows files
37 | # [*.bat]
38 | # end_of_line = crlf
39 |
40 | # Makefile
41 | [Makefile]
42 | indent_style = tab
43 |
44 | # Markdown
45 | [*.md]
46 | # trailing whitespace is used for
in md (yuck)
47 | trim_trailing_whitespace = false
48 |
49 | # YAML
50 | [*.{yaml,yml}]
51 | indent_size = 2
52 |
--------------------------------------------------------------------------------
/.github/CODEOWNERS:
--------------------------------------------------------------------------------
1 | # This file defines the persons who will be assigned as reviewers for PRs that
2 | # modify particular files in the repo. The PR can be merged when approved by at
3 | # least one codeowner. However, all Qiskit team members can (and should!) review the PRs.
4 |
5 | # Global rule, unless specialized by a later one
6 | * @stefan-woerner @manoelmarques @woodsp-ibm @ElePT @mrossinek
7 |
8 | # Application folders
9 | finance/ @stefan-woerner @manoelmarques @woodsp-ibm @ElePT @mrossinek
10 | machine_learning/ @stefan-woerner @manoelmarques @woodsp-ibm @ElePT @mrossinek @adekusar-drl
11 | nature/ @stefan-woerner @manoelmarques @woodsp-ibm @ElePT @mrossinek
12 | optimization/ @stefan-woerner @manoelmarques @woodsp-ibm @ElePT @mrossinek @t-imamichi
13 |
14 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/BUG_REPORT.yaml:
--------------------------------------------------------------------------------
1 | name: 🐛 Bug report
2 | description: Create a report to help us improve 🤔.
3 | labels: ["bug"]
4 |
5 | body:
6 | - type: markdown
7 | attributes:
8 | value: Thank you for reporting! Please also use the search to see if there are any other relevant issues or pull requests.
9 |
10 | - type: textarea
11 | attributes:
12 | label: Environment
13 | description: For the version of the application, please give the actual version number (_e.g._ 0.18.3) if you are using a release version, or the first 7-8 characters of the commit hash if you have installed from `git`. If anything else is relevant, you can add it to the list.
14 | # The trailing spaces on the following lines are to make filling the form
15 | # in easier. The type is 'textarea' rather than three separate 'input's
16 | # to make the resulting issue body less noisy with headings.
17 | value: |
18 | - **Qiskit Finance version**:
19 | - **Qiskit Machine Learning version**:
20 | - **Qiskit Nature version**:
21 | - **Qiskit Optimization version**:
22 | - **Python version**:
23 | - **Operating system**:
24 | validations:
25 | required: true
26 |
27 | - type: textarea
28 | attributes:
29 | label: What is happening?
30 | description: A short description of what is going wrong, in words.
31 | validations:
32 | required: true
33 |
34 | - type: textarea
35 | attributes:
36 | label: How can we reproduce the issue?
37 | description: Give some steps that show the bug. A [minimal working example](https://stackoverflow.com/help/minimal-reproducible-example) of code with output is best. If you are copying in code, please remember to enclose it in triple backticks (` ``` [multiline code goes here] ``` `) so that it [displays correctly](https://docs.github.com/en/github/writing-on-github/getting-started-with-writing-and-formatting-on-github/basic-writing-and-formatting-syntax).
38 | validations:
39 | required: true
40 |
41 | - type: textarea
42 | attributes:
43 | label: What should happen?
44 | description: A short description, including about the expected output of any code in the previous section.
45 | validations:
46 | required: true
47 |
48 | - type: textarea
49 | attributes:
50 | label: Any suggestions?
51 | description: Not required, but if you have suggestions for how a contributor should fix this, or any problems we should be aware of, let us know.
52 | validations:
53 | required: false
54 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/FEATURE_REQUEST.yaml:
--------------------------------------------------------------------------------
1 | name: 🚀 Feature request
2 | description: Suggest an idea for this project 💡!
3 | labels: ["type: feature request"]
4 |
5 | body:
6 | - type: markdown
7 | attributes:
8 | value: Please make sure to browse the opened and closed issues to make sure that this idea has not previously been discussed.
9 |
10 | - type: textarea
11 | attributes:
12 | label: What should we add?
13 | validations:
14 | required: true
15 |
--------------------------------------------------------------------------------
/.github/PULL_REQUEST_TEMPLATE.md:
--------------------------------------------------------------------------------
1 |
11 |
12 | ### Summary
13 |
14 |
15 |
16 | ### Details and comments
17 |
18 |
19 |
--------------------------------------------------------------------------------
/.github/actions/install-application/action.yml:
--------------------------------------------------------------------------------
1 | # This code is part of Qiskit.
2 | #
3 | # (C) Copyright IBM 2022.
4 | #
5 | # This code is licensed under the Apache License, Version 2.0. You may
6 | # obtain a copy of this license in the LICENSE.txt file in the root directory
7 | # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
8 | #
9 | # Any modifications or derivative works of this code must retain this
10 | # copyright notice, and modified files need to carry a notice indicating
11 | # that they have been altered from the originals.
12 |
13 | name: 'Install Qiskit Application'
14 | description: 'Installs Qiskit Application from Main'
15 | inputs:
16 | os:
17 | description: 'OS'
18 | required: true
19 | python-version:
20 | description: 'Python version'
21 | required: true
22 | application-name:
23 | description: 'Application name'
24 | required: true
25 | runs:
26 | using: "composite"
27 | steps:
28 | - name: Get main last commit id
29 | run: |
30 | echo "APPLICATION_HASH=$(git ls-remote --heads https://github.com/Qiskit/qiskit-${{ inputs.application-name }}.git refs/heads/main | awk '{print $1}')" >> $GITHUB_ENV
31 | shell: bash
32 | - name: ${{ inputs.application-name }} Cache
33 | env:
34 | CACHE_VERSION: v1
35 | id: application-cache
36 | uses: actions/cache@v3
37 | with:
38 | path: ${{ inputs.application-name }}-cache
39 | key: ${{ inputs.application-name }}-${{ inputs.os }}-${{ inputs.python-version }}-${{ env.APPLICATION_HASH }}-${{ env.CACHE_VERSION }}
40 | - name: Install ${{ inputs.application-name }} from Main
41 | run: |
42 | echo "Install ${{ inputs.application-name }} from Main"
43 | BASE_DIR=${{ inputs.application-name }}-cache
44 | build_from_main=true
45 | cache_hit=${{ steps.application-cache.outputs.cache-hit }}
46 | echo "cache hit: ${cache_hit}"
47 | pip install -U wheel
48 | pip install "setuptools<67.0.0"
49 | if [ "$cache_hit" == "true" ]; then
50 | pip_result=0
51 | pushd "${BASE_DIR}"
52 | python -m pip install *.whl && pip_result=$? || pip_result=$?
53 | popd
54 | if [ $pip_result == 0 ]; then
55 | build_from_main=false
56 | fi
57 | else
58 | mkdir -p ${BASE_DIR}
59 | fi
60 | if [ "$build_from_main" == "true" ]; then
61 | echo 'Create wheel file from main'
62 | git clone --depth 1 --branch main https://github.com/Qiskit/qiskit-${{ inputs.application-name }}.git /tmp/qiskit-${{ inputs.application-name }}
63 | pushd /tmp/qiskit-${{ inputs.application-name }}
64 | python setup.py bdist_wheel
65 | popd
66 | cp -rf /tmp/qiskit-${{ inputs.application-name }}/dist/*.whl "${BASE_DIR}"
67 | pushd "${BASE_DIR}"
68 | python -m pip install *.whl
69 | popd
70 | fi
71 | shell: bash
72 |
--------------------------------------------------------------------------------
/.github/actions/install-main-dependencies/action.yml:
--------------------------------------------------------------------------------
1 | # This code is part of Qiskit.
2 | #
3 | # (C) Copyright IBM 2021, 2023.
4 | #
5 | # This code is licensed under the Apache License, Version 2.0. You may
6 | # obtain a copy of this license in the LICENSE.txt file in the root directory
7 | # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
8 | #
9 | # Any modifications or derivative works of this code must retain this
10 | # copyright notice, and modified files need to carry a notice indicating
11 | # that they have been altered from the originals.
12 |
13 | name: 'Install Qiskit App.Benchmarks Main Dependencies'
14 | description: 'Installs Python dependencies from Main'
15 | inputs:
16 | os:
17 | description: 'OS'
18 | required: true
19 | python-version:
20 | description: 'Python version'
21 | required: true
22 | runs:
23 | using: "composite"
24 | steps:
25 | - name: Get main last commit ids
26 | run: |
27 | echo "TERRA_HASH=$(git ls-remote --heads https://github.com/Qiskit/qiskit-terra.git refs/heads/main | awk '{print $1}')" >> $GITHUB_ENV
28 | echo "AER_HASH=$(git ls-remote --heads https://github.com/Qiskit/qiskit-aer.git refs/heads/main | awk '{print $1}')" >> $GITHUB_ENV
29 | shell: bash
30 | - name: Terra Cache
31 | env:
32 | CACHE_VERSION: v1
33 | id: terra-cache
34 | uses: actions/cache@v3
35 | with:
36 | path: terra-cache
37 | key: terra-cache-${{ inputs.os }}-${{ inputs.python-version }}-${{ env.TERRA_HASH }}-${{ env.CACHE_VERSION }}
38 | - name: Aer Cache
39 | env:
40 | CACHE_VERSION: v1
41 | id: aer-cache
42 | uses: actions/cache@v3
43 | with:
44 | path: aer-cache
45 | key: aer-cache-${{ inputs.os }}-${{ inputs.python-version }}-${{ env.AER_HASH }}-${{ env.CACHE_VERSION }}
46 | - name: Install Terra from Main
47 | env:
48 | MACOSX_DEPLOYMENT_TARGET: 10.15
49 | run: |
50 | echo 'Install Terra from Main'
51 | BASE_DIR=terra-cache
52 | build_from_main=true
53 | cache_hit=${{ steps.terra-cache.outputs.cache-hit }}
54 | echo "cache hit: ${cache_hit}"
55 | if [ "$cache_hit" == "true" ]; then
56 | pip_result=0
57 | pushd "${BASE_DIR}"
58 | python -m pip install *.whl && pip_result=$? || pip_result=$?
59 | popd
60 | if [ $pip_result == 0 ]; then
61 | build_from_main=false
62 | fi
63 | else
64 | mkdir -p ${BASE_DIR}
65 | fi
66 | if [ "$build_from_main" == "true" ]; then
67 | echo 'Create wheel file from main'
68 | pip install -U wheel setuptools_rust
69 | git clone --depth 1 --branch main https://github.com/Qiskit/qiskit-terra.git /tmp/qiskit-terra
70 | pushd /tmp/qiskit-terra
71 | python setup.py bdist_wheel
72 | popd
73 | cp -rf /tmp/qiskit-terra/dist/*.whl "${BASE_DIR}"
74 | pushd "${BASE_DIR}"
75 | python -m pip install *.whl
76 | popd
77 | pip uninstall -y setuptools_rust
78 | fi
79 | shell: bash
80 | - name: Install Aer from Main
81 | env:
82 | MACOSX_DEPLOYMENT_TARGET: 10.16
83 | run: |
84 | echo 'Install Aer from Main'
85 | if [ "${{ inputs.os }}" == "ubuntu-latest" ]; then
86 | export DISABLE_CONAN=1
87 | sudo apt-get -y install nlohmann-json3-dev
88 | sudo apt-get -y install libspdlog-dev
89 | sudo apt-get -y install libmuparserx-dev
90 | fi
91 | git clone --depth 1 --branch main https://github.com/Qiskit/qiskit-aer.git /tmp/qiskit-aer
92 | BASE_DIR=aer-cache
93 | build_from_main=true
94 | cache_hit=${{ steps.aer-cache.outputs.cache-hit }}
95 | echo "cache hit: ${cache_hit}"
96 | if [ "$cache_hit" == "true" ]; then
97 | pip_result=0
98 | pushd "${BASE_DIR}"
99 | python -m pip install *.whl && pip_result=$? || pip_result=$?
100 | popd
101 | if [ $pip_result == 0 ]; then
102 | echo 'Verifying cached Aer with tools/verify_wheels.py ...'
103 | verify_result=0
104 | pushd /tmp/qiskit-aer
105 | python tools/verify_wheels.py && verify_result=$? || verify_result=$?
106 | popd
107 | if [ $verify_result == 0 ]; then
108 | echo 'Cached Aer passed verification.'
109 | build_from_main=false
110 | else
111 | echo 'Cached Aer failed verification.'
112 | pip uninstall -y qiskit-aer
113 | fi
114 | fi
115 | else
116 | mkdir -p ${BASE_DIR}
117 | fi
118 | if [ "$build_from_main" == "true" ]; then
119 | echo 'Create wheel file from main'
120 | pip install -U wheel
121 | pushd /tmp/qiskit-aer
122 | pip install -U "cmake!=3.17.1,!=3.17.0"
123 | pip install "scikit-build>=0.11.0"
124 | pip install pybind11
125 | if [ "${{ inputs.os }}" == "windows-2019" ]; then
126 | python setup.py bdist_wheel -- -G 'Visual Studio 16 2019'
127 | elif [ "${{ inputs.os }}" == "macos-latest" ]; then
128 | pip install -U -c constraints.txt -r requirements-dev.txt
129 | python setup.py bdist_wheel --plat-name macosx-10.16-x86_64
130 | else
131 | python setup.py bdist_wheel
132 | fi
133 | popd
134 | cp -rf /tmp/qiskit-aer/dist/*.whl "${BASE_DIR}"
135 | pushd "${BASE_DIR}"
136 | python -m pip install *.whl
137 | popd
138 | fi
139 | shell: bash
140 |
--------------------------------------------------------------------------------
/.github/workflows/main.yml:
--------------------------------------------------------------------------------
1 | # This code is part of Qiskit.
2 | #
3 | # (C) Copyright IBM 2021, 2023.
4 | #
5 | # This code is licensed under the Apache License, Version 2.0. You may
6 | # obtain a copy of this license in the LICENSE.txt file in the root directory
7 | # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
8 | #
9 | # Any modifications or derivative works of this code must retain this
10 | # copyright notice, and modified files need to carry a notice indicating
11 | # that they have been altered from the originals.
12 |
13 | name: Application Benchmarks Tests
14 |
15 | on:
16 | push:
17 | branches:
18 | - main
19 | pull_request:
20 | branches:
21 | - main
22 | schedule:
23 | # run every day at 1AM
24 | - cron: '0 1 * * *'
25 |
26 | concurrency:
27 | group: ${{ github.repository }}-${{ github.ref }}-${{ github.head_ref }}-${{ github.workflow }}
28 | cancel-in-progress: true
29 |
30 | jobs:
31 | Checks:
32 | runs-on: ${{ matrix.os }}
33 | strategy:
34 | matrix:
35 | os: [ubuntu-latest]
36 | python-version: [3.8]
37 | steps:
38 | - name: Print Concurrency Group
39 | env:
40 | CONCURRENCY_GROUP: ${{ github.repository }}-${{ github.ref }}-${{ github.head_ref }}-${{ github.workflow }}
41 | run: |
42 | echo -e "\033[31;1;4mConcurrency Group\033[0m"
43 | echo -e "$CONCURRENCY_GROUP\n"
44 | shell: bash
45 | - uses: actions/checkout@v3
46 | with:
47 | fetch-depth: 0
48 | - uses: actions/setup-python@v4
49 | with:
50 | python-version: ${{ matrix.python-version }}
51 | cache: 'pip'
52 | cache-dependency-path: requirements-dev.txt
53 | - uses: ./.github/actions/install-main-dependencies
54 | with:
55 | os: ${{ matrix.os }}
56 | python-version: ${{ matrix.python-version }}
57 | - name: Install Dependencies
58 | run: |
59 | pip install -U -r requirements-dev.txt
60 | sudo apt-get -y install pandoc graphviz
61 | sudo apt-get -y install python3-enchant
62 | sudo apt-get -y install hunspell-en-us
63 | pip install pyenchant
64 | shell: bash
65 | - name: Install Optimization
66 | uses: ./.github/actions/install-application
67 | with:
68 | os: ${{ matrix.os }}
69 | python-version: ${{ matrix.python-version }}
70 | application-name: optimization
71 | - name: Install Finance
72 | uses: ./.github/actions/install-application
73 | with:
74 | os: ${{ matrix.os }}
75 | python-version: ${{ matrix.python-version }}
76 | application-name: finance
77 | - name: Install Machine Learning
78 | uses: ./.github/actions/install-application
79 | with:
80 | os: ${{ matrix.os }}
81 | python-version: ${{ matrix.python-version }}
82 | application-name: machine-learning
83 | - name: Install Nature
84 | uses: ./.github/actions/install-application
85 | with:
86 | os: ${{ matrix.os }}
87 | python-version: ${{ matrix.python-version }}
88 | application-name: nature
89 | - name: Install Extras
90 | run: |
91 | echo 'Install optimization extras'
92 | pip install qiskit-optimization[cplex,cvx,matplotlib,gurobi]
93 | echo 'Install machine learning extras'
94 | pip install qiskit-machine-learning[torch,sparse]
95 | echo 'Install nature extras'
96 | pip install qiskit-nature[pyscf]
97 | shell: bash
98 | - run: pip check
99 | if: ${{ !cancelled() }}
100 | shell: bash
101 | - name: Copyright Check
102 | run: |
103 | python tools/check_copyright.py -check
104 | if: ${{ !cancelled() }}
105 | shell: bash
106 | - run: make spell
107 | if: ${{ !cancelled() }}
108 | shell: bash
109 | - name: Style Check
110 | run: |
111 | make clean_sphinx
112 | make style
113 | if: ${{ !cancelled() }}
114 | shell: bash
115 | - run: make lint
116 | if: ${{ !cancelled() }}
117 | shell: bash
118 | - run: make mypy
119 | if: ${{ !cancelled() }}
120 | shell: bash
121 | - name: Run make html
122 | run: |
123 | make clean_sphinx
124 | make html
125 | cd docs/_build/html
126 | mkdir artifacts
127 | tar -zcvf artifacts/documentation.tar.gz --exclude=./artifacts .
128 | if: ${{ !cancelled() }}
129 | shell: bash
130 | - name: Run upload documentation
131 | uses: actions/upload-artifact@v3
132 | with:
133 | name: documentation
134 | path: docs/_build/html/artifacts/documentation.tar.gz
135 | if: ${{ !cancelled() }}
136 | Benchmarks-Dev:
137 | runs-on: ${{ matrix.os }}
138 | strategy:
139 | fail-fast: false
140 | matrix:
141 | os: [ubuntu-latest]
142 | python-version: [3.8]
143 | domain: ["finance", "machine_learning", "nature", "optimization"]
144 | steps:
145 | - uses: actions/checkout@v3
146 | - uses: actions/setup-python@v4
147 | with:
148 | python-version: ${{ matrix.python-version }}
149 | - name: Install Dependencies
150 | run: |
151 | pushd ${{ matrix.domain }}
152 | if [ -n "$(find benchmarks/* -not -name '__*' | head -1)" ]; then
153 | pip install -U asv virtualenv
154 | # for qiskit-aer build under asv
155 | sudo apt-get -y install libopenblas-dev
156 | fi
157 | popd
158 | shell: bash
159 | - name: Benchmarks
160 | run: |
161 | pushd ${{ matrix.domain }}
162 | if [ -n "$(find benchmarks/* -not -name '__*' | head -1)" ]; then
163 | asv machine --yes
164 | asv update || true
165 | asv run --quick --show-stderr
166 | fi
167 | popd
168 | shell: bash
169 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # MacOSX
2 | .DS_Store
3 |
4 | # Dolphin KDE
5 | .directory
6 |
7 | # editor files
8 | .vscode/
9 | .idea/
10 |
11 | # asv venvs and resulsts
12 | .asv/
13 | html/
14 | qiskit-finance/
15 | qiskit-machine-learning/
16 | qiskit-nature/
17 | qiskit-optimization/
18 |
19 | # Byte-compiled / optimized / DLL files
20 | __pycache__/
21 | *.py[cod]
22 | *$py.class
23 |
24 | # C extensions
25 | *.so
26 |
27 | # Distribution / packaging
28 | .Python
29 | build/
30 | develop-eggs/
31 | dist/
32 | downloads/
33 | eggs/
34 | .eggs/
35 | lib/
36 | lib64/
37 | parts/
38 | sdist/
39 | var/
40 | wheels/
41 | pip-wheel-metadata/
42 | share/python-wheels/
43 | *.egg-info/
44 | .installed.cfg
45 | *.egg
46 | MANIFEST
47 |
48 | # PyInstaller
49 | # Usually these files are written by a python script from a template
50 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
51 | *.manifest
52 | *.spec
53 |
54 | # Installer logs
55 | pip-log.txt
56 | pip-delete-this-directory.txt
57 |
58 | # Unit test / coverage reports
59 | htmlcov/
60 | .tox/
61 | .nox/
62 | .coverage
63 | .coverage.*
64 | .cache
65 | nosetests.xml
66 | coverage.xml
67 | *.cover
68 | *.py,cover
69 | .hypothesis/
70 | .pytest_cache/
71 |
72 | # Translations
73 | *.mo
74 | *.pot
75 |
76 | # Django stuff:
77 | *.log
78 | local_settings.py
79 | db.sqlite3
80 | db.sqlite3-journal
81 |
82 | # Flask stuff:
83 | instance/
84 | .webassets-cache
85 |
86 | # Scrapy stuff:
87 | .scrapy
88 |
89 | # Sphinx documentation
90 | docs/_build/
91 |
92 | # PyBuilder
93 | target/
94 |
95 | # Jupyter Notebook
96 | .ipynb_checkpoints
97 |
98 | # IPython
99 | profile_default/
100 | ipython_config.py
101 |
102 | # pyenv
103 | .python-version
104 |
105 | # pipenv
106 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
107 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
108 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
109 | # install all needed dependencies.
110 | #Pipfile.lock
111 |
112 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow
113 | __pypackages__/
114 |
115 | # Celery stuff
116 | celerybeat-schedule
117 | celerybeat.pid
118 |
119 | # SageMath parsed files
120 | *.sage.py
121 |
122 | # Environments
123 | .env
124 | .venv
125 | env/
126 | venv/
127 | ENV/
128 | env.bak/
129 | venv.bak/
130 |
131 | # Spyder project settings
132 | .spyderproject
133 | .spyproject
134 |
135 | # Rope project settings
136 | .ropeproject
137 |
138 | # mkdocs documentation
139 | /site
140 |
141 | # mypy
142 | .mypy_cache/
143 | .dmypy.json
144 | dmypy.json
145 |
146 | # Pyre type checker
147 | .pyre/
148 |
--------------------------------------------------------------------------------
/.mailmap:
--------------------------------------------------------------------------------
1 | # Entries in this file are made for two reasons:
2 | # 1) to merge multiple git commit authors that correspond to a single author
3 | # 2) to change the canonical name and/or email address of an author.
4 | #
5 | # Format is:
6 | # Canonical Name commit name
7 | # \--------------+---------------/ \----------+-------------/
8 | # replace find
9 | # See also: 'git shortlog --help' and 'git check-mailmap --help'.
10 | #
11 | # If you don't like the way your name is cited by qiskit, please feel free to
12 | # open a pull request against this file to set your preferred naming.
13 | #
14 | # Note that each qiskit optimization element uses its own mailmap so it may be necessary to
15 | # propagate changes in other repos for consistency.
16 |
17 | Abdón Rodríguez Davila
18 | Ali Javadi-Abhari
19 | Ali Javadi-Abhari
20 | Albert Frisch
21 | Albert Frisch
22 | Alejandro Pozas-Kerstjens
23 | Almudena Carrera Vazquez
24 | Anna Phan <9410731+attp@users.noreply.github.com>
25 | Christa Zoufal
26 | Christian Clauss
27 | Antonio Mezzacapo <30698465+antoniomezzacapo@users.noreply.github.com>
28 | Donny Greenberg
29 | Donny Greenberg
30 | Gawel Kus
31 | Gawel Kus
32 | Ian Gould
33 | Igor Olegovich Sokolov
34 | Ikko Hamamura
35 | Isabel Haide
36 | Jan Müggenburg
37 | Jan Müggenburg
38 | Jay M. Gambetta
39 | Juan Cruz-Benito
40 | Julien Gacon
41 | Julien Gacon
42 | Karel Dumon
43 | Marco Pistoia
44 | Max Rossmannek
45 | Nick Singstock <35930611+Nick-Singstock@users.noreply.github.com>
46 | Panagiotis Barkoutsos
47 | Panagiotis Barkoutsos
48 | Peng Liu <34400304+liupibm@users.noreply.github.com>
49 | Shaohan Hu
50 | Shaohan Hu
51 | Stefan Woerner
52 | Stefan Woerner <41292468+stefan-woerner@users.noreply.github.com>
53 | Stephen Wood <40241007+woodsp-ibm@users.noreply.github.com>
54 | Vivek Krishnan
55 | Yael Ben-Haim
56 | Yotam Vaknin
57 |
--------------------------------------------------------------------------------
/.pylintdict:
--------------------------------------------------------------------------------
1 | adjoint
2 | al
3 | ancilla
4 | ancillas
5 | angiotensin
6 | ansatz
7 | args
8 | asmatrix
9 | asv
10 | attr
11 | autograd
12 | autosummary
13 | backend
14 | backpropagation
15 | bergholm
16 | bitstring
17 | bitstrings
18 | bool
19 | cargs
20 | cbit
21 | chuang
22 | clbit
23 | clbits
24 | codec
25 | cohen
26 | config
27 | contravariance
28 | creg
29 | crossentropyloss
30 | csr
31 | ctrl
32 | ctx
33 | currentmodule
34 | cvs
35 | cx
36 | data's
37 | datapoints
38 | dataset
39 | datasets
40 | decrypts
41 | deepcopy
42 | diag
43 | dicts
44 | dir
45 | discretize
46 | discretized
47 | discriminative
48 | distro
49 | dt
50 | eigenstates
51 | endian
52 | entangler
53 | et
54 | eval
55 | formatter
56 | gaussian
57 | getter
58 | gh
59 | grover
60 | hamiltonian
61 | hashable
62 | hilbert
63 | hoc
64 | html
65 | init
66 | inlier
67 | inplace
68 | instantiation
69 | instantiations
70 | isometry
71 | iten
72 | iterable
73 | jupyter
74 | kwarg
75 | kwargs
76 | langle
77 | macos
78 | makefile
79 | matmul
80 | maxcut
81 | maxiter
82 | mcrx
83 | mcry
84 | mcrz
85 | mpl
86 | multiclass
87 | multioutput
88 | mxd
89 | mypy
90 | ndarray
91 | neuropeptide
92 | nielsen
93 | nn
94 | noancilla
95 | nonlocal
96 | nosignatures
97 | np
98 | ns
99 | num
100 | numpy
101 | nxd
102 | nxm
103 | observables
104 | opflow
105 | optim
106 | optimizer's
107 | optimizers
108 | orthonormal
109 | otimes
110 | ovo
111 | ovr
112 | param
113 | parametrized
114 | params
115 | pauli
116 | pre
117 | precomputed
118 | precomputes
119 | precomputing
120 | priori
121 | ps
122 | pxd
123 | py
124 | pytorch
125 | qae
126 | qaoa
127 | qarg
128 | qargs
129 | qasm
130 | qc
131 | qgan
132 | qgans
133 | qiskit
134 | qiskit's
135 | qnn
136 | qsvc
137 | qsvr
138 | qubit
139 | qubits
140 | qubo
141 | rangle
142 | readme
143 | regressor
144 | regressors
145 | regs
146 | repo
147 | rescale
148 | rhs
149 | rx
150 | ry
151 | rz
152 | scikit
153 | scipy
154 | semidefinite
155 | shende
156 | sigmoid
157 | sklearn
158 | softmax
159 | sparsearray
160 | statevector
161 | statevectors
162 | stdlib
163 | stdout
164 | str
165 | subcircuits
166 | submodules
167 | subobjects
168 | svc
169 | svm
170 | svr
171 | sx
172 | sy
173 | tensored
174 | th
175 | toctree
176 | traceback
177 | transpilation
178 | uci
179 | uncompiled
180 | unitaries
181 | univariate
182 | unsymmetric
183 | utils
184 | variational
185 | vec
186 | vqc
187 | vqe
188 | vqr
189 | vx
190 | vy
191 | vz
192 | wrt
193 | zoufal
194 | zsh
195 |
--------------------------------------------------------------------------------
/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------
1 |
2 |
3 | # Code of Conduct
4 | All members of this project agree to adhere to the Qiskit Code of Conduct listed at [https://github.com/Qiskit/qiskit/blob/master/CODE_OF_CONDUCT.md](https://github.com/Qiskit/qiskit/blob/master/CODE_OF_CONDUCT.md)
5 |
6 | ----
7 |
8 | License: [CC BY 4.0](https://creativecommons.org/licenses/by/4.0/),
9 | Copyright Contributors to Qiskit.
10 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Contributing
2 |
3 | **We appreciate all kinds of help, so thank you!**
4 |
5 | First please read the overall project contributing guidelines. These are
6 | included in the Qiskit documentation here:
7 |
8 | https://qiskit.org/documentation/contributing_to_qiskit.html
9 |
10 | ## Contributing to Qiskit Application Benchmarks
11 |
12 | In addition to the general guidelines above there are specific details for
13 | contributing to Qiskit Application Benchmarks, these are documented below.
14 |
15 | ### Project Code Style.
16 |
17 | Code in Qiskit Application Benchmarks should conform to PEP8 and style/lint checks are run to validate
18 | this. Line length must be limited to no more than 100 characters. Docstrings
19 | should be written using the Google docstring format.
20 |
21 | Every Benchmark class should have a `version` property. If the benchmark class changes in a way that would invalidate previous
22 | results, the `version` should change in order to reset previous results and start fresh from next commit.
23 |
24 | ### Pull request checklist
25 |
26 | When submitting a pull request and you feel it is ready for review,
27 | please ensure that:
28 |
29 | 1. The code follows the _code style_ of this project and successfully
30 | passes the _unit tests_. Application Benchmarks uses [Pylint](https://www.pylint.org) and
31 | [PEP8](https://www.python.org/dev/peps/pep-0008) style guidelines.
32 |
33 | You can run
34 | ```shell script
35 | make lint
36 | make style
37 | ```
38 | from the root of the Application Benchmarks repository clone for lint and style conformance checks.
39 |
40 | If your code fails the local style checks (specifically the black
41 | code formatting check) you can use `make black` to automatically
42 | fix update the code formatting.
43 |
44 | 2. The documentation has been updated accordingly. In particular, if a
45 | function or class has been modified during the PR, please update the
46 | *docstring* accordingly.
47 |
48 | You can run `make spell` locally to check spelling though you would need to
49 | [install pyenchant](https://pyenchant.github.io/pyenchant/install.html) and be using
50 | hunspell-en-us as is used by the CI.
51 |
52 | For some words, such as names, technical terms, referring to parameters of the method etc.,
53 | that are not in the en-us dictionary and get flagged as being misspelled, despite being correct,
54 | there is a [.pylintdict](./.pylintdict) custom word list file, in the root of the Application Benchmarks repo,
55 | where such words can be added, in alphabetic order, as needed.
56 |
57 | 3. If it makes sense for your change that you have added new tests that
58 | cover the changes and any new function.
59 |
60 | 4. Ensure all code, has the copyright header. The copyright
61 | date will be checked by CI build. The format of the date(s) is _year of creation,
62 | last year changed_. So for example:
63 |
64 | > \# (C) Copyright IBM 2018, 2021.
65 |
66 | If the _year of creation_ is the same as _last year changed_ then only
67 | one date is needed, for example:
68 |
69 | > \# (C) Copyright IBM 2021.
70 |
71 | If code is changed in a file make sure the copyright includes the current year.
72 | If there is just one date and it's a prior year then add the current year as the 2nd date,
73 | otherwise simply change the 2nd date to the current year. The _year of creation_ date is
74 | never changed.
75 |
76 | ### Branches
77 |
78 | * `main`:
79 |
80 | The main branch is used for development of the next version of qiskit-app-benchmarks.
81 | It will be updated frequently and should not be considered stable. The API
82 | can and will change on main as we introduce and refine new features.
83 |
84 | * `stable/*`:
85 | The stable branches are used to maintain the most recent released versions of
86 | qiskit-app-benchmarks. It contains the versions of the code corresponding to the minor
87 | version release in the branch name release for The API on these branches are
88 | stable and the only changes merged to it are bugfixes.
89 |
--------------------------------------------------------------------------------
/LICENSE.txt:
--------------------------------------------------------------------------------
1 | Copyright 2017 IBM and its contributors
2 |
3 | Apache License
4 | Version 2.0, January 2004
5 | http://www.apache.org/licenses/
6 |
7 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
8 |
9 | 1. Definitions.
10 |
11 | "License" shall mean the terms and conditions for use, reproduction,
12 | and distribution as defined by Sections 1 through 9 of this document.
13 |
14 | "Licensor" shall mean the copyright owner or entity authorized by
15 | the copyright owner that is granting the License.
16 |
17 | "Legal Entity" shall mean the union of the acting entity and all
18 | other entities that control, are controlled by, or are under common
19 | control with that entity. For the purposes of this definition,
20 | "control" means (i) the power, direct or indirect, to cause the
21 | direction or management of such entity, whether by contract or
22 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
23 | outstanding shares, or (iii) beneficial ownership of such entity.
24 |
25 | "You" (or "Your") shall mean an individual or Legal Entity
26 | exercising permissions granted by this License.
27 |
28 | "Source" form shall mean the preferred form for making modifications,
29 | including but not limited to software source code, documentation
30 | source, and configuration files.
31 |
32 | "Object" form shall mean any form resulting from mechanical
33 | transformation or translation of a Source form, including but
34 | not limited to compiled object code, generated documentation,
35 | and conversions to other media types.
36 |
37 | "Work" shall mean the work of authorship, whether in Source or
38 | Object form, made available under the License, as indicated by a
39 | copyright notice that is included in or attached to the work
40 | (an example is provided in the Appendix below).
41 |
42 | "Derivative Works" shall mean any work, whether in Source or Object
43 | form, that is based on (or derived from) the Work and for which the
44 | editorial revisions, annotations, elaborations, or other modifications
45 | represent, as a whole, an original work of authorship. For the purposes
46 | of this License, Derivative Works shall not include works that remain
47 | separable from, or merely link (or bind by name) to the interfaces of,
48 | the Work and Derivative Works thereof.
49 |
50 | "Contribution" shall mean any work of authorship, including
51 | the original version of the Work and any modifications or additions
52 | to that Work or Derivative Works thereof, that is intentionally
53 | submitted to Licensor for inclusion in the Work by the copyright owner
54 | or by an individual or Legal Entity authorized to submit on behalf of
55 | the copyright owner. For the purposes of this definition, "submitted"
56 | means any form of electronic, verbal, or written communication sent
57 | to the Licensor or its representatives, including but not limited to
58 | communication on electronic mailing lists, source code control systems,
59 | and issue tracking systems that are managed by, or on behalf of, the
60 | Licensor for the purpose of discussing and improving the Work, but
61 | excluding communication that is conspicuously marked or otherwise
62 | designated in writing by the copyright owner as "Not a Contribution."
63 |
64 | "Contributor" shall mean Licensor and any individual or Legal Entity
65 | on behalf of whom a Contribution has been received by Licensor and
66 | subsequently incorporated within the Work.
67 |
68 | 2. Grant of Copyright License. Subject to the terms and conditions of
69 | this License, each Contributor hereby grants to You a perpetual,
70 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
71 | copyright license to reproduce, prepare Derivative Works of,
72 | publicly display, publicly perform, sublicense, and distribute the
73 | Work and such Derivative Works in Source or Object form.
74 |
75 | 3. Grant of Patent License. Subject to the terms and conditions of
76 | this License, each Contributor hereby grants to You a perpetual,
77 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
78 | (except as stated in this section) patent license to make, have made,
79 | use, offer to sell, sell, import, and otherwise transfer the Work,
80 | where such license applies only to those patent claims licensable
81 | by such Contributor that are necessarily infringed by their
82 | Contribution(s) alone or by combination of their Contribution(s)
83 | with the Work to which such Contribution(s) was submitted. If You
84 | institute patent litigation against any entity (including a
85 | cross-claim or counterclaim in a lawsuit) alleging that the Work
86 | or a Contribution incorporated within the Work constitutes direct
87 | or contributory patent infringement, then any patent licenses
88 | granted to You under this License for that Work shall terminate
89 | as of the date such litigation is filed.
90 |
91 | 4. Redistribution. You may reproduce and distribute copies of the
92 | Work or Derivative Works thereof in any medium, with or without
93 | modifications, and in Source or Object form, provided that You
94 | meet the following conditions:
95 |
96 | (a) You must give any other recipients of the Work or
97 | Derivative Works a copy of this License; and
98 |
99 | (b) You must cause any modified files to carry prominent notices
100 | stating that You changed the files; and
101 |
102 | (c) You must retain, in the Source form of any Derivative Works
103 | that You distribute, all copyright, patent, trademark, and
104 | attribution notices from the Source form of the Work,
105 | excluding those notices that do not pertain to any part of
106 | the Derivative Works; and
107 |
108 | (d) If the Work includes a "NOTICE" text file as part of its
109 | distribution, then any Derivative Works that You distribute must
110 | include a readable copy of the attribution notices contained
111 | within such NOTICE file, excluding those notices that do not
112 | pertain to any part of the Derivative Works, in at least one
113 | of the following places: within a NOTICE text file distributed
114 | as part of the Derivative Works; within the Source form or
115 | documentation, if provided along with the Derivative Works; or,
116 | within a display generated by the Derivative Works, if and
117 | wherever such third-party notices normally appear. The contents
118 | of the NOTICE file are for informational purposes only and
119 | do not modify the License. You may add Your own attribution
120 | notices within Derivative Works that You distribute, alongside
121 | or as an addendum to the NOTICE text from the Work, provided
122 | that such additional attribution notices cannot be construed
123 | as modifying the License.
124 |
125 | You may add Your own copyright statement to Your modifications and
126 | may provide additional or different license terms and conditions
127 | for use, reproduction, or distribution of Your modifications, or
128 | for any such Derivative Works as a whole, provided Your use,
129 | reproduction, and distribution of the Work otherwise complies with
130 | the conditions stated in this License.
131 |
132 | 5. Submission of Contributions. Unless You explicitly state otherwise,
133 | any Contribution intentionally submitted for inclusion in the Work
134 | by You to the Licensor shall be under the terms and conditions of
135 | this License, without any additional terms or conditions.
136 | Notwithstanding the above, nothing herein shall supersede or modify
137 | the terms of any separate license agreement you may have executed
138 | with Licensor regarding such Contributions.
139 |
140 | 6. Trademarks. This License does not grant permission to use the trade
141 | names, trademarks, service marks, or product names of the Licensor,
142 | except as required for reasonable and customary use in describing the
143 | origin of the Work and reproducing the content of the NOTICE file.
144 |
145 | 7. Disclaimer of Warranty. Unless required by applicable law or
146 | agreed to in writing, Licensor provides the Work (and each
147 | Contributor provides its Contributions) on an "AS IS" BASIS,
148 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
149 | implied, including, without limitation, any warranties or conditions
150 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
151 | PARTICULAR PURPOSE. You are solely responsible for determining the
152 | appropriateness of using or redistributing the Work and assume any
153 | risks associated with Your exercise of permissions under this License.
154 |
155 | 8. Limitation of Liability. In no event and under no legal theory,
156 | whether in tort (including negligence), contract, or otherwise,
157 | unless required by applicable law (such as deliberate and grossly
158 | negligent acts) or agreed to in writing, shall any Contributor be
159 | liable to You for damages, including any direct, indirect, special,
160 | incidental, or consequential damages of any character arising as a
161 | result of this License or out of the use or inability to use the
162 | Work (including but not limited to damages for loss of goodwill,
163 | work stoppage, computer failure or malfunction, or any and all
164 | other commercial damages or losses), even if such Contributor
165 | has been advised of the possibility of such damages.
166 |
167 | 9. Accepting Warranty or Additional Liability. While redistributing
168 | the Work or Derivative Works thereof, You may choose to offer,
169 | and charge a fee for, acceptance of support, warranty, indemnity,
170 | or other liability obligations and/or rights consistent with this
171 | License. However, in accepting such obligations, You may act only
172 | on Your own behalf and on Your sole responsibility, not on behalf
173 | of any other Contributor, and only if You agree to indemnify,
174 | defend, and hold each Contributor harmless for any liability
175 | incurred by, or claims asserted against, such Contributor by reason
176 | of your accepting any such warranty or additional liability.
177 |
178 | END OF TERMS AND CONDITIONS
179 |
180 | APPENDIX: How to apply the Apache License to your work.
181 |
182 | To apply the Apache License to your work, attach the following
183 | boilerplate notice, with the fields enclosed by brackets "[]"
184 | replaced with your own identifying information. (Don't include
185 | the brackets!) The text should be enclosed in the appropriate
186 | comment syntax for the file format. We also recommend that a
187 | file or class name and description of purpose be included on the
188 | same "printed page" as the copyright notice for easier
189 | identification within third-party archives.
190 |
191 | Copyright 2017 IBM and its contributors.
192 |
193 | Licensed under the Apache License, Version 2.0 (the "License");
194 | you may not use this file except in compliance with the License.
195 | You may obtain a copy of the License at
196 |
197 | http://www.apache.org/licenses/LICENSE-2.0
198 |
199 | Unless required by applicable law or agreed to in writing, software
200 | distributed under the License is distributed on an "AS IS" BASIS,
201 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
202 | See the License for the specific language governing permissions and
203 | limitations under the License.
204 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | # This code is part of Qiskit.
2 | #
3 | # (C) Copyright IBM 2021, 2022.
4 | #
5 | # This code is licensed under the Apache License, Version 2.0. You may
6 | # obtain a copy of this license in the LICENSE.txt file in the root directory
7 | # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
8 | #
9 | # Any modifications or derivative works of this code must retain this
10 | # copyright notice, and modified files need to carry a notice indicating
11 | # that they have been altered from the originals.
12 |
13 | # You can set those variables from the command line.
14 | TARGET =
15 | ASVCMD =
16 | ASVOPTS =
17 | SPHINXOPTS =
18 |
19 | .PHONY: asv lint mypy style black spell copyright html clean_sphinx clean
20 |
21 | asv:
22 | make -C $(TARGET) asv ASVCMD=$(ASVCMD) ASVOPTS="$(ASVOPTS)"
23 |
24 | lint:
25 | python -m pylint -rn --ignore=.asv finance machine_learning nature optimization tools
26 | python tools/verify_headers.py finance machine_learning nature optimization tools
27 | python tools/check_version.py finance machine_learning nature optimization
28 |
29 | mypy:
30 | python -m mypy finance machine_learning nature optimization tools
31 |
32 | style:
33 | python -m black --check finance machine_learning nature optimization tools docs
34 |
35 | black:
36 | python -m black finance machine_learning nature optimization tools docs
37 |
38 | spell:
39 | python -m pylint -rn --disable=all --enable=spelling --spelling-dict=en_US --spelling-private-dict-file=.pylintdict --ignore=.asv finance machine_learning nature optimization tools
40 | make -C docs spell SPHINXOPTS=$(SPHINXOPTS)
41 |
42 | copyright:
43 | python tools/check_copyright.py
44 |
45 | html:
46 | make -C docs html SPHINXOPTS=$(SPHINXOPTS)
47 |
48 | clean_sphinx:
49 | make -C docs clean
50 |
51 | clean: clean_sphinx
52 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Qiskit Application Benchmarks
2 |
3 | [](https://opensource.org/licenses/Apache-2.0)[](https://github.com/Qiskit/qiskit-app-benchmarks/actions?query=workflow%3A"Application%20Benchmarks%20Tests"+branch%3Amain+event%3Apush)
4 |
5 | ## Usage
6 |
7 | In order to run benchmarks, run:
8 |
9 | * Finance: `make asv TARGET=finance ASVCMD=run`
10 | * Machine Learning: `make asv TARGET=machine_learning ASVCMD=run`
11 | * Optimization: `make asv TARGET=optimization ASVCMD=run`
12 | * Nature: `make asv TARGET=nature ASVCMD=run`
13 |
14 | Before any benchmarking, you need to set once your machine info.
15 | If you accept defaults, for finance for instance, run `make asv TARGET=finance ASVCMD=machine ASVOPTS=--yes`
16 | Another option is to run in development mode as a validation: `make asv TARGET=machine_learning ASVCMD=dev`
17 |
18 | Documentation for asv can be found at [ASV Documentation](https://asv.readthedocs.io/en/stable/)
19 |
20 | The benchmark results can be found at [ASV Results](https://qiskit.github.io/qiskit-app-benchmarks/)
21 |
22 | ----------------------------------------------------------------------------------------------------
23 |
24 | ## Contribution Guidelines
25 |
26 | If you'd like to contribute to Qiskit, please take a look at our
27 | [contribution guidelines](./CONTRIBUTING.md).
28 | This project adheres to Qiskit's [code of conduct](./CODE_OF_CONDUCT.md).
29 | By participating, you are expected to uphold this code.
30 |
31 | We use [GitHub issues](https://github.com/Qiskit/qiskit-app-benchmarks/issues) for tracking requests and bugs. Please
32 | [join the Qiskit Slack community](https://qisk.it/join-slack)
33 | and for discussion and simple questions.
34 | For questions that are more suited for a forum, we use the **Qiskit** tag in [Stack Overflow](https://stackoverflow.com/questions/tagged/qiskit).
35 |
36 | ## Authors and Citation
37 |
38 | Application Benchmarks were inspired, authored and brought about by the collective work of a team of researchers.
39 | Application Benchmarks continues to grow with the help and work of
40 | [many people](https://github.com/Qiskit/qiskit-app-benchmarks/graphs/contributors), who contribute
41 | to the project at different levels.
42 | If you use Qiskit, please cite as per the provided
43 | [BibTeX file](https://github.com/Qiskit/qiskit/blob/master/Qiskit.bib).
44 |
45 | Please note that if you do not like the way your name is cited in the BibTex file then consult
46 | the information found in the [.mailmap](https://github.com/Qiskit/qiskit-app-benchmarks/blob/main/.mailmap)
47 | file.
48 |
49 | ## License
50 |
51 | This project uses the [Apache License 2.0](LICENSE.txt).
52 |
53 |
54 |
--------------------------------------------------------------------------------
/docs/.nojekyll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/qiskit-community/qiskit-app-benchmarks/07fe387ef338dc0d22aa7cbc13ae21ed5d7a5a10/docs/.nojekyll
--------------------------------------------------------------------------------
/docs/Makefile:
--------------------------------------------------------------------------------
1 | # This code is part of Qiskit.
2 | #
3 | # (C) Copyright IBM 2018, 2022.
4 | #
5 | # This code is licensed under the Apache License, Version 2.0. You may
6 | # obtain a copy of this license in the LICENSE.txt file in the root directory
7 | # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
8 | #
9 | # Any modifications or derivative works of this code must retain this
10 | # copyright notice, and modified files need to carry a notice indicating
11 | # that they have been altered from the originals.
12 |
13 | # You can set these variables from the command line.
14 | SPHINXOPTS =
15 | SPHINXBUILD = sphinx-build
16 | SOURCEDIR = .
17 | BUILDDIR = _build
18 | STUBSDIR = stubs
19 |
20 | # Put it first so that "make" without argument is like "make help".
21 | help:
22 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
23 |
24 | spell:
25 | @$(SPHINXBUILD) -M spelling "$(SOURCEDIR)" "$(BUILDDIR)" -W -T --keep-going $(SPHINXOPTS) $(O)
26 |
27 | clean:
28 | rm -rf $(BUILDDIR)
29 | rm -rf $(STUBSDIR)
30 |
31 | .PHONY: help spell clean Makefile
32 |
33 | # Catch-all target: route all unknown targets to Sphinx using the new
34 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
35 | %: Makefile
36 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" -W -T --keep-going $(SPHINXOPTS) $(O)
37 |
--------------------------------------------------------------------------------
/docs/conf.py:
--------------------------------------------------------------------------------
1 | # This code is part of Qiskit.
2 | #
3 | # (C) Copyright IBM 2021, 2022.
4 | #
5 | # This code is licensed under the Apache License, Version 2.0. You may
6 | # obtain a copy of this license in the LICENSE.txt file in the root directory
7 | # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
8 | #
9 | # Any modifications or derivative works of this code must retain this
10 | # copyright notice, and modified files need to carry a notice indicating
11 | # that they have been altered from the originals.
12 |
13 | # pylint: disable=invalid-name
14 | # Configuration file for the Sphinx documentation builder.
15 | #
16 | # This file does only contain a selection of the most common options. For a
17 | # full list see the documentation:
18 | # http://www.sphinx-doc.org/en/master/config
19 |
20 | # -- Path setup --------------------------------------------------------------
21 |
22 | # If extensions (or modules to document with autodoc) are in another directory,
23 | # add these directories to sys.path here. If the directory is relative to the
24 | # documentation root, use os.path.abspath to make it absolute, like shown here.
25 | #
26 | import os
27 | import sys
28 | from datetime import date
29 |
30 | sys.path.append(os.path.abspath("."))
31 |
32 | """
33 | Sphinx documentation builder
34 | """
35 |
36 | # -- Project information -----------------------------------------------------
37 | project = "Qiskit Application Benchmarks"
38 | copyright = (
39 | f"2021, {date.today().year}, Qiskit Development Team" # pylint: disable=redefined-builtin
40 | )
41 | author = "Qiskit Development Team"
42 |
43 | # -- General configuration ---------------------------------------------------
44 |
45 | master_doc = "index"
46 |
47 | # If your documentation needs a minimal Sphinx version, state it here.
48 | #
49 | # needs_sphinx = '1.0'
50 |
51 | # Add any Sphinx extension module names here, as strings. They can be
52 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
53 | # ones.
54 | extensions = [
55 | "sphinx.ext.githubpages",
56 | ]
57 |
58 | spelling_word_list_filename = "../.pylintdict"
59 | spelling_filters = ["lowercase_filter.LowercaseFilter"]
60 |
61 | # The language for content autogenerated by Sphinx. Refer to documentation
62 | # for a list of supported languages.
63 | #
64 | # This is also used if you do content translation via gettext catalogs.
65 | # Usually you set "language" from the command line for these cases.
66 | language = "en"
67 |
68 | # List of patterns, relative to source directory, that match files and
69 | # directories to ignore when looking for source files.
70 | # This pattern also affects html_static_path and html_extra_path.
71 | exclude_patterns = []
72 |
73 | # The name of the Pygments (syntax highlighting) style to use.
74 | pygments_style = "colorful"
75 |
76 | # A boolean that decides whether module names are prepended to all object names
77 | # (for object types where a “module” of some kind is defined), e.g. for
78 | # py:function directives.
79 | add_module_names = False
80 |
81 | # A list of prefixes that are ignored for sorting the Python module index
82 | # (e.g., if this is set to ['foo.'], then foo.bar is shown under B, not F).
83 | # This can be handy if you document a project that consists of a single
84 | # package. Works only for the HTML builder currently.
85 | modindex_common_prefix = []
86 |
87 | # -- Configuration for extlinks extension ------------------------------------
88 | # Refer to https://www.sphinx-doc.org/en/master/usage/extensions/extlinks.html
89 |
90 |
91 | # -- Options for HTML output -------------------------------------------------
92 |
93 | # The theme to use for HTML and HTML Help pages. See the documentation for
94 | # a list of builtin themes.
95 | #
96 | #
97 | html_theme = "alabaster"
98 |
99 | # Theme options are theme-specific and customize the look and feel of a theme
100 | # further. For a list of options available for each theme, see the
101 | # documentation.
102 | #
103 | html_theme_options = {
104 | # Disable showing the sidebar. Defaults to 'false'
105 | "nosidebar": True,
106 | "show_powered_by": False,
107 | }
108 |
--------------------------------------------------------------------------------
/docs/finance/index.rst:
--------------------------------------------------------------------------------
1 | #########################
2 | Qiskit Finance Benchmarks
3 | #########################
4 |
--------------------------------------------------------------------------------
/docs/index.rst:
--------------------------------------------------------------------------------
1 | #############################
2 | Qiskit Application Benchmarks
3 | #############################
4 |
5 | .. toctree::
6 | :maxdepth: 1
7 |
8 | Finance
9 | Machine Learning
10 | Nature
11 | Optimization
12 |
--------------------------------------------------------------------------------
/docs/lowercase_filter.py:
--------------------------------------------------------------------------------
1 | # This code is part of Qiskit.
2 | #
3 | # (C) Copyright IBM 2021.
4 | #
5 | # This code is licensed under the Apache License, Version 2.0. You may
6 | # obtain a copy of this license in the LICENSE.txt file in the root directory
7 | # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
8 | #
9 | # Any modifications or derivative works of this code must retain this
10 | # copyright notice, and modified files need to carry a notice indicating
11 | # that they have been altered from the originals.
12 |
13 | """ Implements a Lower Case Filter for Sphinx spelling """
14 |
15 | from enchant import tokenize
16 |
17 |
18 | class LowercaseFilter(tokenize.Filter):
19 | """Lower Case Filter"""
20 |
21 | def _split(self, word):
22 | """Filter method for sub-tokenization of tokens.
23 |
24 | This method must be a tokenization function that will split the
25 | given word into sub-tokens according to the needs of the filter.
26 | The default behavior is not to split any words.
27 | """
28 | # Don't split, just lower case to test against lowercase dict
29 | return super()._split(word.lower())
30 |
--------------------------------------------------------------------------------
/docs/machine_learning/index.rst:
--------------------------------------------------------------------------------
1 | ##################################
2 | Qiskit Machine Learning Benchmarks
3 | ##################################
4 |
--------------------------------------------------------------------------------
/docs/nature/index.rst:
--------------------------------------------------------------------------------
1 | ########################
2 | Qiskit Nature Benchmarks
3 | ########################
4 |
--------------------------------------------------------------------------------
/docs/optimization/index.rst:
--------------------------------------------------------------------------------
1 | ##############################
2 | Qiskit Optimization Benchmarks
3 | ##############################
4 |
--------------------------------------------------------------------------------
/finance/Makefile:
--------------------------------------------------------------------------------
1 | # This code is part of Qiskit.
2 | #
3 | # (C) Copyright IBM 2021.
4 | #
5 | # This code is licensed under the Apache License, Version 2.0. You may
6 | # obtain a copy of this license in the LICENSE.txt file in the root directory
7 | # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
8 | #
9 | # Any modifications or derivative works of this code must retain this
10 | # copyright notice, and modified files need to carry a notice indicating
11 | # that they have been altered from the originals.
12 |
13 | # You can set those variables from the command line.
14 | ASVCMD =
15 | ASVOPTS =
16 |
17 | .PHONY: asv machine dev run publish preview
18 |
19 | asv:
20 | python -m asv $(ASVCMD) $(ASVOPTS) || true
21 |
--------------------------------------------------------------------------------
/finance/__init__.py:
--------------------------------------------------------------------------------
1 | # This code is part of Qiskit.
2 | #
3 | # (C) Copyright IBM 2021.
4 | #
5 | # This code is licensed under the Apache License, Version 2.0. You may
6 | # obtain a copy of this license in the LICENSE.txt file in the root directory
7 | # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
8 | #
9 | # Any modifications or derivative works of this code must retain this
10 | # copyright notice, and modified files need to carry a notice indicating
11 | # that they have been altered from the originals.
12 |
--------------------------------------------------------------------------------
/finance/asv.conf.json:
--------------------------------------------------------------------------------
1 | {
2 | // The version of the config file format. Do not change, unless
3 | // you know what you are doing.
4 | "version": 1,
5 |
6 | // The name of the project being benchmarked
7 | "project": "qiskit-finance",
8 |
9 | // The project's homepage
10 | "project_url": "https://qiskit.org/documentation/finance/",
11 |
12 | // The URL or local path of the source code repository for the
13 | // project being benchmarked
14 | "repo": "https://github.com/Qiskit/qiskit-finance.git",
15 |
16 | // The Python project's subdirectory in your repo. If missing or
17 | // the empty string, the project is assumed to be located at the root
18 | // of the repository.
19 | // "repo_subdir": "",
20 |
21 | // Customizable commands for building, installing, and
22 | // uninstalling the project. See asv.conf.json documentation.
23 | //
24 | "install_command": [
25 | "return-code=any python -c \"import shutil; shutil.rmtree('{build_dir}/build')\"",
26 | "return-code=any python -c \"import shutil; shutil.rmtree('{build_dir}/qiskit_finance.egg-info')\"",
27 | "python -mpip install git+https://github.com/Qiskit/qiskit-terra",
28 | "python -mpip install git+https://github.com/Qiskit/qiskit-aer",
29 | "python -mpip install git+https://github.com/Qiskit/qiskit-optimization",
30 | "python -mpip install {wheel_file}",
31 | ],
32 | "uninstall_command": [
33 | "return-code=any python -mpip uninstall -y {project}",
34 | "return-code=any python -mpip uninstall -y qiskit-aer qiskit-terra qiskit-optimization"
35 | ],
36 | "build_command": [
37 | "python setup.py build",
38 | "PIP_NO_BUILD_ISOLATION=false python -mpip wheel --no-deps --no-index -w {build_cache_dir} {build_dir}"
39 | ],
40 |
41 | // List of branches to benchmark. If not provided, defaults to "master"
42 | // (for git) or "default" (for mercurial).
43 | "branches": ["main"], // for git
44 | // "branches": ["default"], // for mercurial
45 |
46 | // The DVCS being used. If not set, it will be automatically
47 | // determined from "repo" by looking at the protocol in the URL
48 | // (if remote), or by looking for special directories, such as
49 | // ".git" (if local).
50 | "dvcs": "git",
51 |
52 | // The tool to use to create environments. May be "conda",
53 | // "virtualenv" or other value depending on the plugins in use.
54 | // If missing or the empty string, the tool will be automatically
55 | // determined by looking for tools on the PATH environment
56 | // variable.
57 | "environment_type": "virtualenv",
58 |
59 | // timeout in seconds for installing any dependencies in environment
60 | // defaults to 10 min
61 | //"install_timeout": 600,
62 |
63 | // the base URL to show a commit for the project.
64 | "show_commit_url": "http://github.com/Qiskit/qiskit-finance/commit/",
65 |
66 | // The Pythons you'd like to test against. If not provided, defaults
67 | // to the current version of Python used to run `asv`.
68 | "pythons": ["3.8"],
69 |
70 | // The list of conda channel names to be searched for benchmark
71 | // dependency packages in the specified order
72 | // "conda_channels": ["conda-forge", "defaults"],
73 |
74 | // The matrix of dependencies to test. Each key is the name of a
75 | // package (in PyPI) and the values are version numbers. An empty
76 | // list or empty string indicates to just test against the default
77 | // (latest) version. null indicates that the package is to not be
78 | // installed. If the package to be tested is only available from
79 | // PyPi, and the 'environment_type' is conda, then you can preface
80 | // the package name by 'pip+', and the package will be installed via
81 | // pip (with all the conda available packages installed first,
82 | // followed by the pip installed packages).
83 | //
84 | "matrix": {
85 | "cplex" : "",
86 | "cvxpy" : "",
87 | "matplotlib" : "",
88 | "gurobipy" : "",
89 | },
90 |
91 | // Combinations of libraries/python versions can be excluded/included
92 | // from the set to test. Each entry is a dictionary containing additional
93 | // key-value pairs to include/exclude.
94 | //
95 | // An exclude entry excludes entries where all values match. The
96 | // values are regexps that should match the whole string.
97 | //
98 | // An include entry adds an environment. Only the packages listed
99 | // are installed. The 'python' key is required. The exclude rules
100 | // do not apply to includes.
101 | //
102 | // In addition to package names, the following keys are available:
103 | //
104 | // - python
105 | // Python version, as in the *pythons* variable above.
106 | // - environment_type
107 | // Environment type, as above.
108 | // - sys_platform
109 | // Platform, as in sys.platform. Possible values for the common
110 | // cases: 'linux2', 'win32', 'cygwin', 'darwin'.
111 | //
112 | // "exclude": [
113 | // {"python": "3.2", "sys_platform": "win32"}, // skip py3.2 on windows
114 | // {"environment_type": "conda", "six": null}, // don't run without six on conda
115 | // ],
116 | //
117 | // "include": [
118 | // // additional env for python2.7
119 | // {"python": "2.7", "numpy": "1.8"},
120 | // // additional env if run on windows+conda
121 | // {"platform": "win32", "environment_type": "conda", "python": "2.7", "libpython": ""},
122 | // ],
123 | "exclude": [
124 | {"python": "3.9", "cplex": ""},
125 | ],
126 |
127 | // The directory (relative to the current directory) that benchmarks are
128 | // stored in. If not provided, defaults to "benchmarks"
129 | "benchmark_dir": "benchmarks",
130 |
131 | // The directory (relative to the current directory) to cache the Python
132 | // environments in. If not provided, defaults to "env"
133 | "env_dir": ".asv/env",
134 |
135 | // The directory (relative to the current directory) that raw benchmark
136 | // results are stored in. If not provided, defaults to "results".
137 | "results_dir": ".asv/results",
138 |
139 | // The directory (relative to the current directory) that the html tree
140 | // should be written to. If not provided, defaults to "html".
141 | "html_dir": ".asv/html",
142 |
143 | // The number of characters to retain in the commit hashes.
144 | // "hash_length": 8,
145 |
146 | // `asv` will cache results of the recent builds in each
147 | // environment, making them faster to install next time. This is
148 | // the number of builds to keep, per environment.
149 | // "build_cache_size": 2,
150 |
151 | // The commits after which the regression search in `asv publish`
152 | // should start looking for regressions. Dictionary whose keys are
153 | // regexps matching to benchmark names, and values corresponding to
154 | // the commit (exclusive) after which to start looking for
155 | // regressions. The default is to start from the first commit
156 | // with results. If the commit is `null`, regression detection is
157 | // skipped for the matching benchmark.
158 | //
159 | // "regressions_first_commits": {
160 | // "some_benchmark": "352cdf", // Consider regressions only after this commit
161 | // "another_benchmark": null, // Skip regression detection altogether
162 | // },
163 |
164 | // The thresholds for relative change in results, after which `asv
165 | // publish` starts reporting regressions. Dictionary of the same
166 | // form as in ``regressions_first_commits``, with values
167 | // indicating the thresholds. If multiple entries match, the
168 | // maximum is taken. If no entry matches, the default is 5%.
169 | //
170 | // "regressions_thresholds": {
171 | // "some_benchmark": 0.01, // Threshold of 1%
172 | // "another_benchmark": 0.5, // Threshold of 50%
173 | // },
174 | }
175 |
--------------------------------------------------------------------------------
/finance/benchmarks/__init__.py:
--------------------------------------------------------------------------------
1 | # This code is part of Qiskit.
2 | #
3 | # (C) Copyright IBM 2021.
4 | #
5 | # This code is licensed under the Apache License, Version 2.0. You may
6 | # obtain a copy of this license in the LICENSE.txt file in the root directory
7 | # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
8 | #
9 | # Any modifications or derivative works of this code must retain this
10 | # copyright notice, and modified files need to carry a notice indicating
11 | # that they have been altered from the originals.
12 |
--------------------------------------------------------------------------------
/machine_learning/Makefile:
--------------------------------------------------------------------------------
1 | # This code is part of Qiskit.
2 | #
3 | # (C) Copyright IBM 2021.
4 | #
5 | # This code is licensed under the Apache License, Version 2.0. You may
6 | # obtain a copy of this license in the LICENSE.txt file in the root directory
7 | # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
8 | #
9 | # Any modifications or derivative works of this code must retain this
10 | # copyright notice, and modified files need to carry a notice indicating
11 | # that they have been altered from the originals.
12 |
13 | # You can set those variables from the command line.
14 | ASVCMD =
15 | ASVOPTS =
16 |
17 | .PHONY: asv machine dev run publish preview
18 |
19 | asv:
20 | python -m asv $(ASVCMD) $(ASVOPTS) || true
21 |
--------------------------------------------------------------------------------
/machine_learning/__init__.py:
--------------------------------------------------------------------------------
1 | # This code is part of Qiskit.
2 | #
3 | # (C) Copyright IBM 2021.
4 | #
5 | # This code is licensed under the Apache License, Version 2.0. You may
6 | # obtain a copy of this license in the LICENSE.txt file in the root directory
7 | # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
8 | #
9 | # Any modifications or derivative works of this code must retain this
10 | # copyright notice, and modified files need to carry a notice indicating
11 | # that they have been altered from the originals.
12 |
--------------------------------------------------------------------------------
/machine_learning/asv.conf.json:
--------------------------------------------------------------------------------
1 | {
2 | // The version of the config file format. Do not change, unless
3 | // you know what you are doing.
4 | "version": 1,
5 |
6 | // The name of the project being benchmarked
7 | "project": "qiskit-machine-learning",
8 |
9 | // The project's homepage
10 | "project_url": "https://qiskit.org/documentation/machine-learning/",
11 |
12 | // The URL or local path of the source code repository for the
13 | // project being benchmarked
14 | "repo": "https://github.com/Qiskit/qiskit-machine-learning.git",
15 |
16 | // The Python project's subdirectory in your repo. If missing or
17 | // the empty string, the project is assumed to be located at the root
18 | // of the repository.
19 | // "repo_subdir": "",
20 |
21 | // Customizable commands for building, installing, and
22 | // uninstalling the project. See asv.conf.json documentation.
23 | //
24 | "install_command": [
25 | "return-code=any python -c \"import shutil; shutil.rmtree('{build_dir}/build')\"",
26 | "return-code=any python -c \"import shutil; shutil.rmtree('{build_dir}/qiskit_machine_learning.egg-info')\"",
27 | "python -mpip install git+https://github.com/Qiskit/qiskit-terra",
28 | "python -mpip install git+https://github.com/Qiskit/qiskit-aer",
29 | "python -mpip install pandas",
30 | "python -mpip install {wheel_file}",
31 | ],
32 | "uninstall_command": [
33 | "return-code=any python -mpip uninstall -y {project}",
34 | "return-code=any python -mpip uninstall -y qiskit-aer qiskit-terra",
35 | ],
36 | "build_command": [
37 | "python setup.py build",
38 | "PIP_NO_BUILD_ISOLATION=false python -mpip wheel --no-deps --no-index -w {build_cache_dir} {build_dir}"
39 | ],
40 |
41 | // List of branches to benchmark. If not provided, defaults to "master"
42 | // (for git) or "default" (for mercurial).
43 | "branches": ["main"], // for git
44 | // "branches": ["default"], // for mercurial
45 |
46 | // The DVCS being used. If not set, it will be automatically
47 | // determined from "repo" by looking at the protocol in the URL
48 | // (if remote), or by looking for special directories, such as
49 | // ".git" (if local).
50 | "dvcs": "git",
51 |
52 | // The tool to use to create environments. May be "conda",
53 | // "virtualenv" or other value depending on the plugins in use.
54 | // If missing or the empty string, the tool will be automatically
55 | // determined by looking for tools on the PATH environment
56 | // variable.
57 | "environment_type": "virtualenv",
58 |
59 | // timeout in seconds for installing any dependencies in environment
60 | // defaults to 10 min
61 | //"install_timeout": 600,
62 |
63 | // the base URL to show a commit for the project.
64 | "show_commit_url": "http://github.com/Qiskit/qiskit-machine-learning/commit/",
65 |
66 | // The Pythons you'd like to test against. If not provided, defaults
67 | // to the current version of Python used to run `asv`.
68 | "pythons": ["3.8"],
69 |
70 | // The list of conda channel names to be searched for benchmark
71 | // dependency packages in the specified order
72 | // "conda_channels": ["conda-forge", "defaults"],
73 |
74 | // The matrix of dependencies to test. Each key is the name of a
75 | // package (in PyPI) and the values are version numbers. An empty
76 | // list or empty string indicates to just test against the default
77 | // (latest) version. null indicates that the package is to not be
78 | // installed. If the package to be tested is only available from
79 | // PyPi, and the 'environment_type' is conda, then you can preface
80 | // the package name by 'pip+', and the package will be installed via
81 | // pip (with all the conda available packages installed first,
82 | // followed by the pip installed packages).
83 | //
84 | "matrix": {
85 | "torch" : "",
86 | "sparse" : "",
87 | },
88 |
89 | // Combinations of libraries/python versions can be excluded/included
90 | // from the set to test. Each entry is a dictionary containing additional
91 | // key-value pairs to include/exclude.
92 | //
93 | // An exclude entry excludes entries where all values match. The
94 | // values are regexps that should match the whole string.
95 | //
96 | // An include entry adds an environment. Only the packages listed
97 | // are installed. The 'python' key is required. The exclude rules
98 | // do not apply to includes.
99 | //
100 | // In addition to package names, the following keys are available:
101 | //
102 | // - python
103 | // Python version, as in the *pythons* variable above.
104 | // - environment_type
105 | // Environment type, as above.
106 | // - sys_platform
107 | // Platform, as in sys.platform. Possible values for the common
108 | // cases: 'linux2', 'win32', 'cygwin', 'darwin'.
109 | //
110 | // "exclude": [
111 | // {"python": "3.2", "sys_platform": "win32"}, // skip py3.2 on windows
112 | // {"environment_type": "conda", "six": null}, // don't run without six on conda
113 | // ],
114 | //
115 | // "include": [
116 | // // additional env for python2.7
117 | // {"python": "2.7", "numpy": "1.8"},
118 | // // additional env if run on windows+conda
119 | // {"platform": "win32", "environment_type": "conda", "python": "2.7", "libpython": ""},
120 | // ],
121 |
122 | // The directory (relative to the current directory) that benchmarks are
123 | // stored in. If not provided, defaults to "benchmarks"
124 | "benchmark_dir": "benchmarks",
125 |
126 | // The directory (relative to the current directory) to cache the Python
127 | // environments in. If not provided, defaults to "env"
128 | "env_dir": ".asv/env",
129 |
130 | // The directory (relative to the current directory) that raw benchmark
131 | // results are stored in. If not provided, defaults to "results".
132 | "results_dir": ".asv/results",
133 |
134 | // The directory (relative to the current directory) that the html tree
135 | // should be written to. If not provided, defaults to "html".
136 | "html_dir": ".asv/html",
137 |
138 | // The number of characters to retain in the commit hashes.
139 | // "hash_length": 8,
140 |
141 | // `asv` will cache results of the recent builds in each
142 | // environment, making them faster to install next time. This is
143 | // the number of builds to keep, per environment.
144 | // "build_cache_size": 2,
145 |
146 | // The commits after which the regression search in `asv publish`
147 | // should start looking for regressions. Dictionary whose keys are
148 | // regexps matching to benchmark names, and values corresponding to
149 | // the commit (exclusive) after which to start looking for
150 | // regressions. The default is to start from the first commit
151 | // with results. If the commit is `null`, regression detection is
152 | // skipped for the matching benchmark.
153 | //
154 | // "regressions_first_commits": {
155 | // "some_benchmark": "352cdf", // Consider regressions only after this commit
156 | // "another_benchmark": null, // Skip regression detection altogether
157 | // },
158 |
159 | // The thresholds for relative change in results, after which `asv
160 | // publish` starts reporting regressions. Dictionary of the same
161 | // form as in ``regressions_first_commits``, with values
162 | // indicating the thresholds. If multiple entries match, the
163 | // maximum is taken. If no entry matches, the default is 5%.
164 | //
165 | // "regressions_thresholds": {
166 | // "some_benchmark": 0.01, // Threshold of 1%
167 | // "another_benchmark": 0.5, // Threshold of 50%
168 | // },
169 | }
170 |
--------------------------------------------------------------------------------
/machine_learning/benchmarks/__init__.py:
--------------------------------------------------------------------------------
1 | # This code is part of Qiskit.
2 | #
3 | # (C) Copyright IBM 2021.
4 | #
5 | # This code is licensed under the Apache License, Version 2.0. You may
6 | # obtain a copy of this license in the LICENSE.txt file in the root directory
7 | # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
8 | #
9 | # Any modifications or derivative works of this code must retain this
10 | # copyright notice, and modified files need to carry a notice indicating
11 | # that they have been altered from the originals.
12 |
--------------------------------------------------------------------------------
/machine_learning/benchmarks/base_classifier_benchmark.py:
--------------------------------------------------------------------------------
1 | # This code is part of Qiskit.
2 | #
3 | # (C) Copyright IBM 2021, 2022.
4 | #
5 | # This code is licensed under the Apache License, Version 2.0. You may
6 | # obtain a copy of this license in the LICENSE.txt file in the root directory
7 | # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
8 | #
9 | # Any modifications or derivative works of this code must retain this
10 | # copyright notice, and modified files need to carry a notice indicating
11 | # that they have been altered from the originals.
12 |
13 | """Base for Classifier benchmarks."""
14 |
15 | from abc import ABC
16 | from typing import Tuple, Optional, Union
17 |
18 | import numpy as np
19 | from qiskit import Aer
20 | from qiskit.utils import QuantumInstance, algorithm_globals
21 | from sklearn.base import TransformerMixin
22 | from sklearn.datasets import load_iris
23 | from sklearn.model_selection import train_test_split
24 | from sklearn.pipeline import Pipeline
25 | from sklearn.preprocessing import MinMaxScaler, FunctionTransformer
26 |
27 | from .datasets import (
28 | DATASET_SYNTHETIC_CLASSIFICATION_FEATURES,
29 | DATASET_SYNTHETIC_CLASSIFICATION_LABELS,
30 | )
31 |
32 | DATASET_SYNTHETIC_CLASSIFICATION = "dataset_synthetic"
33 | DATASET_IRIS_CLASSIFICATION = "dataset_iris"
34 |
35 |
36 | class BaseClassifierBenchmark(ABC):
37 | """Base class for Classifier benchmarks."""
38 |
39 | def __init__(
40 | self,
41 | synthetic_label_encoder: Optional[Union[TransformerMixin, Pipeline]] = None,
42 | iris_num_classes: int = 3,
43 | iris_label_encoder: Optional[Union[TransformerMixin, Pipeline]] = None,
44 | ):
45 | algorithm_globals.random_seed = 12345
46 | quantum_instance_statevector = QuantumInstance(
47 | Aer.get_backend("statevector_simulator"),
48 | seed_simulator=algorithm_globals.random_seed,
49 | seed_transpiler=algorithm_globals.random_seed,
50 | )
51 | quantum_instance_qasm = QuantumInstance(
52 | Aer.get_backend("qasm_simulator"),
53 | shots=1024,
54 | seed_simulator=algorithm_globals.random_seed,
55 | seed_transpiler=algorithm_globals.random_seed,
56 | )
57 |
58 | self.backends = {
59 | "statevector_simulator": quantum_instance_statevector,
60 | "qasm_simulator": quantum_instance_qasm,
61 | }
62 |
63 | # if none, just identity transformer
64 | synthetic_label_encoder = synthetic_label_encoder or FunctionTransformer()
65 | (
66 | synth_train_features,
67 | synth_test_features,
68 | synth_train_labels,
69 | synth_test_labels,
70 | ) = self._prepare_synthetic(synthetic_label_encoder)
71 |
72 | iris_label_encoder = iris_label_encoder or FunctionTransformer()
73 | (
74 | iris_train_features,
75 | iris_test_features,
76 | iris_train_labels,
77 | iris_test_labels,
78 | ) = self._prepare_iris(iris_num_classes, iris_label_encoder)
79 |
80 | self.datasets = {
81 | "dataset_synthetic": {
82 | "train_features": synth_train_features,
83 | "train_labels": synth_train_labels,
84 | "test_features": synth_test_features,
85 | "test_labels": synth_test_labels,
86 | },
87 | "dataset_iris": {
88 | "train_features": iris_train_features,
89 | "train_labels": iris_train_labels,
90 | "test_features": iris_test_features,
91 | "test_labels": iris_test_labels,
92 | },
93 | }
94 |
95 | def _prepare_synthetic(
96 | self, label_encoder: Union[TransformerMixin, Pipeline]
97 | ) -> Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray]:
98 | synthetic_labels = label_encoder.fit_transform(DATASET_SYNTHETIC_CLASSIFICATION_LABELS)
99 |
100 | (
101 | synth_train_features,
102 | synth_test_features,
103 | synth_train_labels,
104 | synth_test_labels,
105 | ) = train_test_split(
106 | DATASET_SYNTHETIC_CLASSIFICATION_FEATURES,
107 | synthetic_labels,
108 | test_size=5,
109 | shuffle=False,
110 | )
111 | return synth_train_features, synth_test_features, synth_train_labels, synth_test_labels
112 |
113 | def _prepare_iris(
114 | self, num_classes: int, label_encoder: Union[TransformerMixin, Pipeline]
115 | ) -> Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray]:
116 | iris_features_all, iris_labels_all = load_iris(return_X_y=True)
117 | size = 25
118 | iris_features = np.zeros((size, 4))
119 | iris_labels = np.zeros(size)
120 | for i in range(size):
121 | # there are 50 samples of each class, three classes, but we sample only two!
122 | num_samples = 50
123 | index = num_samples * (i % num_classes) + i
124 | iris_features[i, :] = iris_features_all[index]
125 | iris_labels[i] = iris_labels_all[index]
126 | scaler = MinMaxScaler((-1, 1))
127 | iris_features = scaler.fit_transform(iris_features)
128 | iris_labels = label_encoder.fit_transform(iris_labels)
129 |
130 | (
131 | iris_train_features,
132 | iris_test_features,
133 | iris_train_labels,
134 | iris_test_labels,
135 | ) = train_test_split(
136 | iris_features,
137 | iris_labels,
138 | test_size=5,
139 | shuffle=False,
140 | )
141 |
142 | return iris_train_features, iris_test_features, iris_train_labels, iris_test_labels
143 |
--------------------------------------------------------------------------------
/machine_learning/benchmarks/base_regressor_benchmark.py:
--------------------------------------------------------------------------------
1 | # This code is part of Qiskit.
2 | #
3 | # (C) Copyright IBM 2021, 2022.
4 | #
5 | # This code is licensed under the Apache License, Version 2.0. You may
6 | # obtain a copy of this license in the LICENSE.txt file in the root directory
7 | # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
8 | #
9 | # Any modifications or derivative works of this code must retain this
10 | # copyright notice, and modified files need to carry a notice indicating
11 | # that they have been altered from the originals.
12 | """Base class for regressor benchmarks."""
13 |
14 | from abc import ABC
15 | from typing import Optional, Tuple
16 |
17 | import numpy as np
18 | from qiskit import Aer, QuantumCircuit
19 | from qiskit.algorithms.optimizers import Optimizer
20 | from qiskit.circuit import Parameter
21 | from qiskit.circuit.library import ZZFeatureMap, RealAmplitudes
22 | from qiskit.utils import QuantumInstance, algorithm_globals
23 | from qiskit_machine_learning.algorithms import NeuralNetworkRegressor
24 | from qiskit_machine_learning.neural_networks import TwoLayerQNN
25 | from sklearn.model_selection import train_test_split
26 | from sklearn.preprocessing import MinMaxScaler
27 |
28 | from .datasets import (
29 | DATASET_SYNTHETIC_REGRESSION_FEATURES,
30 | DATASET_SYNTHETIC_REGRESSION_LABELS,
31 | load_ccpp,
32 | )
33 |
34 | DATASET_SYNTHETIC_REGRESSION = "dataset_synthetic_regression"
35 | DATASET_CCPP_REGRESSION = "dataset_ccpp"
36 |
37 |
38 | class BaseRegressorBenchmark(ABC):
39 | """Base class for regressor benchmarks."""
40 |
41 | def __init__(self) -> None:
42 |
43 | quantum_instance_statevector = QuantumInstance(
44 | Aer.get_backend("statevector_simulator"),
45 | seed_simulator=algorithm_globals.random_seed,
46 | seed_transpiler=algorithm_globals.random_seed,
47 | )
48 | quantum_instance_qasm = QuantumInstance(
49 | Aer.get_backend("qasm_simulator"),
50 | shots=1024,
51 | seed_simulator=algorithm_globals.random_seed,
52 | seed_transpiler=algorithm_globals.random_seed,
53 | )
54 |
55 | self.backends = {
56 | "statevector_simulator": quantum_instance_statevector,
57 | "qasm_simulator": quantum_instance_qasm,
58 | }
59 |
60 | # prepare synthetic dataset
61 | (
62 | synth_train_features,
63 | synth_test_features,
64 | synth_train_labels,
65 | synth_test_labels,
66 | ) = train_test_split(
67 | DATASET_SYNTHETIC_REGRESSION_FEATURES,
68 | DATASET_SYNTHETIC_REGRESSION_LABELS,
69 | test_size=5,
70 | shuffle=False,
71 | )
72 |
73 | ccpp_features, ccpp_labels = self._prepare_ccpp()
74 | (
75 | ccpp_train_features,
76 | ccpp_test_features,
77 | ccpp_train_labels,
78 | ccpp_test_labels,
79 | ) = train_test_split(ccpp_features, ccpp_labels, test_size=5, shuffle=False)
80 |
81 | self.datasets = {
82 | DATASET_SYNTHETIC_REGRESSION: {
83 | "train_features": synth_train_features,
84 | "train_labels": synth_train_labels,
85 | "test_features": synth_test_features,
86 | "test_labels": synth_test_labels,
87 | },
88 | DATASET_CCPP_REGRESSION: {
89 | "train_features": ccpp_train_features,
90 | "train_labels": ccpp_train_labels,
91 | "test_features": ccpp_test_features,
92 | "test_labels": ccpp_test_labels,
93 | },
94 | }
95 |
96 | def _prepare_ccpp(self) -> Tuple[np.ndarray, np.ndarray]:
97 | """
98 | Prepare the CCPP dataset, we can afford only a tiny subset of the dataset for training.
99 | """
100 | ccpp_features, ccpp_labels = load_ccpp()
101 | num_samples = 25
102 | ccpp_features = ccpp_features[:num_samples]
103 | ccpp_labels = ccpp_labels[:num_samples]
104 | scaler = MinMaxScaler((-1, 1))
105 | ccpp_features = scaler.fit_transform(ccpp_features)
106 | ccpp_labels = scaler.fit_transform(ccpp_labels.reshape(-1, 1))
107 | return ccpp_features, ccpp_labels
108 |
109 | def _construct_qnn_synthetic(
110 | self, quantum_instance_name: str, optimizer: Optional[Optimizer] = None
111 | ) -> NeuralNetworkRegressor:
112 | num_inputs = 1
113 | # construct simple feature map
114 | param_x = Parameter("x")
115 | feature_map = QuantumCircuit(1, name="fm")
116 | feature_map.ry(param_x, 0)
117 |
118 | # construct simple ansatz
119 | param_y = Parameter("y")
120 | ansatz = QuantumCircuit(1, name="vf")
121 | ansatz.ry(param_y, 0)
122 |
123 | opflow_qnn = TwoLayerQNN(
124 | num_inputs, feature_map, ansatz, quantum_instance=self.backends[quantum_instance_name]
125 | )
126 |
127 | return NeuralNetworkRegressor(opflow_qnn, optimizer=optimizer)
128 |
129 | def _construct_qnn_ccpp(
130 | self, quantum_instance_name: str, optimizer: Optional[Optimizer] = None
131 | ) -> NeuralNetworkRegressor:
132 | num_inputs = 4
133 | feature_map = ZZFeatureMap(num_inputs)
134 | ansatz = RealAmplitudes(num_inputs)
135 | opflow_qnn = TwoLayerQNN(
136 | num_inputs, feature_map, ansatz, quantum_instance=self.backends[quantum_instance_name]
137 | )
138 |
139 | return NeuralNetworkRegressor(opflow_qnn, optimizer=optimizer)
140 |
--------------------------------------------------------------------------------
/machine_learning/benchmarks/circuit_qnn_base_classifier_benchmark.py:
--------------------------------------------------------------------------------
1 | # This code is part of Qiskit.
2 | #
3 | # (C) Copyright IBM 2022.
4 | #
5 | # This code is licensed under the Apache License, Version 2.0. You may
6 | # obtain a copy of this license in the LICENSE.txt file in the root directory
7 | # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
8 | #
9 | # Any modifications or derivative works of this code must retain this
10 | # copyright notice, and modified files need to carry a notice indicating
11 | # that they have been altered from the originals.
12 |
13 | """Base class for CircuitQNN based Classifier benchmarks."""
14 | from abc import ABC
15 | from typing import Optional, Callable
16 |
17 | from qiskit import QuantumCircuit
18 | from qiskit.algorithms.optimizers import Optimizer
19 | from qiskit.circuit.library import ZZFeatureMap, RealAmplitudes
20 | from qiskit.utils import algorithm_globals
21 | from qiskit_machine_learning.algorithms import NeuralNetworkClassifier
22 | from qiskit_machine_learning.neural_networks import CircuitQNN
23 |
24 | from .base_classifier_benchmark import BaseClassifierBenchmark
25 |
26 |
27 | class CircuitQnnBaseClassifierBenchmark(BaseClassifierBenchmark, ABC):
28 | """Base class for CircuitQNN Classifier benchmarks."""
29 |
30 | def __init__(self) -> None:
31 | super().__init__()
32 |
33 | def _construct_qnn_classifier_synthetic(
34 | self, quantum_instance_name: str, optimizer: Optional[Optimizer] = None
35 | ) -> NeuralNetworkClassifier:
36 |
37 | # parity maps bitstrings to 0 or 1
38 | def parity(x):
39 | return f"{x:b}".count("1") % 2
40 |
41 | return self._construct_qnn_classifier(
42 | num_inputs=2,
43 | output_shape=2,
44 | interpret=parity,
45 | quantum_instance_name=quantum_instance_name,
46 | optimizer=optimizer,
47 | )
48 |
49 | def _construct_qnn_classifier_iris(
50 | self, quantum_instance_name: str, optimizer: Optional[Optimizer] = None
51 | ) -> NeuralNetworkClassifier:
52 |
53 | # map to three classes
54 | def three_class(x):
55 | return f"{x:b}".count("1") % 3
56 |
57 | return self._construct_qnn_classifier(
58 | num_inputs=4,
59 | output_shape=3,
60 | interpret=three_class,
61 | quantum_instance_name=quantum_instance_name,
62 | optimizer=optimizer,
63 | )
64 |
65 | def _construct_qnn_classifier(
66 | self,
67 | num_inputs: int,
68 | output_shape: int,
69 | interpret: Callable[[int], int],
70 | quantum_instance_name: str,
71 | optimizer: Optional[Optimizer],
72 | ) -> NeuralNetworkClassifier:
73 | feature_map = ZZFeatureMap(num_inputs)
74 |
75 | ansatz = RealAmplitudes(num_inputs)
76 |
77 | qc = QuantumCircuit(num_inputs)
78 | qc.append(feature_map, range(num_inputs))
79 | qc.append(ansatz, range(num_inputs))
80 |
81 | circuit_qnn = CircuitQNN(
82 | circuit=qc,
83 | input_params=feature_map.parameters,
84 | weight_params=ansatz.parameters,
85 | interpret=interpret,
86 | output_shape=output_shape,
87 | quantum_instance=self.backends[quantum_instance_name],
88 | )
89 | initial_point = algorithm_globals.random.random(ansatz.num_parameters)
90 | model = NeuralNetworkClassifier(
91 | neural_network=circuit_qnn, optimizer=optimizer, initial_point=initial_point
92 | )
93 | return model
94 |
--------------------------------------------------------------------------------
/machine_learning/benchmarks/circuit_qnn_classifier_benchmark.py:
--------------------------------------------------------------------------------
1 | # This code is part of Qiskit.
2 | #
3 | # (C) Copyright IBM 2021, 2022.
4 | #
5 | # This code is licensed under the Apache License, Version 2.0. You may
6 | # obtain a copy of this license in the LICENSE.txt file in the root directory
7 | # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
8 | #
9 | # Any modifications or derivative works of this code must retain this
10 | # copyright notice, and modified files need to carry a notice indicating
11 | # that they have been altered from the originals.
12 | """Circuit QNN Classifier benchmarks."""
13 |
14 | import pickle
15 | from itertools import product
16 | from timeit import timeit
17 | from typing import Optional
18 |
19 | import numpy as np
20 | from qiskit.algorithms.optimizers import COBYLA
21 | from qiskit_machine_learning.algorithms import NeuralNetworkClassifier
22 | from sklearn.metrics import f1_score, precision_score, recall_score
23 |
24 | from .circuit_qnn_base_classifier_benchmark import CircuitQnnBaseClassifierBenchmark
25 | from .base_classifier_benchmark import (
26 | DATASET_SYNTHETIC_CLASSIFICATION,
27 | DATASET_IRIS_CLASSIFICATION,
28 | )
29 |
30 |
31 | class CircuitQnnClassifierBenchmarks(CircuitQnnBaseClassifierBenchmark):
32 | """Circuit QNN Classifier benchmarks."""
33 |
34 | version = 2
35 | timeout = 1200.0
36 | params = [
37 | [DATASET_SYNTHETIC_CLASSIFICATION, DATASET_IRIS_CLASSIFICATION],
38 | ["qasm_simulator", "statevector_simulator"],
39 | ]
40 | param_names = ["dataset", "backend name"]
41 |
42 | def __init__(self) -> None:
43 | super().__init__()
44 | self.train_features: Optional[np.ndarray] = None
45 | self.train_labels: Optional[np.ndarray] = None
46 | self.test_features: Optional[np.ndarray] = None
47 | self.test_labels: Optional[np.ndarray] = None
48 | self.model: Optional[NeuralNetworkClassifier] = None
49 |
50 | def setup(self, dataset: str, quantum_instance_name: str) -> None:
51 | """Set up the benchmark."""
52 |
53 | self.train_features = self.datasets[dataset]["train_features"]
54 | self.train_labels = self.datasets[dataset]["train_labels"]
55 | self.test_features = self.datasets[dataset]["test_features"]
56 | self.test_labels = self.datasets[dataset]["test_labels"]
57 |
58 | if dataset == DATASET_SYNTHETIC_CLASSIFICATION:
59 | self.model = self._construct_qnn_classifier_synthetic(
60 | quantum_instance_name=quantum_instance_name
61 | )
62 | elif dataset == DATASET_IRIS_CLASSIFICATION:
63 | self.model = self._construct_qnn_classifier_iris(
64 | quantum_instance_name=quantum_instance_name
65 | )
66 | else:
67 | raise ValueError(f"Unsupported dataset: {dataset}")
68 |
69 | file_name = f"circuit_qnn_{dataset}_{quantum_instance_name}.pickle"
70 | with open(file_name, "rb") as file:
71 | self.model._fit_result = pickle.load(file)
72 |
73 | def setup_cache(self) -> None:
74 | """Cache CircuitQNN fitted model."""
75 | for dataset, backend in product(*self.params):
76 | train_features = self.datasets[dataset]["train_features"]
77 | train_labels = self.datasets[dataset]["train_labels"]
78 |
79 | if dataset == DATASET_SYNTHETIC_CLASSIFICATION:
80 | model = self._construct_qnn_classifier_synthetic(
81 | quantum_instance_name=backend, optimizer=COBYLA(maxiter=200)
82 | )
83 | elif dataset == DATASET_IRIS_CLASSIFICATION:
84 | model = self._construct_qnn_classifier_iris(
85 | quantum_instance_name=backend, optimizer=COBYLA(maxiter=200)
86 | )
87 | else:
88 | raise ValueError(f"Unsupported dataset: {dataset}")
89 |
90 | model.fit(train_features, train_labels)
91 |
92 | file_name = f"circuit_qnn_{dataset}_{backend}.pickle"
93 | with open(file_name, "wb") as file:
94 | pickle.dump(model._fit_result, file)
95 |
96 | # pylint: disable=invalid-name
97 | def time_score_circuit_qnn_classifier(self, _, __):
98 | """Time scoring CircuitQNN classifier on data."""
99 | self.model.score(self.train_features, self.train_labels)
100 |
101 | def time_predict_circuit_qnn_classifier(self, _, __):
102 | """Time predicting with CircuitQNN classifier."""
103 | self.model.predict(self.train_features)
104 |
105 | def track_accuracy_score_circuit_qnn_classifier(self, _, __):
106 | """Tracks the overall accuracy of the classification results."""
107 | return self.model.score(self.test_features, self.test_labels)
108 |
109 | def track_precision_score_circuit_qnn_classifier(self, _, __):
110 | """Tracks the precision score."""
111 | predicts = self.model.predict(self.test_features)
112 | return precision_score(y_true=self.test_labels, y_pred=predicts, average="micro")
113 |
114 | def track_recall_score_circuit_qnn_classifier(self, _, __):
115 | """Tracks the recall score for each class of the classification results."""
116 | predicts = self.model.predict(self.test_features)
117 | return recall_score(y_true=self.test_labels, y_pred=predicts, average="micro")
118 |
119 | def track_f1_score_circuit_qnn_classifier(self, _, __):
120 | """Tracks the f1 score for each class of the classification results."""
121 | predicts = self.model.predict(self.test_features)
122 | return f1_score(y_true=self.test_labels, y_pred=predicts, average="micro")
123 |
124 |
125 | if __name__ == "__main__":
126 | bench = CircuitQnnClassifierBenchmarks()
127 | bench.setup_cache()
128 | for dataset_name, backend_name in product(*CircuitQnnClassifierBenchmarks.params):
129 | try:
130 | bench.setup(dataset_name, backend_name)
131 | except NotImplementedError:
132 | continue
133 |
134 | for method in (
135 | "time_score_circuit_qnn_classifier",
136 | "time_predict_circuit_qnn_classifier",
137 | "track_accuracy_score_circuit_qnn_classifier",
138 | "track_precision_score_circuit_qnn_classifier",
139 | "track_recall_score_circuit_qnn_classifier",
140 | "track_f1_score_circuit_qnn_classifier",
141 | ):
142 | elapsed = timeit(
143 | f'bench.{method}("{dataset_name}", "{backend_name}")', number=10, globals=globals()
144 | )
145 | print(f"{method}:\t{elapsed}")
146 |
--------------------------------------------------------------------------------
/machine_learning/benchmarks/circuit_qnn_classifier_fit_benchmark.py:
--------------------------------------------------------------------------------
1 | # This code is part of Qiskit.
2 | #
3 | # (C) Copyright IBM 2021, 2022.
4 | #
5 | # This code is licensed under the Apache License, Version 2.0. You may
6 | # obtain a copy of this license in the LICENSE.txt file in the root directory
7 | # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
8 | #
9 | # Any modifications or derivative works of this code must retain this
10 | # copyright notice, and modified files need to carry a notice indicating
11 | # that they have been altered from the originals.
12 | """Circuit QNN Classifier benchmarks."""
13 |
14 | from itertools import product
15 | from timeit import timeit
16 | from typing import Optional
17 |
18 | import numpy as np
19 | from qiskit.algorithms.optimizers import COBYLA, NELDER_MEAD, L_BFGS_B
20 | from qiskit_machine_learning.algorithms import NeuralNetworkClassifier
21 |
22 | from .circuit_qnn_base_classifier_benchmark import CircuitQnnBaseClassifierBenchmark
23 | from .base_classifier_benchmark import (
24 | DATASET_SYNTHETIC_CLASSIFICATION,
25 | DATASET_IRIS_CLASSIFICATION,
26 | )
27 |
28 |
29 | class CircuitQnnFitClassifierBenchmarks(CircuitQnnBaseClassifierBenchmark):
30 | """Circuit QNN Classifier benchmarks."""
31 |
32 | version = 2
33 | timeout = 1200.0
34 | params = (
35 | [DATASET_SYNTHETIC_CLASSIFICATION, DATASET_IRIS_CLASSIFICATION],
36 | ["qasm_simulator", "statevector_simulator"],
37 | ["cobyla", "nelder-mead", "l-bfgs-b"],
38 | )
39 | param_names = ["dataset", "backend name", "optimizer"]
40 |
41 | def __init__(self) -> None:
42 | super().__init__()
43 |
44 | self.optimizers = {
45 | "cobyla": COBYLA(maxiter=100),
46 | "nelder-mead": NELDER_MEAD(maxiter=50),
47 | "l-bfgs-b": L_BFGS_B(maxiter=20),
48 | }
49 | self.train_features: Optional[np.ndarray] = None
50 | self.train_labels: Optional[np.ndarray] = None
51 | self.test_features: Optional[np.ndarray] = None
52 | self.test_labels: Optional[np.ndarray] = None
53 | self.model: Optional[NeuralNetworkClassifier] = None
54 |
55 | def setup(self, dataset: str, quantum_instance_name: str, optimizer: str) -> None:
56 | """Set up the benchmark."""
57 | self.train_features = self.datasets[dataset]["train_features"]
58 | self.train_labels = self.datasets[dataset]["train_labels"]
59 |
60 | if dataset == DATASET_SYNTHETIC_CLASSIFICATION:
61 | self.model = self._construct_qnn_classifier_synthetic(
62 | quantum_instance_name=quantum_instance_name,
63 | optimizer=self.optimizers[optimizer],
64 | )
65 | elif dataset == DATASET_IRIS_CLASSIFICATION:
66 | self.model = self._construct_qnn_classifier_iris(
67 | quantum_instance_name=quantum_instance_name,
68 | optimizer=self.optimizers[optimizer],
69 | )
70 | else:
71 | raise ValueError(f"Unsupported dataset: {dataset}")
72 |
73 | # pylint: disable=invalid-name
74 | def time_fit_circuit_qnn_classifier(self, _, __, ___):
75 | """Time fitting CircuitQNN classifier to data."""
76 | self.model.fit(self.train_features, self.train_labels)
77 |
78 |
79 | if __name__ == "__main__":
80 | for dataset_name, backend, optimizer_name in product(*CircuitQnnFitClassifierBenchmarks.params):
81 | bench = CircuitQnnFitClassifierBenchmarks()
82 | try:
83 | bench.setup(dataset_name, backend, optimizer_name)
84 | except NotImplementedError:
85 | continue
86 |
87 | for method in ["time_fit_circuit_qnn_classifier"]:
88 | elapsed = timeit(
89 | f'bench.{method}("{dataset_name}", "{backend}", "{optimizer_name}")',
90 | number=10,
91 | globals=globals(),
92 | )
93 | print(f"{method}:\t{elapsed}")
94 |
--------------------------------------------------------------------------------
/machine_learning/benchmarks/datasets.py:
--------------------------------------------------------------------------------
1 | # This code is part of Qiskit.
2 | #
3 | # (C) Copyright IBM 2021, 2022.
4 | #
5 | # This code is licensed under the Apache License, Version 2.0. You may
6 | # obtain a copy of this license in the LICENSE.txt file in the root directory
7 | # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
8 | #
9 | # Any modifications or derivative works of this code must retain this
10 | # copyright notice, and modified files need to carry a notice indicating
11 | # that they have been altered from the originals.
12 |
13 | """Dataset definitions for machine learning benchmarks."""
14 |
15 | import pathlib
16 | from typing import Tuple
17 |
18 | import numpy as np
19 | import pandas as pd
20 |
21 |
22 | DATASET_SYNTHETIC_CLASSIFICATION_FEATURES = np.array(
23 | [
24 | [-0.5662535427032733, -0.9506985204545293],
25 | [0.9237926141124049, -0.21024034471405928],
26 | [-0.5363927962538539, -0.6302006194568324],
27 | [-0.2977115347918018, 0.7846970052821041],
28 | [0.21787357489553272, -0.7196039773174467],
29 | [-0.010251183455284751, 0.396711608321084],
30 | [0.42017133504711857, -0.8924068543685355],
31 | [0.13563265892971055, 0.25827213877148214],
32 | [-0.36481296618769576, 0.03628785960039349],
33 | [0.3192543835725403, -0.1446297087477213],
34 | [-0.925488106852753, -0.4419621857074916],
35 | [-0.4224764941409678, -0.30310071927735405],
36 | [-0.7589609745678978, -0.1886743530929469],
37 | [-0.47958582394439997, 0.7546226885186544],
38 | [-0.798675382973272, -0.15556541510766309],
39 | [-0.38321155225715753, -0.023299505759131423],
40 | [-0.7851734061535027, -0.9130207147701899],
41 | [0.3841410493379849, 0.8008340382312655],
42 | [-0.2914294558218786, 0.2355021627215368],
43 | [0.5199932916423333, 0.6951624888684251],
44 | [0.3588191281355948, -0.04488150511315059],
45 | [-0.5102264261410945, -0.7506684295154553],
46 | [-0.9568730382417594, 0.5771183134462541],
47 | [0.2764265583218535, 0.2632603202395736],
48 | [0.8982101657724386, -0.31681068006920854],
49 | ]
50 | )
51 |
52 | DATASET_SYNTHETIC_CLASSIFICATION_LABELS = np.array(
53 | [0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 1, 0, 0, 1, 1]
54 | )
55 |
56 |
57 | # Synthetic dataset for regression is generated as a noisy sine wave.
58 | # For more details please refer to the "Neural Network Classifier & Regressor" tutorial
59 | # in Qiskit Machine Learning.
60 | DATASET_SYNTHETIC_REGRESSION_FEATURES = np.array(
61 | [
62 | [-1.0],
63 | [-0.61017247],
64 | [0.3437984],
65 | [-0.57425039],
66 | [-0.15067904],
67 | [0.54669445],
68 | [-0.16936095],
69 | [-0.90157044],
70 | [0.37708312],
71 | [0.99143454],
72 | [-0.49085348],
73 | [0.53117172],
74 | [1.0],
75 | [0.02066802],
76 | [-0.6564818],
77 | [-0.76721681],
78 | [-0.64475023],
79 | [0.43324292],
80 | [0.71212452],
81 | [0.80997021],
82 | [-0.07795483],
83 | [0.32083733],
84 | [0.17454994],
85 | [0.56743491],
86 | [-0.12885006],
87 | ]
88 | )
89 |
90 | DATASET_SYNTHETIC_REGRESSION_LABELS = np.array(
91 | [
92 | [-0.07742259],
93 | [-1.01861621],
94 | [0.9456131],
95 | [-1.15321842],
96 | [-0.60170763],
97 | [1.03743838],
98 | [-0.53921747],
99 | [-0.53976918],
100 | [0.99681555],
101 | [0.29210661],
102 | [-1.09853086],
103 | [0.89345683],
104 | [0.12046636],
105 | [-0.03882522],
106 | [-0.88622102],
107 | [-0.83758335],
108 | [-0.98542812],
109 | [1.07553076],
110 | [0.70927951],
111 | [0.64405885],
112 | [-0.36920025],
113 | [0.89642704],
114 | [0.41763132],
115 | [1.13432333],
116 | [-0.56352804],
117 | ]
118 | )
119 |
120 |
121 | def load_ccpp() -> Tuple[np.ndarray, np.ndarray]:
122 | """
123 | Loads the Combined Cycle Power Plant dataset. See the `UCI Machine Learning Repository
124 | `_ web site for more
125 | details.
126 |
127 | Returns:
128 | a tuple with features and labels as numpy arrays.
129 | """
130 | # the benchmarks are run in a temp directory, but we have to reference a file with the dataset.
131 | abs_path = pathlib.Path(__file__).parent.resolve()
132 | ccpp_df = pd.read_csv(f"{abs_path}/CCPP_data.csv")
133 | ccpp_features = ccpp_df[["AT", "V", "AP", "RH"]].to_numpy()
134 | ccpp_labels = ccpp_df["PE"].to_numpy()
135 | return ccpp_features, ccpp_labels
136 |
--------------------------------------------------------------------------------
/machine_learning/benchmarks/opflow_qnn_base_classifier_benchmark.py:
--------------------------------------------------------------------------------
1 | # This code is part of Qiskit.
2 | #
3 | # (C) Copyright IBM 2022.
4 | #
5 | # This code is licensed under the Apache License, Version 2.0. You may
6 | # obtain a copy of this license in the LICENSE.txt file in the root directory
7 | # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
8 | #
9 | # Any modifications or derivative works of this code must retain this
10 | # copyright notice, and modified files need to carry a notice indicating
11 | # that they have been altered from the originals.
12 |
13 | """Base class for Opflow based classifier benchmarks."""
14 | from abc import ABC
15 | from typing import Optional
16 |
17 | from qiskit.algorithms.optimizers import Optimizer
18 | from qiskit.circuit.library import RealAmplitudes, ZZFeatureMap
19 | from qiskit.utils import algorithm_globals
20 | from qiskit_machine_learning.algorithms import NeuralNetworkClassifier
21 | from qiskit_machine_learning.neural_networks import TwoLayerQNN
22 | from sklearn.preprocessing import FunctionTransformer
23 |
24 | from .base_classifier_benchmark import BaseClassifierBenchmark
25 |
26 |
27 | class OpflowQnnBaseClassifierBenchmark(BaseClassifierBenchmark, ABC):
28 | """Base class for Opflow Classifier benchmarks."""
29 |
30 | def __init__(self) -> None:
31 | encoder = FunctionTransformer(lambda x: 2 * x - 1)
32 | super().__init__(
33 | synthetic_label_encoder=encoder, iris_num_classes=2, iris_label_encoder=encoder
34 | )
35 |
36 | def _construct_opflow_classifier_synthetic(
37 | self, quantum_instance_name: str, optimizer: Optional[Optimizer] = None
38 | ) -> NeuralNetworkClassifier:
39 | """Training a TwoLayerQNN-based classifier for synthetic classification dataset."""
40 | return self._construct_opflow_classifier(2, quantum_instance_name, optimizer)
41 |
42 | def _construct_opflow_classifier_iris(
43 | self, quantum_instance_name: str, optimizer: Optional[Optimizer] = None
44 | ) -> NeuralNetworkClassifier:
45 | """Construct a TwoLayerQNN-based classifier for iris classification dataset."""
46 | return self._construct_opflow_classifier(4, quantum_instance_name, optimizer)
47 |
48 | def _construct_opflow_classifier(
49 | self, num_inputs: int, quantum_instance_name: str, optimizer: Optional[Optimizer] = None
50 | ) -> NeuralNetworkClassifier:
51 | """Construct a TwoLayerQNN-based classifier."""
52 | feature_map = ZZFeatureMap(num_inputs)
53 | ansatz = RealAmplitudes(num_inputs)
54 |
55 | opflow_qnn = TwoLayerQNN(
56 | num_inputs,
57 | feature_map=feature_map,
58 | ansatz=ansatz,
59 | quantum_instance=self.backends[quantum_instance_name],
60 | )
61 |
62 | initial_point = algorithm_globals.random.random(ansatz.num_parameters)
63 | model = NeuralNetworkClassifier(
64 | opflow_qnn, optimizer=optimizer, initial_point=initial_point
65 | )
66 | return model
67 |
--------------------------------------------------------------------------------
/machine_learning/benchmarks/opflow_qnn_classifier_benchmark.py:
--------------------------------------------------------------------------------
1 | # This code is part of Qiskit.
2 | #
3 | # (C) Copyright IBM 2021, 2022.
4 | #
5 | # This code is licensed under the Apache License, Version 2.0. You may
6 | # obtain a copy of this license in the LICENSE.txt file in the root directory
7 | # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
8 | #
9 | # Any modifications or derivative works of this code must retain this
10 | # copyright notice, and modified files need to carry a notice indicating
11 | # that they have been altered from the originals.
12 | """Opflow Neural Network Classifier benchmarks."""
13 |
14 | import pickle
15 | from itertools import product
16 | from timeit import timeit
17 | from typing import Optional
18 |
19 | import numpy as np
20 | from qiskit.algorithms.optimizers import COBYLA
21 | from qiskit_machine_learning.algorithms import NeuralNetworkClassifier
22 | from sklearn.metrics import precision_score, recall_score, f1_score
23 |
24 | from .opflow_qnn_base_classifier_benchmark import OpflowQnnBaseClassifierBenchmark
25 | from .base_classifier_benchmark import (
26 | DATASET_SYNTHETIC_CLASSIFICATION,
27 | DATASET_IRIS_CLASSIFICATION,
28 | )
29 |
30 |
31 | class OpflowQnnClassifierBenchmarks(OpflowQnnBaseClassifierBenchmark):
32 | """Opflow QNN Classifier benchmarks."""
33 |
34 | version = 2
35 | timeout = 1200.0
36 | params = [
37 | [DATASET_SYNTHETIC_CLASSIFICATION, DATASET_IRIS_CLASSIFICATION],
38 | ["qasm_simulator", "statevector_simulator"],
39 | ]
40 | param_names = ["dataset", "backend name"]
41 |
42 | def __init__(self) -> None:
43 | super().__init__()
44 | self.train_features: Optional[np.ndarray] = None
45 | self.train_labels: Optional[np.ndarray] = None
46 | self.test_features: Optional[np.ndarray] = None
47 | self.test_labels: Optional[np.ndarray] = None
48 | self.model: Optional[NeuralNetworkClassifier] = None
49 |
50 | def setup(self, dataset: str, quantum_instance_name: str) -> None:
51 | """Set up the benchmark."""
52 |
53 | self.train_features = self.datasets[dataset]["train_features"]
54 | self.train_labels = self.datasets[dataset]["train_labels"]
55 | self.test_features = self.datasets[dataset]["test_features"]
56 | self.test_labels = self.datasets[dataset]["test_labels"]
57 |
58 | if dataset == DATASET_SYNTHETIC_CLASSIFICATION:
59 | self.model = self._construct_opflow_classifier_synthetic(
60 | quantum_instance_name=quantum_instance_name
61 | )
62 | elif dataset == DATASET_IRIS_CLASSIFICATION:
63 | self.model = self._construct_opflow_classifier_iris(
64 | quantum_instance_name=quantum_instance_name
65 | )
66 | else:
67 | raise ValueError(f"Unsupported dataset: {dataset}")
68 |
69 | file_name = f"opflow_qnn_{dataset}_{quantum_instance_name}.pickle"
70 | with open(file_name, "rb") as file:
71 | self.model._fit_result = pickle.load(file)
72 |
73 | def setup_cache(self) -> None:
74 | """Cache CircuitQNN fitted model."""
75 | for dataset, backend in product(*self.params):
76 | train_features = self.datasets[dataset]["train_features"]
77 | train_labels = self.datasets[dataset]["train_labels"]
78 |
79 | if dataset == DATASET_SYNTHETIC_CLASSIFICATION:
80 | model = self._construct_opflow_classifier_synthetic(
81 | quantum_instance_name=backend, optimizer=COBYLA(maxiter=200)
82 | )
83 | elif dataset == DATASET_IRIS_CLASSIFICATION:
84 | model = self._construct_opflow_classifier_iris(
85 | quantum_instance_name=backend, optimizer=COBYLA(maxiter=200)
86 | )
87 | else:
88 | raise ValueError(f"Unsupported dataset: {dataset}")
89 |
90 | model.fit(train_features, train_labels)
91 |
92 | file_name = f"opflow_qnn_{dataset}_{backend}.pickle"
93 | with open(file_name, "wb") as file:
94 | pickle.dump(model._fit_result, file)
95 |
96 | # pylint: disable=invalid-name
97 | def time_score_opflow_qnn_classifier(self, _, __):
98 | """Time scoring OpflowQNN classifier on data."""
99 | self.model.score(self.train_features, self.train_labels)
100 |
101 | def time_predict_opflow_qnn_classifier(self, _, __):
102 | """Time predicting with classifier OpflowQNN."""
103 | self.model.predict(self.train_features)
104 |
105 | def track_accuracy_score_opflow_qnn_classifier(self, _, __):
106 | """Tracks the overall accuracy of the classification results."""
107 | return self.model.score(self.test_features, self.test_labels)
108 |
109 | def track_precision_score_opflow_qnn_classifier(self, _, __):
110 | """Tracks the precision score."""
111 | predicts = self.model.predict(self.test_features)
112 | return precision_score(y_true=self.test_labels, y_pred=predicts, average="micro")
113 |
114 | def track_recall_score_opflow_qnn_classifier(self, _, __):
115 | """Tracks the recall score for each class of the classification results."""
116 | predicts = self.model.predict(self.test_features)
117 | return recall_score(y_true=self.test_labels, y_pred=predicts, average="micro")
118 |
119 | def track_f1_score_opflow_qnn_classifier(self, _, __):
120 | """Tracks the f1 score for each class of the classification results."""
121 | predicts = self.model.predict(self.test_features)
122 | return f1_score(y_true=self.test_labels, y_pred=predicts, average="micro")
123 |
124 |
125 | if __name__ == "__main__":
126 | bench = OpflowQnnClassifierBenchmarks()
127 | bench.setup_cache()
128 | for dataset_name, backend_name in product(*OpflowQnnClassifierBenchmarks.params):
129 | try:
130 | bench.setup(dataset_name, backend_name)
131 | except NotImplementedError:
132 | continue
133 |
134 | for method in (
135 | "time_score_opflow_qnn_classifier",
136 | "time_predict_opflow_qnn_classifier",
137 | "track_accuracy_score_opflow_qnn_classifier",
138 | "track_precision_score_opflow_qnn_classifier",
139 | "track_recall_score_opflow_qnn_classifier",
140 | "track_f1_score_opflow_qnn_classifier",
141 | ):
142 | elapsed = timeit(
143 | f'bench.{method}("{dataset_name}", "{backend_name}")', number=10, globals=globals()
144 | )
145 | print(f"{method}:\t{elapsed}")
146 |
--------------------------------------------------------------------------------
/machine_learning/benchmarks/opflow_qnn_classifier_fit_benchmark.py:
--------------------------------------------------------------------------------
1 | # This code is part of Qiskit.
2 | #
3 | # (C) Copyright IBM 2021, 2022.
4 | #
5 | # This code is licensed under the Apache License, Version 2.0. You may
6 | # obtain a copy of this license in the LICENSE.txt file in the root directory
7 | # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
8 | #
9 | # Any modifications or derivative works of this code must retain this
10 | # copyright notice, and modified files need to carry a notice indicating
11 | # that they have been altered from the originals.
12 | """Opflow Neural Network Classifier fit benchmarks."""
13 |
14 | from itertools import product
15 | from timeit import timeit
16 | from typing import Optional
17 |
18 | import numpy as np
19 | from qiskit.algorithms.optimizers import COBYLA, NELDER_MEAD, L_BFGS_B
20 | from qiskit_machine_learning.algorithms import NeuralNetworkClassifier
21 |
22 | from .base_classifier_benchmark import (
23 | DATASET_SYNTHETIC_CLASSIFICATION,
24 | DATASET_IRIS_CLASSIFICATION,
25 | )
26 | from .opflow_qnn_base_classifier_benchmark import OpflowQnnBaseClassifierBenchmark
27 |
28 |
29 | class OpflowQnnFitClassifierBenchmarks(OpflowQnnBaseClassifierBenchmark):
30 | """Opflow QNN Classifier benchmarks."""
31 |
32 | version = 2
33 | timeout = 1200.0
34 | params = (
35 | [DATASET_SYNTHETIC_CLASSIFICATION, DATASET_IRIS_CLASSIFICATION],
36 | ["qasm_simulator", "statevector_simulator"],
37 | ["cobyla", "nelder-mead", "l-bfgs-b"],
38 | )
39 | param_names = ["dataset", "backend name", "optimizer"]
40 |
41 | def __init__(self) -> None:
42 | super().__init__()
43 |
44 | self.optimizers = {
45 | "cobyla": COBYLA(maxiter=100),
46 | "nelder-mead": NELDER_MEAD(maxiter=50),
47 | "l-bfgs-b": L_BFGS_B(maxiter=20),
48 | }
49 | self.train_features: Optional[np.ndarray] = None
50 | self.train_labels: Optional[np.ndarray] = None
51 | self.test_features: Optional[np.ndarray] = None
52 | self.test_labels: Optional[np.ndarray] = None
53 | self.model: Optional[NeuralNetworkClassifier] = None
54 |
55 | def setup(self, dataset: str, quantum_instance_name: str, optimizer: str) -> None:
56 | """Set up the benchmark."""
57 | self.train_features = self.datasets[dataset]["train_features"]
58 | self.train_labels = self.datasets[dataset]["train_labels"]
59 |
60 | if dataset == DATASET_SYNTHETIC_CLASSIFICATION:
61 | self.model = self._construct_opflow_classifier_synthetic(
62 | quantum_instance_name=quantum_instance_name,
63 | optimizer=self.optimizers[optimizer],
64 | )
65 | elif dataset == DATASET_IRIS_CLASSIFICATION:
66 | self.model = self._construct_opflow_classifier_iris(
67 | quantum_instance_name=quantum_instance_name,
68 | optimizer=self.optimizers[optimizer],
69 | )
70 | else:
71 | raise ValueError(f"Unsupported dataset: {dataset}")
72 |
73 | # pylint: disable=invalid-name
74 | def time_fit_opflow_qnn_classifier(self, _, __, ___):
75 | """Time fitting OpflowQNN classifier to data."""
76 | self.model.fit(self.train_features, self.train_labels)
77 |
78 |
79 | if __name__ == "__main__":
80 | for dataset_name, backend_name, optimizer_name in product(
81 | *OpflowQnnFitClassifierBenchmarks.params
82 | ):
83 | bench = OpflowQnnFitClassifierBenchmarks()
84 | try:
85 | bench.setup(dataset_name, backend_name, optimizer_name)
86 | except NotImplementedError:
87 | continue
88 |
89 | for method in ["time_fit_opflow_qnn_classifier"]:
90 | elapsed = timeit(
91 | f'bench.{method}("{dataset_name}", "{backend_name}", "{optimizer_name}")',
92 | number=10,
93 | globals=globals(),
94 | )
95 | print(f"{method}:\t{elapsed}")
96 |
--------------------------------------------------------------------------------
/machine_learning/benchmarks/opflow_qnn_regressor_benchmark.py:
--------------------------------------------------------------------------------
1 | # This code is part of Qiskit.
2 | #
3 | # (C) Copyright IBM 2021, 2022.
4 | #
5 | # This code is licensed under the Apache License, Version 2.0. You may
6 | # obtain a copy of this license in the LICENSE.txt file in the root directory
7 | # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
8 | #
9 | # Any modifications or derivative works of this code must retain this
10 | # copyright notice, and modified files need to carry a notice indicating
11 | # that they have been altered from the originals.
12 | """Opflow based neural network regressor benchmarks."""
13 |
14 | import pickle
15 | from itertools import product
16 | from timeit import timeit
17 | from typing import Optional
18 |
19 | from qiskit.algorithms.optimizers.cobyla import COBYLA
20 | from qiskit_machine_learning.algorithms import NeuralNetworkRegressor
21 | from sklearn.metrics import mean_absolute_error, mean_squared_error
22 |
23 | from .base_regressor_benchmark import (
24 | BaseRegressorBenchmark,
25 | DATASET_SYNTHETIC_REGRESSION,
26 | DATASET_CCPP_REGRESSION,
27 | )
28 |
29 |
30 | class OpflowQnnRegressorBenchmarks(BaseRegressorBenchmark):
31 | """Opflow QNN regressor benchmarks."""
32 |
33 | version = 1
34 | timeout = 1200.0
35 | params = [
36 | [DATASET_SYNTHETIC_REGRESSION, DATASET_CCPP_REGRESSION],
37 | ["qasm_simulator", "statevector_simulator"],
38 | ]
39 | param_names = ["dataset", "backend name"]
40 |
41 | def __init__(self) -> None:
42 | super().__init__()
43 | self.model: Optional[NeuralNetworkRegressor] = None
44 | self.train_features = None
45 | self.train_labels = None
46 | self.test_features = None
47 | self.test_labels = None
48 |
49 | def setup_cache(self) -> None:
50 | """Cache Opflow fitted model."""
51 | for dataset, backend in product(*self.params):
52 | train_features = self.datasets[dataset]["train_features"]
53 | train_labels = self.datasets[dataset]["train_labels"]
54 |
55 | if dataset == DATASET_SYNTHETIC_REGRESSION:
56 | model = self._construct_qnn_synthetic(
57 | quantum_instance_name=backend, optimizer=COBYLA()
58 | )
59 | elif dataset == DATASET_CCPP_REGRESSION:
60 | model = self._construct_qnn_ccpp(
61 | quantum_instance_name=backend, optimizer=COBYLA(maxiter=100)
62 | )
63 | else:
64 | raise ValueError(f"Unsupported dataset: {dataset}")
65 |
66 | model.fit(train_features, train_labels)
67 |
68 | file_name = f"{dataset}_{backend}.pickle"
69 | with open(file_name, "wb") as file:
70 | pickle.dump(model._fit_result, file)
71 |
72 | def setup(self, dataset: str, quantum_instance_name: str) -> None:
73 | """Set up the benchmark."""
74 |
75 | self.train_features = self.datasets[dataset]["train_features"]
76 | self.train_labels = self.datasets[dataset]["train_labels"]
77 | self.test_features = self.datasets[dataset]["test_features"]
78 | self.test_labels = self.datasets[dataset]["test_labels"]
79 |
80 | if dataset == DATASET_SYNTHETIC_REGRESSION:
81 | self.model = self._construct_qnn_synthetic(quantum_instance_name=quantum_instance_name)
82 | elif dataset == DATASET_CCPP_REGRESSION:
83 | self.model = self._construct_qnn_ccpp(quantum_instance_name=quantum_instance_name)
84 | else:
85 | raise ValueError(f"Unsupported dataset: {dataset}")
86 |
87 | file_name = f"{dataset}_{quantum_instance_name}.pickle"
88 | with open(file_name, "rb") as file:
89 | self.model._fit_result = pickle.load(file)
90 |
91 | # pylint: disable=invalid-name
92 | def time_score_opflow_qnn_regressor(self, _, __):
93 | """Time scoring OpflowQNN regressor on data."""
94 | self.model.score(self.train_features, self.train_labels)
95 |
96 | def time_predict_opflow_qnn_regressor(self, _, __):
97 | """Time predicting with OpflowQNN regressor."""
98 | self.model.predict(self.train_features)
99 |
100 | def track_score_opflow_qnn_regressor(self, _, __):
101 | """R2 score of the model on data."""
102 | return self.model.score(self.test_features, self.test_labels)
103 |
104 | def track_mae_opflow_qnn_regressor(self, _, __):
105 | """Mean absolute error of the model on data."""
106 | predicts = self.model.predict(self.test_features)
107 | mae = mean_absolute_error(y_true=self.test_labels, y_pred=predicts)
108 | return mae
109 |
110 | def track_mse_opflow_qnn_regressor(self, _, __):
111 | """Mean squared error of the model on data."""
112 | predicts = self.model.predict(self.test_features)
113 | mse = mean_squared_error(y_true=self.test_labels, y_pred=predicts)
114 | return mse
115 |
116 |
117 | if __name__ == "__main__":
118 | bench = OpflowQnnRegressorBenchmarks()
119 | bench.setup_cache()
120 | for dataset_name, backend_name in product(*OpflowQnnRegressorBenchmarks.params):
121 | try:
122 | bench.setup(dataset_name, backend_name)
123 | except NotImplementedError:
124 | continue
125 |
126 | for method in (
127 | "time_score_opflow_qnn_regressor",
128 | "time_predict_opflow_qnn_regressor",
129 | "track_score_opflow_qnn_regressor",
130 | "track_mae_opflow_qnn_regressor",
131 | "track_mse_opflow_qnn_regressor",
132 | ):
133 | elapsed = timeit(
134 | f'bench.{method}("{dataset_name}", "{backend_name}")', number=10, globals=globals()
135 | )
136 | print(f"{method}:\t{elapsed}")
137 |
--------------------------------------------------------------------------------
/machine_learning/benchmarks/opflow_qnn_regressor_fit_benchmark.py:
--------------------------------------------------------------------------------
1 | # This code is part of Qiskit.
2 | #
3 | # (C) Copyright IBM 2021, 2022.
4 | #
5 | # This code is licensed under the Apache License, Version 2.0. You may
6 | # obtain a copy of this license in the LICENSE.txt file in the root directory
7 | # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
8 | #
9 | # Any modifications or derivative works of this code must retain this
10 | # copyright notice, and modified files need to carry a notice indicating
11 | # that they have been altered from the originals.
12 | """Neural network regressor benchmarks."""
13 |
14 | from typing import Optional
15 | from itertools import product
16 | from timeit import timeit
17 |
18 | from qiskit.algorithms.optimizers import COBYLA, L_BFGS_B, NELDER_MEAD
19 | from qiskit_machine_learning.algorithms import NeuralNetworkRegressor
20 |
21 | from .base_regressor_benchmark import (
22 | BaseRegressorBenchmark,
23 | DATASET_SYNTHETIC_REGRESSION,
24 | DATASET_CCPP_REGRESSION,
25 | )
26 |
27 |
28 | class OpflowQnnFitRegressorBenchmarks(BaseRegressorBenchmark):
29 | """OpflowQNN regressor benchmarks."""
30 |
31 | version = 1
32 | timeout = 1200.0
33 | params = (
34 | [DATASET_SYNTHETIC_REGRESSION, DATASET_CCPP_REGRESSION],
35 | ["qasm_simulator", "statevector_simulator"],
36 | ["cobyla", "nelder-mead", "l-bfgs-b"],
37 | )
38 | param_names = ["dataset", "backend name", "optimizer"]
39 |
40 | def __init__(self) -> None:
41 | super().__init__()
42 | self.optimizers = {
43 | "cobyla": COBYLA(maxiter=100),
44 | "nelder-mead": NELDER_MEAD(maxiter=50),
45 | "l-bfgs-b": L_BFGS_B(maxiter=20),
46 | }
47 | self.train_features = None
48 | self.train_labels = None
49 | self.model: Optional[NeuralNetworkRegressor] = None
50 |
51 | def setup(self, dataset: str, quantum_instance_name: str, optimizer: str):
52 | """Set up the benchmark."""
53 | self.train_features = self.datasets[dataset]["train_features"]
54 | self.train_labels = self.datasets[dataset]["train_labels"]
55 |
56 | if dataset == DATASET_SYNTHETIC_REGRESSION:
57 | self.model = self._construct_qnn_synthetic(
58 | quantum_instance_name, self.optimizers[optimizer]
59 | )
60 | elif dataset == DATASET_CCPP_REGRESSION:
61 | self.model = self._construct_qnn_ccpp(quantum_instance_name, self.optimizers[optimizer])
62 | else:
63 | raise ValueError(f"Unsupported dataset: {dataset}")
64 |
65 | # pylint: disable=invalid-name
66 | def time_fit_opflow_qnn_regressor(self, _, __, ___):
67 | """Time fitting OpflowQNN regressor to data."""
68 | self.model.fit(self.train_features, self.train_labels)
69 |
70 |
71 | if __name__ == "__main__":
72 | for dataset_name, backend, optimizer_name in product(*OpflowQnnFitRegressorBenchmarks.params):
73 | bench = OpflowQnnFitRegressorBenchmarks()
74 | try:
75 | bench.setup(dataset_name, backend, optimizer_name)
76 | except NotImplementedError:
77 | continue
78 |
79 | for method in ["time_fit_opflow_qnn_regressor"]:
80 | elapsed = timeit(
81 | f'bench.{method}("{dataset_name}", "{backend}", "{optimizer_name}")',
82 | number=10,
83 | globals=globals(),
84 | )
85 | print(f"{method}:\t{elapsed}")
86 |
--------------------------------------------------------------------------------
/machine_learning/benchmarks/vqc_base_benchmark.py:
--------------------------------------------------------------------------------
1 | # This code is part of Qiskit.
2 | #
3 | # (C) Copyright IBM 2022.
4 | #
5 | # This code is licensed under the Apache License, Version 2.0. You may
6 | # obtain a copy of this license in the LICENSE.txt file in the root directory
7 | # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
8 | #
9 | # Any modifications or derivative works of this code must retain this
10 | # copyright notice, and modified files need to carry a notice indicating
11 | # that they have been altered from the originals.
12 |
13 | """Base class for VQC based classifier benchmarks."""
14 | from abc import ABC
15 | from typing import Optional
16 |
17 | from qiskit.algorithms.optimizers import Optimizer
18 | from qiskit.circuit.library import RealAmplitudes, ZZFeatureMap
19 | from qiskit.utils import algorithm_globals
20 | from qiskit_machine_learning.algorithms import NeuralNetworkClassifier, VQC
21 | from sklearn.pipeline import Pipeline
22 | from sklearn.preprocessing import OneHotEncoder, FunctionTransformer
23 |
24 | from .base_classifier_benchmark import BaseClassifierBenchmark
25 |
26 |
27 | class VqcBaseClassifierBenchmark(BaseClassifierBenchmark, ABC):
28 | """Base class for Opflow Classifier benchmarks."""
29 |
30 | def __init__(self) -> None:
31 | reshaper = FunctionTransformer(lambda x: x.reshape(-1, 1))
32 | encoder = OneHotEncoder(sparse=False)
33 | super().__init__(
34 | synthetic_label_encoder=Pipeline([("reshape", reshaper), ("one hot", encoder)]),
35 | iris_num_classes=2,
36 | iris_label_encoder=Pipeline([("reshape", reshaper), ("one hot", encoder)]),
37 | )
38 |
39 | def _construct_vqc_classifier_synthetic(
40 | self,
41 | quantum_instance_name: str,
42 | optimizer: Optional[Optimizer] = None,
43 | loss_function: str = "cross_entropy",
44 | ) -> NeuralNetworkClassifier:
45 | """Training a VQC classifier for synthetic classification dataset."""
46 | return self._construct_vqc_classifier(2, quantum_instance_name, optimizer, loss_function)
47 |
48 | def _construct_vqc_classifier_iris(
49 | self,
50 | quantum_instance_name: str,
51 | optimizer: Optional[Optimizer] = None,
52 | loss_function: str = "cross_entropy",
53 | ) -> NeuralNetworkClassifier:
54 | """Construct a VQC classifier for iris classification dataset."""
55 | return self._construct_vqc_classifier(4, quantum_instance_name, optimizer, loss_function)
56 |
57 | def _construct_vqc_classifier(
58 | self,
59 | num_inputs: int,
60 | quantum_instance_name: str,
61 | optimizer: Optional[Optimizer] = None,
62 | loss_function: str = None,
63 | ) -> VQC:
64 | """Construct a VQC classifier."""
65 | feature_map = ZZFeatureMap(num_inputs)
66 | ansatz = RealAmplitudes(num_inputs)
67 |
68 | initial_point = algorithm_globals.random.random(ansatz.num_parameters)
69 |
70 | # construct variational quantum classifier
71 | model = VQC(
72 | feature_map=feature_map,
73 | ansatz=ansatz,
74 | loss=loss_function,
75 | optimizer=optimizer,
76 | quantum_instance=self.backends[quantum_instance_name],
77 | initial_point=initial_point,
78 | )
79 |
80 | return model
81 |
--------------------------------------------------------------------------------
/machine_learning/benchmarks/vqc_benchmark.py:
--------------------------------------------------------------------------------
1 | # This code is part of Qiskit.
2 | #
3 | # (C) Copyright IBM 2021, 2022.
4 | #
5 | # This code is licensed under the Apache License, Version 2.0. You may
6 | # obtain a copy of this license in the LICENSE.txt file in the root directory
7 | # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
8 | #
9 | # Any modifications or derivative works of this code must retain this
10 | # copyright notice, and modified files need to carry a notice indicating
11 | # that they have been altered from the originals.
12 | """Variational Quantum Classifier benchmarks."""
13 | import pickle
14 | from itertools import product
15 | from timeit import timeit
16 | from typing import Optional
17 |
18 | import numpy as np
19 | from qiskit.algorithms.optimizers import COBYLA
20 | from qiskit_machine_learning.algorithms import NeuralNetworkClassifier
21 | from sklearn.metrics import precision_score, recall_score, f1_score
22 |
23 | from .base_classifier_benchmark import DATASET_SYNTHETIC_CLASSIFICATION, DATASET_IRIS_CLASSIFICATION
24 | from .vqc_base_benchmark import VqcBaseClassifierBenchmark
25 |
26 |
27 | class VqcBenchmarks(VqcBaseClassifierBenchmark):
28 | """Variational Quantum Classifier benchmarks."""
29 |
30 | version = 2
31 | timeout = 1200.0
32 | params = [
33 | # Only the synthetic dataset now
34 | [DATASET_SYNTHETIC_CLASSIFICATION],
35 | ["qasm_simulator", "statevector_simulator"],
36 | ]
37 | param_names = ["dataset", "backend name"]
38 |
39 | def __init__(self) -> None:
40 | super().__init__()
41 | self.train_features: Optional[np.ndarray] = None
42 | self.train_labels: Optional[np.ndarray] = None
43 | self.test_features: Optional[np.ndarray] = None
44 | self.test_labels: Optional[np.ndarray] = None
45 | self.model: Optional[NeuralNetworkClassifier] = None
46 |
47 | def setup(self, dataset: str, quantum_instance_name: str) -> None:
48 | """Set up the benchmark."""
49 |
50 | self.train_features = self.datasets[dataset]["train_features"]
51 | self.train_labels = self.datasets[dataset]["train_labels"]
52 | self.test_features = self.datasets[dataset]["test_features"]
53 | self.test_labels = self.datasets[dataset]["test_labels"]
54 |
55 | if dataset == DATASET_SYNTHETIC_CLASSIFICATION:
56 | self.model = self._construct_vqc_classifier_synthetic(
57 | quantum_instance_name=quantum_instance_name
58 | )
59 | elif dataset == DATASET_IRIS_CLASSIFICATION:
60 | self.model = self._construct_vqc_classifier_iris(
61 | quantum_instance_name=quantum_instance_name
62 | )
63 | else:
64 | raise ValueError(f"Unsupported dataset: {dataset}")
65 |
66 | file_name = f"vqc_{dataset}_{quantum_instance_name}.pickle"
67 | with open(file_name, "rb") as file:
68 | self.model._fit_result = pickle.load(file)
69 |
70 | def setup_cache(self) -> None:
71 | """Cache VQC fitted model."""
72 | for dataset, backend in product(*self.params):
73 | train_features = self.datasets[dataset]["train_features"]
74 | train_labels = self.datasets[dataset]["train_labels"]
75 |
76 | if dataset == DATASET_SYNTHETIC_CLASSIFICATION:
77 | model = self._construct_vqc_classifier_synthetic(
78 | quantum_instance_name=backend, optimizer=COBYLA(maxiter=200)
79 | )
80 | elif dataset == DATASET_IRIS_CLASSIFICATION:
81 | model = self._construct_vqc_classifier_iris(
82 | quantum_instance_name=backend, optimizer=COBYLA(maxiter=200)
83 | )
84 | else:
85 | raise ValueError(f"Unsupported dataset: {dataset}")
86 |
87 | model.fit(train_features, train_labels)
88 |
89 | file_name = f"vqc_{dataset}_{backend}.pickle"
90 | with open(file_name, "wb") as file:
91 | pickle.dump(model._fit_result, file)
92 |
93 | # pylint: disable=invalid-name
94 | def time_score_vqc_classifier(self, _, __):
95 | """Time scoring VQC on data."""
96 | self.model.score(self.train_features, self.train_labels)
97 |
98 | def time_predict_vqc_classifier(self, _, __):
99 | """Time predicting with VQC."""
100 | self.model.predict(self.train_features)
101 |
102 | def track_accuracy_score_vqc_classifier(self, _, __):
103 | """Tracks the overall accuracy of the classification results."""
104 | return self.model.score(self.test_features, self.test_labels)
105 |
106 | def track_precision_score_vqc_classifier(self, _, __):
107 | """Tracks the precision score."""
108 | predicts = self.model.predict(self.test_features)
109 | return precision_score(y_true=self.test_labels, y_pred=predicts, average="micro")
110 |
111 | def track_recall_score_vqc_classifier(self, _, __):
112 | """Tracks the recall score for each class of the classification results."""
113 | predicts = self.model.predict(self.test_features)
114 | return recall_score(y_true=self.test_labels, y_pred=predicts, average="micro")
115 |
116 | def track_f1_score_vqc_classifier(self, _, __):
117 | """Tracks the f1 score for each class of the classification results."""
118 | predicts = self.model.predict(self.test_features)
119 | return f1_score(y_true=self.test_labels, y_pred=predicts, average="micro")
120 |
121 |
122 | if __name__ == "__main__":
123 | bench = VqcBenchmarks()
124 | bench.setup_cache()
125 | for dataset_name, backend_name in product(*VqcBenchmarks.params):
126 | try:
127 | bench.setup(dataset_name, backend_name)
128 | except NotImplementedError:
129 | continue
130 |
131 | for method in (
132 | "time_score_vqc_classifier",
133 | "time_predict_vqc_classifier",
134 | "track_accuracy_score_vqc_classifier",
135 | "track_precision_score_vqc_classifier",
136 | "track_recall_score_vqc_classifier",
137 | "track_f1_score_vqc_classifier",
138 | ):
139 | elapsed = timeit(
140 | f'bench.{method}("{dataset_name}", "{backend_name}")', number=10, globals=globals()
141 | )
142 |
--------------------------------------------------------------------------------
/machine_learning/benchmarks/vqc_fit_benchmark.py:
--------------------------------------------------------------------------------
1 | # This code is part of Qiskit.
2 | #
3 | # (C) Copyright IBM 2021, 2022.
4 | #
5 | # This code is licensed under the Apache License, Version 2.0. You may
6 | # obtain a copy of this license in the LICENSE.txt file in the root directory
7 | # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
8 | #
9 | # Any modifications or derivative works of this code must retain this
10 | # copyright notice, and modified files need to carry a notice indicating
11 | # that they have been altered from the originals.
12 | """Variational Quantum Classifier benchmarks."""
13 |
14 | from itertools import product
15 | from timeit import timeit
16 | from typing import Optional
17 |
18 | import numpy as np
19 | from qiskit.algorithms.optimizers import COBYLA, L_BFGS_B, NELDER_MEAD
20 | from qiskit_machine_learning.algorithms import NeuralNetworkClassifier
21 |
22 | from .base_classifier_benchmark import DATASET_SYNTHETIC_CLASSIFICATION, DATASET_IRIS_CLASSIFICATION
23 | from .vqc_base_benchmark import VqcBaseClassifierBenchmark
24 |
25 |
26 | class VqcFitBenchmarks(VqcBaseClassifierBenchmark):
27 | """Variational Quantum Classifier benchmarks."""
28 |
29 | version = 2
30 | timeout = 1200.0
31 | params = (
32 | # Only the synthetic dataset now
33 | [DATASET_SYNTHETIC_CLASSIFICATION],
34 | ["qasm_simulator", "statevector_simulator"],
35 | ["cobyla", "nelder-mead", "l-bfgs-b"],
36 | ["cross_entropy", "squared_error"],
37 | )
38 | param_names = ["dataset", "backend name", "optimizer", "loss function"]
39 |
40 | def __init__(self) -> None:
41 | super().__init__()
42 |
43 | self.optimizers = {
44 | "cobyla": COBYLA(maxiter=100),
45 | "nelder-mead": NELDER_MEAD(maxiter=50),
46 | "l-bfgs-b": L_BFGS_B(maxiter=20),
47 | }
48 | self.train_features: Optional[np.ndarray] = None
49 | self.train_labels: Optional[np.ndarray] = None
50 | self.test_features: Optional[np.ndarray] = None
51 | self.test_labels: Optional[np.ndarray] = None
52 | self.model: Optional[NeuralNetworkClassifier] = None
53 |
54 | def setup(
55 | self, dataset: str, quantum_instance_name: str, optimizer: str, loss_function: str
56 | ) -> None:
57 | """Set up the benchmark."""
58 | self.train_features = self.datasets[dataset]["train_features"]
59 | self.train_labels = self.datasets[dataset]["train_labels"]
60 |
61 | if dataset == DATASET_SYNTHETIC_CLASSIFICATION:
62 | self.model = self._construct_vqc_classifier_synthetic(
63 | quantum_instance_name=quantum_instance_name,
64 | optimizer=self.optimizers[optimizer],
65 | loss_function=loss_function,
66 | )
67 | elif dataset == DATASET_IRIS_CLASSIFICATION:
68 | self.model = self._construct_vqc_classifier_iris(
69 | quantum_instance_name=quantum_instance_name,
70 | optimizer=self.optimizers[optimizer],
71 | loss_function=loss_function,
72 | )
73 | else:
74 | raise ValueError(f"Unsupported dataset: {dataset}")
75 |
76 | # pylint: disable=invalid-name
77 | def time_fit_vqc(self, _, __, ___, ____):
78 | """Time fitting VQC to data."""
79 | self.model.fit(self.train_features, self.train_labels)
80 |
81 |
82 | if __name__ == "__main__":
83 | for dataset_name, backend_name, optimizer_name, loss_function_name in product(
84 | *VqcFitBenchmarks.params
85 | ):
86 | bench = VqcFitBenchmarks()
87 | try:
88 | bench.setup(dataset_name, backend_name, optimizer_name, loss_function_name)
89 | except NotImplementedError:
90 | continue
91 |
92 | for method in ["time_fit_vqc"]:
93 | elapsed = timeit(
94 | f'bench.{method}("{dataset_name}", "{backend_name}", '
95 | f'"{optimizer_name}", "{loss_function_name}")',
96 | number=10,
97 | globals=globals(),
98 | )
99 | print(f"{method}:\t{elapsed}")
100 |
--------------------------------------------------------------------------------
/mypy.ini:
--------------------------------------------------------------------------------
1 | [mypy]
2 | warn_unused_configs = True
3 | ignore_missing_imports = True
4 | strict_optional = False
5 | no_implicit_optional = True
6 | warn_redundant_casts = True
7 | warn_unused_ignores = True
8 |
--------------------------------------------------------------------------------
/nature/Makefile:
--------------------------------------------------------------------------------
1 | # This code is part of Qiskit.
2 | #
3 | # (C) Copyright IBM 2021.
4 | #
5 | # This code is licensed under the Apache License, Version 2.0. You may
6 | # obtain a copy of this license in the LICENSE.txt file in the root directory
7 | # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
8 | #
9 | # Any modifications or derivative works of this code must retain this
10 | # copyright notice, and modified files need to carry a notice indicating
11 | # that they have been altered from the originals.
12 |
13 | # You can set those variables from the command line.
14 | ASVCMD =
15 | ASVOPTS =
16 |
17 | .PHONY: asv machine dev run publish preview
18 |
19 | asv:
20 | python -m asv $(ASVCMD) $(ASVOPTS) || true
21 |
--------------------------------------------------------------------------------
/nature/__init__.py:
--------------------------------------------------------------------------------
1 | # This code is part of Qiskit.
2 | #
3 | # (C) Copyright IBM 2021.
4 | #
5 | # This code is licensed under the Apache License, Version 2.0. You may
6 | # obtain a copy of this license in the LICENSE.txt file in the root directory
7 | # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
8 | #
9 | # Any modifications or derivative works of this code must retain this
10 | # copyright notice, and modified files need to carry a notice indicating
11 | # that they have been altered from the originals.
12 |
--------------------------------------------------------------------------------
/nature/asv.conf.json:
--------------------------------------------------------------------------------
1 | {
2 | // The version of the config file format. Do not change, unless
3 | // you know what you are doing.
4 | "version": 1,
5 |
6 | // The name of the project being benchmarked
7 | "project": "qiskit-nature",
8 |
9 | // The project's homepage
10 | "project_url": "https://qiskit.org/documentation/nature/",
11 |
12 | // The URL or local path of the source code repository for the
13 | // project being benchmarked
14 | "repo": "https://github.com/Qiskit/qiskit-nature.git",
15 |
16 | // The Python project's subdirectory in your repo. If missing or
17 | // the empty string, the project is assumed to be located at the root
18 | // of the repository.
19 | // "repo_subdir": "",
20 |
21 | // Customizable commands for building, installing, and
22 | // uninstalling the project. See asv.conf.json documentation.
23 | //
24 | "install_command": [
25 | "return-code=any python -c \"import shutil; shutil.rmtree('{build_dir}/build')\"",
26 | "return-code=any python -c \"import shutil; shutil.rmtree('{build_dir}/qiskit_nature.egg-info')\"",
27 | "python -mpip install git+https://github.com/Qiskit/qiskit-terra",
28 | "python -mpip install git+https://github.com/Qiskit/qiskit-aer",
29 | "python -mpip install {wheel_file}",
30 | ],
31 | "uninstall_command": [
32 | "return-code=any python -mpip uninstall -y {project}",
33 | "return-code=any python -mpip uninstall -y qiskit-aer qiskit-terra",
34 | ],
35 | "build_command": [
36 | "python setup.py build",
37 | "PIP_NO_BUILD_ISOLATION=false python -mpip wheel --no-deps --no-index -w {build_cache_dir} {build_dir}"
38 | ],
39 |
40 | // List of branches to benchmark. If not provided, defaults to "master"
41 | // (for git) or "default" (for mercurial).
42 | "branches": ["main"], // for git
43 | // "branches": ["default"], // for mercurial
44 |
45 | // The DVCS being used. If not set, it will be automatically
46 | // determined from "repo" by looking at the protocol in the URL
47 | // (if remote), or by looking for special directories, such as
48 | // ".git" (if local).
49 | "dvcs": "git",
50 |
51 | // The tool to use to create environments. May be "conda",
52 | // "virtualenv" or other value depending on the plugins in use.
53 | // If missing or the empty string, the tool will be automatically
54 | // determined by looking for tools on the PATH environment
55 | // variable.
56 | "environment_type": "virtualenv",
57 |
58 | // timeout in seconds for installing any dependencies in environment
59 | // defaults to 10 min
60 | //"install_timeout": 600,
61 |
62 | // the base URL to show a commit for the project.
63 | "show_commit_url": "http://github.com/Qiskit/qiskit-nature/commit/",
64 |
65 | // The Pythons you'd like to test against. If not provided, defaults
66 | // to the current version of Python used to run `asv`.
67 | "pythons": ["3.8"],
68 |
69 | // The list of conda channel names to be searched for benchmark
70 | // dependency packages in the specified order
71 | // "conda_channels": ["conda-forge", "defaults"],
72 |
73 | // The matrix of dependencies to test. Each key is the name of a
74 | // package (in PyPI) and the values are version numbers. An empty
75 | // list or empty string indicates to just test against the default
76 | // (latest) version. null indicates that the package is to not be
77 | // installed. If the package to be tested is only available from
78 | // PyPi, and the 'environment_type' is conda, then you can preface
79 | // the package name by 'pip+', and the package will be installed via
80 | // pip (with all the conda available packages installed first,
81 | // followed by the pip installed packages).
82 | //
83 | "matrix": {
84 | "pyscf": "",
85 | },
86 |
87 | // Combinations of libraries/python versions can be excluded/included
88 | // from the set to test. Each entry is a dictionary containing additional
89 | // key-value pairs to include/exclude.
90 | //
91 | // An exclude entry excludes entries where all values match. The
92 | // values are regexps that should match the whole string.
93 | //
94 | // An include entry adds an environment. Only the packages listed
95 | // are installed. The 'python' key is required. The exclude rules
96 | // do not apply to includes.
97 | //
98 | // In addition to package names, the following keys are available:
99 | //
100 | // - python
101 | // Python version, as in the *pythons* variable above.
102 | // - environment_type
103 | // Environment type, as above.
104 | // - sys_platform
105 | // Platform, as in sys.platform. Possible values for the common
106 | // cases: 'linux2', 'win32', 'cygwin', 'darwin'.
107 | //
108 | // "exclude": [
109 | // {"python": "3.2", "sys_platform": "win32"}, // skip py3.2 on windows
110 | // {"environment_type": "conda", "six": null}, // don't run without six on conda
111 | // ],
112 | //
113 | // "include": [
114 | // // additional env for python2.7
115 | // {"python": "2.7", "numpy": "1.8"},
116 | // // additional env if run on windows+conda
117 | // {"platform": "win32", "environment_type": "conda", "python": "2.7", "libpython": ""},
118 | // ],
119 |
120 | // The directory (relative to the current directory) that benchmarks are
121 | // stored in. If not provided, defaults to "benchmarks"
122 | "benchmark_dir": "benchmarks",
123 |
124 | // The directory (relative to the current directory) to cache the Python
125 | // environments in. If not provided, defaults to "env"
126 | "env_dir": ".asv/env",
127 |
128 | // The directory (relative to the current directory) that raw benchmark
129 | // results are stored in. If not provided, defaults to "results".
130 | "results_dir": ".asv/results",
131 |
132 | // The directory (relative to the current directory) that the html tree
133 | // should be written to. If not provided, defaults to "html".
134 | "html_dir": ".asv/html",
135 |
136 | // The number of characters to retain in the commit hashes.
137 | // "hash_length": 8,
138 |
139 | // `asv` will cache results of the recent builds in each
140 | // environment, making them faster to install next time. This is
141 | // the number of builds to keep, per environment.
142 | // "build_cache_size": 2,
143 |
144 | // The commits after which the regression search in `asv publish`
145 | // should start looking for regressions. Dictionary whose keys are
146 | // regexps matching to benchmark names, and values corresponding to
147 | // the commit (exclusive) after which to start looking for
148 | // regressions. The default is to start from the first commit
149 | // with results. If the commit is `null`, regression detection is
150 | // skipped for the matching benchmark.
151 | //
152 | // "regressions_first_commits": {
153 | // "some_benchmark": "352cdf", // Consider regressions only after this commit
154 | // "another_benchmark": null, // Skip regression detection altogether
155 | // },
156 |
157 | // The thresholds for relative change in results, after which `asv
158 | // publish` starts reporting regressions. Dictionary of the same
159 | // form as in ``regressions_first_commits``, with values
160 | // indicating the thresholds. If multiple entries match, the
161 | // maximum is taken. If no entry matches, the default is 5%.
162 | //
163 | // "regressions_thresholds": {
164 | // "some_benchmark": 0.01, // Threshold of 1%
165 | // "another_benchmark": 0.5, // Threshold of 50%
166 | // },
167 | }
168 |
--------------------------------------------------------------------------------
/nature/benchmarks/__init__.py:
--------------------------------------------------------------------------------
1 | # This code is part of Qiskit.
2 | #
3 | # (C) Copyright IBM 2021.
4 | #
5 | # This code is licensed under the Apache License, Version 2.0. You may
6 | # obtain a copy of this license in the LICENSE.txt file in the root directory
7 | # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
8 | #
9 | # Any modifications or derivative works of this code must retain this
10 | # copyright notice, and modified files need to carry a notice indicating
11 | # that they have been altered from the originals.
12 |
--------------------------------------------------------------------------------
/nature/benchmarks/jordan_wigner_benchmark.py:
--------------------------------------------------------------------------------
1 | # This code is part of Qiskit.
2 | #
3 | # (C) Copyright IBM 2022, 2023.
4 | #
5 | # This code is licensed under the Apache License, Version 2.0. You may
6 | # obtain a copy of this license in the LICENSE.txt file in the root directory
7 | # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
8 | #
9 | # Any modifications or derivative works of this code must retain this
10 | # copyright notice, and modified files need to carry a notice indicating
11 | # that they have been altered from the originals.
12 |
13 | """Jordan-Wigner Mapper Benchmarks."""
14 | import argparse
15 | from typing import List
16 | from timeit import timeit
17 | from pathlib import Path
18 | import h5py
19 | from qiskit_nature.second_q.drivers import PySCFDriver
20 | from qiskit_nature.second_q.formats.qcschema import QCSchema
21 | from qiskit_nature.second_q.formats import qcschema_to_problem
22 | from qiskit_nature.second_q.mappers import JordanWignerMapper
23 | from qiskit_nature.settings import settings
24 | from qiskit_nature.units import DistanceUnit
25 |
26 | settings.use_pauli_sum_op = False
27 |
28 | # pylint: disable=redefined-outer-name, invalid-name, attribute-defined-outside-init
29 |
30 |
31 | class JordanWignerMapperBenchmarks:
32 | """Jordan-Wigner Mapper Benchmarks."""
33 |
34 | version = 1
35 | params: List[str] = [
36 | "H2 ElectronicEnergy",
37 | "H2 AngularMomentum",
38 | "H2 Magnetization",
39 | "H2 ParticleNumber",
40 | "H2 XDipole",
41 | "H2 YDipole",
42 | "H2 ZDipole",
43 | "H2O ElectronicEnergy",
44 | "H2O AngularMomentum",
45 | "H2O Magnetization",
46 | "H2O ParticleNumber",
47 | "H2O XDipole",
48 | "H2O YDipole",
49 | "H2O ZDipole",
50 | "LiH ElectronicEnergy",
51 | "LiH AngularMomentum",
52 | "LiH Magnetization",
53 | "LiH ParticleNumber",
54 | "LiH XDipole",
55 | "LiH YDipole",
56 | "LiH ZDipole",
57 | ]
58 | param_names = ["operator_type"]
59 |
60 | _hdf5_files = [
61 | ("H .0 .0 .0; H .0 .0 0.735", "jordan_wigner_benchmark_driver_H2.hdf5"),
62 | (
63 | "O 0.0 0.0 0.0; H 0.758602 0.0 0.504284; H 0.758602 0.0 -0.504284",
64 | "jordan_wigner_benchmark_driver_H2O.hdf5",
65 | ),
66 | ("Li 0.0 0.0 0.0; H 0.0 0.0 1.5474", "jordan_wigner_benchmark_driver_LiH.hdf5"),
67 | ]
68 |
69 | @staticmethod
70 | def make_hdf5_file():
71 | """create hdf5 files"""
72 |
73 | for _, (atom, file_name) in enumerate(JordanWignerMapperBenchmarks._hdf5_files):
74 | _driver = PySCFDriver(
75 | atom=atom,
76 | unit=DistanceUnit.ANGSTROM,
77 | basis="sto3g",
78 | )
79 | _driver.run_pyscf()
80 | _qcschema = _driver.to_qcschema(include_dipole=True)
81 | with h5py.File(file_name, "w") as file:
82 | _qcschema.to_hdf5(file)
83 |
84 | def setup_cache(self):
85 | """setup cache"""
86 |
87 | source_path = Path(__file__).resolve()
88 | source_dir = source_path.parent
89 | second_q_ops_list = []
90 | for _, file_name in JordanWignerMapperBenchmarks._hdf5_files:
91 | file_path = Path(source_dir, file_name)
92 | _qcschema = QCSchema.from_hdf5(file_path)
93 | _problem = qcschema_to_problem(_qcschema)
94 | hamil, aux_ops = _problem.second_q_ops()
95 | second_q_ops_list.append(hamil)
96 | for _, item in sorted(aux_ops.items()):
97 | second_q_ops_list.append(item)
98 |
99 | return second_q_ops_list
100 |
101 | def setup(self, second_q_ops_list, operator):
102 | """setup"""
103 | self.op_number = self.params.index(operator)
104 | self.second_q_ops_list = second_q_ops_list
105 | self.jw_mapper = JordanWignerMapper()
106 |
107 | def time_map(self, _, __):
108 | """time map"""
109 | return self.jw_mapper.map(self.second_q_ops_list[self.op_number])
110 |
111 |
112 | if __name__ == "__main__":
113 | PARSER = argparse.ArgumentParser(description="Create hdf5 files")
114 | PARSER.add_argument(
115 | "-hdf5create", action="store_true", help="creates hdf5 file with PYSCFDriver results"
116 | )
117 |
118 | ARGS = PARSER.parse_args()
119 | if ARGS.hdf5create:
120 | JordanWignerMapperBenchmarks.make_hdf5_file()
121 | else:
122 | bench = JordanWignerMapperBenchmarks()
123 | second_q_ops_list = bench.setup_cache()
124 | for operator in JordanWignerMapperBenchmarks.params:
125 | bench = JordanWignerMapperBenchmarks()
126 | try:
127 | bench.setup(second_q_ops_list, operator)
128 | except NotImplementedError:
129 | continue
130 |
131 | for method in set(dir(JordanWignerMapperBenchmarks)):
132 | if method.startswith("time_"):
133 | elapsed = timeit(
134 | f"bench.{method}(None, '{operator}')",
135 | number=10,
136 | globals=globals(),
137 | )
138 | print(f"bench.{method} : \t{elapsed}")
139 |
--------------------------------------------------------------------------------
/nature/benchmarks/jordan_wigner_benchmark_driver_H2.hdf5:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/qiskit-community/qiskit-app-benchmarks/07fe387ef338dc0d22aa7cbc13ae21ed5d7a5a10/nature/benchmarks/jordan_wigner_benchmark_driver_H2.hdf5
--------------------------------------------------------------------------------
/nature/benchmarks/jordan_wigner_benchmark_driver_H2O.hdf5:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/qiskit-community/qiskit-app-benchmarks/07fe387ef338dc0d22aa7cbc13ae21ed5d7a5a10/nature/benchmarks/jordan_wigner_benchmark_driver_H2O.hdf5
--------------------------------------------------------------------------------
/nature/benchmarks/jordan_wigner_benchmark_driver_LiH.hdf5:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/qiskit-community/qiskit-app-benchmarks/07fe387ef338dc0d22aa7cbc13ae21ed5d7a5a10/nature/benchmarks/jordan_wigner_benchmark_driver_LiH.hdf5
--------------------------------------------------------------------------------
/nature/benchmarks/linear_mapper_benchmark.py:
--------------------------------------------------------------------------------
1 | # This code is part of Qiskit.
2 | #
3 | # (C) Copyright IBM 2022, 2023.
4 | #
5 | # This code is licensed under the Apache License, Version 2.0. You may
6 | # obtain a copy of this license in the LICENSE.txt file in the root directory
7 | # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
8 | #
9 | # Any modifications or derivative works of this code must retain this
10 | # copyright notice, and modified files need to carry a notice indicating
11 | # that they have been altered from the originals.
12 |
13 | """Linear Mapper Benchmarks."""
14 | from timeit import timeit
15 | import rustworkx
16 | from qiskit_nature.second_q.mappers import LinearMapper
17 | from qiskit_nature.second_q.hamiltonians import IsingModel
18 | from qiskit_nature.second_q.hamiltonians.lattices import Lattice
19 | from qiskit_nature.settings import settings
20 |
21 | settings.use_pauli_sum_op = False
22 |
23 | # pylint: disable=redefined-outer-name, invalid-name, attribute-defined-outside-init
24 |
25 |
26 | class LinearMapperBenchmarks:
27 | """Linear Mapper Benchmarks."""
28 |
29 | version = 1
30 | timeout = 120.0
31 | params = [50, 80, 100]
32 | param_names = ["Number of nodes"]
33 |
34 | def setup_cache(self):
35 | """setup cache"""
36 |
37 | second_q_ops_list = []
38 | edge_arr = list((n**11) % 100 for n in range(100))
39 |
40 | for index, _ in enumerate(self.params):
41 | graph = rustworkx.PyGraph(multigraph=False)
42 | graph.add_nodes_from(list(range(self.params[index])))
43 |
44 | for i in range(self.params[index]):
45 | for j in range(i + 1, self.params[index]):
46 | graph.add_edge(i, j, edge_arr[i])
47 |
48 | lattice = Lattice(graph)
49 | ising_model = IsingModel(lattice)
50 | second_q_op = ising_model.second_q_op()
51 | second_q_ops_list.append(second_q_op)
52 |
53 | return second_q_ops_list
54 |
55 | def setup(self, second_q_ops_list, num_nodes):
56 | """setup"""
57 | self.second_q_ops_list = second_q_ops_list
58 | self.op_number = self.params.index(num_nodes)
59 | self.linear_mapper = LinearMapper()
60 |
61 | def time_map(self, _, __):
62 | """time map"""
63 | return self.linear_mapper.map(self.second_q_ops_list[self.op_number])
64 |
65 |
66 | if __name__ == "__main__":
67 | bench = LinearMapperBenchmarks()
68 | second_q_ops_list = bench.setup_cache()
69 | for num_nodes in LinearMapperBenchmarks.params:
70 | bench = LinearMapperBenchmarks()
71 | try:
72 | bench.setup(second_q_ops_list, num_nodes)
73 | except NotImplementedError:
74 | continue
75 |
76 | for method in set(dir(LinearMapperBenchmarks)):
77 | if method.startswith("time_"):
78 | elapsed = timeit(f"bench.{method}(None, {num_nodes})", number=10, globals=globals())
79 | print(f"bench.{method} : \t{elapsed}")
80 |
--------------------------------------------------------------------------------
/optimization/Makefile:
--------------------------------------------------------------------------------
1 | # This code is part of Qiskit.
2 | #
3 | # (C) Copyright IBM 2021.
4 | #
5 | # This code is licensed under the Apache License, Version 2.0. You may
6 | # obtain a copy of this license in the LICENSE.txt file in the root directory
7 | # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
8 | #
9 | # Any modifications or derivative works of this code must retain this
10 | # copyright notice, and modified files need to carry a notice indicating
11 | # that they have been altered from the originals.
12 |
13 | # You can set those variables from the command line.
14 | ASVCMD =
15 | ASVOPTS =
16 |
17 | .PHONY: asv machine dev run publish preview
18 |
19 | asv:
20 | python -m asv $(ASVCMD) $(ASVOPTS) || true
21 |
--------------------------------------------------------------------------------
/optimization/__init__.py:
--------------------------------------------------------------------------------
1 | # This code is part of Qiskit.
2 | #
3 | # (C) Copyright IBM 2021.
4 | #
5 | # This code is licensed under the Apache License, Version 2.0. You may
6 | # obtain a copy of this license in the LICENSE.txt file in the root directory
7 | # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
8 | #
9 | # Any modifications or derivative works of this code must retain this
10 | # copyright notice, and modified files need to carry a notice indicating
11 | # that they have been altered from the originals.
12 |
--------------------------------------------------------------------------------
/optimization/asv.conf.json:
--------------------------------------------------------------------------------
1 | {
2 | // The version of the config file format. Do not change, unless
3 | // you know what you are doing.
4 | "version": 1,
5 |
6 | // The name of the project being benchmarked
7 | "project": "qiskit-optimization",
8 |
9 | // The project's homepage
10 | "project_url": "https://qiskit.org/documentation/optimization/",
11 |
12 | // The URL or local path of the source code repository for the
13 | // project being benchmarked
14 | "repo": "https://github.com/Qiskit/qiskit-optimization.git",
15 |
16 | // The Python project's subdirectory in your repo. If missing or
17 | // the empty string, the project is assumed to be located at the root
18 | // of the repository.
19 | // "repo_subdir": "",
20 |
21 | // Customizable commands for building, installing, and
22 | // uninstalling the project. See asv.conf.json documentation.
23 | //
24 | "install_command": [
25 | "return-code=any python -c \"import shutil; shutil.rmtree('{build_dir}/build')\"",
26 | "return-code=any python -c \"import shutil; shutil.rmtree('{build_dir}/qiskit_optimization.egg-info')\"",
27 | "python -mpip install git+https://github.com/Qiskit/qiskit-terra",
28 | "python -mpip install git+https://github.com/Qiskit/qiskit-aer",
29 | "python -mpip install {wheel_file}",
30 | ],
31 | "uninstall_command": [
32 | "return-code=any python -mpip uninstall -y {project}",
33 | "return-code=any python -mpip uninstall -y qiskit-aer qiskit-terra"
34 | ],
35 | "build_command": [
36 | "python setup.py build",
37 | "PIP_NO_BUILD_ISOLATION=false python -mpip wheel --no-deps --no-index -w {build_cache_dir} {build_dir}"
38 | ],
39 |
40 | // List of branches to benchmark. If not provided, defaults to "master"
41 | // (for git) or "default" (for mercurial).
42 | "branches": ["main"], // for git
43 | // "branches": ["default"], // for mercurial
44 |
45 | // The DVCS being used. If not set, it will be automatically
46 | // determined from "repo" by looking at the protocol in the URL
47 | // (if remote), or by looking for special directories, such as
48 | // ".git" (if local).
49 | "dvcs": "git",
50 |
51 | // The tool to use to create environments. May be "conda",
52 | // "virtualenv" or other value depending on the plugins in use.
53 | // If missing or the empty string, the tool will be automatically
54 | // determined by looking for tools on the PATH environment
55 | // variable.
56 | "environment_type": "virtualenv",
57 |
58 | // timeout in seconds for installing any dependencies in environment
59 | // defaults to 10 min
60 | //"install_timeout": 600,
61 |
62 | // the base URL to show a commit for the project.
63 | "show_commit_url": "http://github.com/Qiskit/qiskit-optimization/commit/",
64 |
65 | // The Pythons you'd like to test against. If not provided, defaults
66 | // to the current version of Python used to run `asv`.
67 | "pythons": ["3.8"],
68 |
69 | // The list of conda channel names to be searched for benchmark
70 | // dependency packages in the specified order
71 | // "conda_channels": ["conda-forge", "defaults"],
72 |
73 | // The matrix of dependencies to test. Each key is the name of a
74 | // package (in PyPI) and the values are version numbers. An empty
75 | // list or empty string indicates to just test against the default
76 | // (latest) version. null indicates that the package is to not be
77 | // installed. If the package to be tested is only available from
78 | // PyPi, and the 'environment_type' is conda, then you can preface
79 | // the package name by 'pip+', and the package will be installed via
80 | // pip (with all the conda available packages installed first,
81 | // followed by the pip installed packages).
82 | //
83 | "matrix": {
84 | "cplex" : "",
85 | "cvxpy" : "",
86 | "matplotlib" : "",
87 | "gurobipy" : "",
88 | },
89 |
90 | // Combinations of libraries/python versions can be excluded/included
91 | // from the set to test. Each entry is a dictionary containing additional
92 | // key-value pairs to include/exclude.
93 | //
94 | // An exclude entry excludes entries where all values match. The
95 | // values are regexps that should match the whole string.
96 | //
97 | // An include entry adds an environment. Only the packages listed
98 | // are installed. The 'python' key is required. The exclude rules
99 | // do not apply to includes.
100 | //
101 | // In addition to package names, the following keys are available:
102 | //
103 | // - python
104 | // Python version, as in the *pythons* variable above.
105 | // - environment_type
106 | // Environment type, as above.
107 | // - sys_platform
108 | // Platform, as in sys.platform. Possible values for the common
109 | // cases: 'linux2', 'win32', 'cygwin', 'darwin'.
110 | //
111 | // "exclude": [
112 | // {"python": "3.2", "sys_platform": "win32"}, // skip py3.2 on windows
113 | // {"environment_type": "conda", "six": null}, // don't run without six on conda
114 | // ],
115 | //
116 | // "include": [
117 | // // additional env for python2.7
118 | // {"python": "2.7", "numpy": "1.8"},
119 | // // additional env if run on windows+conda
120 | // {"platform": "win32", "environment_type": "conda", "python": "2.7", "libpython": ""},
121 | // ],
122 | "exclude": [
123 | {"python": "3.9", "cplex": ""},
124 | ],
125 |
126 | // The directory (relative to the current directory) that benchmarks are
127 | // stored in. If not provided, defaults to "benchmarks"
128 | "benchmark_dir": "benchmarks",
129 |
130 | // The directory (relative to the current directory) to cache the Python
131 | // environments in. If not provided, defaults to "env"
132 | "env_dir": ".asv/env",
133 |
134 | // The directory (relative to the current directory) that raw benchmark
135 | // results are stored in. If not provided, defaults to "results".
136 | "results_dir": ".asv/results",
137 |
138 | // The directory (relative to the current directory) that the html tree
139 | // should be written to. If not provided, defaults to "html".
140 | "html_dir": ".asv/html",
141 |
142 | // The number of characters to retain in the commit hashes.
143 | // "hash_length": 8,
144 |
145 | // `asv` will cache results of the recent builds in each
146 | // environment, making them faster to install next time. This is
147 | // the number of builds to keep, per environment.
148 | // "build_cache_size": 2,
149 |
150 | // The commits after which the regression search in `asv publish`
151 | // should start looking for regressions. Dictionary whose keys are
152 | // regexps matching to benchmark names, and values corresponding to
153 | // the commit (exclusive) after which to start looking for
154 | // regressions. The default is to start from the first commit
155 | // with results. If the commit is `null`, regression detection is
156 | // skipped for the matching benchmark.
157 | //
158 | // "regressions_first_commits": {
159 | // "some_benchmark": "352cdf", // Consider regressions only after this commit
160 | // "another_benchmark": null, // Skip regression detection altogether
161 | // },
162 |
163 | // The thresholds for relative change in results, after which `asv
164 | // publish` starts reporting regressions. Dictionary of the same
165 | // form as in ``regressions_first_commits``, with values
166 | // indicating the thresholds. If multiple entries match, the
167 | // maximum is taken. If no entry matches, the default is 5%.
168 | //
169 | // "regressions_thresholds": {
170 | // "some_benchmark": 0.01, // Threshold of 1%
171 | // "another_benchmark": 0.5, // Threshold of 50%
172 | // },
173 | }
174 |
--------------------------------------------------------------------------------
/optimization/benchmarks/__init__.py:
--------------------------------------------------------------------------------
1 | # This code is part of Qiskit.
2 | #
3 | # (C) Copyright IBM 2021.
4 | #
5 | # This code is licensed under the Apache License, Version 2.0. You may
6 | # obtain a copy of this license in the LICENSE.txt file in the root directory
7 | # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
8 | #
9 | # Any modifications or derivative works of this code must retain this
10 | # copyright notice, and modified files need to carry a notice indicating
11 | # that they have been altered from the originals.
12 |
--------------------------------------------------------------------------------
/optimization/benchmarks/knapsack.py:
--------------------------------------------------------------------------------
1 | # This code is part of Qiskit.
2 | #
3 | # (C) Copyright IBM 2021.
4 | #
5 | # This code is licensed under the Apache License, Version 2.0. You may
6 | # obtain a copy of this license in the LICENSE.txt file in the root directory
7 | # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
8 | #
9 | # Any modifications or derivative works of this code must retain this
10 | # copyright notice, and modified files need to carry a notice indicating
11 | # that they have been altered from the originals.
12 |
13 | """knapsack benchmarks"""
14 |
15 | import random
16 |
17 | from qiskit import Aer
18 | from qiskit.algorithms import QAOA, VQE
19 | from qiskit.algorithms.optimizers import COBYLA
20 | from qiskit.circuit.library import EfficientSU2
21 | from qiskit.utils import algorithm_globals, QuantumInstance
22 |
23 | from qiskit_optimization.algorithms import MinimumEigenOptimizer, GroverOptimizer
24 | from qiskit_optimization.applications import Knapsack
25 | from qiskit_optimization.converters import QuadraticProgramToQubo
26 |
27 | # pylint: disable=redefined-outer-name, invalid-name, attribute-defined-outside-init
28 |
29 |
30 | class KnapsackBenchmarks:
31 | """Knapsack benchmarks"""
32 |
33 | version = 1
34 | params = ([2, 3, 4, 5], [2, 4, 8, 16])
35 | param_names = ["number of items", "max_weights"]
36 |
37 | def setup(self, num_items, max_weights):
38 | """setup"""
39 | seed = 10
40 | algorithm_globals.random_seed = seed
41 | qasm_sim = Aer.get_backend("aer_simulator")
42 | self._qins = QuantumInstance(
43 | backend=qasm_sim, shots=1, seed_simulator=seed, seed_transpiler=seed
44 | )
45 | random.seed(seed)
46 | values = [random.randint(1, max_weights) for _ in range(num_items)]
47 | weights = [random.randint(1, max_weights) for _ in range(num_items)]
48 | self._knapsack = Knapsack(values, weights, max_weights)
49 | self._qp = self._knapsack.to_quadratic_program()
50 |
51 | @staticmethod
52 | def _generate_qubo(knapsack: Knapsack):
53 | q_p = knapsack.to_quadratic_program()
54 | conv = QuadraticProgramToQubo()
55 | qubo = conv.convert(q_p)
56 | return qubo
57 |
58 | def time_generate_qubo(self, _, __):
59 | """generate time qubo"""
60 | self._generate_qubo(self._knapsack)
61 |
62 | def time_qaoa(self, _, __):
63 | """time qaoa"""
64 | meo = MinimumEigenOptimizer(
65 | min_eigen_solver=QAOA(optimizer=COBYLA(maxiter=1), quantum_instance=self._qins)
66 | )
67 | meo.solve(self._qp)
68 |
69 | def time_vqe(self, _, __):
70 | """time vqe"""
71 | meo = MinimumEigenOptimizer(
72 | min_eigen_solver=VQE(
73 | optimizer=COBYLA(maxiter=1), ansatz=EfficientSU2(), quantum_instance=self._qins
74 | )
75 | )
76 | meo.solve(self._qp)
77 |
78 | def time_grover(self, _, __):
79 | """time grover"""
80 | meo = GroverOptimizer(
81 | num_value_qubits=self._qp.get_num_vars(),
82 | num_iterations=1,
83 | quantum_instance=self._qins,
84 | )
85 | meo.solve(self._qp)
86 |
--------------------------------------------------------------------------------
/optimization/benchmarks/maxcut.py:
--------------------------------------------------------------------------------
1 | # This code is part of Qiskit.
2 | #
3 | # (C) Copyright IBM 2021.
4 | #
5 | # This code is licensed under the Apache License, Version 2.0. You may
6 | # obtain a copy of this license in the LICENSE.txt file in the root directory
7 | # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
8 | #
9 | # Any modifications or derivative works of this code must retain this
10 | # copyright notice, and modified files need to carry a notice indicating
11 | # that they have been altered from the originals.
12 |
13 | """maxcut benchmarks"""
14 |
15 | from itertools import product
16 | from timeit import timeit
17 |
18 | import networkx as nx
19 | from qiskit import Aer
20 | from qiskit.algorithms import QAOA, VQE
21 | from qiskit.algorithms.optimizers import COBYLA
22 | from qiskit.circuit.library import EfficientSU2
23 | from qiskit.utils import algorithm_globals, QuantumInstance
24 |
25 | from qiskit_optimization.algorithms import MinimumEigenOptimizer, GroverOptimizer
26 | from qiskit_optimization.applications import Maxcut
27 | from qiskit_optimization.converters import QuadraticProgramToQubo
28 |
29 | # pylint: disable=redefined-outer-name, invalid-name, attribute-defined-outside-init
30 |
31 |
32 | class MaxcutBenchmarks:
33 | """Maxcut benchmarks"""
34 |
35 | version = 1
36 | params = ([2, 4, 8, 12], [3, 5, 7, 9])
37 | param_names = ["number of nodes", "degree"]
38 |
39 | def setup(self, n, d):
40 | """setup"""
41 | seed = 123
42 | algorithm_globals.random_seed = seed
43 | qasm_sim = Aer.get_backend("qasm_simulator")
44 | self._qins = QuantumInstance(
45 | backend=qasm_sim, shots=1, seed_simulator=seed, seed_transpiler=seed
46 | )
47 | if n >= d:
48 | graph = nx.random_regular_graph(n=n, d=d)
49 | self._maxcut = Maxcut(graph=graph)
50 | self._qp = self._maxcut.to_quadratic_program()
51 | else:
52 | raise NotImplementedError
53 |
54 | @staticmethod
55 | def _generate_qubo(maxcut: Maxcut):
56 | q_p = maxcut.to_quadratic_program()
57 | conv = QuadraticProgramToQubo()
58 | qubo = conv.convert(q_p)
59 | return qubo
60 |
61 | def time_generate_qubo(self, _, __):
62 | """generate time qubo"""
63 | self._generate_qubo(self._maxcut)
64 |
65 | def time_qaoa(self, _, __):
66 | """time qaoa"""
67 | meo = MinimumEigenOptimizer(
68 | min_eigen_solver=QAOA(optimizer=COBYLA(maxiter=1), quantum_instance=self._qins)
69 | )
70 | meo.solve(self._qp)
71 |
72 | def time_vqe(self, _, __):
73 | """time vqe"""
74 | meo = MinimumEigenOptimizer(
75 | min_eigen_solver=VQE(
76 | optimizer=COBYLA(maxiter=1), ansatz=EfficientSU2(), quantum_instance=self._qins
77 | )
78 | )
79 | meo.solve(self._qp)
80 |
81 | def time_grover(self, _, __):
82 | """time grover"""
83 | meo = GroverOptimizer(
84 | num_value_qubits=self._qp.get_num_vars() // 2,
85 | num_iterations=1,
86 | quantum_instance=self._qins,
87 | )
88 | meo.solve(self._qp)
89 |
90 |
91 | if __name__ == "__main__":
92 | for n, d in product(*MaxcutBenchmarks.params):
93 | if n < d:
94 | continue
95 | bench = MaxcutBenchmarks()
96 | try:
97 | bench.setup(n=n, d=d)
98 | except NotImplementedError:
99 | continue
100 | for method in set(dir(MaxcutBenchmarks)):
101 | if method.startswith("time_"):
102 | elapsed = timeit(f"bench.{method}(None, None)", number=10, globals=globals())
103 | print(f"n={n}, d={d}, {method}:\t{elapsed}")
104 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [tool.black]
2 | line-length = 100
3 | target-version = ['py37', 'py38', 'py39']
4 |
--------------------------------------------------------------------------------
/requirements-dev.txt:
--------------------------------------------------------------------------------
1 | black~=22.0
2 | pylint>=2.8.3,<2.14.0
3 | Sphinx>=1.8.3,!=3.1.0
4 | sphinxcontrib-spelling
5 | mypy>=0.780
6 | mypy-extensions>=0.4.3
7 | asv
8 | virtualenv
9 | requests
10 | types-requests
11 | tox
12 |
--------------------------------------------------------------------------------
/tools/benchmarks.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # This code is part of Qiskit.
3 | #
4 | # (C) Copyright IBM 2022.
5 | #
6 | # This code is licensed under the Apache License, Version 2.0. You may
7 | # obtain a copy of this license in the LICENSE.txt file in the root directory
8 | # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
9 | #
10 | # Any modifications or derivative works of this code must retain this
11 | # copyright notice, and modified files need to carry a notice indicating
12 | # that they have been altered from the originals.
13 |
14 | # A virtual env names benchmark has been created
15 | # and has all the qiskit-app-benchmarks requirements-dev.txt
16 | # dependencies installed
17 |
18 | # Script parameters
19 | BENCHMARK_BASENAME=${BASH_SOURCE}
20 | GIT_OWNER=$1
21 | GIT_USERID=$2
22 | GIT_PERSONAL_TOKEN=$3
23 |
24 | set -e
25 |
26 | echo "Start script $BENCHMARK_BASENAME."
27 |
28 | echo 'Update benchmarks repository dependencies'
29 | pip install -U -r requirements-dev.txt
30 |
31 | export GIT_ASKPASS=/tmp/.git-askpass
32 | rm -f $GIT_ASKPASS
33 | echo "echo $GIT_PERSONAL_TOKEN" > $GIT_ASKPASS
34 | chmod +x $GIT_ASKPASS
35 |
36 | echo 'qiskit-app-benchmarks has a gh-pages branch with the html benchmarks results in it.'
37 |
38 | make clean_sphinx
39 | make html
40 |
41 | rm -rf /tmp/qiskit-app-benchmarks
42 | git clone https://$GIT_USERID@github.com/$GIT_OWNER/qiskit-app-benchmarks.git /tmp/qiskit-app-benchmarks
43 |
44 | echo 'Copy main docs'
45 |
46 | pushd /tmp/qiskit-app-benchmarks
47 | git config user.name "Qiskit Application Benchmarks Autodeploy"
48 | git config user.email "qiskit@qiskit.org"
49 | git checkout gh-pages
50 | GLOBIGNORE=.git:finance:machine_learning:nature:optimization
51 | rm -rf * .*
52 | unset GLOBIGNORE
53 | popd
54 |
55 | declare -a TARGETS=("finance" "nature" "optimization" "machine_learning")
56 |
57 | # copy base html to benchmarks gh-pages branch
58 | rm -rf /tmp/qiskit-app-benchmarks-html
59 | mkdir /tmp/qiskit-app-benchmarks-html
60 | cp -r docs/_build/html/. /tmp/qiskit-app-benchmarks-html
61 | for TARGET in "${TARGETS[@]}"
62 | do
63 | rm -rf /tmp/qiskit-app-benchmarks-html/$TARGET
64 | done
65 | cp -r /tmp/qiskit-app-benchmarks-html/. /tmp/qiskit-app-benchmarks
66 |
67 |
68 | pushd /tmp/qiskit-app-benchmarks
69 | git add .
70 | # push only if there are changes
71 | if git diff-index --quiet HEAD --; then
72 | echo 'Nothing to commit for the base doc template.'
73 | else
74 | git commit -m "[Benchmarks] Base documentation update"
75 | fi
76 | popd
77 |
78 | echo 'Run Benchmarks for domains'
79 | for TARGET in "${TARGETS[@]}"
80 | do
81 | pushd $TARGET
82 | if [ -n "$(find benchmarks/* -not -name '__*' | head -1)" ]; then
83 | date
84 | asv_result=0
85 | echo "Update Benchmarks for domain $TARGET"
86 | asv update && asv_result=$? || asv_result=$?
87 | echo "asv update returned $asv_result for domain $TARGET"
88 | asv_result=0
89 | if [ -z "$ASV_QUICK" ]; then
90 | echo "Run Benchmarks for domain $TARGET"
91 | asv run --show-stderr --launch-method spawn --record-samples NEW && asv_result=$? || asv_result=$?
92 | else
93 | echo "Run Quick Benchmarks for domain $TARGET"
94 | asv run --quick --show-stderr && asv_result=$? || asv_result=$?
95 | fi
96 | date
97 | echo "asv run returned $asv_result for domain $TARGET"
98 | if [ $asv_result == 0 ]; then
99 | echo "Publish Benchmarks for domain $TARGET"
100 | asv publish && asv_result=$? || asv_result=$?
101 | echo "asv publish returned $asv_result for domain $TARGET"
102 | if [ $asv_result == 0 ]; then
103 | rm -rf /tmp/qiskit-app-benchmarks/$TARGET/*
104 | cp -r .asv/html/. /tmp/qiskit-app-benchmarks/$TARGET
105 | fi
106 | fi
107 | else
108 | rm -rf /tmp/qiskit-app-benchmarks/$TARGET/*
109 | cp -r ../docs/_build/html/$TARGET/. /tmp/qiskit-app-benchmarks/$TARGET
110 | echo "No Benchmark files found for domain $TARGET, run skipped."
111 | fi
112 | popd
113 | pushd /tmp/qiskit-app-benchmarks
114 | git add .
115 | # push only if there are changes
116 | if git diff-index --quiet HEAD --; then
117 | echo "Nothing to push for $TARGET."
118 | else
119 | echo "Push benchmark for $TARGET."
120 | git commit -m "[Benchmarks $TARGET] Automated documentation update"
121 | git push origin gh-pages
122 | fi
123 | popd
124 | done
125 |
126 | echo 'Final Cleanup'
127 | rm -f $GIT_ASKPASS
128 | unset GIT_ASKPASS
129 | rm -rf /tmp/qiskit-app-benchmarks
130 | rm -rf /tmp/qiskit-app-benchmarks-html
131 | echo "End of $BENCHMARK_BASENAME script."
132 |
--------------------------------------------------------------------------------
/tools/check_copyright.py:
--------------------------------------------------------------------------------
1 | # This code is part of Qiskit.
2 | #
3 | # (C) Copyright IBM 2020, 2022.
4 | #
5 | # This code is licensed under the Apache License, Version 2.0. You may
6 | # obtain a copy of this license in the LICENSE.txt file in the root directory
7 | # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
8 | #
9 | # Any modifications or derivative works of this code must retain this
10 | # copyright notice, and modified files need to carry a notice indicating
11 | # that they have been altered from the originals.
12 |
13 | """ Fix copyright year in header """
14 |
15 | from typing import Tuple, Union, List
16 | import sys
17 | import os
18 | import datetime
19 | import argparse
20 | import subprocess
21 | import traceback
22 |
23 |
24 | class CopyrightChecker:
25 | """Check copyright"""
26 |
27 | _UTF_STRING = "# -*- coding: utf-8 -*-"
28 | _COPYRIGHT_STRING = "# (C) Copyright IBM "
29 |
30 | def __init__(self, root_dir: str, check: bool) -> None:
31 | self._root_dir = root_dir
32 | self._check = check
33 | self._current_year = datetime.datetime.now().year
34 | self._changed_files = self._get_changed_files()
35 |
36 | @staticmethod
37 | def _exception_to_string(excp: Exception) -> str:
38 | stack = traceback.extract_stack()[:-3] + traceback.extract_tb(excp.__traceback__)
39 | pretty = traceback.format_list(stack)
40 | return "".join(pretty) + f"\n {excp.__class__} {excp}"
41 |
42 | @staticmethod
43 | def _get_year_from_date(date) -> int:
44 | if not date or len(date) < 4:
45 | return None
46 |
47 | return int(date[:4])
48 |
49 | def _cmd_execute(self, args: List[str]) -> Tuple[str, Union[None, str]]:
50 | # execute command
51 | env = {}
52 | for k in ["SYSTEMROOT", "PATH"]:
53 | v = os.environ.get(k)
54 | if v is not None:
55 | env[k] = v
56 | # LANGUAGE is used on win32
57 | env["LANGUAGE"] = "C"
58 | env["LANG"] = "C"
59 | env["LC_ALL"] = "C"
60 | with subprocess.Popen(
61 | args,
62 | cwd=self._root_dir,
63 | env=env,
64 | stdin=subprocess.DEVNULL,
65 | stdout=subprocess.PIPE,
66 | stderr=subprocess.PIPE,
67 | ) as popen:
68 | out, err = popen.communicate()
69 | popen.wait()
70 | out_str = out.decode("utf-8").strip()
71 | err_str = err.decode("utf-8").strip()
72 | err_str = err_str if err_str else None
73 | return out_str, err_str
74 |
75 | def _get_changed_files(self) -> List[str]:
76 | out_str, err_str = self._cmd_execute(["git", "diff", "--name-only", "HEAD"])
77 | if err_str:
78 | raise Exception(err_str)
79 |
80 | return out_str.splitlines()
81 |
82 | def _get_file_last_year(self, relative_path: str) -> int:
83 | last_year = None
84 | errors = []
85 | try:
86 | out_str, err_str = self._cmd_execute(
87 | ["git", "log", "-1", "--format=%cI", relative_path]
88 | )
89 | last_year = CopyrightChecker._get_year_from_date(out_str)
90 | if err_str:
91 | errors.append(err_str)
92 | except Exception as ex: # pylint: disable=broad-except
93 | errors.append(f"'{relative_path}' Last year: {str(ex)}")
94 |
95 | if errors:
96 | raise ValueError(" - ".join(errors))
97 |
98 | return last_year
99 |
100 | def check_copyright(self, file_path) -> Tuple[bool, bool, bool]:
101 | """check copyright for a file"""
102 | file_with_utf8 = False
103 | file_with_invalid_year = False
104 | file_has_header = False
105 | try:
106 | new_line = "# (C) Copyright IBM "
107 | idx_utf8 = -1
108 | idx_new_line = -1
109 | file_lines = None
110 | with open(file_path, "rt", encoding="utf8") as file:
111 | file_lines = file.readlines()
112 | for idx, line in enumerate(file_lines):
113 | relative_path = os.path.relpath(file_path, self._root_dir)
114 | if line.startswith(CopyrightChecker._UTF_STRING):
115 | if self._check:
116 | print(f"File contains utf-8 header: '{relative_path}'")
117 | file_with_utf8 = True
118 | idx_utf8 = idx
119 |
120 | if not line.startswith(CopyrightChecker._COPYRIGHT_STRING):
121 | continue
122 |
123 | file_has_header = True
124 | curr_years = []
125 | for word in line.strip().split():
126 | for year in word.strip().split(","):
127 | if year.startswith("20") and len(year) >= 4:
128 | try:
129 | curr_years.append(int(year[0:4]))
130 | except ValueError:
131 | pass
132 |
133 | header_start_year = None
134 | header_last_year = None
135 | if len(curr_years) > 1:
136 | header_start_year = curr_years[0]
137 | header_last_year = curr_years[1]
138 | elif len(curr_years) == 1:
139 | header_start_year = header_last_year = curr_years[0]
140 |
141 | if relative_path in self._changed_files:
142 | self._changed_files.remove(relative_path)
143 | last_year = self._current_year
144 | else:
145 | last_year = self._get_file_last_year(relative_path)
146 | if last_year and header_last_year != last_year:
147 | if header_start_year and header_start_year != last_year:
148 | new_line += f"{header_start_year}, "
149 |
150 | new_line += f"{self._current_year}.\n"
151 | if self._check:
152 | print(
153 | f"Wrong Copyright Year:'{relative_path}': ",
154 | f"Current:'{line[:-1]}' Correct:'{new_line[:-1]}'",
155 | )
156 | file_with_invalid_year = True
157 | idx_new_line = idx
158 |
159 | break
160 | if not self._check and (idx_utf8 >= 0 or idx_new_line >= 0):
161 | if idx_new_line >= 0:
162 | file_lines[idx_new_line] = new_line
163 | if idx_utf8 >= 0:
164 | del file_lines[idx_utf8]
165 | with open(file_path, "w", encoding="utf8") as file:
166 | file.writelines(file_lines)
167 | if idx_new_line >= 0:
168 | file_with_invalid_year = False
169 | print(f"Fixed copyright year for {relative_path}.")
170 | if idx_utf8 >= 0:
171 | file_with_utf8 = False
172 | print(f"Removed utf-8 header for {relative_path}.")
173 |
174 | except UnicodeDecodeError:
175 | return file_with_utf8, file_with_invalid_year, file_has_header
176 |
177 | return file_with_utf8, file_with_invalid_year, file_has_header
178 |
179 | def check(self) -> Tuple[int, int, int]:
180 | """check copyright"""
181 | return self._check_copyright(self._root_dir)
182 |
183 | def _check_copyright(self, path: str) -> Tuple[int, int, int]:
184 | files_with_utf8 = 0
185 | files_with_invalid_year = 0
186 | files_with_header = 0
187 | for item in os.listdir(path):
188 | fullpath = os.path.join(path, item)
189 | if os.path.isdir(fullpath):
190 | if not item.startswith("."):
191 | files = self._check_copyright(fullpath)
192 | files_with_utf8 += files[0]
193 | files_with_invalid_year += files[1]
194 | files_with_header += files[2]
195 | continue
196 |
197 | if os.path.isfile(fullpath):
198 | # check copyright year
199 | (
200 | file_with_utf8,
201 | file_with_invalid_year,
202 | file_has_header,
203 | ) = self.check_copyright(fullpath)
204 | if file_with_utf8:
205 | files_with_utf8 += 1
206 | if file_with_invalid_year:
207 | files_with_invalid_year += 1
208 | if file_has_header:
209 | files_with_header += 1
210 |
211 | return files_with_utf8, files_with_invalid_year, files_with_header
212 |
213 |
214 | def check_path(path):
215 | """valid path argument"""
216 | if not path or os.path.isdir(path):
217 | return path
218 |
219 | raise argparse.ArgumentTypeError(f"readable_dir:{path} is not a valid path")
220 |
221 |
222 | if __name__ == "__main__":
223 | PARSER = argparse.ArgumentParser(description="Check Copyright Tool")
224 | PARSER.add_argument("-path", type=check_path, metavar="path", help="Root path of project.")
225 | PARSER.add_argument(
226 | "-check",
227 | required=False,
228 | action="store_true",
229 | help="Just check copyright, without fixing it.",
230 | )
231 |
232 | ARGS = PARSER.parse_args()
233 | if not ARGS.path:
234 | ARGS.path = os.getcwd()
235 |
236 | ARGS.path = os.path.abspath(os.path.realpath(os.path.expanduser(ARGS.path)))
237 | INVALID_UTF8, INVALID_YEAR, HAS_HEADER = CopyrightChecker(ARGS.path, ARGS.check).check()
238 | print(f"{INVALID_UTF8} files have utf8 headers.")
239 | print(f"{INVALID_YEAR} of {HAS_HEADER} files with copyright header have wrong years.")
240 |
241 | sys.exit(0 if INVALID_UTF8 == 0 and INVALID_YEAR == 0 else 1)
242 |
--------------------------------------------------------------------------------
/tools/check_version.py:
--------------------------------------------------------------------------------
1 | # This code is part of Qiskit.
2 | #
3 | # (C) Copyright IBM 2021, 2023.
4 | #
5 | # This code is licensed under the Apache License, Version 2.0. You may
6 | # obtain a copy of this license in the LICENSE.txt file in the root directory
7 | # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
8 | #
9 | # Any modifications or derivative works of this code must retain this
10 | # copyright notice, and modified files need to carry a notice indicating
11 | # that they have been altered from the originals.
12 |
13 | """ Checks that benchmark classes have a version property """
14 |
15 | from abc import ABC
16 | from typing import List
17 | import sys
18 | import os
19 | import argparse
20 | import inspect
21 | import pkgutil
22 | import importlib
23 |
24 |
25 | class VersionChecker:
26 | """Check existence of version property"""
27 |
28 | _VERSION_NAME = "version"
29 |
30 | def __init__(self, root_path: str, package: str) -> None:
31 | self._root_path = root_path
32 | self._package = package
33 |
34 | def check(self) -> int:
35 | """check copyright"""
36 | return self._check(os.path.join(self._root_path, self._package), self._package)
37 |
38 | def _check(self, path: str, package: str) -> int:
39 | ret_code = 0
40 | for _, name, ispackage in pkgutil.iter_modules([path]):
41 | if ispackage:
42 | continue
43 |
44 | # Iterate through the modules
45 | fullname = package + "." + name
46 | modspec = importlib.util.find_spec(fullname) # type: ignore[attr-defined]
47 | mod = importlib.util.module_from_spec(modspec) # type: ignore[attr-defined]
48 | modspec.loader.exec_module(mod)
49 | for _, cls in inspect.getmembers(mod, inspect.isclass):
50 | # Iterate through the classes defined on the module.
51 | if cls.__module__ == modspec.name:
52 | if ABC not in cls.__bases__:
53 | name = ".".join([cls.__module__, cls.__name__])
54 | try:
55 | _ = getattr(cls, VersionChecker._VERSION_NAME)
56 | except AttributeError as ex:
57 | print(f"Error: Class {name}: {ex}")
58 | ret_code = 1
59 |
60 | for item in sorted(os.listdir(path)):
61 | full_path = os.path.join(path, item)
62 | if item not in [
63 | f"qiskit-{self._package.replace('_', '-')}",
64 | "__pycache__",
65 | ".asv",
66 | ] and os.path.isdir(full_path):
67 | ret = self._check(full_path, package + "." + item)
68 | if ret != 0:
69 | ret_code = ret
70 |
71 | return ret_code
72 |
73 |
74 | def _check_version_property(modules: List[str]) -> int:
75 | root = os.path.abspath(".")
76 | sys.path.insert(0, root)
77 | ret_code = 0
78 | for module in modules:
79 | ret = VersionChecker(root, module).check()
80 | if ret != 0:
81 | ret_code = ret
82 |
83 | return ret_code
84 |
85 |
86 | if __name__ == "__main__":
87 | parser = argparse.ArgumentParser(description="Check benchmark versions.")
88 | parser.add_argument(
89 | "modules",
90 | type=str,
91 | nargs="*",
92 | default=["finance", "machine_learning", "nature", "optimization"],
93 | help="Modules to scan",
94 | )
95 | args = parser.parse_args()
96 | RET = _check_version_property(args.modules)
97 | sys.exit(RET)
98 |
--------------------------------------------------------------------------------
/tools/cron_script.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # This code is part of Qiskit.
3 | #
4 | # (C) Copyright IBM 2021, 2022.
5 | #
6 | # This code is licensed under the Apache License, Version 2.0. You may
7 | # obtain a copy of this license in the LICENSE.txt file in the root directory
8 | # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
9 | #
10 | # Any modifications or derivative works of this code must retain this
11 | # copyright notice, and modified files need to carry a notice indicating
12 | # that they have been altered from the originals.
13 |
14 | # Script parameters
15 | CRON_BASENAME=$0
16 | GIT_OWNER=$1
17 | GIT_USERID=$2
18 | GIT_PERSONAL_TOKEN=$3
19 |
20 | set -e
21 |
22 | echo "Start script $CRON_BASENAME."
23 |
24 | # lock file with this file name and containing the pid
25 | CRON_LOCKFILE=/tmp/`basename $CRON_BASENAME`.lock
26 |
27 | if [ -f $CRON_LOCKFILE ]; then
28 | if ps -p `cat $CRON_LOCKFILE` > /dev/null 2>&1; then
29 | echo "Script $CRON_BASENAME is still running."
30 | echo "End of $CRON_BASENAME script."
31 | exit 0
32 | fi
33 | fi
34 | echo $$ > $CRON_LOCKFILE
35 |
36 | # Removes the file if:
37 | # EXIT - normal termination
38 | # SIGHUP - termination of the controlling process
39 | # SIGKILL - immediate program termination
40 | # SIGINT - program interrupt INTR character
41 | # SIGQUIT - program interrupt QUIT character
42 | # SIGTERM - program termination by kill
43 | trap 'rm -f "$CRON_LOCKFILE" >/dev/null 2>&1' EXIT HUP KILL INT QUIT TERM
44 |
45 | echo 'Pull latest benchmarks repository files'
46 | git pull origin main --no-rebase
47 |
48 | CRON_SCRIPT_PATH=$(dirname $(readlink -f "${CRON_BASENAME}"))
49 |
50 | echo 'Run main script'
51 | . $CRON_SCRIPT_PATH/main_script.sh $GIT_OWNER $GIT_USERID $GIT_PERSONAL_TOKEN || true
52 |
53 | echo "End of $CRON_BASENAME script."
54 |
--------------------------------------------------------------------------------
/tools/install_rust.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # This code is part of Qiskit.
3 | #
4 | # (C) Copyright IBM 2022.
5 | #
6 | # This code is licensed under the Apache License, Version 2.0. You may
7 | # obtain a copy of this license in the LICENSE.txt file in the root directory
8 | # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
9 | #
10 | # Any modifications or derivative works of this code must retain this
11 | # copyright notice, and modified files need to carry a notice indicating
12 | # that they have been altered from the originals.
13 |
14 | # Script parameters
15 | START_BASENAME=${BASH_SOURCE}
16 |
17 | set -e
18 |
19 | echo "Start script $START_BASENAME."
20 |
21 | echo 'Install Rust'
22 | curl https://sh.rustup.rs -sSf | sh -s -- --default-toolchain stable --profile default --no-modify-path -y
23 |
24 | echo "End of $START_BASENAME script."
25 |
--------------------------------------------------------------------------------
/tools/main_script.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # This code is part of Qiskit.
3 | #
4 | # (C) Copyright IBM 2022.
5 | #
6 | # This code is licensed under the Apache License, Version 2.0. You may
7 | # obtain a copy of this license in the LICENSE.txt file in the root directory
8 | # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
9 | #
10 | # Any modifications or derivative works of this code must retain this
11 | # copyright notice, and modified files need to carry a notice indicating
12 | # that they have been altered from the originals.
13 |
14 | # Script parameters
15 | MAIN_BASENAME=${BASH_SOURCE}
16 | GIT_OWNER=$1
17 | GIT_USERID=$2
18 | GIT_PERSONAL_TOKEN=$3
19 |
20 | set -e
21 |
22 | echo "Start script $MAIN_BASENAME."
23 |
24 | echo 'Remove previous python environment if it exists'
25 | rm -rf /tmp/benchmarks-env
26 |
27 | echo 'Create python environment'
28 | env_retval=0
29 | python3.8 -m venv /tmp/benchmarks-env && env_retval=$? || env_retval=$?
30 | if [ $env_retval -ne 0 ]; then
31 | echo "Python environment creation failed. Error: $env_retval"
32 | echo "End of $MAIN_BASENAME script."
33 | return $env_retval
34 | else
35 | echo 'Python environment creation succeeded.'
36 | fi
37 |
38 | echo 'Activate python environment'
39 | source /tmp/benchmarks-env/bin/activate
40 |
41 | echo 'Upgrade pip'
42 | pip install -U pip
43 |
44 | echo 'Update benchmarks repository dependencies'
45 | pip install -U -r requirements-dev.txt
46 |
47 | BASE_DIR=/tmp/cron-logs
48 | mkdir -p ${BASE_DIR}
49 | FILE_PREFIX=cron_
50 | FILE_SUFFIX=.txt
51 |
52 | echo 'Remove cron log files older than 30 days'
53 | find ${BASE_DIR} -name ${FILE_PREFIX}*${FILE_SUFFIX} -maxdepth 1 -type f -mtime +30 -delete
54 |
55 | MAIN_SCRIPT_PATH=$(dirname $(readlink -f "${MAIN_BASENAME}"))
56 | ENC_FILE_PATH=$(dirname $(dirname ${MAIN_SCRIPT_PATH}))/benchmarks-secrets.json.asc
57 |
58 | DATE=$(date +%Y%m%d%H%M%S)
59 |
60 | echo "Environment HOME=$HOME"
61 |
62 | echo 'Install Rust'
63 | export CARGO_HOME=/tmp/cargo
64 | export RUSTUP_HOME=/tmp/rustup
65 | rm -rf $CARGO_HOME
66 | rm -rf $RUSTUP_HOME
67 |
68 | MAIN_LOG_FILE="${BASE_DIR}/${FILE_PREFIX}${DATE}_RUST${FILE_SUFFIX}"
69 |
70 | echo 'Run Install Rust script'
71 | rust_retval=0
72 | . $MAIN_SCRIPT_PATH/install_rust.sh 2>&1 | tee ${MAIN_LOG_FILE} && rust_retval=$? || rust_retval=$?
73 |
74 | export PATH="$PATH:$CARGO_HOME/bin"
75 | echo "Environment PATH=$PATH"
76 |
77 | echo "Posting Rust install to Slack"
78 | retval=0
79 | python $MAIN_SCRIPT_PATH/send_notification.py -key $GIT_PERSONAL_TOKEN -encryptedfile $ENC_FILE_PATH -logfile $MAIN_LOG_FILE && retval=$? || retval=$?
80 | if [ $retval -ne 0 ]; then
81 | echo "Rust Install post to Slack failed. Error: $retval"
82 | else
83 | echo 'Rust Install post to Slack succeeded.'
84 | fi
85 |
86 | if [ $rust_retval -ne 0 ]; then
87 | echo "Rust Install failed. Error: $rust_retval"
88 | echo "End of $MAIN_BASENAME script."
89 | return $rust_retval
90 | fi
91 |
92 | MAIN_LOG_FILE="${BASE_DIR}/${FILE_PREFIX}${DATE}_GPU${FILE_SUFFIX}"
93 |
94 | echo 'Run GPU Unit tests script'
95 | . $MAIN_SCRIPT_PATH/ml_unittests.sh $GIT_PERSONAL_TOKEN 2>&1 | tee ${MAIN_LOG_FILE} || true
96 |
97 | echo "Posting GPU logs to Slack"
98 | retval=0
99 | python $MAIN_SCRIPT_PATH/send_notification.py -key $GIT_PERSONAL_TOKEN -encryptedfile $ENC_FILE_PATH -logfile $MAIN_LOG_FILE && retval=$? || retval=$?
100 | if [ $retval -ne 0 ]; then
101 | echo "GPU Logs post to Slack failed. Error: $retval"
102 | else
103 | echo 'GPU Logs post to Slack succeeded.'
104 | fi
105 |
106 | MAIN_LOG_FILE="${BASE_DIR}/${FILE_PREFIX}${DATE}_ASV${FILE_SUFFIX}"
107 |
108 | echo 'Run benchmarks script'
109 | . $MAIN_SCRIPT_PATH/benchmarks.sh $GIT_OWNER $GIT_USERID $GIT_PERSONAL_TOKEN 2>&1 | tee ${MAIN_LOG_FILE} || true
110 |
111 | echo "Posting Benchmarks logs to Slack"
112 | retval=0
113 | python $MAIN_SCRIPT_PATH/send_notification.py -key $GIT_PERSONAL_TOKEN -encryptedfile $ENC_FILE_PATH -logfile $MAIN_LOG_FILE && retval=$? || retval=$?
114 | if [ $retval -ne 0 ]; then
115 | echo "Benchmarks Logs post to Slack failed. Error: $retval"
116 | else
117 | echo 'Benchmarks Logs post to Slack succeeded.'
118 | fi
119 |
120 | echo "End of $MAIN_BASENAME script."
121 |
--------------------------------------------------------------------------------
/tools/ml_unittests.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # This code is part of Qiskit.
3 | #
4 | # (C) Copyright IBM 2022.
5 | #
6 | # This code is licensed under the Apache License, Version 2.0. You may
7 | # obtain a copy of this license in the LICENSE.txt file in the root directory
8 | # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
9 | #
10 | # Any modifications or derivative works of this code must retain this
11 | # copyright notice, and modified files need to carry a notice indicating
12 | # that they have been altered from the originals.
13 |
14 | # This script prepares the current environment by installing
15 | # dependencies then runs unit tests
16 |
17 | # Script parameters
18 | ML_BASENAME=${BASH_SOURCE}
19 | GIT_PERSONAL_TOKEN=$1
20 |
21 | set -e
22 |
23 | echo "Start script $ML_BASENAME."
24 |
25 | echo 'Update benchmarks repository dependencies'
26 | pip install -U -r requirements-dev.txt
27 |
28 | BASE_DIR=/tmp/ml
29 | mkdir -p ${BASE_DIR}
30 |
31 | ML_DIR=${BASE_DIR}/qiskit-machine-learning
32 | rm -rf ${ML_DIR}
33 |
34 | echo 'Clone Qiskit Machine Learning'
35 | git clone https://github.com/Qiskit/qiskit-machine-learning.git ${ML_DIR}
36 |
37 | echo 'Run unit tests with tox'
38 | pushd ${ML_DIR}
39 | tox -e gpu || true
40 | popd
41 |
42 | echo 'Final cleanup'
43 | rm -rf ${ML_DIR}
44 | echo "End of $ML_BASENAME script."
45 |
--------------------------------------------------------------------------------
/tools/send_notification.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # This code is part of Qiskit.
3 | #
4 | # (C) Copyright IBM 2022.
5 | #
6 | # This code is licensed under the Apache License, Version 2.0. You may
7 | # obtain a copy of this license in the LICENSE.txt file in the root directory
8 | # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
9 | #
10 | # Any modifications or derivative works of this code must retain this
11 | # copyright notice, and modified files need to carry a notice indicating
12 | # that they have been altered from the originals.
13 |
14 | """Utility script to send notifications to Slack"""
15 |
16 | from typing import Tuple, Union, List
17 | import sys
18 | import os
19 | import json
20 | import argparse
21 | from pathlib import Path
22 | import subprocess
23 | import requests
24 |
25 |
26 | def _cmd_execute(args: List[str]) -> Tuple[str, Union[None, str]]:
27 | """execute command"""
28 | env = {}
29 | for k in ["SYSTEMROOT", "PATH"]:
30 | v = os.environ.get(k)
31 | if v is not None:
32 | env[k] = v
33 | # LANGUAGE is used on win32
34 | env["LANGUAGE"] = "C"
35 | env["LANG"] = "C"
36 | env["LC_ALL"] = "C"
37 | with subprocess.Popen(
38 | args,
39 | cwd=os.getcwd(),
40 | env=env,
41 | stdin=subprocess.DEVNULL,
42 | stdout=subprocess.PIPE,
43 | stderr=subprocess.PIPE,
44 | ) as popen:
45 | out, err = popen.communicate()
46 | popen.wait()
47 | out_str = out.decode("utf-8").strip()
48 | err_str = err.decode("utf-8").strip()
49 | err_str = err_str if err_str else None
50 | return out_str, err_str
51 |
52 |
53 | def _get_webhook_url(key: str, encryptedfile: str) -> str:
54 | """decrypts file and return web hook URL"""
55 | path = Path(encryptedfile).resolve()
56 | path_str = str(path)
57 | if not path.exists() or not path.is_file():
58 | raise ValueError(f"GPG error: Invalid file path {path_str}")
59 | out_str, err_str = _cmd_execute(["gpg", "-d", "--batch", "--passphrase", key, path_str])
60 | if not out_str:
61 | if err_str:
62 | raise ValueError(f"GPG error: {err_str}")
63 | raise ValueError("GPG error: empty decrypted data")
64 |
65 | data = json.loads(out_str)
66 | return data["secrets"]["slack-app-url"]
67 |
68 |
69 | def _send_notification(key: str, encryptedfile: str, path: str) -> None:
70 | """Sends notification to Slack"""
71 | webhook_url = _get_webhook_url(key, encryptedfile)
72 | with open(path, "rt", encoding="utf8") as file:
73 | text = file.read()
74 |
75 | blocks = [
76 | {
77 | "type": "rich_text",
78 | "elements": [
79 | {"type": "rich_text_preformatted", "elements": [{"type": "text", "text": text}]}
80 | ],
81 | },
82 | ]
83 | slack_data = {
84 | "blocks": blocks,
85 | }
86 | response = requests.post(
87 | webhook_url, data=json.dumps(slack_data), headers={"Content-Type": "application/json"}
88 | )
89 | if response.status_code != 200:
90 | raise ValueError(
91 | f"Request to Slack returned an error {response.status_code}, "
92 | f"the response is:\n{response.text}"
93 | )
94 |
95 |
96 | def _check_path(path: str) -> str:
97 | """valid path argument"""
98 | if path and os.path.isdir(path):
99 | raise argparse.ArgumentTypeError(f"path:{path} is not a valid file path")
100 | return path
101 |
102 |
103 | if __name__ == "__main__":
104 | PARSER = argparse.ArgumentParser(description="Sends notifications to Slack")
105 | PARSER.add_argument("-key", metavar="key", required=True, help="Encrypted file key")
106 | PARSER.add_argument(
107 | "-encryptedfile",
108 | type=_check_path,
109 | metavar="encryptedfile",
110 | required=True,
111 | help="Encrypted file path.",
112 | )
113 | PARSER.add_argument(
114 | "-logfile",
115 | type=_check_path,
116 | metavar="logfile",
117 | required=True,
118 | help="Log path path.",
119 | )
120 |
121 | STATUS = 0
122 | try:
123 | ARGS = PARSER.parse_args()
124 | _send_notification(ARGS.key, ARGS.encryptedfile, ARGS.logfile)
125 | except Exception as ex: # pylint: disable=broad-except
126 | print(str(ex))
127 | STATUS = 1
128 |
129 | sys.exit(STATUS)
130 |
--------------------------------------------------------------------------------
/tools/verify_headers.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # This code is part of Qiskit.
3 | #
4 | # (C) Copyright IBM 2020, 2021.
5 | #
6 | # This code is licensed under the Apache License, Version 2.0. You may
7 | # obtain a copy of this license in the LICENSE.txt file in the root directory
8 | # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
9 | #
10 | # Any modifications or derivative works of this code must retain this
11 | # copyright notice, and modified files need to carry a notice indicating
12 | # that they have been altered from the originals.
13 |
14 | """Utility script to verify qiskit copyright file headers"""
15 |
16 | import argparse
17 | import multiprocessing
18 | import os
19 | import sys
20 | import re
21 |
22 | # regex for character encoding from PEP 263
23 | pep263 = re.compile(r"^[ \t\f]*#.*?coding[:=][ \t]*([-_.a-zA-Z0-9]+)")
24 |
25 |
26 | def discover_files(code_paths, exclude_folders):
27 | """Find all .py, .pyx, .pxd files in a list of trees"""
28 | out_paths = []
29 | for path in code_paths:
30 | if os.path.isfile(path):
31 | out_paths.append(path)
32 | else:
33 | for directory in os.walk(path):
34 | dir_path = directory[0]
35 | for folder in exclude_folders:
36 | if folder in directory[1]:
37 | directory[1].remove(folder)
38 | for subfile in directory[2]:
39 | if (
40 | subfile.endswith(".py")
41 | or subfile.endswith(".pyx")
42 | or subfile.endswith(".pxd")
43 | ):
44 | out_paths.append(os.path.join(dir_path, subfile))
45 | return out_paths
46 |
47 |
48 | def validate_header(file_path):
49 | """Validate the header for a single file"""
50 | header = """# This code is part of Qiskit.
51 | #
52 | """
53 | apache_text = """#
54 | # This code is licensed under the Apache License, Version 2.0. You may
55 | # obtain a copy of this license in the LICENSE.txt file in the root directory
56 | # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
57 | #
58 | # Any modifications or derivative works of this code must retain this
59 | # copyright notice, and modified files need to carry a notice indicating
60 | # that they have been altered from the originals.
61 | """
62 | count = 0
63 | with open(file_path, encoding="utf8") as code_file:
64 | lines = code_file.readlines()
65 | start = 0
66 | for index, line in enumerate(lines):
67 | count += 1
68 | if count > 5:
69 | return file_path, False, "Header not found in first 5 lines"
70 | if count <= 2 and pep263.match(line):
71 | return file_path, False, "Unnecessary encoding specification (PEP 263, 3120)"
72 | if line == "# This code is part of Qiskit.\n":
73 | start = index
74 | break
75 | if "".join(lines[start : start + 2]) != header:
76 | return file_path, False, f"Header up to copyright line does not match: {header}"
77 | if not lines[start + 2].startswith("# (C) Copyright IBM 20"):
78 | return file_path, False, "Header copyright line not found"
79 | if "".join(lines[start + 3 : start + 11]) != apache_text:
80 | return file_path, False, f"Header apache text string doesn't match:\n {apache_text}"
81 | return file_path, True, None
82 |
83 |
84 | def _main():
85 | default_path = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
86 | parser = argparse.ArgumentParser(description="Check file headers.")
87 | parser.add_argument(
88 | "paths",
89 | type=str,
90 | nargs="*",
91 | default=[default_path],
92 | help="Paths to scan by default uses ../. from the script",
93 | )
94 | args = parser.parse_args()
95 | files = discover_files(
96 | args.paths,
97 | exclude_folders=[
98 | ".asv",
99 | "qiskit-finance",
100 | "qiskit-machine-learning",
101 | "qiskit-nature",
102 | "qiskit-optimization",
103 | ],
104 | )
105 | with multiprocessing.Pool() as pool:
106 | res = pool.map(validate_header, files)
107 | failed_files = [x for x in res if x[1] is False]
108 | if len(failed_files) > 0:
109 | for failed_file in failed_files:
110 | sys.stderr.write(f"{failed_file[0]} failed header check because:\n")
111 | sys.stderr.write(f"{failed_file[2]}\n\n")
112 | sys.exit(1)
113 | sys.exit(0)
114 |
115 |
116 | if __name__ == "__main__":
117 | _main()
118 |
--------------------------------------------------------------------------------