├── .gitattributes ├── .github └── workflows │ ├── joss_draft.yml │ └── python-app.yml ├── .gitignore ├── .readthedocs.yaml ├── LICENSE ├── README.md ├── _config.yml ├── docs ├── .github │ ├── CODE_OF_CONDUCT.md │ └── CONTRIBUTING.md ├── Makefile ├── make.bat ├── requirements.txt └── source │ ├── _static │ └── custom.css │ ├── _templates │ └── breadcrumbs.html │ ├── api_ref │ ├── pownet.core.rst │ ├── pownet.data_model.rst │ ├── pownet.optim_model.constraints.rst │ ├── pownet.optim_model.rst │ ├── pownet.reservoir.rst │ ├── pownet.rst │ └── pownet.stochastic.rst │ ├── conf.py │ ├── examples │ ├── custom_workflow.nblink │ ├── quickstart.nblink │ ├── reservoir_reoperation.nblink │ ├── reservoir_simulation.nblink │ ├── synthetic_load.nblink │ └── synthetic_solar.nblink │ ├── getting_started │ ├── installation.rst │ └── introduction.rst │ ├── index.rst │ ├── reference │ ├── cite_pownet.rst │ ├── contributing.rst │ ├── developers.rst │ ├── glossary.rst │ └── publications.rst │ ├── references.bib │ ├── technical │ ├── dispatchable.rst │ ├── linear_power_flow.rst │ ├── math_formulation.rst │ ├── reservoir_model.rst │ └── time_series_models.rst │ └── user_guide │ └── input_files.rst ├── examples ├── custom_workflow.ipynb ├── quickstart.ipynb ├── reservoir_reoperation.ipynb ├── reservoir_simulation.ipynb ├── synthetic_load.ipynb └── synthetic_solar.ipynb ├── images ├── complex_river.png ├── dummy_system.png ├── hydro_system.png └── solar_ess.png ├── joss_submission ├── overview_pownet.png ├── paper.bib └── paper.md ├── model_library ├── complex_river │ ├── flow_path.csv │ ├── inflow.csv │ ├── minimum_flow.csv │ └── reservoir_unit.csv ├── dummy │ ├── contract_cost.csv │ ├── demand_export.csv │ ├── hydropower.csv │ ├── import.csv │ ├── nondispatch_unit.csv │ ├── thermal_unit.csv │ └── transmission.csv ├── hydro_system │ ├── contract_cost.csv │ ├── demand_export.csv │ ├── hydropower_daily.csv │ ├── nondispatch_unit.csv │ ├── reservoir_data │ │ ├── flow_path.csv │ │ ├── inflow.csv │ │ ├── minimum_flow.csv │ │ └── reservoir_unit.csv │ ├── thermal_unit.csv │ └── transmission.csv ├── solar_ess │ ├── contract_cost.csv │ ├── demand_export.csv │ ├── energy_storage.csv │ ├── nondispatch_unit.csv │ ├── solar.csv │ ├── thermal_unit.csv │ └── transmission.csv ├── synthetic_timeseries │ ├── demand_export.csv │ ├── solar.csv │ └── thailand_2023_weather.csv └── test_flow │ ├── contract_cost.csv │ ├── demand_export.csv │ ├── thermal_unit.csv │ └── transmission.csv ├── pyproject.toml ├── src ├── pownet │ ├── __init__.py │ ├── builder │ │ ├── __init__.py │ │ ├── basebuilder.py │ │ ├── energy_storage.py │ │ ├── hydro.py │ │ ├── nondispatch.py │ │ ├── system.py │ │ └── thermal.py │ ├── core │ │ ├── __init__.py │ │ ├── data_processor.py │ │ ├── model_builder.py │ │ ├── output.py │ │ ├── record.py │ │ ├── simulation.py │ │ ├── user_constraint.py │ │ └── visualizer.py │ ├── coupler.py │ ├── data_model │ │ ├── __init__.py │ │ └── reservoir.py │ ├── data_utils.py │ ├── database │ │ ├── fuels.csv │ │ └── transmission_params.csv │ ├── folder_utils.py │ ├── input.py │ ├── optim_model │ │ ├── __init__.py │ │ ├── constraints │ │ │ ├── __init__.py │ │ │ ├── energy_storage_constr.py │ │ │ ├── nondispatch_constr.py │ │ │ ├── system_constr.py │ │ │ └── thermal_unit_constr.py │ │ ├── model.py │ │ ├── objfunc.py │ │ ├── rounding_algo.py │ │ └── variable_func.py │ ├── reservoir │ │ ├── __init__.py │ │ ├── manager.py │ │ ├── reservoir.py │ │ ├── reservoir_functions.py │ │ └── solve_release.py │ └── stochastic │ │ ├── __init__.py │ │ ├── demand.py │ │ ├── kirsch_nowak.py │ │ ├── solar.py │ │ ├── timeseries_model.py │ │ └── timeseries_utils.py └── test_pownet │ ├── __init__.py │ ├── test_builder │ ├── __init__.py │ └── test_basebuilder.py │ ├── test_core │ ├── __init__.py │ ├── test_data_processor.py │ └── test_model_builder.py │ ├── test_coupler.py │ ├── test_data_model │ ├── __init__.py │ └── test_reservoir.py │ ├── test_data_utils.py │ ├── test_folder_utils.py │ ├── test_model.py │ ├── test_model_library │ └── dummy │ │ ├── contract_cost.csv │ │ ├── demand_export.csv │ │ ├── hydropower.csv │ │ ├── import.csv │ │ ├── nondispatch_unit.csv │ │ ├── pownet_cycle_map.json │ │ ├── pownet_derate_factor.csv │ │ ├── pownet_derated_capacity.csv │ │ ├── pownet_thermal_derated_capacity.csv │ │ ├── pownet_transmission.csv │ │ ├── thermal_unit.csv │ │ └── transmission.csv │ ├── test_optim_model │ ├── __init__.py │ ├── test_constraints │ │ ├── __init__.py │ │ ├── test_energy_storage_constr.py │ │ ├── test_nondispatch_constr.py │ │ ├── test_system_constr.py │ │ └── test_thermal_unit_constr.py │ ├── test_objfunc.py │ ├── test_rounding_algo.py │ └── test_variable_func.py │ ├── test_reservoir │ ├── __init__.py │ └── test_reservoir_functions.py │ └── test_stochastic │ ├── __init__.py │ └── test_timeseries_model.py └── temp └── merra_2019.csv /.gitattributes: -------------------------------------------------------------------------------- 1 | # Auto detect text files and perform LF normalization 2 | * text=auto 3 | -------------------------------------------------------------------------------- /.github/workflows/joss_draft.yml: -------------------------------------------------------------------------------- 1 | name: JOSS draft 2 | on: [push] 3 | 4 | jobs: 5 | paper: 6 | runs-on: ubuntu-latest 7 | name: Paper Draft 8 | steps: 9 | - name: Checkout 10 | uses: actions/checkout@v4 11 | - name: Build draft PDF 12 | uses: openjournals/openjournals-draft-action@master 13 | with: 14 | journal: joss 15 | # This should be the path to the paper within your repo. 16 | paper-path: joss_submission/paper.md 17 | - name: Upload 18 | uses: actions/upload-artifact@v4 19 | with: 20 | name: paper 21 | # This is the output path where Pandoc will write the compiled 22 | # PDF. Note, this should be the same directory as the input 23 | # paper.md 24 | path: joss_submission/paper.pdf 25 | -------------------------------------------------------------------------------- /.github/workflows/python-app.yml: -------------------------------------------------------------------------------- 1 | # This workflow will install Python dependencies, run tests and lint with a single version of Python 2 | # For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python 3 | 4 | name: CI Tests 5 | 6 | on: 7 | push: 8 | branches: [ "master" ] 9 | pull_request: 10 | branches: [ "master" ] 11 | 12 | permissions: 13 | contents: read 14 | 15 | jobs: 16 | build: 17 | 18 | runs-on: ubuntu-latest 19 | 20 | steps: 21 | - uses: actions/checkout@v4 22 | 23 | - name: Set up Python 3.10 24 | uses: actions/setup-python@v3 25 | with: 26 | python-version: "3.10" 27 | 28 | - name: Install dependencies 29 | run: | 30 | python -m pip install --upgrade pip 31 | pip install flake8 32 | pip install . 33 | 34 | - name: Lint with flake8 35 | run: | 36 | # stop the build if there are Python syntax errors or undefined names 37 | flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics 38 | # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide 39 | flake8 . --ignore=E501,W503,E266,C901 --count --exit-zero --max-complexity=10 --statistics 40 | 41 | - name: Unit testing 42 | run: | 43 | coverage run -m unittest discover src/test_pownet 44 | echo "--------------------------------------------------" # For clarity in logs 45 | echo "DEBUG: Coverage text report from CI environment:" 46 | coverage report -m # <<< THIS IS THE CRITICAL OUTPUT NEEDED 47 | echo "--------------------------------------------------" 48 | coverage json 49 | COVERAGE_PERCENT=$(python -c "import json; print(json.load(open('coverage.json'))['totals']['percent_covered_display'])") 50 | echo "Total Coverage: $COVERAGE_PERCENT%" 51 | echo "total=$COVERAGE_PERCENT" >> $GITHUB_ENV 52 | 53 | - name: Code coverage 54 | uses: schneegans/dynamic-badges-action@v1.4.0 55 | with: 56 | auth: ${{ secrets.GIST_TOKEN }} 57 | gistID: 23c151ad08ede7f698ce7cfbc2c09a0a 58 | filename: covbadge.json 59 | label: Coverage 60 | message: ${{ env.total }}% 61 | minColorRange: 50 62 | maxColorRange: 90 63 | valColorRange: ${{ env.total }} 64 | 65 | 66 | 67 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | MANIFEST 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .coverage 42 | .coverage.* 43 | .cache 44 | nosetests.xml 45 | coverage.xml 46 | *.cover 47 | .hypothesis/ 48 | .pytest_cache/ 49 | 50 | # Translations 51 | *.mo 52 | *.pot 53 | 54 | # Django stuff: 55 | *.log 56 | local_settings.py 57 | db.sqlite3 58 | 59 | # Flask stuff: 60 | instance/ 61 | .webassets-cache 62 | 63 | # Scrapy stuff: 64 | .scrapy 65 | 66 | # Sphinx documentation 67 | docs/_build/ 68 | 69 | # PyBuilder 70 | target/ 71 | 72 | # Jupyter Notebook 73 | .ipynb_checkpoints 74 | 75 | # pyenv 76 | .python-version 77 | 78 | # celery beat schedule file 79 | celerybeat-schedule 80 | 81 | # SageMath parsed files 82 | *.sage.py 83 | 84 | # Environments 85 | .env 86 | .venv 87 | env/ 88 | venv/ 89 | ENV/ 90 | env.bak/ 91 | venv.bak/ 92 | 93 | # Spyder project settings 94 | .spyderproject 95 | .spyproject 96 | 97 | # Rope project settings 98 | .ropeproject 99 | 100 | # mkdocs documentation 101 | /site 102 | 103 | # mypy 104 | .mypy_cache/ 105 | # Ignore testing script 106 | src/scratch.py 107 | 108 | # Ignore irrelevant files 109 | *.pyc 110 | *.pptx 111 | temp/* 112 | 113 | # Ignore outputs 114 | outputs/* 115 | 116 | # Ignore user inputs 117 | user_inputs/* 118 | 119 | # Ignore subfolders in the analysis folder 120 | src/analysis/decom_files/* 121 | src/analysis/results/* 122 | scripts/* 123 | 124 | # Pownet generated files 125 | /model_library/*/pownet_*.csv 126 | /model_library/*/pownet_*.json 127 | coverage.json 128 | -------------------------------------------------------------------------------- /.readthedocs.yaml: -------------------------------------------------------------------------------- 1 | version: "2" 2 | 3 | build: 4 | os: "ubuntu-22.04" 5 | tools: 6 | python: "3.10" 7 | 8 | python: 9 | install: 10 | # Install dependencies for Read the Docs 11 | - requirements: docs/requirements.txt 12 | # Install PowNet package 13 | - method: pip 14 | path: . 15 | 16 | sphinx: 17 | configuration: docs/source/conf.py 18 | 19 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | 4 | Copyright (c) 2024 Phumthep Bunnak 5 | 6 | Permission is hereby granted, free of charge, to any person obtaining a 7 | copy of this software and associated documentation files (the "Software"), to 8 | deal in the Software without restriction, including without limitation the 9 | rights to use, copy, modify, merge, publish, distribute, sublicense, and/or 10 | sell copies of the Software, and to permit persons to whom the Software is 11 | furnished to do so, subject to the following conditions: 12 | 13 | The above copyright notice and this permission notice shall be included 14 | in all copies or substantial portions of the Software. 15 | 16 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS 17 | OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 18 | MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. 19 | IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY 20 | CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, 21 | TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 22 | SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 23 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 |  2 | [](https://github.com/Critical-Infrastructure-Systems-Lab/PowNet/actions) 3 | [](https://github.com/Critical-Infrastructure-Systems-Lab/PowNet/actions) 4 | [](https://github.com/Critical-Infrastructure-Systems-Lab/PowNet/releases/tag/v2.0) 5 | [](https://pownet.readthedocs.io/en/latest/) 6 | [](https://joss.theoj.org/papers/f7509a62fde550bec7ae3d1da0181b7d) 7 | 8 | # PowNet: A Production Cost Modeling Framework for Large-scale Power Systems 9 | PowNet is an open-source production cost model (PCM) framework written in Python, designed to simulate the operational scheduling of large-scale (regional or national) power systems. It determines the least-cost schedule for power generation by solving the unit commitment (UC) and economic dispatch (ED) problems – a process commonly used for applications like day-ahead market simulation. Designed for users focused on power system analysis rather than complex model development, PowNet uses simple spreadsheet files for data inputs, significantly simplifying model setup and modification. As for advanced users, the framework's modular design provides modeling flexibility to implement customized analysis or explore complex modeling algorithms. 10 | 11 | ## Key functionalities 12 | - Models power systems including generator techno-economic constraints and network limits 13 | - Uses computationally efficient linearized DC power flow for network analysis 14 | - Supports variable renewable energy (VRE) sources, such as hydro, wind, and solar 15 | - Built for computational speed using the high-performance Gurobipy modeling framework with support for the open-source HiGHS solver 16 | - Includes features allowing the analysis of water-energy nexus impacts (e.g., drought effects on thermal units) 17 | 18 | PowNet enables analysts to readily study grid operations, VRE integration, and water-energy interactions in a low-code environment. For advanced users, it retains the flexibility needed to explore complex model configurations and research modeling algorithms. 19 | 20 | Read the Documentation for more information at https://pownet.readthedocs.io/en/latest. 21 | 22 | ## Using PowNet 23 | To use PowNet, a user needs to supply it with CSV files. For guidance on creating these CSV files, please see examples provided [here](https://github.com/Critical-Infrastructure-Systems-Lab/PowNet/tree/master/model_library). Please ensure that column names matches with those from the examples. Details on preparing some input files can be found [here](https://critical-infrastructure-systems-lab.github.io/manual/docs/CIS-Lab-software). 24 | 25 | As for installing PowNet, there are multiple options depending on whether we want to modify the source code. However, the following step is highly recommended for any user: creating a virtual environment to manage dependencies. If using Conda, we can create an envrionment with the following command 26 | 27 | ``` 28 | conda create --name your_env_name_here 29 | conda activate your_env_name_here 30 | ``` 31 | 32 | If deciding on a name for the environment takes too long, please feel free to name the environment as "pownet". 33 | 34 | ### Option 1: Regular user 35 | A regular user is someone who has created their input files and wish to just run PowNet. In this case, it is best to simply install PowNet as a package from PyPI. We can achieve this with the following command: 36 | 37 | ``` 38 | pip install pownet 39 | ``` 40 | 41 | Once the package has been installed, we can now go to our working directory. In this example, we assume the following folder structure: 42 | 43 | ``` 44 | working_directory/ 45 | ├── scripts/ 46 | │ └── run_quickstart.py 47 | ├── model_library/ 48 | │ └── dummy/ 49 | │ ├── demand_export.csv 50 | │ ├── thermal_unit.csv 51 | │ ├── nondispatch_unit.csv 52 | │ ├── hydropower.csv 53 | │ ├── import.csv 54 | │ ├── contract_cost.csv 55 | │ └── transmission.csv 56 | └── outputs/ 57 | ``` 58 | 59 | A tutorial "[quickstart.py](./examples/quickstart.ipynb)" provides an example on running a simulation, saving the simulation outputs, and visualizing the outputs. 60 | 61 | ### Option 2: Power user (no pun intended) 62 | In case we wish to modify the source code, PowNet should be installed as an editable package. First, download or clone the PowNet repository to your local machine. For example: if we want to clone to "C://user/pownet", 63 | 64 | ``` 65 | git clone https://github.com/your-username/pownet.git C://user/pownet 66 | ``` 67 | 68 | Next, open a terminal and navigate to the directory where we cloned the repository: 69 | 70 | ``` 71 | cd C://user/pownet 72 | ``` 73 | 74 | Now, we can install this PowNet package using pip, which is a manager for Python packages: 75 | 76 | ``` 77 | pip install -e . 78 | ``` 79 | 80 | This command installs the package in "editable" mode (-e) using pyproject.toml that is located in the root directory of PowNet. The editable mode allows us to edit PowNet codebase when we need to modify or implement new features. The pyproject.toml file specifies the dependencies required to run PowNet. 81 | 82 | 83 | ## Overview of PowNet 84 | 85 |  86 | 87 | 88 | -------------------------------------------------------------------------------- /_config.yml: -------------------------------------------------------------------------------- 1 | theme: jekyll-theme-cayman -------------------------------------------------------------------------------- /docs/.github/CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | 2 | # Contributor Covenant Code of Conduct 3 | 4 | ## Our Pledge 5 | 6 | We pledge to foster a harassment-free community where every person is treated with decency and respect as a fellow human being. 7 | 8 | ## Our Standards 9 | 10 | Examples of behavior that contributes to a positive environment for our 11 | community include: 12 | 13 | * Demonstrating empathy and kindness toward other people 14 | * Being respectful of differing opinions, viewpoints, and experiences 15 | * Giving and gracefully accepting constructive feedback 16 | * Accepting responsibility and apologizing to those affected by our mistakes, 17 | and learning from the experience 18 | * Focusing on what is best not just for us as individuals, but for the overall 19 | community 20 | 21 | Examples of unacceptable behavior include: 22 | 23 | * Conducts which could reasonably be considered inappropriate in a 24 | professional setting 25 | * Trolling, insulting or derogatory comments, and personal or political attacks 26 | * Public or private harassment 27 | * Publishing others' private information, such as a physical or email address, 28 | without their explicit permission 29 | 30 | 31 | ## Enforcement Responsibilities 32 | 33 | Community leaders are responsible for clarifying and enforcing our standards of 34 | acceptable behavior and will take appropriate and fair corrective action in 35 | response to any behavior that they deem inappropriate, threatening, offensive, 36 | or harmful. 37 | 38 | Community leaders have the right and responsibility to remove, edit, or reject 39 | comments, commits, code, wiki edits, issues, and other contributions that are 40 | not aligned to this Code of Conduct, and will communicate reasons for moderation 41 | decisions when appropriate. 42 | 43 | ## Scope 44 | 45 | This Code of Conduct applies within all community spaces, and also applies when 46 | an individual is officially representing the community in public spaces. 47 | Examples of representing our community include using an official email address, 48 | posting via an official social media account, or acting as an appointed 49 | representative at an online or offline event. 50 | 51 | ## Enforcement 52 | 53 | Instances of abusive, harassing, or otherwise unacceptable behavior may be 54 | reported to the community leaders responsible for enforcement 55 | [here](https://github.com/stefano-galelli). 56 | All complaints will be reviewed and investigated promptly and fairly. 57 | 58 | All community leaders are obligated to respect the privacy and security of the 59 | reporter of any incident. 60 | 61 | ## Enforcement Guidelines 62 | 63 | Community leaders will follow these Community Impact Guidelines in determining 64 | the consequences for any action they deem in violation of this Code of Conduct: 65 | 66 | ### 1. Correction 67 | 68 | **Community Impact**: Use of inappropriate language or other behavior deemed 69 | unprofessional or unwelcome in the community. 70 | 71 | **Consequence**: A private, written warning from community leaders, providing 72 | clarity around the nature of the violation and an explanation of why the 73 | behavior was inappropriate. A public apology may be requested. 74 | 75 | ### 2. Warning 76 | 77 | **Community Impact**: A violation through a single incident or series of 78 | actions. 79 | 80 | **Consequence**: A warning with consequences for continued behavior. No 81 | interaction with the people involved, including unsolicited interaction with 82 | those enforcing the Code of Conduct, for a specified period of time. This 83 | includes avoiding interactions in community spaces as well as external channels 84 | like social media. Violating these terms may lead to a temporary or permanent 85 | ban. 86 | 87 | ### 3. Temporary Ban 88 | 89 | **Community Impact**: A serious violation of community standards, including 90 | sustained inappropriate behavior. 91 | 92 | **Consequence**: A temporary ban from any sort of interaction or public 93 | communication with the community for a specified period of time. No public or 94 | private interaction with the people involved, including unsolicited interaction 95 | with those enforcing the Code of Conduct, is allowed during this period. 96 | Violating these terms may lead to a permanent ban. 97 | 98 | ### 4. Permanent Ban 99 | 100 | **Community Impact**: Demonstrating a pattern of violation of community 101 | standards, including sustained inappropriate behavior, harassment of an 102 | individual, or aggression toward or disparagement of classes of individuals. 103 | 104 | **Consequence**: A permanent ban from any sort of public interaction within the 105 | community. 106 | 107 | ## Attribution 108 | 109 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], 110 | version 2.1, available at 111 | [https://www.contributor-covenant.org/version/2/1/code_of_conduct.html][v2.1]. 112 | 113 | Community Impact Guidelines were inspired by 114 | [Mozilla's code of conduct enforcement ladder][Mozilla CoC]. 115 | 116 | For answers to common questions about this code of conduct, see the FAQ at 117 | [https://www.contributor-covenant.org/faq][FAQ]. Translations are available at 118 | [https://www.contributor-covenant.org/translations][translations]. 119 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line, and also 5 | # from the environment for the first two. 6 | SPHINXOPTS ?= 7 | SPHINXBUILD ?= sphinx-build 8 | SOURCEDIR = source 9 | BUILDDIR = build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 21 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=source 11 | set BUILDDIR=build 12 | 13 | if "%1" == "" goto help 14 | 15 | %SPHINXBUILD% >NUL 2>NUL 16 | if errorlevel 9009 ( 17 | echo. 18 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 19 | echo.installed, then set the SPHINXBUILD environment variable to point 20 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 21 | echo.may add the Sphinx directory to PATH. 22 | echo. 23 | echo.If you don't have Sphinx installed, grab it from 24 | echo.http://sphinx-doc.org/ 25 | exit /b 1 26 | ) 27 | 28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 29 | goto end 30 | 31 | :help 32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 33 | 34 | :end 35 | popd 36 | -------------------------------------------------------------------------------- /docs/requirements.txt: -------------------------------------------------------------------------------- 1 | sphinx >= 8.1.0 2 | sphinx-rtd-theme >= 3.0.2 3 | sphinx-mdinclude >= 0.6.2 4 | nbsphinx >= 0.9.7 5 | nbsphinx_link >= 1.3.1 6 | sphinxcontrib-bibtex >= 2.6.3 7 | sphinx_autodoc_typehints 8 | ipykernel 9 | -------------------------------------------------------------------------------- /docs/source/_static/custom.css: -------------------------------------------------------------------------------- 1 | .tight-table table td { 2 | white-space: normal !important; 3 | } 4 | 5 | .bullet-list { 6 | margin-bottom: 100px; /* Adjust the value as needed */ 7 | margin-top: 100px; /* Adjust the value as needed */ 8 | } 9 | -------------------------------------------------------------------------------- /docs/source/_templates/breadcrumbs.html: -------------------------------------------------------------------------------- 1 | {%- if meta is defined and meta is not none %} 2 | {%- set check_meta = True %} 3 | {%- else %} 4 | {%- set check_meta = False %} 5 | {%- endif %} 6 | 7 | {%- if check_meta and 'github_url' in meta %} 8 | {%- set display_github = True %} 9 | {%- endif %} 10 | 11 | {%- if check_meta and 'bitbucket_url' in meta %} 12 | {%- set display_bitbucket = True %} 13 | {%- endif %} 14 | 15 | {%- if check_meta and 'gitlab_url' in meta %} 16 | {%- set display_gitlab = True %} 17 | {%- endif %} 18 | 19 | {%- set display_vcs_links = display_vcs_links if display_vcs_links is defined else True %} 20 | 21 | {#- Translators: This is an ARIA section label for page links, including previous/next page link and links to GitHub/GitLab/etc. -#} 22 |
78 | -------------------------------------------------------------------------------- /docs/source/api_ref/pownet.core.rst: -------------------------------------------------------------------------------- 1 | pownet.core package 2 | =================== 3 | 4 | Submodules 5 | ---------- 6 | 7 | pownet.core.data\_processor module 8 | ---------------------------------- 9 | 10 | .. automodule:: pownet.core.data_processor 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | pownet.core.model\_builder module 16 | --------------------------------- 17 | 18 | .. automodule:: pownet.core.model_builder 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | 23 | pownet.core.output module 24 | ------------------------- 25 | 26 | .. automodule:: pownet.core.output 27 | :members: 28 | :undoc-members: 29 | :show-inheritance: 30 | 31 | pownet.core.record module 32 | ------------------------- 33 | 34 | .. automodule:: pownet.core.record 35 | :members: 36 | :undoc-members: 37 | :show-inheritance: 38 | 39 | pownet.core.simulation module 40 | ----------------------------- 41 | 42 | .. automodule:: pownet.core.simulation 43 | :members: 44 | :undoc-members: 45 | :show-inheritance: 46 | 47 | pownet.core.user\_constraint module 48 | ----------------------------------- 49 | 50 | .. automodule:: pownet.core.user_constraint 51 | :members: 52 | :undoc-members: 53 | :show-inheritance: 54 | 55 | pownet.core.visualizer module 56 | ----------------------------- 57 | 58 | .. automodule:: pownet.core.visualizer 59 | :members: 60 | :undoc-members: 61 | :show-inheritance: 62 | -------------------------------------------------------------------------------- /docs/source/api_ref/pownet.data_model.rst: -------------------------------------------------------------------------------- 1 | pownet.data\_model package 2 | ========================== 3 | 4 | pownet.data\_model.reservoir module 5 | ----------------------------------- 6 | 7 | .. automodule:: pownet.data_model.reservoir 8 | :members: 9 | :no-index: 10 | -------------------------------------------------------------------------------- /docs/source/api_ref/pownet.optim_model.constraints.rst: -------------------------------------------------------------------------------- 1 | pownet.optim\_model.constraints package 2 | ======================================= 3 | 4 | Submodules 5 | ---------- 6 | 7 | pownet.optim\_model.constraints.energy\_storage\_constr module 8 | -------------------------------------------------------------- 9 | 10 | .. automodule:: pownet.optim_model.constraints.energy_storage_constr 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | pownet.optim\_model.constraints.nondispatch\_constr module 16 | ---------------------------------------------------------- 17 | 18 | .. automodule:: pownet.optim_model.constraints.nondispatch_constr 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | 23 | pownet.optim\_model.constraints.system\_constr module 24 | ----------------------------------------------------- 25 | 26 | .. automodule:: pownet.optim_model.constraints.system_constr 27 | :members: 28 | :undoc-members: 29 | :show-inheritance: 30 | 31 | pownet.optim\_model.constraints.thermal\_unit\_constr module 32 | ------------------------------------------------------------ 33 | 34 | .. automodule:: pownet.optim_model.constraints.thermal_unit_constr 35 | :members: 36 | :undoc-members: 37 | :show-inheritance: 38 | 39 | Module contents 40 | --------------- 41 | 42 | .. automodule:: pownet.optim_model.constraints 43 | :members: 44 | :undoc-members: 45 | :show-inheritance: 46 | -------------------------------------------------------------------------------- /docs/source/api_ref/pownet.optim_model.rst: -------------------------------------------------------------------------------- 1 | pownet.optim\_model package 2 | =========================== 3 | 4 | Subpackages 5 | ----------- 6 | 7 | .. toctree:: 8 | :maxdepth: 4 9 | 10 | pownet.optim_model.constraints 11 | 12 | Submodules 13 | ---------- 14 | 15 | pownet.optim\_model.model module 16 | -------------------------------- 17 | 18 | .. automodule:: pownet.optim_model.model 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | 23 | pownet.optim\_model.objfunc module 24 | ---------------------------------- 25 | 26 | .. automodule:: pownet.optim_model.objfunc 27 | :members: 28 | :undoc-members: 29 | :show-inheritance: 30 | 31 | pownet.optim\_model.rounding\_algo module 32 | ----------------------------------------- 33 | 34 | .. automodule:: pownet.optim_model.rounding_algo 35 | :members: 36 | :undoc-members: 37 | :show-inheritance: 38 | 39 | pownet.optim\_model.variable\_func module 40 | ----------------------------------------- 41 | 42 | .. automodule:: pownet.optim_model.variable_func 43 | :members: 44 | :undoc-members: 45 | :show-inheritance: 46 | 47 | Module contents 48 | --------------- 49 | 50 | .. automodule:: pownet.optim_model 51 | :members: 52 | :undoc-members: 53 | :show-inheritance: 54 | -------------------------------------------------------------------------------- /docs/source/api_ref/pownet.reservoir.rst: -------------------------------------------------------------------------------- 1 | pownet.reservoir package 2 | ======================== 3 | 4 | Submodules 5 | ---------- 6 | 7 | pownet.reservoir.manager module 8 | ------------------------------- 9 | 10 | .. automodule:: pownet.reservoir.manager 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | pownet.reservoir.reservoir module 16 | --------------------------------- 17 | 18 | .. automodule:: pownet.reservoir.reservoir 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | 23 | pownet.reservoir.reservoir\_functions module 24 | -------------------------------------------- 25 | 26 | .. automodule:: pownet.reservoir.reservoir_functions 27 | :members: 28 | :undoc-members: 29 | :show-inheritance: 30 | 31 | pownet.reservoir.solve\_release module 32 | -------------------------------------- 33 | 34 | .. automodule:: pownet.reservoir.solve_release 35 | :members: 36 | :undoc-members: 37 | :show-inheritance: 38 | 39 | Module contents 40 | --------------- 41 | 42 | .. automodule:: pownet.reservoir 43 | :members: 44 | :undoc-members: 45 | :show-inheritance: 46 | -------------------------------------------------------------------------------- /docs/source/api_ref/pownet.rst: -------------------------------------------------------------------------------- 1 | API Reference 2 | ============== 3 | 4 | pownet.core module 5 | --------------------- 6 | 7 | .. toctree:: 8 | :maxdepth: 0 9 | 10 | pownet.core 11 | pownet.data_model 12 | pownet.optim_model 13 | pownet.reservoir 14 | pownet.stochastic 15 | 16 | .. automodule:: pownet.core 17 | :members: 18 | :undoc-members: 19 | :show-inheritance: 20 | 21 | pownet.coupler module 22 | --------------------- 23 | 24 | .. automodule:: pownet.coupler 25 | :members: 26 | :undoc-members: 27 | :show-inheritance: 28 | 29 | pownet.data\_utils module 30 | ------------------------- 31 | 32 | .. automodule:: pownet.data_utils 33 | :members: 34 | :undoc-members: 35 | :show-inheritance: 36 | 37 | pownet.folder\_utils module 38 | --------------------------- 39 | 40 | .. automodule:: pownet.folder_utils 41 | :members: 42 | :undoc-members: 43 | :show-inheritance: 44 | 45 | pownet.input module 46 | ------------------- 47 | 48 | .. automodule:: pownet.input 49 | :members: 50 | :undoc-members: 51 | :show-inheritance: 52 | -------------------------------------------------------------------------------- /docs/source/api_ref/pownet.stochastic.rst: -------------------------------------------------------------------------------- 1 | pownet.stochastic package 2 | ========================= 3 | 4 | Submodules 5 | ---------- 6 | 7 | pownet.stochastic.demand module 8 | ------------------------------- 9 | 10 | .. automodule:: pownet.stochastic.demand 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | pownet.stochastic.kirsch\_nowak module 16 | -------------------------------------- 17 | 18 | .. automodule:: pownet.stochastic.kirsch_nowak 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | 23 | pownet.stochastic.solar module 24 | ------------------------------ 25 | 26 | .. automodule:: pownet.stochastic.solar 27 | :members: 28 | :undoc-members: 29 | :show-inheritance: 30 | 31 | pownet.stochastic.timeseries\_model module 32 | ------------------------------------------ 33 | 34 | .. automodule:: pownet.stochastic.timeseries_model 35 | :members: 36 | :undoc-members: 37 | :show-inheritance: 38 | 39 | pownet.stochastic.timeseries\_utils module 40 | ------------------------------------------ 41 | 42 | .. automodule:: pownet.stochastic.timeseries_utils 43 | :members: 44 | :undoc-members: 45 | :show-inheritance: 46 | 47 | Module contents 48 | --------------- 49 | 50 | .. automodule:: pownet.stochastic 51 | :members: 52 | :undoc-members: 53 | :show-inheritance: 54 | -------------------------------------------------------------------------------- /docs/source/conf.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import os 3 | from importlib.metadata import version as get_version 4 | 5 | 6 | sys.path.insert(0, os.path.abspath("../../src/")) 7 | 8 | # Import mock modules for readthedocs 9 | autodoc_mock_imports = [ 10 | "__future__", 11 | "abc", 12 | "dataclasses", 13 | "datetime", 14 | "re", 15 | "contextily", 16 | "geopandas", 17 | "gurobipy", 18 | "highspy", 19 | "logging", 20 | "matplotlib", 21 | "math", 22 | "networkx", 23 | "numpy", 24 | "pandas", 25 | "pmdarima", 26 | "scipy", 27 | "shapely", 28 | "sklearn", 29 | "statsmodels", 30 | ] 31 | 32 | # -- Project information 33 | 34 | project = "PowNet" 35 | copyright = "2021-2025, Critical Infrastructure Systems (CIS) Lab, Cornell University" 36 | author = "Critical Infrastructure Systems Lab (CIS), Cornell University" 37 | 38 | # TODO: Show version and release in the documentation 39 | release = get_version("pownet") 40 | version = ".".join(release.split(".")[:1]) 41 | 42 | # -- General configuration 43 | 44 | extensions = [ 45 | "sphinx.ext.autodoc", 46 | "sphinx.ext.autosummary", 47 | "sphinx.ext.doctest", 48 | "sphinx.ext.duration", 49 | "sphinx.ext.extlinks", 50 | "sphinx.ext.intersphinx", 51 | "sphinx.ext.mathjax", 52 | "sphinx.ext.napoleon", 53 | "sphinx.ext.viewcode", 54 | "sphinx_rtd_theme", 55 | "sphinx_mdinclude", 56 | "nbsphinx", 57 | "nbsphinx_link", 58 | "sphinx_autodoc_typehints", 59 | "sphinxcontrib.bibtex", 60 | ] 61 | 62 | templates_path = ["_templates"] 63 | html_static_path = ["_static"] 64 | 65 | # References are found here 66 | bibtex_bibfiles = ["references.bib"] 67 | 68 | # Create the page even when there is an error in the notebook 69 | nbsphinx_allow_errors = True 70 | 71 | # -- Options for HTML output 72 | html_theme = "sphinx_rtd_theme" 73 | html_theme_options = {} 74 | 75 | html_show_sphinx = False 76 | 77 | 78 | # -- customize the CSS styling 79 | def setup(app): 80 | app.add_css_file("custom.css") 81 | -------------------------------------------------------------------------------- /docs/source/examples/custom_workflow.nblink: -------------------------------------------------------------------------------- 1 | {"path": "../../../examples/custom_workflow.ipynb"} 2 | -------------------------------------------------------------------------------- /docs/source/examples/quickstart.nblink: -------------------------------------------------------------------------------- 1 | {"path": "../../../examples/quickstart.ipynb"} 2 | -------------------------------------------------------------------------------- /docs/source/examples/reservoir_reoperation.nblink: -------------------------------------------------------------------------------- 1 | {"path": "../../../examples/reservoir_reoperation.ipynb"} 2 | -------------------------------------------------------------------------------- /docs/source/examples/reservoir_simulation.nblink: -------------------------------------------------------------------------------- 1 | {"path": "../../../examples/reservoir_simulation.ipynb"} 2 | -------------------------------------------------------------------------------- /docs/source/examples/synthetic_load.nblink: -------------------------------------------------------------------------------- 1 | {"path": "../../../examples/synthetic_load.ipynb"} 2 | -------------------------------------------------------------------------------- /docs/source/examples/synthetic_solar.nblink: -------------------------------------------------------------------------------- 1 | {"path": "../../../examples/synthetic_solar.ipynb"} 2 | -------------------------------------------------------------------------------- /docs/source/getting_started/installation.rst: -------------------------------------------------------------------------------- 1 | 2 | .. autosummary:: 3 | :toctree: _source/ 4 | 5 | ####################### 6 | Installation 7 | ####################### 8 | 9 | `PowNet` is a Python package available on `PyPI`. It is designed to be compatible with `Python 3.12` and above. Depending on your needs, you can choose between two installation options: a regular user or a power user. The following sections will guide you through the installation process for both options. 10 | 11 | The following step is highly recommended for any user: creating a virtual environment to manage dependencies. If using `Conda`, we can create an envrionment with the following command 12 | 13 | .. code-block:: bash 14 | 15 | conda create --name your_env_name_here 16 | conda activate your_env_name_here 17 | 18 | If deciding on a name for the environment takes too long, please feel free to name the environment as "pownet". 19 | 20 | **Option 1: Regular user** 21 | 22 | A regular user is someone who has created their input files and wish to just run `PowNet`. In this case, it is best to simply install `PowNet` as a package from PyPI. We can achieve this with the following command: 23 | 24 | .. code-block:: bash 25 | 26 | pip install pownet 27 | 28 | 29 | **Option 2: Power user (no pun intended)** 30 | 31 | In case we wish to modify the source code, `PowNet` should be installed as an editable package. First, download or clone the `PowNet` repository to your local machine. For example: if we want to clone to "C://user/pownet", 32 | 33 | .. code-block:: bash 34 | 35 | git clone https://github.com/your-username/pownet.git C://user/pownet 36 | 37 | 38 | Next, open a terminal and navigate to the directory where we cloned the repository: 39 | 40 | .. code-block:: bash 41 | 42 | cd C://user/pownet 43 | 44 | Now, we can install this `PowNet` package using pip, which is a manager for Python packages: 45 | 46 | .. code-block:: bash 47 | 48 | pip install -e . 49 | 50 | This command installs the package in "editable" mode (-e) using pyproject.toml that is located in the root directory of `PowNet`. The editable mode allows us to edit `PowNet` codebase when we need to modify or implement new features. The pyproject.toml file specifies the dependencies required to run `PowNet`. 51 | -------------------------------------------------------------------------------- /docs/source/getting_started/introduction.rst: -------------------------------------------------------------------------------- 1 | 2 | .. autosummary:: 3 | :toctree: _source/ 4 | 5 | ################ 6 | Introduction 7 | ################ 8 | 9 | **Background** 10 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 11 | 12 | PowNet is an open-source Production Cost Model (PCM) framework designed to simulate the least-cost operation 13 | of large-scale power systems, such as those spanning entire regions or countries. Written in Python, 14 | it tackles the fundamental challenge of meeting electricity demand subject to techno-economic constraints. 15 | 16 | PowNet is implemented entirely in Python, leveraging popular and established packages. For building and interacting with the optimization model, PowNet uses Gurobipy, the Python API for the high-performance Gurobi optimizer. This choice was driven by Gurobipy's proven performance compared to other frameworks. Other dependencies include Pandas, SciPy, and NetworkX. 17 | 18 | PowNet supports multiple optimization solvers: 19 | * **Gurobi:** A powerful commercial solver (Gurobi provides free academic licenses). 20 | * **HiGHS:** A competitive open-source solver, providing an accessible alternative. 21 | 22 | 23 | 24 | **Target users and their use cases** 25 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 26 | 27 | PowNet caters to both users primarily interested in applying the model and those focused on research and development. Simulating a power system is 28 | achieved in a low-code environment as the user Define power system components (generators, lines, nodes), economic parameters, and time-series data 29 | (demand, renewable availability) using simple CSV files (spreadsheets), making model setup accessible even with basic Python knowledge. 30 | 31 | For advanced users, PowNet's modular design allows for easy customization and extension of the model, enabling the integration of new features or constraints. Furthermore, the model's flexibility allows for experimentation with different formulations and algorithms. 32 | 33 | Example use cases based on past research publications can be found :doc:`here `. 34 | 35 | 36 | 37 | **Model functionalities** 38 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~ 39 | 40 | PowNet incorporates several key functionalities to support comprehensive power system analysis: 41 | 42 | * **CSV Data Input:** Relies on easily understandable CSV files for all model inputs, simplifying data preparation and management 43 | 44 | * **Data Validation:** Includes checks within the workflow to help ensure data consistency 45 | 46 | * **Multi-Solver Support:** Compatible with both commercial (Gurobi) and open-source (HiGHS) solvers 47 | * **Comprehensive output processing:** Generates results (e.g., generator dispatch schedules, costs, line flows) as Pandas DataFrames or CSV files for easy analysis and post-processing. Visualization capabilities are also part of the workflow 48 | * **Reservoir Simulation Module:** Includes a dedicated module to simulate the operation of single or cascaded reservoirs, calculating hourly hydropower energy availability based on inflow data and operational rules. This is crucial for systems with significant hydropower capacity and for water-energy nexus studies 49 | * **Stochastic Time Series Generation:** Provides functionality to generate synthetic time series for inputs like electricity demand or VRE availability, based on historical data patterns. This enables Monte Carlo simulations and analysis under uncertainty 50 | * **Custom Constraint API:** While providing a core formulation, PowNet is designed to be extensible, allowing advanced users to define and incorporate custom constraints into the optimization problem 51 | 52 | 53 | .. bibliography:: 54 | 55 | -------------------------------------------------------------------------------- /docs/source/index.rst: -------------------------------------------------------------------------------- 1 | Welcome to PowNet's documentation! 2 | =================================== 3 | 4 | PowNet is an open-source Python framework simulating the least-cost operational scheduling of large-scale power systems as 5 | a Production Cost Model (PCM). At its heart, it solves the Unit Commitment (UC) and Economic Dispatch (ED) problems using 6 | Mixed-Integer Linear Programming to determine optimal generator schedules that meet hourly demand while respecting operational 7 | and network constraints. Designed for accessibility with simple CSV inputs and support for both Gurobi and the open-source HiGHS solvers, 8 | PowNet includes advanced features like reservoir simulation for hydropower and stochastic time series generation. 9 | Its modular design facilitates straightforward analysis for basic users while offering flexibility for advanced research, 10 | custom constraints, and model extensions. 11 | 12 | The software is actively maintained by researchers at the Critical Infrastructure Systems Lab, 13 | School of Civil and Environmental Engineering, Cornell University. 14 | 15 | 16 | .. For each section, we list the subsections instead of the table of contents to avoid cluttering the sidebar. 17 | 18 | **Getting started** 19 | 20 | * :doc:`getting_started/introduction` 21 | * :doc:`getting_started/installation` 22 | * :doc:`examples/quickstart` 23 | * :doc:`user_guide/input_files` 24 | 25 | .. toctree:: 26 | :hidden: 27 | :caption: Getting started 28 | :maxdepth: 1 29 | 30 | getting_started/introduction 31 | getting_started/installation 32 | examples/quickstart 33 | user_guide/input_files 34 | 35 | 36 | **Examples** 37 | 38 | * :doc:`examples/custom_workflow` 39 | * :doc:`examples/synthetic_load` 40 | * :doc:`examples/synthetic_solar` 41 | * :doc:`examples/reservoir_simulation` 42 | * :doc:`examples/reservoir_reoperation` 43 | 44 | .. toctree:: 45 | :hidden: 46 | :caption: Examples 47 | :maxdepth: 1 48 | 49 | examples/custom_workflow 50 | examples/synthetic_load 51 | examples/synthetic_solar 52 | examples/reservoir_simulation 53 | examples/reservoir_reoperation 54 | 55 | 56 | **Technical explanations** 57 | 58 | * :doc:`technical/math_formulation` 59 | * :doc:`technical/dispatchable` 60 | * :doc:`technical/linear_power_flow` 61 | * :doc:`technical/time_series_models` 62 | * :doc:`technical/reservoir_model` 63 | 64 | .. toctree:: 65 | :hidden: 66 | :caption: Technical explanations 67 | 68 | technical/math_formulation 69 | technical/dispatchable 70 | technical/linear_power_flow 71 | technical/time_series_models 72 | technical/reservoir_model 73 | 74 | 75 | **References** 76 | 77 | * :doc:`reference/contributing` 78 | * :doc:`reference/cite_pownet` 79 | * :doc:`reference/publications` 80 | * :doc:`reference/developers` 81 | * :doc:`reference/glossary` 82 | * :doc:`api_ref/pownet` 83 | 84 | .. toctree:: 85 | :hidden: 86 | :caption: References 87 | :maxdepth: 0 88 | 89 | reference/contributing 90 | reference/cite_pownet 91 | reference/publications 92 | reference/developers 93 | reference/glossary 94 | api_ref/pownet 95 | -------------------------------------------------------------------------------- /docs/source/reference/cite_pownet.rst: -------------------------------------------------------------------------------- 1 | 2 | .. autosummary:: 3 | :toctree: _source/ 4 | 5 | ################################ 6 | Citing PowNet 7 | ################################ 8 | 9 | If you use PowNet in your work, we kindly ask that you cite the following publication. 10 | Citing the software helps acknowledge the effort involved in its development 11 | and allows us to track its usage. 12 | 13 | 14 | **Primary Reference:** 15 | 16 | If you are using the latest version of PowNet, please cite the following publication: :: 17 | 18 | @article{bunnak2025bridging, 19 | title={Bridging theory and practice: Efficiently solving the unit commitment problem in production cost models}, 20 | author={Bunnak, Phumthep and Coniglio, Stefano and Galelli, Stefano}, 21 | journal={Energy}, 22 | volume={322}, 23 | pages={135454}, 24 | year={2025}, 25 | publisher={Elsevier} 26 | doi={https://doi.org/10.1016/j.energy.2025.135454} 27 | } 28 | 29 | 30 | **PowNet Version before 2023** 31 | 32 | If you are using a version of PowNet before 2023, please cite the following publication: :: 33 | 34 | @article{chowdhury2020pownet, 35 | title={PowNet: a network-constrained unit commitment/economic dispatch model for large-scale power systems analysis}, 36 | author={Chowdhury, AFM Kamal and Kern, Jordan and Dang, Thanh Duc and Galelli, Stefano}, 37 | journal={Journal of Open Research Software}, 38 | volume={8}, 39 | number={1}, 40 | year={2020} 41 | } 42 | -------------------------------------------------------------------------------- /docs/source/reference/contributing.rst: -------------------------------------------------------------------------------- 1 | 2 | .. autosummary:: 3 | :toctree: _source/ 4 | 5 | ################################ 6 | Contributing 7 | ################################ 8 | 9 | 10 | .. mdinclude:: ../../.github/CONTRIBUTING.md 11 | -------------------------------------------------------------------------------- /docs/source/reference/developers.rst: -------------------------------------------------------------------------------- 1 | 2 | .. autosummary:: 3 | :toctree: _source/ 4 | 5 | ################################ 6 | Development team 7 | ################################ 8 | 9 | PowNet is the result of contributions from several individuals over its development history. 10 | 11 | **Current Development Team (Version 2.0.0 onwards)** 12 | 13 | * Phumthep Bunnak 14 | * Hisham Chowdhury 15 | * Zhuoer Feng 16 | * Stefano Galelli 17 | 18 | 19 | **Initial Development Team (Version 1.0.0)** 20 | 21 | * AFM Kamal Chowdhury 22 | * Rachel Koh 23 | * Jia Yi Ng 24 | * Stefano Galelli 25 | -------------------------------------------------------------------------------- /docs/source/reference/glossary.rst: -------------------------------------------------------------------------------- 1 | =============== 2 | Glossary 3 | =============== 4 | .. confval:: Derating Factor 5 | 6 | A reduction factor applied to the nominal capacity or rating of a component, such as a generator, transformer, or transmission line, to account for operating conditions or constraints that may limit its performance such as temperature, humidity, altitude, loading, and system configuration. These deviations can affect the performance and capability of power system components. 7 | 8 | 9 | .. confval:: Economic Dispatch 10 | 11 | Establishes the amount of power supplied by each unit. 12 | 13 | 14 | .. confval:: Generation Mix 15 | 16 | The combination of the various fuels used to generate electricity in a given geographic region. 17 | 18 | 19 | .. confval:: Grid Dispatch 20 | 21 | The process by which transmission system operators decide which power plants and generation facilities to deploy to meet current electricity demand. 22 | 23 | 24 | .. confval:: N-1 Criterion 25 | 26 | A reliability criterion used in power systems to ensure grid reliability by assessing the system's ability to withstand the loss of a single component without causing a widespread blackout or cascading failures. 27 | 28 | 29 | .. confval:: Spinning Reserve 30 | 31 | The excess generating capacity that is immediately available and synchronized to the grid (but is not currently generating energy) to meet unexpected increases in electricity demand or compensate for sudden generator or transmission line outages. 32 | 33 | 34 | .. confval:: Susceptance 35 | 36 | Represents how much circuit is susceptible to conducting a changing current or simply how much circuit allows the changing current. 37 | 38 | 39 | .. confval:: Unit Commitment 40 | 41 | Determines when and which generating units to start-up and shut-down. 42 | -------------------------------------------------------------------------------- /docs/source/reference/publications.rst: -------------------------------------------------------------------------------- 1 | 2 | .. autosummary:: 3 | :toctree: _source/ 4 | 5 | ################################ 6 | Related publications 7 | ################################ 8 | 9 | This page lists research papers and reports that use PowNet 2.0 (or its predecessor). Please let us know if you would like to add your work to this list. 10 | 11 | 12 | 13 | **Research papers** 14 | 15 | - **Bunnak, P., Coniglio, S., & Galelli, S. (2025)**. Bridging theory and practice: Efficiently solving the unit commitment problem in production cost models. Energy, 322, 135454. 16 | 17 | 18 | *Predecessor to PowNet 2.0* 19 | 20 | - **Arnold, W., Giuliani, M., & Castelletti, A. (2024)**. Floating photovoltaics may reduce the risk of hydro-dominated energy development in Africa. Nature Energy, 9(5), 602-611. 21 | 22 | - **Koh, R., & Galelli, S. (2024)**. Evaluating streamflow forecasts in hydro‐dominated power systems—When and why they matter. Water Resources Research, 60(3), e2023WR035825. 23 | 24 | - **Koh, R., & Galelli, S. (2023)**. Evaluating Streamflow Forecasts in Hydro-Dominated Power Systems--When and Why They Matter. Authorea Preprints. 25 | 26 | - **Koh, R., Kern, J., & Galelli, S. (2022)**. Hard-coupling water and power system models increases the complementarity of renewable energy sources. Applied Energy, 321, 119386. 27 | 28 | - **Galelli, S., Dang, T. D., Ng, J. Y., Chowdhury, A. K., & Arias, M. E. (2022)**. Opportunities to curb hydrological alterations via dam re-operation in the Mekong. Nature Sustainability, 5(12), 1058-1069. 29 | 30 | - **Chowdhury, A. K., Dang, T. D., Nguyen, H. T., Koh, R., & Galelli, S. (2021)**. The Greater Mekong's climate‐water‐energy nexus: How ENSO‐triggered regional droughts affect power supply and CO2 emissions. Earth's Future, 9(3), e2020EF001814. 31 | 32 | 33 | - **Chowdhury, A. K., Dang, T. D., Bagchi, A., & Galelli, S. (2020)**. Expected benefits of Laos’ hydropower development curbed by hydroclimatic variability and limited transmission capacity: Opportunities to reform. Journal of Water Resources Planning and Management, 146(10), 05020019. 34 | 35 | - **Chowdhury, A. K., Kern, J., Dang, T. D., & Galelli, S. (2020)**. PowNet: a network-constrained unit commitment/economic dispatch model for large-scale power systems analysis. Journal of Open Research Software, 8(1). 36 | 37 | 38 | 39 | **Conference presentations** 40 | 41 | - **Galelli, S., Eldardiry, H., & Bunnak, P. (2025).** Towards real-time operation of interconnected water-energy systems (No. EGU25-7249). Copernicus Meetings. 42 | 43 | - **Galelli, S., Bunnak, P., Eldardiry, H., & Koh, R. (2025).** Evaluating Streamflow Forecasts in Hydro-Dominated Power Systems (No. EGU25-7232). Copernicus Meetings. 44 | 45 | - **Eldardiry, H., Bunnak, P., Thomsen, G., & Galelli, S. (2024)**. Orchestrating Hydropower Dispatch Decisions over Wide-Area Synchronous Grids. AGU24. 46 | 47 | - **Giuliani, M., Castelletti, A., Carlino, A., & Arnold, W. (2024, April)**. Reconsidering hydropower in the African energy transition. In EGU General Assembly Conference Abstracts (p. 7354). 48 | 49 | - **Leoni, A., Stevanato, N., Carlino, A., Castelletti, A. F., & Giuliani, M. (2024, April)**. From multi-decadal energy planning to hourly power dispatch: evaluating the reliability of energy projections in the Southern African Power Pool. In EGU General Assembly Conference Abstracts (p. 6025). 50 | 51 | - **Bunnak, P., Dang, T. D., & Galelli, S. (2022, December)**. Evaluating the Effect of Climate Change on the Power Systems of Mainland Southeast Asia. In AGU Fall Meeting Abstracts (Vol. 2022, pp. GC14E-01). 52 | 53 | - **Feng, Z., Bunnak, P., & Galelli, S. (2022, December)**. Characterizing Malaysia's Transition into a Liberalized Electricity Market. In AGU Fall Meeting Abstracts (Vol. 2022, pp. GC51E-02). 54 | 55 | 56 | 57 | -------------------------------------------------------------------------------- /docs/source/references.bib: -------------------------------------------------------------------------------- 1 | @article{bunnak2025bridging, 2 | title={Bridging theory and practice: Efficiently solving the unit commitment problem in production cost models}, 3 | author={Bunnak, Phumthep and Coniglio, Stefano and Galelli, Stefano}, 4 | journal={Energy}, 5 | volume={322}, 6 | pages={135454}, 7 | year={2025}, 8 | publisher={Elsevier} 9 | } 10 | -------------------------------------------------------------------------------- /docs/source/technical/dispatchable.rst: -------------------------------------------------------------------------------- 1 | 2 | .. autosummary:: 3 | :toctree: _source/ 4 | 5 | **Dispatchable/Non-dispatchable generators** 6 | =============================================== 7 | 8 | **Overview** 9 | 10 | Electric power systems rely on a mix of generation technologies. Broadly, these fall into two categories: 11 | * Dispatchable generators: Facilities whose output can be adjusted up or down by the system operator (e.g., natural gas, coal), subject to physical limits (ramping limits, minimum up/down times). 12 | * Non‑dispatchable generators: Units that produce energy according to an exogenous resource profile (e.g., wind turbines, solar PV), with limited or no ability for the operator to increase output beyond what nature provides. 13 | 14 | **Dispatchable generators** 15 | 16 | We model the operation of dispatchable generators using three variables: 17 | 18 | * Dispatch variable :math:`p_{g,t}`: Power output of generator :math:`g` at time :math:`t`. More specifically, it is split into "above-min" generation :math:`p'_{g,t}` and "at-min" generation :math:`\underline{P}_g` 19 | * Commitment variable :math:`u_{g,t} \in \{0,1\}`: Binary indicator if :math:`g` is online. 20 | * Starting/Shutdown variable :math:`v_{g,t}, w_{g,t} \in \{0,1\}`: Binary indicator if :math:`g` is starting up or shutting down at time :math:`t`. 21 | 22 | Each dispatchable generator is subject to constraints: 23 | 24 | * Capacity: The capacity is constrainted by the minimum and maximum capacity. 25 | :math:`\underline{P}_g \times u_{g,t} \le P'_{g,t} + \underline{P}_g \le \bar{P}_g \times u_{g,t}` 26 | 27 | * Ramping limit: The change in power output from :math:`t` to :math:`t+1` cannot be over the ramping limit. 28 | * Minimum up/down time: Once started or stopped, the unit remains in that state for the specified duration. 29 | * Must take: If the unit has to be included in the generation profile. 30 | 31 | We then minimize the cost of generation, for which the cost of individual dispatchable generator is given by: 32 | 33 | * Fixed cost: Fixed cost is a function of rated capacity and fixed cost per unit. 34 | :math:`c_{g,t}^{fixed} = \bar{P}_g \times {fixed\_cost\_per\_unit}_g \times u_{g,t}` 35 | 36 | * Variable cost: Variable cost is a function of fuel cost, heat rate, and operating cost. 37 | :math:`c_{g,t}^{var} = (({fuel\_price}_g \times {heat\_rate}_g) + {opex}_g) \times p_{g,t}` 38 | 39 | * Startup cost: Startup cost is a function of rated capacity and startup cost per unit. 40 | :math:`c_{g,t}^{start} = P_g^{max} \times {startup\_cost}_g \times v_{g,t}` 41 | 42 | * Curtailment cost: Curtailing "must-take" thermal output is priced as the same variable rate. 43 | :math:`c_{g,t}^{curt} = (({fuel\_price}_g \times {heat\_rate}_g) + {opex}_g) \times p^curt_{g,t}` 44 | 45 | **Non-dispatchable generators** 46 | 47 | For non-dispatchable generators, the model's decision is to dispatch, curtail, or store the renewable energy produced. We therefore have: 48 | 49 | * Dispatched :math:`pdispatch_{g,t}` 50 | * Curtailed :math:`pcurtail_{g,t}` 51 | * Charged :math:`pcharge_{g,t}` 52 | 53 | Non-dispatchable generators are subject to constraints: 54 | 55 | * Available capacity: 56 | :math:`pdispatch_{g,t} \le available\_capacity_{g,t}` 57 | * Energy balance: 58 | :math:`pdispatch_{g,t} + pcurtail_{g,t} + pcharge_{g,t} = available\_capacity_{g,t}` 59 | 60 | We assume non-dispatchable generators do not have a fixed or start-up cost, and we get the variable cost from the contract price: 61 | 62 | * Variable cost: 63 | :math:`c_{g,t}^{var} = {contract\_price}_g \times pdispatch_{g,t}` -------------------------------------------------------------------------------- /docs/source/technical/linear_power_flow.rst: -------------------------------------------------------------------------------- 1 | 2 | .. autosummary:: 3 | :toctree: _source/ 4 | 5 | **Linearized DC power flow** 6 | ================================ 7 | 8 | **Overview** 9 | 10 | In alternating‐current (AC) power systems, power is transmitted at high voltages through a network of buses and transmission lines and is a crucial part of an energy system. Each line can be characterized by its series impedance (resistance and reactance). We model power flow in a by both voltage magnitudes and angles at its ends, as described by the nonlinear AC power flow equation for real power: 11 | 12 | :math:`P_{ij}=V_i V_j(G_{ij}\cos{(\theta_i-\theta_j)} + B_{ij}\sin{(\theta_i-\theta_j)})-V_i^2G_{ij}` 13 | 14 | where :math:`V_i` is the voltage magnitude at bus :math:`i`, :math:`\theta_i` is the voltage angle, :math:`G_{ij}` and :math:`B_{ij}` are the conductance (1 over resistance) and susceptance (1 over reactance) of the line between bus :math:`i` and bus :math:`j`. 15 | 16 | The full AC equation above captures the full dynamics but is nonlinear due to the trigonometric terms and quadratic dependence on voltage magnitudes. To simplify analysis and enable efficient optimization, we use a DC power flow approximation based on: 17 | 18 | * **Negligible Resistance**: Line losses due to resistance are assumed small compared to reactance for high-voltage large-scale power grids, so resistances are ignored and only reactances remain. 19 | * **Small Angle Differences**: Voltage angle differences are small enough that :math:`\sin{(\theta_i)}` approaches :math:`\theta_i`. 20 | 21 | therefore we simplify the real power flow on a line as follows: 22 | 23 | :math:`P_{ij}=B_{ij}(\theta_i-\theta_j)` 24 | 25 | where :math:`P_{ij}` is the powerflow in a line, :math:`B_{ij}` is the line susceptance, and :math:`\theta_i` the voltage angle. 26 | 27 | **Model** 28 | 29 | The above formulation translates into the following in our model: 30 | 31 | * Decision Variables:Our decision variables therefore include :math:`\theta_i` for each bus :math:`i`, representing the phase angle; line flows :math:`F_{ij}` as auxiliary variables representing the real power on each line. 32 | * Parameters:Constant :math:`B_{ij}` representing the line susceptance between bus :math:`i` and :math:`j`. 33 | * Flow: DC approcimation of power flow. 34 | :math:`F_{ij}=B_{ij}(\theta_i-\theta_j)` 35 | * Kirchoff's Current Law: Power Balance at each bus. :math:`P_i^{gen} - P_i^{load}` represents the net power injection (generation minus loads) at bus :math:`i`. 36 | :math:`P_i^{gen} - P_i^{load} = \sum F_{ij} - \sum F_{ji}` 37 | * Line Capacity Constraint: 38 | :math:`-F_{ij}^{max} \le F_{ij} \le F_{ij}^{max}` 39 | 40 | By combining these elements: angle and flow variables, a linear flow, nodal balance equations, and capacity bounds, our code realizes the classic DC power flow model entirely within a (mixed integer) linear framework. -------------------------------------------------------------------------------- /docs/source/technical/math_formulation.rst: -------------------------------------------------------------------------------- 1 | 2 | .. autosummary:: 3 | :toctree: _source/ 4 | 5 | **Mathematical Formulation** 6 | ============================== 7 | 8 | **The Unit Commitment Problem** 9 | 10 | PowNet solves the unit commitment problem (UCP), which is an optimization problem 11 | to determine the optimal schedule for starting up, shutting down, and outputing power of power stations 12 | (like coal, natural gas, hydro, etc.) over a specific time horizon (typically day-ahead). 13 | The primary goal is to minimize the total operational cost, which includes fuel costs, start-up/shutdown costs for thermal units, 14 | and potentially other operational expenses, while ensuring that electricity generation consistently meets 15 | the fluctuating demand and respects various operational constraints. 16 | 17 | **Mathematical Formulation** 18 | 19 | The UCP in PowNet is formulated as a Mixed-Integer Linear Program (MILP). This means 20 | the model uses a combination of continuous variables (like power output) and integer (specifically binary) variables, 21 | primarily to represent the on/off status of thermal generators and their start-up/shutdown decisions. 22 | 23 | The model's objective function aims to minimize total system costs, including: 24 | * Variable costs based on fuel consumption (heat rate) and operational expenses 25 | * Fixed costs incurred when thermal units are online 26 | * Start-up costs for thermal units 27 | * Costs associated with renewable generation and imports 28 | * Penalties for failing to meet demand or required operating reserves 29 | * Penalties for violating generation contracts 30 | 31 | This objective is minimized subject to a comprehensive set of constraints representing real-world physical and operational limitations: 32 | * **Thermal Unit Constraints:** Generation limits (min/max power), minimum up-time and down-time requirements, and ramping limits (how quickly units can change output). 33 | * **Renewable/Import Limits:** Constraints on the maximum available power from sources like solar, wind, hydro, and imports at each time step. 34 | * **Transmission Network Constraints:** Modeled using a linearized DC power flow approximation, which balances computational efficiency with network representation. 35 | * **System Constraints:** Nodal energy balance (ensuring power supply equals demand plus losses at each location) and system-wide spinning reserve requirements (maintaining sufficient online capacity to handle unexpected outages or demand spikes). 36 | 37 | While there are multiple ways to formulate the mathematical problem, the formulations implemented were chosen based on benchmarking exercises for computational efficiency. 38 | Formal mathematical descriptions can be found in :cite:p:`bunnak2025bridging`. 39 | -------------------------------------------------------------------------------- /docs/source/technical/reservoir_model.rst: -------------------------------------------------------------------------------- 1 | 2 | .. autosummary:: 3 | :toctree: _source/ 4 | 5 | **Reservoir model** 6 | ======================= 7 | 8 | **Reservoir Dynamics** 9 | 10 | Reservoir storage hinges on a simple balance: water in (inflows) minus water out (releases, spills, losses) equals the change in storage. In discrete time (daily or hourly), we enforce: 11 | :math:`S_{t+1}=S_t+I_t-R_t-E_t` 12 | 13 | where :math:`S_t` is the storage, :math:`I_t` the inflow, :math:`R_t` the release, and :math:`E_t` the evaporation. 14 | 15 | In optimization formulations, the balance equation becomes an equality constraint linking decision variables (release, spill) and state variables (storage). Physical limits on storage (dead storage, maximum capacity) are imposed as inequality constraints to ensure feasible reservoir levels. 16 | 17 | Besides storage, elevation is also often used as a measure of the state of a reservoir. The storage-elevation relationship :math:`S = f(h)` and its inverse :math:`h = f^{-1}(S)` can be non-linear and is defined by emprical volume curve :math:`vol\_curve`. 18 | 19 | **Reservoir Operations** 20 | 21 | We have two core modes of operation: rule-curve scheduling and dispatch-driven re-operation: 22 | 23 | * **Rule-curve scheduling**: Operators follow seasonal guidelines (rule curves) to choose releases to track a daily storage target while satisfying mass balance, spill, and min/max flow constraints. It has a deterministic target, such as daily storage or level targets derived from historical rule curves. Our optimization then minimizes the deviation from the target path. 24 | * **Dispatch-driven re-operation**: We invert a power‐dispatch target into release decisions. We adapt reservoir releases to meet daily or hourly power-generation targets from system dispatch, while still honoring mass balance and environmental rules. Our optimization there for solve for release sequence that minimizes mismatch between computed hydropower and dispatch targets, subject to mass balance, ramp-rate bounds, ecologoical minima, and turbine & grid limits. This is done based on hydropower physics, where we calculate the power :math:`P_t` from :math:`\eta` the turbine efficiency, :math:`\rho` the water density, :math:`g` the gravitational constant, :math:`H_t` the hydraulic head (water level above turbine center), and :math:`Q^{turbine}_t` the water flow through the turbine: 25 | :math:`P_t = \eta \cdot \rho \cdot g \cdot H_t \cdot Q^{turbine}_t` 26 | 27 | **Environmental Flow & Hydropeaking Constraints** 28 | 29 | To protect downstream ecosystems, maintain habitat, and prevent fish stranding, operations must respect minimum flow requirements and limit daily ramp‐rates (hydropeaking): 30 | 31 | * Minimum enviornmental flow: We determine the minimum amount of water that should be released from a reservoir to maintain the health of the downstream ecosystem. The minimum flow is set at different percentages of the inflow, subject to how the inflow compares with the mean annual flow. 32 | * Hydropeaking: Hydropeaking is the rapid and frequent changes in river flow to optimize hydropower operation. We adjust the release based on a hydropeaking factor and the minimum environmental flow, ensuring daily changes remain within a percentage of capacity. 33 | 34 | **Cascade Coordination & Basin-Level Aggregation** 35 | 36 | In multi‐reservoir systems, upstream releases and spills directly become downstream inflows, creating interdependence. To manage reservoirs in a basin, we follow the modeling principles: 37 | 38 | * Sequential Processing: Order reservoirs by cascade level; each reservoir receives the combined outflow of upstream units as its inflow. 39 | * Simulation: Each reservoir at level uses the aggregated inflow from all upstream nodes. 40 | * Aggregation: Sum across reservoir columns to produce basin‐level time series for water release and hydropower, enabling performance metrics and system modeling. -------------------------------------------------------------------------------- /docs/source/technical/time_series_models.rst: -------------------------------------------------------------------------------- 1 | 2 | .. autosummary:: 3 | :toctree: _source/ 4 | 5 | **Time series modeling** 6 | ========================= 7 | 8 | **ARIMA Model** 9 | 10 | ARIMA (Autoregressive Integrated Moving Average) is a foundational time-series modeling technique that combines three components: 11 | 12 | * Autoregression (:math:`AR(p)`): Models the value at time t as a linear function of its own p lagged values, capturing persistence and momentum in the series. 13 | * Integration (:math:`I(d)`): Applies differencing of order d to remove trends and achieve stationarity, ensuring the series has constant mean and variance over time. 14 | * Moving Average (:math:`MA(q)`): Represents the value at time t as a linear function of q past forecast errors, capturing short-term shocks and noise. 15 | 16 | **SARIMAX Model** 17 | 18 | SARIMAX (Seasonal Autoregressive Integrated Moving Average with eXogenous regressors) extends the classic ARIMA framework by explicitly modeling both seasonality and the influence of external variables on the series. In SARIMAX, the observed series :math:`y_t` is first differenced :math:`d` times to achieve stationarity, similar to ARIMA; the residual structure is then captured by: 19 | 20 | * Seasonality: Additional differencing and seasonal AR/MA terms at lag :math:`s` (period) to model repeating patterns (e.g., daily, weekly), by seasonally differencing :math:`D` times with period :math:`s` 21 | * Exogenous Regressors: Incorporates external variables (e.g., temperature, calendar indicators) linearly to explain known drivers of variability. 22 | 23 | **Monthly SARIMAX Model** 24 | 25 | We employ a per-month SARIMAX model for: 26 | 27 | * Non‑Stationary Seasonality across the Year: Daily/weekly cycle shapes (peak sharpness, trough depth) change with season. 28 | * Localized Parameter Tuning: Fitting separate SARIMAX for each calendar month allows tailored (p,d,q)(P,D,Q,s) orders per month. 29 | * Improved Forecast Robustness: Month‑specific models reduce over‑ or under‑differencing risk and avoid a one‑size‑fits‑all seasonal period. 30 | 31 | **Demand Model** 32 | 33 | In short‑term load forecasting, SARIMAX is well suited because demand exhibits strong intra‑day/weekly cycles plus dependencies on weather and calendar effects. We equip the model with the ability to forecase the hourly electricity demand: 34 | 35 | * **Step 1:** We first run an ordinary least squares (OLS) regression on user-supplied exogenous variables (such as temperature) and get the residual series. The OLS isolates the “explained” portion of demand and the residual is treated as a stationary series for further time‐series modeling. 36 | * **Step 2:** We run a Seasonal-Trend decomposition using LOESS (STL) to decompose a time series into three components: trend, season(al) and residual. 37 | * **Step 3:** We fit a Seasonal Autoregressive Integrated Moving Average with Exogenous Regressors (SARIMAX) model to capture the correlation structure, and store per-month SARIMAX models and residuals. 38 | 39 | We run auto-ARIMA on the detrended, deseasonalized residuals to pick (p,d,q)(P,D,Q,s) orders that best capture autocorrelation left after regression + STL. We can then get synthetic time series by comnbining regression, drawing sample paths from each monthly SARIMAX, and adding seasonal and trend components. 40 | 41 | **Solar Model** 42 | 43 | Solar generation follows a deterministic diurnal "bell curve" and seasonal envelope, with high‑frequency fluctuations due to clouds and atmosphere, therefore we can also use SARIMAX to capture stochastic variability. Similarly, we would like to forecase solar irradiance/PV output. We follow a similar structure to the demand model, but: 44 | 45 | * No pre-regression on exogenous weather, instead, the STL decomposition itself captures the deterministic daily/seasonal envelope and all remaining variation feeds directly into SARIMAX. 46 | * Clip output to nonnegative and force it to zero during night hours. 47 | 48 | **Related Works** 49 | 50 | SARIMAX is a tried-and-tested way of modeling both demand and solar irradiance as demonstrated by the following works: 51 | 52 | * N. Elamin, M. Fukushige. Modeling and forecasting hourly electricity demand by SARIMAX with interactions. *Energy*. Volume 165, Part B, 2018, Pages 257-268, ISSN 0360-5442. 53 | * E. Eskandarnia and M. AlHammad, "Predication of future energy consumption using SARIMAX," *3rd Smart Cities Symposium (SCS 2020)*, 2020, pp. 657-662. 54 | * S. Vagropoulos, G. Chouliaras, E. Kardakos, C. Simoglou and A. Bakirtzis, "Comparison of SARIMAX, SARIMA, modified SARIMA and ANN-based models for short-term PV generation forecasting," *2016 IEEE International Energy Conference (ENERGYCON)*, Leuven, Belgium, 2016, pp. 1-6. -------------------------------------------------------------------------------- /docs/source/user_guide/input_files.rst: -------------------------------------------------------------------------------- 1 | 2 | .. autosummary:: 3 | :toctree: _source/ 4 | 5 | ################################ 6 | Preparing PowNet inputs 7 | ################################ 8 | 9 | To run a `PowNet` simulation, you need to provide a set of CSV files that describe your power system. These files should be placed together within a dedicated folder. Based on the files you provide, `PowNet`'s ``DataProcessor`` class may auto-generate additional processed files within the same folder. These generated files typically have the ``pownet_`` prefix and are used directly by the simulation engine. 10 | 11 | **IMPORTANT** Ensure that for files describing techno-economic parameters (like ``thermal_unit.csv``, ``transmission.csv``, etc.), the column names match the expected format exactly as detailed below. It is recommended that a user uses the provided examples as file templates. 12 | 13 | Required User Inputs 14 | ====================== 15 | 16 | These files *must* be provided by the user: 17 | 18 | * ``demand_export.csv``: 19 | * **Description**: An hourly timeseries of electricity demand (in MW) for each load node in the system. 20 | * **Format**: Each column represents a node, and each row represents an hour of the year (8760 rows expected for a standard year). Date/time columns (like year, month, day, hour) can be included for reference but are ignored by `PowNet`. 21 | * **Note**: When your system has only a single node, or all power stations are aggrgated to a node, then this file should contain only a single column being the demand of that node. Otherwise, nodes are defined in ``transmission.csv`` as described later in this section. 22 | 23 | * ``contract_cost.csv``: 24 | * **Description**: Specifies the hourly costs (e.g., USD/MWh or USD/MW) associated with different supply or fuel contracts. These contracts are linked to specific generator units via ``thermal_unit.csv`` and ``nondispatch_unit.csv``. 25 | * **Format**: Each column represents a contract name (which must match names used in other files), and each row represents an hour. 26 | 27 | Optional User Inputs 28 | ====================== 29 | 30 | These files describe different components of the power system. While optional, you typically need to provide files relevant to the system components you want to model (e.g., provide ``thermal_unit.csv`` if you have thermal generators). 31 | 32 | * ``thermal_unit.csv``: 33 | * **Description**: Defines the techno-economic parameters for thermal generators. 34 | * **Columns**: 35 | * ``name``: Unique identifier for the thermal unit. 36 | * ``node``: Network node where the unit connects. 37 | * ``unit_type``: User-defined type (e.g., CCGT, OCGT). This is for processing modeling outputs. 38 | * ``fuel_type``: Type of fuel (e.g., 'coal', 'gas', 'oil'). 39 | * ``fuel_contract``: Name of the contract defined in ``contract_cost.csv``. 40 | * ``max_capacity``: Maximum power output (MW). 41 | * ``min_capacity``: Minimum stable power output (MW). 42 | * ``heat_rate``: Heat rate (e.g., MMBtu/MWh or GJ/MWh). 43 | * ``operation_cost``: Variable O&M cost (e.g., USD/MWh). 44 | * ``fixed_cost``: Fixed O&M cost (e.g., USD/MW/hr). 45 | * ``startup_cost``: Cost per startup event (e.g., USD/startup). 46 | * ``ramp_rate``: Maximum ramp up/down rate (MW/hr). 47 | * ``min_uptime``: Minimum hours unit must stay online after starting (hr). 48 | * ``min_downtime``: Minimum hours unit must stay offline after shutting down (hr). 49 | * ``latitude`` (optional): Latitude coordinate. 50 | * ``longitude`` (optional): Longitude coordinate. 51 | * ``must_take`` (0 or 1): 1 if the unit has a take-or-pay contract. 52 | 53 | * ``nondispatch_unit.csv``: 54 | * **Description**: Defines parameters for non-dispatchable units (hydro, solar, wind, imports). Often used to link these units to cost contracts. 55 | * **Columns**: 56 | * ``name``: Unit name (must match name in the corresponding timeseries file, e.g., ``solar.csv``). 57 | * ``contracted_capacity``: Contracted capacity limit (MW). Use -1 for no explicit contractual limit, so the unit is limited by ints installed capacity. 58 | * ``contract``: Name of the contract defined in ``contract_cost.csv``. 59 | * ``must_take`` (0 or 1): 1 if generation must be taken if available (subject to capacity). 60 | 61 | * ``energy_storage.csv``: 62 | * **Description**: Defines parameters for energy storage systems. 63 | * **Columns**: 64 | * ``name``: Unique identifier for the storage unit. 65 | * ``attach_to``: Name of the solar or wind unit or network node the storage is associated with. 66 | * ``inject_to``: Network node where discharged energy is injected. 67 | * ``max_charge``: Maximum charging rate (MW). 68 | * ``max_discharge``: Maximum discharging rate (MW). 69 | * ``max_capacity``: Maximum energy storage capacity (MWh). 70 | * ``min_capacity``: Minimum energy storage level (MWh). 71 | * ``charge_efficiency``: Charging efficiency factor (0 to 1). 72 | * ``discharge_efficiency``: Discharging efficiency factor (0 to 1). 73 | * ``self_discharge_rate``: Hourly self-discharge rate (0 to 1). 74 | * ``cost_contract``: Name of the contract (from ``contract_cost.csv``) associated with charging/discharging costs. 75 | 76 | * ``transmission.csv``: 77 | * **Description**: Defines a system's nodes and their properties. `PowNet` can calculate line parameters if needed, but user-provided values take precedence. 78 | * **Required Columns**: 79 | * ``source``: Starting node of the line. 80 | * ``sink``: Ending node of the line. 81 | * ``source_kv``: Voltage level at the source (kV). 82 | * ``sink_kv``: Voltage level at the sink (kV). 83 | * ``distance``: Length of the line (km). 84 | * ``n_circuits``: Number of parallel circuits. 85 | * **Optional Columns**: 86 | * ``user_line_cap``: User-defined line capacity (MW). Use -1 or omit to let `PowNet` calculate. 87 | * ``user_susceptance``: User-defined line susceptance (Siemens). Use -1 or omit to let `PowNet` calculate. 88 | * ``source_lon``, ``source_lat``: Coordinates for the source node (needed for map plotting). 89 | * ``sink_lon``, ``sink_lat``: Coordinates for the sink node (needed for map plotting). 90 | 91 | * ``hydropower.csv`` / ``hydropower_daily.csv``: 92 | * **Description**: Hourly (``hydropower.csv``) or daily (``hydropower_daily.csv``) timeseries of maximum available hydropower generation (e.g., in MW) for each hydro unit. `Do not provide` both for the same unit. 93 | * **Format**: Uses a two-level column header. Level 1: Unit name. Level 2: Node name where the unit connects. Rows correspond to hours or days. 94 | 95 | * ``solar.csv``: 96 | * **Description**: Hourly timeseries of maximum available solar power generation (e.g., in MW) for each solar unit. 97 | * **Format**: Two-level column header (Level 1: Unit name, Level 2: Node name). Rows correspond to hours. 98 | 99 | * ``wind.csv``: 100 | * **Description**: Hourly timeseries of maximum available wind power generation (e.g., in MW) for each wind unit. 101 | * **Format**: Two-level column header (Level 1: Unit name, Level 2: Node name). Rows correspond to hours. 102 | 103 | * ``import.csv``: 104 | * **Description**: Hourly timeseries of maximum available power import (e.g., in MW) for each import source/node. 105 | * **Format**: Two-level column header (Level 1: Source/Unit name, Level 2: Node name). Rows correspond to hours. 106 | 107 | Auto-Generated Inputs (by `PowNet`'s DataProcessor) 108 | ======================================================= 109 | 110 | These files are automatically generated by `PowNet`'s ``DataProcessor`` based on the user inputs. You do not need to create them manually, but they will appear in your input folder. 111 | 112 | * ``pownet_transmission.csv``: 113 | * Contains processed transmission line data used directly by the model, including calculated or user-provided ``line_capacity`` and ``susceptance``. 114 | 115 | * ``pownet_cycle_map.json``: 116 | * Defines basic cycles in the transmission network, used for the Kirchhoff power flow formulation. 117 | 118 | * ``pownet_thermal_derated_capacity.csv``: 119 | * Hourly maximum power output for each thermal unit, potentially considering derating factors. 120 | 121 | * ``pownet_ess_derated_capacity.csv``: 122 | * Hourly maximum storage capacity (MWh) for energy storage systems, potentially considering derating. 123 | -------------------------------------------------------------------------------- /images/complex_river.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Critical-Infrastructure-Systems-Lab/PowNet/c5cb0437ee3af0f21683b81fdb8f4b67f1a6e9f5/images/complex_river.png -------------------------------------------------------------------------------- /images/dummy_system.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Critical-Infrastructure-Systems-Lab/PowNet/c5cb0437ee3af0f21683b81fdb8f4b67f1a6e9f5/images/dummy_system.png -------------------------------------------------------------------------------- /images/hydro_system.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Critical-Infrastructure-Systems-Lab/PowNet/c5cb0437ee3af0f21683b81fdb8f4b67f1a6e9f5/images/hydro_system.png -------------------------------------------------------------------------------- /images/solar_ess.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Critical-Infrastructure-Systems-Lab/PowNet/c5cb0437ee3af0f21683b81fdb8f4b67f1a6e9f5/images/solar_ess.png -------------------------------------------------------------------------------- /joss_submission/overview_pownet.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Critical-Infrastructure-Systems-Lab/PowNet/c5cb0437ee3af0f21683b81fdb8f4b67f1a6e9f5/joss_submission/overview_pownet.png -------------------------------------------------------------------------------- /joss_submission/paper.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: 'PowNet 2.0: A production cost modeling framework for large-scale power systems' 3 | tags: 4 | - Python 5 | - mathematical optimization 6 | - unit commitment problem 7 | - sensitivity analysis 8 | authors: 9 | - name: Phumthep Bunnak 10 | orcid: 0000-0001-6412-7017 11 | corresponding: true 12 | affiliation: 1 13 | - name: Hisham Eldardiry 14 | orcid: 0000-0002-2932-7459 15 | affiliation: 1 16 | - name: Matija Pavičević 17 | orcid: 0000-0002-6524-5581 18 | affiliation: 2 19 | - name: Jerry Zhuoer Feng 20 | orcid: 0009-0004-5129-1167 21 | affiliation: 1 22 | - name: Stefano Galelli 23 | orcid: 0000-0003-2316-3243 24 | affiliation: 1 25 | affiliations: 26 | - name: Cornell University, USA 27 | index: 1 28 | - name: Argonne National Laboratory, USA 29 | index: 2 30 | date: 6 October 2024 31 | bibliography: paper.bib 32 | --- 33 | 34 | 35 | # Summary 36 | 37 | Managing the complex network of power stations and transmission lines that deliver electricity across large spatial domains (e.g., country and continental scale) requires a variety of mathematical models. Among them, Production Cost Models (PCM) are commonly used for planning the short-term operation of power grids [@garver1962threebin]. Specifically, PCMs schedule the production of electricity by generators considering transmission constraints with planning horizons that range from a few hours to a few days. 38 | 39 | `PowNet 2.0` is a Python-based PCM framework that simulates and analyzes the most cost-effective way of meeting electricity demand using various electricity sources (e.g., coal, natural gas, and renewables) such that sufficient electricity is generated at each hour while considering factors like fuel prices, availability of renewables, and regulatory requirements. The framework also allows us to study the integration between hydropower and power systems. 40 | 41 | The framework is designed to be accessible to a wide range of users, especially those with only basic Python knowledge. To use `PowNet 2.0`, a user can supply data for power stations, transmission lines, and electricity demand as spreadsheets, and run a script to handle the complex calculations and to produce modeling outputs as either data frames or spreadsheet files. While a basic user does not need to modify the codebase, an advanced user can leverage the software's flexible and modular design for more complex tasks, like modeling the interaction between water and power systems or exploring customized optimization algorithms. 42 | 43 | 44 | # Statement of need 45 | 46 | PCMs share similar features [@oikonomou2022core] and are widely used in both industry and academia. While proprietary options like `PLEXOS` and `PROMOD` offer comprehensive features and user-friendly interfaces, they do not follow the Findable, Accessible, Interoperable, and Reusable (FAIR) principles [@wilkinson2016fair], which help improve the transparency and applicability of models. Furthermore, the cost of proprietary PCMs can be prohibitive for many researchers and institutions. In response to these limitations, the open-source community has developed PCMs in various programming languages. A detailed taxonomy and comparison of different PCMs can be found in [@hoffmann2024review; @oberle2019open]. Notable examples include `SIENNA` [@lara2021pcm], written in Julia, and Python-based frameworks like `PyPSA` [@brown2017pypsa], `Grid Operations` [@akdemir2024open], and `PowNet` [@chowdhury2020pownet]. Building upon its predecessor, `PowNet 2.0` represents a significant leap forward, particularly as not all existing PCM frameworks have a full suite of features for addressing specific needs within the power systems domain, namely (1) the flexibility to incorporate diverse mathematical formulations and solution algorithms, (2) the capacity to generate stochastic input data (e.g., load, solar availability) to support uncertainty analyses, and (3) the ability to integrate hydropower and power system models. All these features are seamlessly integrated in our software. 47 | 48 | `PowNet 2.0` has a few key functionalities that address the aforementioned needs: 49 | 50 | **Mathematical Formulations**: Selecting a set of computationally efficient mathematical formulations is an ongoing research effort [@tejada2019unit; @chen2022security]. The set of formulations implemented in `PowNet 2.0` was chosen based on thorough benchmarking exercises that compare the runtime of several different formulations [@horsch2018linear; @knueven2020mixed]. `PowNet 2.0` also allows a user to experiment with different formulations, such as representing the direct-current power flow with either the voltage-angle formulation or the Kirchhoff formulation. 51 | 52 | **Solution Method**: `PowNet 2.0` supports both Gurobi and HiGHS as the mathematical solver. While Gurobi is a powerful commercial solver, its free usage is limited to an academic license. Consequently, HiGHs was chosen as an alternative due to its competitive performance among open-source solvers [@parzen2022opt]. A user can also explore solution methods like rounding heuristics [@bunnak2024bridging]. 53 | 54 | **Stochastic generation of input variables**: A user can automatically generate stochastic time series of load, solar, wind, and hydropower availability. These time series are created by the SARIMAX model, which requires as input deterministic time series. Through this functionality, the user can then explore the impact of uncertainty in weather and climatic conditions on power system performance. 55 | 56 | **Reservoir simulation**: The reservoir module simulates the operation of hydropower reservoirs, thus providing time series of available hydropower. It requires dam design specifications and inflow as input data. This feature makes the model more detailed, such as avoiding the use of static capacity factors, while reducing reliance on external hydrologic models. 57 | 58 | **Water-power system coupling**: The reservoir simulation module can be either soft- or hard-coupled with the ‘core’ module simulating the unit commitment and economic dispatch problem. In the former case, the hydropower availability is passed as input (unidirectional information flow). In the latter case, the two modules are run in parallel, ensuring that dam release decisions follow the exact needs of the entire power system – representing a tighter integration between water and power systems [@koh2022hard]. 59 | 60 | 61 | # Software Design and Implementation 62 | 63 | `PowNet 2.0` offers an end-to-end modeling framework through functionalities from generating synthetic time series of external forcings (e.g., load, solar, and wind availabilities) to plotting the modeling outputs. `PowNet 2.0` leverages the `Gurobipy` package for building instances of the optimization problem. This package was chosen because of its proven performance when compared to other Python-based modeling frameworks as shown in [@hofmann2023linopy; @gams2023perf]. Furthermore, `Gurobipy` allows a user to leverage the Python ecosystem by using popular libraries like `Pandas`, `SciPy`, and `NetworkX`. 64 | 65 | The modeling workflow of `PowNet 2.0` is illustrated in \autoref{fig:workflow}. For each modeling task represented by a rectangle, there is a corresponding module to accomplish the task. This modular design facilitates unit testing of individual class objects and functions, ensuring code reliability and maintainability. It also enables future extension and customization, empowering users to adapt `PowNet 2.0` to their specific needs and contribute to its ongoing development. Current development efforts focus on modeling power purchase agreements, regional electricity trading schemes, and computational algorithms. 66 | 67 |  68 | 69 | # Acknowledgement 70 | 71 | Development of the reservoir module was supported by the US Department of Energy (DOE) Water Power Technologies Office as a part of the HydroWIRES “HydroCHiPPs” project (53165). 72 | 73 | # References 74 | -------------------------------------------------------------------------------- /model_library/complex_river/flow_path.csv: -------------------------------------------------------------------------------- 1 | source,sink,lag_time,flow_fraction 2 | atay,kamchay,0,1 3 | kamchay,kirirom1,0,0.25 4 | kamchay,kirirom2,0,0.75 5 | -------------------------------------------------------------------------------- /model_library/complex_river/reservoir_unit.csv: -------------------------------------------------------------------------------- 1 | name,max_storage,min_level,max_level,max_head,min_day,max_day,max_release,max_generation,turbine_factor 2 | kirirom1,30000000,500,534,373.5,150,310,1728000,12,0.9 3 | kirirom2,30000000,500,540,271,150,310,3456000,18,0.9 4 | kamchay,432000000,500,610,122,180,298,14126400,194,0.9 5 | atay,443800000,510,545,216,158,311,10800000,240,0.9 6 | -------------------------------------------------------------------------------- /model_library/dummy/nondispatch_unit.csv: -------------------------------------------------------------------------------- 1 | name,contracted_capacity,contract,must_take 2 | pHydro,45,hydro,0 3 | Supplier,65,supplier,0 4 | -------------------------------------------------------------------------------- /model_library/dummy/thermal_unit.csv: -------------------------------------------------------------------------------- 1 | name,node,unit_type,fuel_type,max_capacity,min_capacity,heat_rate,operation_cost,fuel_contract,fixed_cost,startup_cost,ramp_rate,min_uptime,min_downtime,must_take 2 | pGas,pGas,gas_cc,gas,1200,10,7.65,2,gas,1.2,70,287.67,4,4,0 3 | pOil,pOil,oil_st,oil,35,1,10.19,3.17,oil,1.5,50,6,1,1,0 4 | pBiomass,pBiomass,biomass_st,biomass,9.9,2,14.1,26,biomass,1,100,9.9,2,2,1 5 | -------------------------------------------------------------------------------- /model_library/dummy/transmission.csv: -------------------------------------------------------------------------------- 1 | source,sink,user_line_cap,type,n_circuits,source_kv,sink_kv,distance,source_lon,source_lat,sink_lon,sink_lat,user_susceptance 2 | pGas,Node3,-1,acsr,1,765,275,55,-76.47351909,42.4391233,-76.47127586,42.44726029,-1 3 | Node3,Node1,-1,acsr,2,275,275,175,-76.47127586,42.44726029,-76.48522899,42.44206975,-1 4 | Node1,Node2,-1,acsr,2,275,275,350,-76.48522899,42.44206975,-76.51073276,42.42804445,-1 5 | pHydro,Node2,-1,acsr,2,275,275,300,-76.51223575,42.44692185,-76.51073276,42.42804445,-1 6 | pOil,Node3,-1,acsr,2,275,275,40,-76.47899854,42.45793602,-76.47127586,42.44726029,-1 7 | pBiomass,Node2,-1,acsr,2,275,275,150,-76.49899651,42.42885897,-76.51073276,42.42804445,-1 8 | Buyer,pHydro,-1,acsr,2,275,275,400,-76.51909681,42.44094852,-76.51223575,42.44692185,-1 9 | Supplier,Node1,-1,acsr,2,275,275,350,-76.49770115,42.45325699,-76.48522899,42.44206975,-1 10 | -------------------------------------------------------------------------------- /model_library/hydro_system/nondispatch_unit.csv: -------------------------------------------------------------------------------- 1 | name,contracted_capacity,contract,must_take 2 | kirirom,16,kirirom,0 3 | kamchay,160,kamchay,0 4 | atay,200,atay,0 5 | -------------------------------------------------------------------------------- /model_library/hydro_system/reservoir_data/flow_path.csv: -------------------------------------------------------------------------------- 1 | source,sink,lag_time,flow_fraction 2 | atay,kamchay,0,1 3 | -------------------------------------------------------------------------------- /model_library/hydro_system/reservoir_data/minimum_flow.csv: -------------------------------------------------------------------------------- 1 | kirirom,kamchay,atay 2 | 4066,52564,74255 3 | 4029,52084,73551 4 | 3992,50416,72819 5 | 3955,50582,72158 6 | 3918,49877,71487 7 | 3882,49674,70880 8 | 3845,50561,70333 9 | 3814,50656,73828 10 | 3799,51293,77185 11 | 3849,51202,75976 12 | 3792,51137,74200 13 | 3749,49209,72318 14 | 3713,46687,70303 15 | 3696,46048,68719 16 | 3665,46444,73622 17 | 3644,46968,80548 18 | 3620,47211,93806 19 | 3590,46966,113787 20 | 3560,46000,115216 21 | 3545,46570,110980 22 | 3960,62803,114203 23 | 3726,61496,113009 24 | 3551,57443,107492 25 | 3496,55689,99230 26 | 3459,50014,91033 27 | 3424,46135,84040 28 | 3391,44813,89136 29 | 3382,38579,96736 30 | 3354,34644,94234 31 | 3320,34860,89804 32 | 3287,35739,85176 33 | 3255,35174,80909 34 | 3223,33671,77026 35 | 3191,32456,73384 36 | 3160,34406,70178 37 | 3130,35537,67235 38 | 3100,35426,64794 39 | 3070,36942,63095 40 | 3040,35936,61898 41 | 3011,39926,61109 42 | 2983,42544,60557 43 | 2957,39666,59901 44 | 2931,37303,59292 45 | 2906,34423,58654 46 | 2879,34136,58063 47 | 2852,33446,57510 48 | 2836,42570,57141 49 | 3171,43435,73851 50 | 2980,41517,102241 51 | 2863,41589,111858 52 | 2840,39935,106991 53 | 3633,37201,115599 54 | 3483,36665,125394 55 | 3353,35345,129519 56 | 3434,34652,141735 57 | 4811,38779,168453 58 | 5006,41352,200829 59 | 6794,44452,253546 60 | 7605,44060,283031 61 | 6909,41282,277492 62 | 6066,38915,253124 63 | 5300,36655,227331 64 | 4621,32407,238505 65 | 4070,30945,232124 66 | 3829,31918,230209 67 | 6600,31655,294021 68 | 8358,31578,322685 69 | 8111,35005,336967 70 | 8854,41284,359343 71 | 10697,53960,371072 72 | 11961,66813,392425 73 | 11327,76577,419841 74 | 12775,84960,437503 75 | 11716,83692,412554 76 | 10703,76624,375861 77 | 9599,87620,339172 78 | 10225,91030,330933 79 | 9639,105882,374289 80 | 8616,101647,364605 81 | 7624,96468,330680 82 | 8198,95214,350727 83 | 8302,92509,380906 84 | 11219,89964,456631 85 | 10630,86798,458500 86 | 9711,79791,425204 87 | 8776,73515,406991 88 | 8029,64612,375944 89 | 7205,57783,344752 90 | 7005,52834,312705 91 | 6384,63741,299852 92 | 7042,66889,303633 93 | 8475,73487,342813 94 | 8573,72110,389687 95 | 8791,77603,387330 96 | 8901,71333,391033 97 | 9368,71236,428839 98 | 10495,82721,444632 99 | 9771,107047,429497 100 | 10069,109916,466697 101 | 14953,111487,501442 102 | 14479,110889,485861 103 | 13348,117544,466378 104 | 15126,161935,515897 105 | 17441,195121,539888 106 | 19724,243553,591028 107 | 21673,255726,568095 108 | 24080,277194,576070 109 | 23912,274446,590524 110 | 24757,264385,588126 111 | 25647,260250,624802 112 | 28083,265422,651220 113 | 30617,277536,667224 114 | 31034,300569,748019 115 | 29350,335079,766848 116 | 34227,349429,772700 117 | 33726,369650,787337 118 | 33685,385260,798944 119 | 30821,390774,760966 120 | 33737,372633,730752 121 | 31337,349613,681476 122 | 28334,338777,643638 123 | 25558,326841,598898 124 | 25084,304855,567540 125 | 23677,281503,546894 126 | 22784,264194,535198 127 | 21212,252108,531801 128 | 19394,247817,543809 129 | 20070,244685,573783 130 | 21085,245236,599079 131 | 21229,255102,650352 132 | 22524,265573,696866 133 | 20658,263500,658900 134 | 19049,269891,606710 135 | 19322,280613,589588 136 | 19350,300477,618223 137 | 21767,330090,629658 138 | 24759,351693,699998 139 | 27030,353049,755713 140 | 26970,361001,766298 141 | 28403,345609,802733 142 | 28526,346290,806253 143 | 29871,337460,852513 144 | 29281,339497,828200 145 | 30882,349940,875524 146 | 30738,349499,884289 147 | 30304,342007,856363 148 | 28024,333387,823498 149 | 26533,342700,862793 150 | 24402,347421,873370 151 | 23330,338573,850456 152 | 23020,343499,816372 153 | 25542,346921,835519 154 | 25387,356857,829867 155 | 28329,401215,860008 156 | 31495,456314,886975 157 | 33406,713351,992225 158 | 45796,1010522,1220956 159 | 53129,1089031,1338025 160 | 48370,1016025,1318580 161 | 43194,905960,1191383 162 | 39169,831000,1081899 163 | 37470,805974,1026570 164 | 36605,796886,1013338 165 | 35462,737368,1039227 166 | 34729,692538,988545 167 | 32110,635013,912068 168 | 29282,577449,841212 169 | 26504,538858,791717 170 | 25189,511409,768389 171 | 26647,503125,802402 172 | 29285,594888,851939 173 | 30669,622067,891105 174 | 30171,644164,932229 175 | 32159,621541,1006095 176 | 30361,606462,962840 177 | 31968,577850,951905 178 | 30755,566816,916318 179 | 29877,538001,884884 180 | 29456,510425,847363 181 | 27756,485408,805464 182 | 25389,458283,758924 183 | 23707,431384,709148 184 | 21529,431203,669615 185 | 27084,457772,755316 186 | 27464,476522,764543 187 | 31017,524019,785388 188 | 33328,578574,822899 189 | 34669,624308,876246 190 | 36088,700551,975205 191 | 39042,699426,1127329 192 | 37999,650570,1093556 193 | 35154,592514,1020434 194 | 32591,544911,1054207 195 | 49781,769408,1381199 196 | 76785,974313,1806114 197 | 81308,1031327,2052633 198 | 73587,943343,1935106 199 | 69803,886234,1794368 200 | 70614,1028963,1765106 201 | 83976,1029211,1906530 202 | 84417,997923,2120097 203 | 81680,915908,2064425 204 | 75141,831185,1899786 205 | 70491,746690,1713443 206 | 63780,679482,1576487 207 | 59832,612873,1456646 208 | 54599,566447,1335063 209 | 49160,519616,1198828 210 | 45873,477737,1130774 211 | 43708,457728,1101000 212 | 42396,433100,1073011 213 | 41326,417174,1012955 214 | 40945,401465,1007087 215 | 42360,373393,1004827 216 | 39920,348910,934321 217 | 39942,338596,924784 218 | 40019,329445,913831 219 | 38554,318513,857765 220 | 42705,324336,875725 221 | 41610,331383,881995 222 | 40502,333401,886492 223 | 43125,346405,895166 224 | 42794,362363,919212 225 | 41615,358608,921434 226 | 38299,348327,873561 227 | 34745,338881,821977 228 | 31685,335762,777283 229 | 30460,321581,756463 230 | 27995,313915,740573 231 | 26361,305234,718736 232 | 29812,316905,736397 233 | 28337,352199,771069 234 | 30935,414722,851999 235 | 34833,405074,976482 236 | 34700,396804,1030344 237 | 36308,487718,1044675 238 | 37292,554205,1055068 239 | 43100,574793,1183426 240 | 43462,619801,1270466 241 | 44795,654802,1393879 242 | 44292,667813,1437568 243 | 47922,645699,1480770 244 | 47323,629246,1534121 245 | 49729,647161,1587820 246 | 58075,705679,1689979 247 | 57223,818566,1685288 248 | 60255,851122,1762846 249 | 59335,817310,1719224 250 | 55141,732783,1590367 251 | 49782,661374,1445846 252 | 44766,592382,1328748 253 | 44226,562087,1265546 254 | 40798,537802,1173686 255 | 37563,492607,1101618 256 | 37173,474243,1055367 257 | 36749,498261,1026614 258 | 42814,514994,1127483 259 | 47045,522424,1309195 260 | 44052,507210,1395248 261 | 41033,475675,1334003 262 | 41482,449994,1323359 263 | 45095,441308,1334623 264 | 47907,467801,1266365 265 | 44207,480965,1177445 266 | 43089,470340,1141114 267 | 44224,458617,1127324 268 | 42008,481320,1151050 269 | 51933,574080,1211832 270 | 60827,623046,1252591 271 | 61122,612848,1249338 272 | 57421,579800,1164785 273 | 51666,539174,1074489 274 | 48104,511842,989819 275 | 44065,494503,915676 276 | 41240,471293,899433 277 | 39080,445324,865300 278 | 38990,441240,846765 279 | 38789,481285,890380 280 | 35422,510131,876820 281 | 35611,495333,905062 282 | 34800,508883,932047 283 | 40576,536203,970234 284 | 42960,543692,991089 285 | 41464,519737,955013 286 | 38988,499830,947125 287 | 37384,491749,915708 288 | 39920,494362,945265 289 | 38465,523366,1002718 290 | 59829,679635,1146274 291 | 89992,894341,1561115 292 | 90417,985782,1662728 293 | 87618,957560,1692566 294 | 82296,924083,1610518 295 | 80332,924905,1498603 296 | 87960,908723,1598422 297 | 82228,849750,1505123 298 | 79038,810246,1474955 299 | 74568,792567,1416236 300 | 74101,836370,1393260 301 | 72578,835307,1343016 302 | 69623,783270,1259272 303 | 65362,732958,1283251 304 | 61713,687827,1256089 305 | 57075,688324,1224246 306 | 60286,745012,1308462 307 | 57921,862664,1238731 308 | 57541,918732,1263955 309 | 56212,1133547,1248094 310 | 53262,1078189,1141426 311 | 48744,971587,1075450 312 | 43637,850692,968726 313 | 39731,769026,878816 314 | 36251,689058,791384 315 | 32925,618122,712883 316 | 29638,559746,642616 317 | 26658,504644,578051 318 | 23976,452704,521155 319 | 21562,405393,469694 320 | 19391,367180,426235 321 | 18175,339768,406890 322 | 16420,316962,376081 323 | 14765,289277,341194 324 | 13271,323482,310834 325 | 12424,323394,284367 326 | 11288,306488,260925 327 | 10205,289184,237682 328 | 9177,265955,214976 329 | 8445,244360,205285 330 | 7609,224715,190960 331 | 6828,209062,172659 332 | 6134,200756,156055 333 | 5892,195986,144143 334 | 6402,211546,139109 335 | 6178,232653,151779 336 | 5646,229943,148016 337 | 5071,218053,137841 338 | 4634,201292,126081 339 | 4493,187249,116604 340 | 4445,173464,110550 341 | 4402,159614,104769 342 | 4361,143191,100921 343 | 4320,130106,96362 344 | 4280,117623,91959 345 | 4252,134118,88512 346 | 5403,176588,90170 347 | 5102,183554,91128 348 | 4558,170502,88585 349 | 4284,154241,86388 350 | 4221,145745,84572 351 | 4188,139424,83077 352 | 4166,130253,82351 353 | 4133,118147,81501 354 | 4098,106483,79723 355 | 4063,97003,77970 356 | 4028,88226,76831 357 | 3994,83214,75998 358 | 3961,77848,75342 359 | 4023,78539,78575 360 | 3956,83895,81383 361 | 3914,81832,79645 362 | 3882,75792,76589 363 | 3850,68188,74162 364 | 3819,74490,72875 365 | 3789,66205,72053 366 | 3223,61265,71439 367 | -------------------------------------------------------------------------------- /model_library/hydro_system/reservoir_data/reservoir_unit.csv: -------------------------------------------------------------------------------- 1 | name,max_storage,min_level,max_level,max_head,min_day,max_day,max_release,max_generation,turbine_factor 2 | kirirom,30000000,500,540,271,150,310,3456000,18,0.9 3 | kamchay,432000000,500,610,122,180,298,14126400,194,0.9 4 | atay,443800000,510,545,216,158,311,10800000,240,0.9 5 | -------------------------------------------------------------------------------- /model_library/hydro_system/thermal_unit.csv: -------------------------------------------------------------------------------- 1 | name,node,unit_type,fuel_type,fuel_contract,max_capacity,min_capacity,heat_rate,operation_cost,fixed_cost,startup_cost,ramp_rate,min_uptime,min_downtime,latitude,longitude,must_take 2 | pGas,Node1,gas_cc,gas,pGas,1200,10,7.65,2,1.2,70,287.67,4,4,,,0 3 | pBiomass,Node2,biomass_st,biomass,pBiomass,9.9,2,14.1,26,1,1,9.9,2,2,,,0 4 | -------------------------------------------------------------------------------- /model_library/hydro_system/transmission.csv: -------------------------------------------------------------------------------- 1 | source,sink,user_line_cap,type,n_circuits,source_kv,sink_kv,distance,user_susceptance 2 | Node1,Node2,-1,acsr,4,275,275,350,-1 3 | -------------------------------------------------------------------------------- /model_library/solar_ess/energy_storage.csv: -------------------------------------------------------------------------------- 1 | name,attach_to,inject_to,max_charge,max_discharge,max_capacity,min_capacity,charge_efficiency,discharge_efficiency,self_discharge_rate,cost_contract 2 | solar_ess,pSolar,pSolar,100,100,400,0,0.97,1,0.01,ess 3 | -------------------------------------------------------------------------------- /model_library/solar_ess/nondispatch_unit.csv: -------------------------------------------------------------------------------- 1 | name,contracted_capacity,contract,must_take 2 | pSolar,150,solar_unit,1 3 | -------------------------------------------------------------------------------- /model_library/solar_ess/thermal_unit.csv: -------------------------------------------------------------------------------- 1 | name,node,unit_type,fuel_type,fuel_contract,max_capacity,min_capacity,heat_rate,operation_cost,fixed_cost,startup_cost,ramp_rate,min_uptime,min_downtime,latitude,longitude,must_take 2 | pGas,pGas,gas_cc,gas,gas_unit,350,10,1,2,0,15,600,2,2,,,0 3 | -------------------------------------------------------------------------------- /model_library/solar_ess/transmission.csv: -------------------------------------------------------------------------------- 1 | source,sink,user_line_cap,type,n_circuits,source_kv,sink_kv,distance,user_susceptance 2 | pGas,Node1,-1,acsr,2,275,275,120,-1 3 | pSolar,Node1,-1,acsr,2,275,275,50,-1 4 | Node1,Node2,-1,acsr,2,275,275,25,-1 5 | -------------------------------------------------------------------------------- /model_library/test_flow/thermal_unit.csv: -------------------------------------------------------------------------------- 1 | name,node,unit_type,fuel_type,max_capacity,min_capacity,heat_rate,operation_cost,fuel_contract,fixed_cost,startup_cost,ramp_rate,min_uptime,min_downtime,must_take 2 | pGas,Node1,gas_cc,gas,1200,10,7.65,2,gas,1.2,70,287.67,4,4,0 3 | -------------------------------------------------------------------------------- /model_library/test_flow/transmission.csv: -------------------------------------------------------------------------------- 1 | source,sink,user_line_cap,type,n_circuits,source_kv,sink_kv,distance,source_lon,source_lat,sink_lon,sink_lat,user_susceptance 2 | Node1,Node2,-1,acsr,2,275,275,25,42.43715518,-76.48785695,42.44448255,-76.48461395,-1 3 | Node1,Node3,-1,acsr,2,275,275,50,42.43715518,-76.48785695,42.43929509,-76.49504405,-1 4 | Node3,Node2,-1,acsr,2,275,275,50,42.43929509,-76.49504405,42.44448255,-76.48461395,-1 5 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools>=61.0.0", "setuptools-scm", "wheel"] 3 | build-backend = "setuptools.build_meta" 4 | 5 | [tool.setuptools.packages.find] 6 | where = ["src"] 7 | 8 | [tool.setuptools.package-data] 9 | pownet = ["database/*.csv"] 10 | 11 | [tool.coverage.run] 12 | disable_warnings = ["no-data-collected"] 13 | source = ["pownet"] 14 | 15 | [project] 16 | name = "pownet" 17 | version = "2.3.0" 18 | description = "A simple production cost model of framework for power systems" 19 | readme = "README.md" 20 | authors = [{ name = "Phumthep Bunnak", email = "pb585@cornell.edu" }] 21 | license = { file = "LICENSE" } 22 | keywords = ["power_system", "optimization", "production_cost_model", "unit_commitment"] 23 | dependencies = [ 24 | "gurobipy >= 11.0.3", 25 | "highspy >= 1.7.2", 26 | "matplotlib >= 3.7.1", 27 | "networkx >= 3.1.0", 28 | "numpy >= 2.2.0", 29 | "pandas >= 2.1.1", 30 | "scipy >= 1.11.3", 31 | "scikit-learn >= 1.5.2", 32 | "statsmodels >= 0.14.4", 33 | "geopandas >= 1.0.1", 34 | "contextily >= 1.6.2", 35 | "coverage[toml] >= 7.8.0", 36 | "flake8 >= 7.2.0", 37 | ] 38 | requires-python = ">=3.10" 39 | 40 | [project.urls] 41 | Homepage = "https://github.com/Critical-Infrastructure-Systems-Lab/PowNet" 42 | Documentation = "https://pownet.readthedocs.io/en/latest/index.html" 43 | -------------------------------------------------------------------------------- /src/pownet/__init__.py: -------------------------------------------------------------------------------- 1 | from .core import ( 2 | Simulator, 3 | OutputProcessor, 4 | SystemRecord, 5 | DataProcessor, 6 | ModelBuilder, 7 | Visualizer, 8 | UserConstraint, 9 | ) 10 | 11 | from .input import SystemInput 12 | -------------------------------------------------------------------------------- /src/pownet/builder/__init__.py: -------------------------------------------------------------------------------- 1 | """This is the builder module.""" 2 | -------------------------------------------------------------------------------- /src/pownet/builder/basebuilder.py: -------------------------------------------------------------------------------- 1 | """basebuilder.py: This module defines the abstract base class for component builders in the Pownet framework.""" 2 | 3 | from abc import ABC, abstractmethod 4 | 5 | import gurobipy as gp 6 | 7 | from ..input import SystemInput 8 | 9 | 10 | class ComponentBuilder(ABC): 11 | """ 12 | Abstract base class for component builders in the Pownet framework. 13 | 14 | This class defines the interface for building components, which includes methods for 15 | creating a model, adding variables, constraints, and objectives to the model. 16 | """ 17 | 18 | def __init__(self, model: gp.Model, inputs: SystemInput): 19 | self.model = model 20 | self.inputs = inputs 21 | self.sim_horizon = inputs.sim_horizon 22 | self.timesteps = range(1, self.inputs.sim_horizon + 1) 23 | 24 | @abstractmethod 25 | def add_variables(self, step_k: int) -> None: 26 | pass 27 | 28 | @abstractmethod 29 | def get_fixed_objective_terms(self) -> gp.LinExpr: 30 | pass 31 | 32 | @abstractmethod 33 | def get_variable_objective_terms(self, step_k: int, **kwargs) -> gp.LinExpr: 34 | pass 35 | 36 | @abstractmethod 37 | def add_constraints(self, step_k: int, init_conds: dict, **kwargs) -> None: 38 | pass 39 | 40 | @abstractmethod 41 | def update_variables(self, step_k: int) -> None: 42 | pass 43 | 44 | @abstractmethod 45 | def update_constraints(self, step_k: int, init_conds: dict, **kwargs) -> None: 46 | pass 47 | 48 | @abstractmethod 49 | def get_variables(self) -> dict[str, gp.tupledict]: 50 | pass 51 | -------------------------------------------------------------------------------- /src/pownet/core/__init__.py: -------------------------------------------------------------------------------- 1 | """This is the core module.""" 2 | 3 | from .model_builder import ModelBuilder 4 | from .output import OutputProcessor 5 | from .visualizer import Visualizer 6 | from .record import SystemRecord 7 | from .simulation import Simulator 8 | from .data_processor import DataProcessor 9 | from .user_constraint import UserConstraint 10 | 11 | __all__ = [ 12 | "Simulator", 13 | "OutputProcessor", 14 | "SystemRecord", 15 | "DataProcessor", 16 | "ModelBuilder", 17 | "Visualizer", 18 | "UserConstraint", 19 | ] 20 | -------------------------------------------------------------------------------- /src/pownet/core/model_builder.py: -------------------------------------------------------------------------------- 1 | """builder.py: This module contains the ModelBuilder class, which is responsible 2 | for constructing and updating the optimization model for the power system. 3 | """ 4 | 5 | from ..input import SystemInput 6 | 7 | from gurobipy import GRB 8 | import gurobipy as gp 9 | 10 | from ..optim_model import PowerSystemModel 11 | from ..builder.thermal import ThermalUnitBuilder 12 | from ..builder.hydro import HydroUnitBuilder 13 | from ..builder.nondispatch import NonDispatchUnitBuilder 14 | from ..builder.energy_storage import EnergyStorageUnitBuilder 15 | from ..builder.system import SystemBuilder 16 | 17 | 18 | class ModelBuilder: 19 | def __init__(self, inputs: SystemInput) -> None: 20 | self.inputs = inputs 21 | self.model: gp.Model = gp.Model(self.inputs.model_id) 22 | 23 | # Instantiate specialized builders, passing the model, inputs, and timesteps 24 | self.thermal_builder = ThermalUnitBuilder(self.model, self.inputs) 25 | self.hydro_builder = HydroUnitBuilder(self.model, self.inputs) 26 | self.nondispatch_builder = NonDispatchUnitBuilder(self.model, self.inputs) 27 | self.storage_builder = EnergyStorageUnitBuilder(self.model, self.inputs) 28 | self.system_builder = SystemBuilder(self.model, self.inputs) 29 | 30 | # Model attributes 31 | self.total_fixed_objective_expr = gp.LinExpr() 32 | 33 | def build(self, step_k: int, init_conds: dict[str, dict]) -> PowerSystemModel: 34 | """Build the initial optimization model by delegating to specialized builders.""" 35 | 36 | ########################################### 37 | # Add variables 38 | ########################################### 39 | self.thermal_builder.add_variables(step_k=step_k) 40 | self.hydro_builder.add_variables(step_k=step_k) 41 | self.nondispatch_builder.add_variables(step_k=step_k) 42 | self.storage_builder.add_variables(step_k=step_k) 43 | self.system_builder.add_variables(step_k=step_k) 44 | 45 | ########################################### 46 | # Set Objective Function 47 | ########################################### 48 | 49 | # --- Add fixed objective terms 50 | self.total_fixed_objective_expr = ( 51 | self.thermal_builder.get_fixed_objective_terms() 52 | ) 53 | self.total_fixed_objective_expr += ( 54 | self.hydro_builder.get_fixed_objective_terms() 55 | ) 56 | self.total_fixed_objective_expr += ( 57 | self.nondispatch_builder.get_fixed_objective_terms() 58 | ) 59 | self.total_fixed_objective_expr += ( 60 | self.storage_builder.get_fixed_objective_terms() 61 | ) 62 | self.total_fixed_objective_expr += ( 63 | self.system_builder.get_fixed_objective_terms() 64 | ) 65 | 66 | # --- Add variable objective terms 67 | total_variable_objective_expr = ( 68 | self.thermal_builder.get_variable_objective_terms(step_k=step_k) 69 | ) 70 | total_variable_objective_expr += ( 71 | self.hydro_builder.get_variable_objective_terms(step_k=step_k) 72 | ) 73 | total_variable_objective_expr += ( 74 | self.nondispatch_builder.get_variable_objective_terms(step_k=step_k) 75 | ) 76 | total_variable_objective_expr += ( 77 | self.storage_builder.get_variable_objective_terms(step_k=step_k) 78 | ) 79 | total_variable_objective_expr += ( 80 | self.system_builder.get_variable_objective_terms(step_k=step_k) 81 | ) 82 | 83 | # --- Sum up all fixed and variable objective terms 84 | total_objective_expr = ( 85 | self.total_fixed_objective_expr + total_variable_objective_expr 86 | ) 87 | self.model.setObjective(total_objective_expr, sense=GRB.MINIMIZE) 88 | 89 | ########################################### 90 | # Add Constraints 91 | ########################################### 92 | self.thermal_builder.add_constraints(step_k=step_k, init_conds=init_conds) 93 | self.hydro_builder.add_constraints(step_k=step_k, init_conds=init_conds) 94 | self.nondispatch_builder.add_constraints(step_k=step_k, init_conds=init_conds) 95 | self.storage_builder.add_constraints(step_k=step_k, init_conds=init_conds) 96 | self.system_builder.add_constraints( 97 | step_k=step_k, 98 | init_conds=init_conds, 99 | spin_vars=self.thermal_builder.spin, 100 | vpowerbar_vars=self.thermal_builder.vpowerbar, 101 | thermal_status_vars=self.thermal_builder.status, 102 | pthermal=self.thermal_builder.pthermal, 103 | phydro=self.hydro_builder.phydro, 104 | psolar=self.nondispatch_builder.psolar, 105 | pwind=self.nondispatch_builder.pwind, 106 | pimp=self.nondispatch_builder.pimp, 107 | pcharge=self.storage_builder.pcharge, 108 | pdischarge=self.storage_builder.pdischarge, 109 | charge_state=self.storage_builder.charge_state, 110 | ) 111 | 112 | self.model.update() 113 | return PowerSystemModel(self.model) 114 | 115 | def update(self, step_k: int, init_conds: dict[str, dict]) -> PowerSystemModel: 116 | """Update the existing model for a new step_k by delegating to specialized builders.""" 117 | 118 | ########################################### 119 | # Update variables 120 | ########################################### 121 | self.thermal_builder.update_variables(step_k=step_k) 122 | self.hydro_builder.update_variables(step_k=step_k) 123 | self.nondispatch_builder.update_variables(step_k=step_k) 124 | self.storage_builder.update_variables(step_k=step_k) 125 | self.system_builder.update_variables(step_k=step_k) 126 | 127 | ########################################### 128 | # Update Objective Function 129 | ########################################### 130 | 131 | updated_objective_expr = self.total_fixed_objective_expr.copy() 132 | 133 | # Rebuild the objective as terms/coefficients that change with step_k 134 | updated_objective_expr += self.thermal_builder.get_variable_objective_terms( 135 | step_k=step_k 136 | ) 137 | updated_objective_expr += self.hydro_builder.get_variable_objective_terms( 138 | step_k=step_k 139 | ) 140 | updated_objective_expr += self.nondispatch_builder.get_variable_objective_terms( 141 | step_k=step_k 142 | ) 143 | updated_objective_expr += self.storage_builder.get_variable_objective_terms( 144 | step_k=step_k 145 | ) 146 | updated_objective_expr += self.system_builder.get_variable_objective_terms( 147 | step_k=step_k 148 | ) 149 | self.model.setObjective(updated_objective_expr, sense=GRB.MINIMIZE) 150 | 151 | ########################################### 152 | # Update Constraints 153 | ########################################### 154 | # Builders will handle removing old and adding new/modified constraints 155 | self.thermal_builder.update_constraints(step_k=step_k, init_conds=init_conds) 156 | self.hydro_builder.update_constraints(step_k=step_k, init_conds=init_conds) 157 | self.nondispatch_builder.update_constraints( 158 | step_k=step_k, init_conds=init_conds 159 | ) 160 | self.storage_builder.update_constraints(step_k=step_k, init_conds=init_conds) 161 | self.system_builder.update_constraints( 162 | step_k=step_k, 163 | init_conds=init_conds, 164 | spin_vars=self.thermal_builder.spin, 165 | vpowerbar_vars=self.thermal_builder.vpowerbar, 166 | thermal_status_vars=self.thermal_builder.status, 167 | pthermal=self.thermal_builder.pthermal, 168 | phydro=self.hydro_builder.phydro, 169 | psolar=self.nondispatch_builder.psolar, 170 | pwind=self.nondispatch_builder.pwind, 171 | pimp=self.nondispatch_builder.pimp, 172 | pcharge=self.storage_builder.pcharge, 173 | pdischarge=self.storage_builder.pdischarge, 174 | charge_state=self.storage_builder.charge_state, 175 | ) 176 | 177 | self.model.update() 178 | return PowerSystemModel(self.model) 179 | 180 | def get_phydro(self) -> gp.tupledict: 181 | """Get the hydro power variable from the model.""" 182 | return self.hydro_builder.phydro 183 | 184 | def update_daily_hydropower_capacity( 185 | self, step_k: int, new_capacity: dict[tuple[str, int], float] 186 | ) -> PowerSystemModel: 187 | """Update the daily hydro capacity in the model.""" 188 | self.hydro_builder.update_daily_hydropower_capacity(step_k, new_capacity) 189 | self.model.update() 190 | return PowerSystemModel(self.model) 191 | -------------------------------------------------------------------------------- /src/pownet/core/user_constraint.py: -------------------------------------------------------------------------------- 1 | from typing import Callable, Any 2 | import gurobipy as gp 3 | 4 | 5 | class UserConstraint: 6 | def __init__( 7 | self, 8 | model: gp.Model, 9 | ): 10 | self.model = model 11 | 12 | # Constraints that do not need to be updated with each iteration 13 | self.constraints: dict[tuple] = {} # list of functions and its arguments 14 | self.constrs_with_update: dict[str] = {} 15 | self.added_constraints: gp.tupledict = gp.tupledict() 16 | 17 | def register_constraint( 18 | self, 19 | constraint: Callable[..., Any], 20 | constr_name: str, 21 | **kwargs: Any, 22 | ): 23 | self.constraints[constr_name] = (constraint, kwargs) 24 | 25 | def register_constraint_with_update( 26 | self, 27 | constraint: Callable[..., Any], 28 | constr_name: str, 29 | **kwargs: Any, 30 | ): 31 | # Remove step_k from kwargs 32 | del kwargs["step_k"] 33 | self.constrs_with_update[constr_name] = (constraint, kwargs) 34 | 35 | def add_constraints_to_model(self, step_k: int = 1): 36 | # Constraints without update 37 | for constr_name, (constraint, kwargs) in self.constraints.items(): 38 | self.added_constraints[constr_name] = constraint( 39 | self.model, constr_name=constr_name, **kwargs 40 | ) 41 | # Constraints with update 42 | for constr_name, (constraint, kwargs) in self.constrs_with_update.items(): 43 | self.added_constraints[constr_name] = constraint( 44 | self.model, step_k=step_k, constr_name=constr_name, **kwargs 45 | ) 46 | 47 | self.model.update() 48 | 49 | def remove_constraints(self, constr_names: str): 50 | for constr_name in constr_names: 51 | self.model.remove(self.added_constraints[constr_name]) 52 | self.model.update() 53 | 54 | def update_constraints(self, step_k: int): 55 | # Remove constraints 56 | for constr_name in self.constrs_with_update.keys(): 57 | self.model.remove(self.added_constraints[constr_name]) 58 | # Add constraints 59 | for constr_name, (constraint, kwargs) in self.constrs_with_update.items(): 60 | self.added_constraints[constr_name] = constraint( 61 | self.model, step_k=step_k, constr_name=constr_name, **kwargs 62 | ) 63 | 64 | self.model.update() 65 | -------------------------------------------------------------------------------- /src/pownet/coupler.py: -------------------------------------------------------------------------------- 1 | """coupler.py: PowerWaterCoupler class to couple the power and water systems.""" 2 | 3 | from .core import ModelBuilder 4 | from .reservoir.manager import ReservoirManager 5 | 6 | import logging 7 | 8 | logger = logging.getLogger(__name__) 9 | 10 | 11 | class PowerWaterCoupler: 12 | def __init__( 13 | self, 14 | model_builder: ModelBuilder, 15 | reservoir_manager: ReservoirManager, 16 | solver: str = "gurobi", 17 | mip_gap: float = 0.0001, 18 | timelimit: float = 600, 19 | log_to_console: bool = False, 20 | ) -> None: 21 | """ 22 | Coupler class to couple the power and water systems. 23 | 24 | Args: 25 | model_builder (ModelBuilder): ModelBuilder object to build the power system model. 26 | reservoir_manager (ReservoirManager): ReservoirManager object to manage the water system. 27 | solver (str): Solver to use for optimization. Default is "gurobi". 28 | mip_gap (float): MIP gap for optimization. Default is 0.0001. 29 | timelimit (float): Time limit for optimization in seconds. Default is 600. 30 | log_to_console (bool): Whether to log to console. Default is False. 31 | 32 | Returns: 33 | None 34 | """ 35 | self.model_builder = model_builder 36 | self.reservoir_manager = reservoir_manager 37 | 38 | self.solver = solver 39 | self.mipgap = mip_gap 40 | self.timelimit = timelimit 41 | self.log_to_console = log_to_console 42 | 43 | self.num_days_in_step = self.model_builder.inputs.sim_horizon // 24 44 | 45 | self.reop_iter = [] 46 | self.reop_opt_time = 0.0 47 | 48 | def get_reop_opt_time(self): 49 | return self.reop_opt_time 50 | 51 | def get_reop_iter(self): 52 | return self.reop_iter 53 | 54 | def reoperate( 55 | self, 56 | step_k: int, 57 | max_reop_iter: int = 100, 58 | ) -> None: 59 | """Reoperate the reservoirs based on the daily dispatch of the power system model. 60 | Note that we don't reoperate on the first day of the simulation period. 61 | 62 | Args: 63 | step_k (int): Current step in the simulation. 64 | 65 | Returns: 66 | None 67 | """ 68 | 69 | # Assume optimization is rolling horizon of 24 hours 70 | days_in_step = range(step_k, step_k + self.num_days_in_step) 71 | 72 | reop_converge = False 73 | reop_k = 0 74 | 75 | while not reop_converge: 76 | # --- PowNet returns the hydropower dispatch in hourly resolution across the simulation horizon 77 | hydropower_dispatch = { 78 | (unit, day): 0 79 | for unit in self.reservoir_manager.simulation_order 80 | for day in days_in_step 81 | } 82 | for varname, var in self.model_builder.get_phydro().items(): 83 | unit = varname[0] 84 | 85 | if varname[1] % 24 == 0: 86 | current_day = varname[1] // 24 + step_k - 1 87 | else: 88 | current_day = varname[1] // 24 + step_k 89 | 90 | hydropower_dispatch[unit, current_day] += var.X 91 | 92 | # --- Reoperate the reservoirs 93 | proposed_capacity = self.reservoir_manager.reoperate( 94 | daily_dispatch=hydropower_dispatch, 95 | days_in_step=days_in_step, 96 | ) 97 | 98 | # --- Iterate the reoperation process 99 | # Compare the new hydropower capacity with the current dispatch 100 | max_deviation = { 101 | (unit, day): abs( 102 | proposed_capacity[unit, day] - hydropower_dispatch[unit, day] 103 | ) 104 | for unit in self.reservoir_manager.simulation_order 105 | for day in days_in_step 106 | } 107 | 108 | # Set the tolerance for convergence to 5% 109 | reop_tol = { 110 | (unit, day): 0.05 * hydropower_dispatch[unit, day] 111 | for unit, day in max_deviation.keys() 112 | } 113 | 114 | if all( 115 | max_deviation[unit, day] <= reop_tol[unit, day] 116 | for unit in self.reservoir_manager.simulation_order 117 | for day in days_in_step 118 | ): 119 | reop_converge = True 120 | logger.info( 121 | f"PowNet: Day {step_k + 1} - Reservoirs converged at iteration {reop_k}" 122 | ) 123 | 124 | logger.info("Max deviations:", max_deviation) 125 | 126 | if reop_k > max_reop_iter: 127 | raise ValueError( 128 | f"Reservoirs reoperation did not converge after {max_reop_iter} iterations" 129 | ) 130 | 131 | # To reoptimize PowNet with the new hydropower capacity, 132 | # update the builder class 133 | power_system_model = self.model_builder.update_daily_hydropower_capacity( 134 | step_k=step_k, new_capacity=proposed_capacity 135 | ) 136 | power_system_model.optimize( 137 | solver=self.solver, 138 | mipgap=self.mipgap, 139 | timelimit=self.timelimit, 140 | log_to_console=self.log_to_console, 141 | ) 142 | 143 | # Keep track of optimization time oand reoperation iterations 144 | self.reop_opt_time += power_system_model.get_runtime() 145 | reop_k += 1 146 | 147 | # Record the number of iterations after convergence 148 | self.reop_iter.append(reop_k) 149 | -------------------------------------------------------------------------------- /src/pownet/data_model/__init__.py: -------------------------------------------------------------------------------- 1 | """This is the core module.""" 2 | 3 | from .reservoir import ReservoirParams 4 | 5 | __all__ = [ 6 | "ReservoirParams", 7 | ] 8 | -------------------------------------------------------------------------------- /src/pownet/data_model/reservoir.py: -------------------------------------------------------------------------------- 1 | """reservoir.py""" 2 | 3 | import dataclasses 4 | import math 5 | 6 | import pandas as pd 7 | 8 | 9 | @dataclasses.dataclass() 10 | class ReservoirParams: 11 | """ 12 | Data class to hold static parameters and initial timeseries data for a reservoir. 13 | 14 | Attributes: 15 | name (str): The unique name of the reservoir. 16 | min_day (int): The day of the year when the target level is typically at its minimum. 17 | max_day (int): The day of the year when the target level is typically at its maximum. 18 | min_level (float): The minimum operational water level (meters). 19 | max_level (float): The maximum operational water level (meters). 20 | max_head (float): The maximum hydraulic head difference available for generation (meters). 21 | max_storage (float): The maximum storage capacity of the reservoir (m³). 22 | max_release (float): The maximum allowable daily release rate (m³/day). 23 | max_generation (float): The maximum power generation capacity (MW). 24 | turbine_factor (float): The efficiency factor of the turbine(s). 25 | inflow_ts (pd.Series): Timeseries of daily natural inflow into the reservoir (m³/day), indexed from 1. 26 | minflow_ts (pd.Series): Minimum environmental flow (m³/day), indexed from 1. 27 | upstream_units (list[str]): List of upstream reservoir names that feed into this reservoir. 28 | downstream_flow_fracs (dict[str, float]): Dictionary mapping downstream reservoir names to their respective flow fractions (0-1). 29 | """ 30 | 31 | name: str 32 | min_day: int 33 | max_day: int 34 | min_level: float 35 | max_level: float 36 | max_head: float 37 | max_storage: float 38 | max_release: float 39 | max_generation: float 40 | turbine_factor: float 41 | inflow_ts: pd.Series 42 | minflow_ts: pd.Series 43 | upstream_units: list[str] 44 | downstream_flow_fracs: dict[str, float] 45 | 46 | def __post_init__(self): 47 | """Perform basic validation after initialization.""" 48 | # Flow fractions of downstream units should sum to 1 49 | if self.downstream_flow_fracs: 50 | if not math.isclose( 51 | sum(self.downstream_flow_fracs.values()), 1.0, abs_tol=1e-4 52 | ): 53 | raise ValueError( 54 | f"Downstream units for {self.name} do not sum to 1: " 55 | f"{self.downstream_flow_fracs}" 56 | ) 57 | 58 | # Check that inflow and minflow timeseries are indexed correctly 59 | if not self.inflow_ts.index.equals(self.minflow_ts.index): 60 | raise ValueError( 61 | f"Inflows and minflows for {self.name} are not indexed the same: " 62 | f"{self.inflow_ts.index} vs {self.minflow_ts.index}" 63 | ) 64 | 65 | # Indexing starts at 1 66 | if self.inflow_ts.index[0] != 1: 67 | raise ValueError( 68 | f"Inflows for {self.name} do not start at 1: {self.inflow_ts.index[0]}" 69 | ) 70 | if self.minflow_ts.index[0] != 1: 71 | raise ValueError( 72 | f"Minflows for {self.name} do not start at 1: {self.minflow_ts.index[0]}" 73 | ) 74 | 75 | # Inflow must be greater than minflow for all days 76 | if not all(self.inflow_ts >= self.minflow_ts): 77 | raise ValueError( 78 | f"Inflows for {self.name} are less than minflows on some days: " 79 | f"{(self.inflow_ts < self.minflow_ts).sum()} days" 80 | ) 81 | -------------------------------------------------------------------------------- /src/pownet/database/fuels.csv: -------------------------------------------------------------------------------- 1 | name,color,co2_emission 2 | coal,#636363, 3 | gas,#08519c, 4 | hydropower,#7bccc4, 5 | solar,#fdbb84, 6 | wind,#deebf7, 7 | oil,#252525, 8 | biomass,#33a02c, 9 | biogas,#2abf63, 10 | solid_waste,#d95f0e, 11 | import,#54278f, 12 | shortfall,#fa9fb5, 13 | curtailment,#f768a1, 14 | charging,#238443, 15 | discharging,#f16913, 16 | undefined,#000000, 17 | -------------------------------------------------------------------------------- /src/pownet/database/transmission_params.csv: -------------------------------------------------------------------------------- 1 | kv,type,n_conductors,aluminum_cross_section_per_conductor,code_name,current_capacity_amps,reactance_ohms_per_km,n_circuits 2 | 22,acsr,1,1000,curlew,1060,0.3648,2 3 | 69,acsr,1,1000,curlew,1060,0.3648,2 4 | 115,acsr,1,1000,curlew,1060,0.3648,2 5 | 132,acsr,1,1000,curlew,1060,0.3648,2 6 | 230,acsr,1,1000,curlew,1060,0.3648,2 7 | 275,acsr,1,1000,curlew,1060,0.365,2 8 | 345,acsr,2,2200,kiwi,1380,0.2801,2 9 | 500,acsr,3,1500,parrot,1340,0.2326,1 10 | 750,acsr,4,1300,pheasant,1200,0.2544,1 11 | 765,acsr,4,1300,pheasant,1200,0.2544,1 12 | -------------------------------------------------------------------------------- /src/pownet/folder_utils.py: -------------------------------------------------------------------------------- 1 | "folder_utils.py: Folder utility functions for pownet package." "" 2 | 3 | import os 4 | 5 | 6 | def get_pownet_dir() -> str: 7 | """Returns the root directory of the pownet package.""" 8 | return os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) 9 | 10 | 11 | def get_home_dir() -> str: 12 | """Returns the home directory of the user. This is useful for testing purposes.""" 13 | return os.path.expanduser("~") 14 | 15 | 16 | def get_database_dir() -> str: 17 | """Returns the database directory of the pownet package.""" 18 | return os.path.join(os.path.dirname(os.path.abspath(__file__)), "database") 19 | 20 | 21 | def get_test_dir() -> str: 22 | """Returns the test directory of the pownet package.""" 23 | return os.path.join(get_pownet_dir(), "src", "test_pownet") 24 | -------------------------------------------------------------------------------- /src/pownet/optim_model/__init__.py: -------------------------------------------------------------------------------- 1 | """The optim_model module provides the core optimization model for power system operations.""" 2 | 3 | from .model import PowerSystemModel 4 | from .variable_func import ( 5 | add_var_with_variable_ub, 6 | update_var_with_variable_ub, 7 | update_flow_vars, 8 | ) 9 | 10 | from .objfunc import ( 11 | get_thermal_fixed_coeff, 12 | get_thermal_opex_coeff, 13 | get_thermal_startup_coeff, 14 | get_marginal_cost_coeff, 15 | ) 16 | -------------------------------------------------------------------------------- /src/pownet/optim_model/constraints/__init__.py: -------------------------------------------------------------------------------- 1 | """This is the constraint module.""" 2 | -------------------------------------------------------------------------------- /src/pownet/optim_model/constraints/energy_storage_constr.py: -------------------------------------------------------------------------------- 1 | """energy_storage_constr.py: Constraints for energy storage units.""" 2 | 3 | import gurobipy as gp 4 | 5 | 6 | def add_c_link_ess_charge( 7 | model: gp.Model, 8 | pcharge: gp.tupledict, 9 | ucharge: gp.tupledict, 10 | timesteps: range, 11 | units: list, 12 | max_charge: dict[str, float], 13 | ) -> gp.tupledict: 14 | """Link the charging indicator to the continuous charging variable. 15 | The charging variable is limited by the maximum charging capacity of the unit. 16 | 17 | Args: 18 | model (gp.Model): The Gurobi model. 19 | pcharge (gp.tupledict): Continuous charging variable. 20 | ucharge (gp.tupledict): Binary charging indicator. 21 | timesteps (range): Range of time steps. 22 | units (list): List of energy storage units. 23 | max_charge (dict[str, float]): Maximum charging capacity for each unit. 24 | 25 | Returns: 26 | gp.tupledict: The added constraints. 27 | """ 28 | return model.addConstrs( 29 | ( 30 | pcharge[unit, t] <= ucharge[unit, t] * max_charge[unit] 31 | for unit in units 32 | for t in timesteps 33 | ), 34 | name="link_ess_charge", 35 | ) 36 | 37 | 38 | def add_c_link_ess_discharge( 39 | model: gp.Model, 40 | pdischarge: gp.tupledict, 41 | udischarge: gp.tupledict, 42 | timesteps: range, 43 | units: list, 44 | max_discharge: dict[str, float], 45 | ) -> gp.tupledict: 46 | """Link the discharging indicator to the continuous discharging variable. 47 | The discharging variable is limited by the maximum discharging capacity of the unit. 48 | 49 | Args: 50 | model (gp.Model): The Gurobi model. 51 | pdischarge (gp.tupledict): Continuous discharging variable. 52 | udischarge (gp.tupledict): Binary discharging indicator. 53 | timesteps (range): Range of time steps. 54 | units (list): List of energy storage units. 55 | max_discharge (dict[str, float]): Maximum discharging capacity for each unit. 56 | 57 | Returns: 58 | gp.tupledict: The added constraints. 59 | """ 60 | return model.addConstrs( 61 | ( 62 | pdischarge[unit, t] <= udischarge[unit, t] * max_discharge[unit] 63 | for unit in units 64 | for t in timesteps 65 | ), 66 | name="link_ess_discharge", 67 | ) 68 | 69 | 70 | def add_c_link_ess_state( 71 | model: gp.Model, 72 | ucharge: gp.tupledict, 73 | udischarge: gp.tupledict, 74 | timesteps: range, 75 | units: list, 76 | ) -> gp.tupledict: 77 | """Link the charging and discharging indicators to ensure that only one can be active at a time. 78 | 79 | Args: 80 | model (gp.Model): The Gurobi model. 81 | ucharge (gp.tupledict): Binary charging indicator. 82 | udischarge (gp.tupledict): Binary discharging indicator. 83 | timesteps (range): Range of time steps. 84 | units (list): List of energy storage units. 85 | 86 | Returns: 87 | gp.tupledict: The added constraints. 88 | """ 89 | return model.addConstrs( 90 | ( 91 | ucharge[unit, t] + udischarge[unit, t] <= 1 92 | for unit in units 93 | for t in timesteps 94 | ), 95 | name="link_ess_state", 96 | ) 97 | 98 | 99 | def add_c_unit_ess_balance_init( 100 | model: gp.Model, 101 | pcharge: gp.tupledict, 102 | pdischarge: gp.tupledict, 103 | charge_state: gp.tupledict, 104 | units: list, 105 | charge_state_init: dict[str, float], 106 | charge_efficiency: dict[str, float], 107 | discharge_efficiency: dict[str, float], 108 | self_discharge_rate: dict[str, float], 109 | ) -> gp.tupledict: 110 | """Initial balance for energy storage units at the first time step. 111 | 112 | Args: 113 | model (gp.Model): The Gurobi model. 114 | pcharge (gp.tupledict): Continuous charging variable. 115 | pdischarge (gp.tupledict): Continuous discharging variable. 116 | charge_state (gp.tupledict): Charge state variable. 117 | units (list): List of energy storage units. 118 | charge_state_init (dict[str, float]): Initial charge state for each unit. 119 | charge_efficiency (dict[str, float]): Charging efficiency for each unit. 120 | discharge_efficiency (dict[str, float]): Discharging efficiency for each unit. 121 | self_discharge_rate (dict[str, float]): Self-discharge rate for each unit. 122 | 123 | Returns: 124 | gp.tupledict: The added constraints. 125 | """ 126 | t = 1 127 | return model.addConstrs( 128 | ( 129 | charge_state[unit, t] 130 | == (1 - self_discharge_rate[unit]) * charge_state_init[unit] 131 | + charge_efficiency[unit] * pcharge[unit, t] 132 | - pdischarge[unit, t] / discharge_efficiency[unit] 133 | for unit in units 134 | ), 135 | name="unit_ess_balance_init", 136 | ) 137 | 138 | 139 | def add_c_unit_ess_balance( 140 | model: gp.Model, 141 | pcharge: gp.tupledict, 142 | pdischarge: gp.tupledict, 143 | charge_state: gp.tupledict, 144 | units: list, 145 | sim_horizon: int, 146 | charge_efficiency: dict[str, float], 147 | discharge_efficiency: dict[str, float], 148 | self_discharge_rate: dict[str, float], 149 | ) -> gp.tupledict: 150 | """Balance for energy storage units at the second time step and onwards. 151 | The balance equation ensures that the charge state at time t is equal to the charge state at time t-1, 152 | adjusted for the charging and discharging variables, as well as the self-discharge rate. 153 | 154 | Args: 155 | model (gp.Model): The Gurobi model. 156 | pcharge (gp.tupledict): Continuous charging variable. 157 | pdischarge (gp.tupledict): Continuous discharging variable. 158 | charge_state (gp.tupledict): Charge state variable. 159 | units (list): List of energy storage units. 160 | sim_horizon (int): Simulation horizon. 161 | charge_efficiency (dict[str, float]): Charging efficiency for each unit. 162 | discharge_efficiency (dict[str, float]): Discharging efficiency for each unit. 163 | self_discharge_rate (dict[str, float]): Self-discharge rate for each unit. 164 | 165 | Returns: 166 | gp.tupledict: The added constraints. 167 | """ 168 | return model.addConstrs( 169 | ( 170 | charge_state[unit, t] 171 | == (1 - self_discharge_rate[unit]) * charge_state[unit, t - 1] 172 | + charge_efficiency[unit] * pcharge[unit, t] 173 | - pdischarge[unit, t] / discharge_efficiency[unit] 174 | for unit in units 175 | for t in range(2, sim_horizon + 1) 176 | ), 177 | name="unit_ess_balance", 178 | ) 179 | -------------------------------------------------------------------------------- /src/pownet/optim_model/objfunc.py: -------------------------------------------------------------------------------- 1 | """objfunc.py: Functions for constructing the objective function.""" 2 | 3 | 4 | def get_thermal_fixed_coeff( 5 | timesteps: range, 6 | thermal_units: list, 7 | thermal_fixed_cost: dict, 8 | thermal_rated_capacity: dict, 9 | ) -> dict: 10 | """Fixed cost is a function of rated capacity and fixed cost per unit.""" 11 | return { 12 | (unit, t): thermal_rated_capacity[unit] * thermal_fixed_cost[unit] 13 | for t in timesteps 14 | for unit in thermal_units 15 | } 16 | 17 | 18 | def get_thermal_opex_coeff( 19 | step_k: int, 20 | timesteps: range, 21 | thermal_units: list, 22 | thermal_opex: dict, 23 | fuel_contracts: dict, 24 | contract_costs: dict, 25 | thermal_heat_rate: dict, 26 | ) -> dict: 27 | """Variable cost is a function of fuel cost, heat rate, and opex.""" 28 | return { 29 | (unit, t): ( 30 | contract_costs[(fuel_contracts[unit], t + (step_k - 1) * 24)] 31 | * thermal_heat_rate[unit] 32 | ) 33 | + thermal_opex[unit] 34 | for t in timesteps 35 | for unit in thermal_units 36 | } 37 | 38 | 39 | def get_thermal_startup_coeff( 40 | timesteps: range, 41 | thermal_units: list, 42 | thermal_startup_cost: dict, 43 | thermal_rated_capacity: dict, 44 | ) -> dict: 45 | """Startup cost is a function of rated capacity and startup cost per unit.""" 46 | return { 47 | (unit, t): thermal_rated_capacity[unit] * thermal_startup_cost[unit] 48 | for t in timesteps 49 | for unit in thermal_units 50 | } 51 | 52 | 53 | def get_marginal_cost_coeff( 54 | step_k: int, 55 | timesteps: range, 56 | units: list, 57 | nondispatch_contracts: dict, 58 | contract_costs: dict, 59 | ) -> dict: 60 | """ 61 | Generic helper function to calculate coefficients based on marginal cost or a similar attribute for a list of units. 62 | 63 | Args: 64 | step_k: Current step in the simulation 65 | timesteps: Range of timesteps for the simulation 66 | units: List of units to calculate coefficients for 67 | nondispatch_contracts: Dictionary mapping units to their respective contracts 68 | contract_costs: Dictionary mapping contracts to their respective costs 69 | 70 | Returns 71 | A dictionary mapping (unit, t) tuples to the calculated coefficients 72 | """ 73 | return { 74 | (unit, t): contract_costs[(nondispatch_contracts[unit], t + (step_k - 1) * 24)] 75 | for t in timesteps 76 | for unit in units 77 | } 78 | -------------------------------------------------------------------------------- /src/pownet/optim_model/rounding_algo.py: -------------------------------------------------------------------------------- 1 | """rounding_algo.py: Functions to perform iterative rounding.""" 2 | 3 | import gurobipy as gp 4 | import numpy as np 5 | 6 | import logging 7 | 8 | logger = logging.getLogger(__name__) 9 | 10 | 11 | def get_variables(model: gp.Model, target_varnames: list[str] = None) -> dict: 12 | """Extract non-binary variables from a Gurobi model. 13 | 14 | Args: 15 | model (gp.Model): The Gurobi model to extract variables from. 16 | target_varnames (list[str], optional): 17 | A list of variable name prefixes to include. 18 | If None, defaults to ["status"]. 19 | 20 | Returns: 21 | dict: A dictionary mapping variable names to their corresponding 22 | non-binary values (v.X). 23 | 24 | """ 25 | if target_varnames is None: 26 | target_varnames = ["status"] 27 | filtered_vars = {} 28 | for v in model.getVars(): 29 | # Extract the prefix of the variable name (e.g., 'status' from 'status[1,2]') 30 | if v.varName.split("[")[0] in target_varnames: 31 | filtered_vars[v.varName] = v 32 | return filtered_vars 33 | 34 | 35 | def find_fraction_vars( 36 | binary_vars: dict, 37 | atol: float = 1e-5, 38 | ) -> dict: 39 | """Return a list of variable names when their values are fractional.""" 40 | fractional_vars = {} 41 | for varname in binary_vars: 42 | x_value = binary_vars[varname].X 43 | if not (np.isclose(x_value, 0, atol=atol) or np.isclose(x_value, 1, atol=atol)): 44 | fractional_vars[varname] = binary_vars[varname] 45 | return fractional_vars 46 | 47 | 48 | def round_up(variable: gp.Var) -> None: 49 | variable.lb = 1 50 | variable.ub = 1 51 | 52 | 53 | def round_down(variable: gp.Var) -> None: 54 | variable.lb = 0 55 | variable.ub = 0 56 | 57 | 58 | def slow_rounding( 59 | fraction_vars: dict, 60 | threshold: float = 0, 61 | ) -> None: 62 | """Iteratively rounding variables with the largest value at each iteration. 63 | Values above the threshold are rounded up. Values below the threshold are rounded down. 64 | """ 65 | max_value = max([v.X for v in fraction_vars.values()]) 66 | for var in fraction_vars.values(): 67 | if var.X == max_value: 68 | if max_value >= threshold: 69 | round_up(var) 70 | else: 71 | round_down(var) 72 | 73 | 74 | def fast_rounding(fraction_vars: dict, threshold: float = 0) -> None: 75 | for bin_var in fraction_vars.values(): 76 | if bin_var.X >= threshold: 77 | round_up(bin_var) 78 | else: 79 | round_down(bin_var) 80 | 81 | 82 | def check_binary_values(var_dict: dict) -> bool: 83 | """ 84 | Check if all variables in a dictionary have binary values (0 or 1). 85 | 86 | Args: 87 | var_dict (dict): A dictionary where keys are variable names and 88 | values are gurobipy.Var objects. 89 | 90 | Returns: 91 | bool: True if all variables have binary values, False otherwise. 92 | """ 93 | for var_name, var in var_dict.items(): 94 | var_value = var.X 95 | if not (var_value == 0 or var_value == 1): 96 | logger.info(f"Variable {var_name} has non-binary value: {var_value}") 97 | return False 98 | return True 99 | 100 | 101 | def optimize_with_rounding( 102 | model: gp.Model, 103 | rounding_strategy: str, 104 | threshold: float, 105 | max_rounding_iter: int, 106 | mipgap: float, 107 | timelimit: int, 108 | num_threads: int, 109 | log_to_console: bool, 110 | ) -> tuple[gp.Model, float, int]: 111 | """ 112 | Optimize a Gurobi model using iterative rounding with a given threshold. 113 | 114 | This function first relaxes the input model and then iteratively rounds 115 | fractional variables until an integer solution is found or the maximum 116 | number of iterations is reached. 117 | 118 | Args: 119 | model (gp.Model): The Gurobi model to optimize. 120 | threshold (float): The threshold for rounding fractional variables. 121 | max_rounding_iter (int): The maximum number of rounding iterations. 122 | log_to_console (bool): Whether to log optimization output to the console. 123 | mipgap (float): The relative MIP optimality gap. 124 | timelimit (int): The time limit for the optimization in seconds. 125 | num_threads (int): The number of threads to use for optimization. 126 | 127 | Returns: 128 | gp.Model: The optimized Gurobi model. 129 | """ 130 | 131 | # First specify the model parameters 132 | model.Params.LogToConsole = log_to_console 133 | model.Params.MIPGap = mipgap 134 | model.Params.TimeLimit = timelimit 135 | model.Params.Threads = num_threads 136 | 137 | rounding_model = model.relax() 138 | rounding_model.Params.LogToConsole = False 139 | binary_vars = get_variables(rounding_model) 140 | 141 | rounding_optimization_time = 0 142 | for current_iter in range(max_rounding_iter): 143 | rounding_model.optimize() 144 | 145 | # Keep track of the optimization time 146 | rounding_optimization_time += rounding_model.runtime 147 | 148 | # Fixing variables can cause infeasibility 149 | if rounding_model.status == 3: 150 | logger.warning("\nPowNet: Rounding is infeasible. Use the MIP method.") 151 | model.optimize() 152 | return model, None, None 153 | # The model should be feasible, but raise an error if not. 154 | elif rounding_model.status != 2: 155 | raise ValueError(f"Unrecognized model status: {rounding_model.status}") 156 | 157 | # Round variables and update the model 158 | fraction_vars = find_fraction_vars(binary_vars) 159 | 160 | # An empty dict means we have an integer solution. 161 | if len(fraction_vars) == 0: 162 | return rounding_model, rounding_optimization_time, current_iter 163 | 164 | if rounding_strategy == "slow": 165 | slow_rounding(fraction_vars=fraction_vars, threshold=threshold) 166 | else: 167 | fast_rounding(fraction_vars=fraction_vars, threshold=threshold) 168 | 169 | # Remove the rounded variables 170 | rounding_model.update() 171 | 172 | # If no integer solution is found after max_rounding_iter 173 | logger.warning( 174 | "\nPowNet: The rounding heuristic has terminated before finding an integer solution." 175 | ) 176 | model.optimize() 177 | return model, None, None 178 | -------------------------------------------------------------------------------- /src/pownet/optim_model/variable_func.py: -------------------------------------------------------------------------------- 1 | """variable_func.py: Contains functions for adding variables to the optimization model.""" 2 | 3 | import gurobipy as gp 4 | from gurobipy import GRB 5 | import pandas as pd 6 | 7 | from pownet.data_utils import ( 8 | get_capacity_value, 9 | ) 10 | 11 | 12 | def add_var_with_variable_ub( 13 | model: gp.Model, 14 | varname: str, 15 | timesteps: range, 16 | step_k: int, 17 | units: list, 18 | capacity_df: pd.DataFrame, 19 | ) -> gp.tupledict: 20 | """Add a variable with a variable upper bound in a day-ahead rolling horizon optimization. 21 | 22 | Args: 23 | model (gp.Model): The optimization model. 24 | varname (str): The name of the variable. 25 | timesteps (range): The range of timesteps. 26 | step_k (int): The step index. 27 | units (list): The list of units. 28 | capacity_df (pd.DataFrame): The dataframe of capacities. 29 | 30 | Returns: 31 | gp.tupledict: The variable with a variable upper bound. 32 | 33 | """ 34 | 35 | return model.addVars( 36 | units, 37 | timesteps, 38 | lb=0, 39 | ub={ 40 | (unit, t): get_capacity_value(t, unit, step_k, capacity_df) 41 | for t in timesteps 42 | for unit in units 43 | }, 44 | vtype=GRB.CONTINUOUS, 45 | name=varname, 46 | ) 47 | 48 | 49 | def update_var_with_variable_ub( 50 | variables: gp.tupledict, 51 | step_k: int, 52 | capacity_df: pd.DataFrame, 53 | ) -> None: 54 | """Update the time-dependent upper bound of the variable. 55 | 56 | Args: 57 | variables (gp.tupledict): The variable with a variable upper bound. 58 | step_k (int): The step index. 59 | capacity_df (pd.DataFrame): The dataframe of capacities. 60 | 61 | Returns: 62 | None 63 | """ 64 | for (unit, t), v in variables.items(): 65 | capacity_value = get_capacity_value(t, unit, step_k, capacity_df) 66 | v.ub = capacity_value 67 | return 68 | 69 | 70 | def update_flow_vars( 71 | flow_variables: gp.tupledict, 72 | step_k: int, 73 | capacity_df: pd.DataFrame, 74 | line_capacity_factor: float, 75 | ) -> None: 76 | """Update the lower and upper bounds of the flow variables based on the capacity dataframes""" 77 | hours_per_step = 24 78 | for (node1, node2, t), flow_variable in flow_variables.items(): 79 | edge = (node1, node2) 80 | line_capacity = capacity_df.loc[t + (step_k - 1) * hours_per_step, edge] 81 | flow_variable.ub = line_capacity * line_capacity_factor 82 | -------------------------------------------------------------------------------- /src/pownet/reservoir/__init__.py: -------------------------------------------------------------------------------- 1 | """This is the reservoir module.""" 2 | 3 | from .reservoir import Reservoir 4 | from .solve_release import ( 5 | solve_release_from_target_storage, 6 | solve_release_from_dispatch, 7 | ) 8 | from .manager import ReservoirManager 9 | -------------------------------------------------------------------------------- /src/pownet/reservoir/manager.py: -------------------------------------------------------------------------------- 1 | """manager.py: ReservoirManager class for managing reservoir operations and simulations.""" 2 | 3 | import os 4 | import pandas as pd 5 | 6 | from .reservoir import Reservoir 7 | from ..data_model import ReservoirParams 8 | from .reservoir_functions import ( 9 | find_upstream_units, 10 | find_downstream_flow_fractions, 11 | find_simulation_order, 12 | ) 13 | 14 | 15 | def find_upstream_flow( 16 | reservoir: Reservoir, reservoirs: dict[str, Reservoir] 17 | ) -> pd.Series: 18 | unit_name = reservoir.name 19 | total_upstream_flow = pd.Series( 20 | 0, index=range(1, reservoir.sim_days + 1), name="upstream_flow" 21 | ) 22 | for upstream_unit in reservoir.upstream_units: 23 | upstream_reservoir = reservoirs[upstream_unit] 24 | # Get the release and spill from the upstream reservoir 25 | total_upstream_flow += ( 26 | upstream_reservoir.release + upstream_reservoir.spill 27 | ) * upstream_reservoir.downstream_flow_fracs[unit_name] 28 | return total_upstream_flow 29 | 30 | 31 | class ReservoirManager: 32 | def __init__(self): 33 | self.reservoirs: dict[str, Reservoir] = {} 34 | self.simulation_order: list[str] = [] 35 | 36 | def load_reservoirs_from_csv(self, input_folder: str) -> None: 37 | """Load Basin information from a CSV file.""" 38 | 39 | ############################################################################### 40 | # Read CSV files 41 | ############################################################################### 42 | 43 | # Reservoir units 44 | reservoir_data = pd.read_csv( 45 | os.path.join(input_folder, "reservoir_unit.csv"), header=0 46 | ) 47 | 48 | # Flow paths 49 | filepath = os.path.join(input_folder, "flow_path.csv") 50 | if os.path.exists(filepath): 51 | flow_paths = pd.read_csv(filepath, header=0) 52 | flow_paths = flow_paths 53 | 54 | # Inflow and minimum flow time series 55 | inflow_ts = pd.read_csv(os.path.join(input_folder, "inflow.csv"), header=0) 56 | inflow_ts.index += 1 57 | 58 | minflow_ts = pd.read_csv( 59 | os.path.join(input_folder, "minimum_flow.csv"), header=0 60 | ) 61 | minflow_ts.index += 1 # Time series indexing starts at 1 62 | 63 | ################################################################################ 64 | # Instantiate Reservoir objects 65 | ################################################################################ 66 | for _, row in reservoir_data.iterrows(): 67 | unit_name = row["name"] 68 | 69 | upstream_units = find_upstream_units(flow_paths, unit_name) 70 | downstream_flow_fracs = find_downstream_flow_fractions( 71 | flow_paths, unit_name 72 | ) 73 | 74 | params = ReservoirParams( 75 | name=unit_name, 76 | min_day=int(row["min_day"]), 77 | max_day=int(row["max_day"]), 78 | min_level=float(row["min_level"]), 79 | max_level=float(row["max_level"]), 80 | max_head=float(row["max_head"]), 81 | max_storage=float(row["max_storage"]), 82 | max_release=float(row["max_release"]), 83 | max_generation=float(row["max_generation"]), 84 | turbine_factor=float(row["turbine_factor"]), 85 | inflow_ts=inflow_ts[row["name"]], 86 | minflow_ts=minflow_ts[row["name"]], 87 | upstream_units=upstream_units, 88 | downstream_flow_fracs=downstream_flow_fracs, 89 | ) 90 | # Create a new Reservoir object and add it to the list 91 | reservoir = Reservoir(params) 92 | self.reservoirs[unit_name] = reservoir 93 | 94 | ############################################################################# 95 | # Process the network topology 96 | ############################################################################## 97 | self.simulation_order = find_simulation_order( 98 | reservoir_names=self.reservoirs.keys(), flow_paths=flow_paths 99 | ) 100 | 101 | def simulate(self) -> None: 102 | """Simulate the reservoir operations to get hydropower time series.""" 103 | for unit_name in self.simulation_order: 104 | reservoir = self.reservoirs[unit_name] 105 | total_upstream_flow = find_upstream_flow(reservoir, self.reservoirs) 106 | reservoir.set_upstream_flow(total_upstream_flow) 107 | reservoir.simulate() 108 | 109 | def get_hydropower_ts( 110 | self, unit_node_mapping: dict[str, str] = None 111 | ) -> pd.DataFrame: 112 | """Get the hydropower time series for all reservoirs.""" 113 | df = pd.DataFrame() 114 | for unit_name in self.simulation_order: 115 | reservoir = self.reservoirs[unit_name] 116 | temp_df = pd.DataFrame( 117 | { 118 | unit_name: reservoir.daily_hydropower.values, 119 | } 120 | ) 121 | df = pd.concat([df, temp_df], axis=1) 122 | 123 | df.index = range(1, len(df) + 1) 124 | # Create multi-level column index if unit_node_mapping is provided 125 | if unit_node_mapping: 126 | df.columns = pd.MultiIndex.from_tuples( 127 | [(unit, unit_node_mapping[unit]) for unit in df.columns] 128 | ) 129 | return df 130 | 131 | def write_hydropower_to_csv( 132 | self, output_filepath: str, unit_node_mapping: dict[str, str] = None 133 | ) -> None: 134 | """Write the hydropower time series to CSV files.""" 135 | hydropower_df = self.get_hydropower_ts(unit_node_mapping) 136 | hydropower_df.to_csv(output_filepath, index=False) 137 | 138 | def reoperate( 139 | self, daily_dispatch: dict[(str, int), float], days_in_step: range 140 | ) -> dict[str, float]: 141 | """Reoperate the reservoirs based on the daily dispatch of the power system model. 142 | Note that we don't reoperate on the first day of the simulation period. 143 | """ 144 | proposed_capacity = {k: 0 for k in daily_dispatch.keys()} 145 | 146 | for unit_name in self.simulation_order: 147 | for day in days_in_step: 148 | reservoir = self.reservoirs[unit_name] 149 | # Find the upstream flow 150 | total_upstream_flow = find_upstream_flow(reservoir, self.reservoirs) 151 | 152 | # Simulate 153 | reservoir.set_upstream_flow(total_upstream_flow) 154 | proposed_capacity[unit_name, day] = reservoir.reoperate( 155 | day=day, 156 | daily_dispatch=daily_dispatch[unit_name, day], 157 | upstream_flow_t=total_upstream_flow.loc[day], 158 | ) 159 | 160 | return proposed_capacity 161 | -------------------------------------------------------------------------------- /src/pownet/stochastic/__init__.py: -------------------------------------------------------------------------------- 1 | """stochastic module.""" 2 | 3 | from .timeseries_model import TimeSeriesModel 4 | from .demand import DemandTSModel 5 | from .solar import SolarTSModel 6 | -------------------------------------------------------------------------------- /src/pownet/stochastic/demand.py: -------------------------------------------------------------------------------- 1 | """ demand.py: Model for solar time series data""" 2 | 3 | import numpy as np 4 | import pandas as pd 5 | from pmdarima import auto_arima 6 | from statsmodels.regression.linear_model import OLS 7 | from statsmodels.tsa.statespace.sarimax import SARIMAX 8 | from statsmodels.tsa.seasonal import STL, DecomposeResult 9 | 10 | from .timeseries_model import TimeSeriesModel 11 | 12 | import logging 13 | 14 | logger = logging.getLogger(__name__) 15 | 16 | 17 | class DemandTSModel(TimeSeriesModel): 18 | def __init__( 19 | self, 20 | ) -> None: 21 | super().__init__() 22 | self._monthly_models: dict[int, SARIMAX] = {} 23 | self._predictions: pd.Series = pd.Series() 24 | self._pred_residuals: pd.Series = pd.Series() 25 | 26 | # Specific model parameters 27 | self.monthly_reg_models: dict[int, OLS] = {} 28 | self.stl_seasonal_value = 24 * 7 - 1 # Must be an odd number 29 | self.stl_period_value = 24 30 | self.monthly_stl_results: dict[int, DecomposeResult] = {} 31 | 32 | @property 33 | def monthly_models(self) -> dict: 34 | if not self._is_fitted: 35 | raise ValueError("Model must be fitted first!") 36 | return self._monthly_models 37 | 38 | @property 39 | def predictions(self) -> pd.Series: 40 | if not self._is_fitted: 41 | raise ValueError("Model must be fitted first!") 42 | return self._predictions 43 | 44 | @property 45 | def pred_residuals(self) -> pd.Series: 46 | if not self._is_fitted: 47 | raise ValueError("Model must be fitted first!") 48 | return self._pred_residuals 49 | 50 | def _fit( 51 | self, 52 | target_column: str, 53 | exog_vars: list[str], 54 | arima_order: tuple[int, int, int], 55 | seasonal_order: tuple[int, int, int, int] = None, 56 | ) -> None: 57 | """Steps include 58 | 1) Fit a regression to get yt 59 | 2) Fit an STL to the regression residuals to get ytt 60 | 3) Fit a SARIMAX model to ytt 61 | 62 | Note that the user must at lease use temperature as a predictor 63 | """ 64 | self._pred_residuals = pd.Series() 65 | for month in self.months: 66 | logger.info(f"Fitting SARIMAX model for month {month}") 67 | # Subset the data for the month 68 | monthly_y = self.data.loc[self.data.index.month == month, target_column] 69 | monthly_exog = self.data.loc[ 70 | self.data.index.month == month, exog_vars 71 | ].astype(float) 72 | 73 | # Regression model 74 | reg_model = OLS(monthly_y, monthly_exog).fit() 75 | monthly_yt = reg_model.resid 76 | 77 | # STL model 78 | stl_model = STL( 79 | monthly_yt, 80 | seasonal=self.stl_seasonal_value, 81 | period=self.stl_period_value, 82 | ) 83 | stl_result = stl_model.fit() 84 | 85 | # SARIMAX model 86 | monthly_ytt = monthly_yt - stl_result.seasonal - stl_result.trend 87 | sarimax_model = SARIMAX( 88 | monthly_ytt, 89 | order=arima_order, 90 | seasonal_order=seasonal_order, 91 | ).fit(disp=True) 92 | 93 | # Store the models, and residuals 94 | self.monthly_reg_models[month] = reg_model 95 | self._monthly_models[month] = sarimax_model 96 | self.monthly_stl_results[month] = stl_result 97 | 98 | if self._pred_residuals.empty: 99 | self._pred_residuals = sarimax_model.resid 100 | else: 101 | self._pred_residuals = pd.concat( 102 | [self._pred_residuals, sarimax_model.resid] 103 | ) 104 | self._pred_residuals.name = "value" 105 | 106 | def _predict(self) -> pd.Series: 107 | self._predictions = pd.Series() 108 | for month in self.months: 109 | monthly_exog = self.data.loc[ 110 | self.data.index.month == month, self.exog_vars 111 | ].astype(float) 112 | sarimax_model = self._monthly_models[month] 113 | # SARIMAX model ytt, which is the trend and residuals of LOESS 114 | monthly_ytt_pred = sarimax_model.predict() 115 | monthly_yt_pred = ( 116 | monthly_ytt_pred + self.monthly_stl_results[month].seasonal 117 | ) 118 | 119 | # Regression model to get y 120 | reg_pred = self.monthly_reg_models[month].predict(monthly_exog) 121 | monthly_y_pred = monthly_yt_pred + reg_pred 122 | 123 | monthly_y_pred.name = "value" 124 | if self._predictions.empty: 125 | self._predictions = monthly_y_pred 126 | else: 127 | self._predictions = pd.concat([self._predictions, monthly_y_pred]) 128 | return self._predictions 129 | 130 | def _get_synthetic(self, exog_data: pd.DataFrame, seed: int) -> pd.Series: 131 | synthetic_y = pd.Series() 132 | for month in self.months: 133 | # Models are fitted for each month 134 | stl_result = self.monthly_stl_results[month] 135 | sarimax_model = self._monthly_models[month] 136 | 137 | monthly_exog = exog_data.loc[exog_data.index.month == month] 138 | 139 | # Create a synthetic time series for each day 140 | for day in monthly_exog.index.day.unique(): 141 | # Regression model 142 | start_time = monthly_exog.loc[monthly_exog.index.day == day].index[0] 143 | end_time = start_time + pd.Timedelta(hours=23) 144 | daily_exog = monthly_exog.loc[start_time:end_time] 145 | reg_pred = self.monthly_reg_models[month].predict(exog=daily_exog) 146 | reg_pred = pd.Series(reg_pred, index=daily_exog.index) 147 | 148 | # Bootstrap the SARIMAX predictions within 95% confidence interval 149 | sarimax_pred = sarimax_model.predict() 150 | sarimax_pred_ci = np.percentile(sarimax_pred, [2.5, 97.5]) 151 | sarimax_bootstrap = np.random.choice( 152 | sarimax_pred_ci, size=len(daily_exog) 153 | ) 154 | sarimax_bootstrap = pd.Series(sarimax_bootstrap, index=daily_exog.index) 155 | 156 | # Recover electricity demand by adding predictions from the regression model, 157 | # predictions from SARIMAX, and also the STL's seasonal and trend components 158 | daily_stl_seasonal = stl_result.seasonal.loc[start_time:end_time] 159 | daily_stl_trend = stl_result.trend.loc[start_time:end_time] 160 | daily_syn_y = ( 161 | sarimax_bootstrap + reg_pred + daily_stl_seasonal + daily_stl_trend 162 | ) 163 | daily_syn_y.name = "value" 164 | if synthetic_y.empty: 165 | synthetic_y = daily_syn_y 166 | else: 167 | synthetic_y = pd.concat([synthetic_y, daily_syn_y]) 168 | return synthetic_y 169 | 170 | def _find_best_model( 171 | self, 172 | target_column: str, 173 | exog_vars: list[str], 174 | month_to_use: int, 175 | seed: int, 176 | suppress_warnings: bool, 177 | ) -> tuple[tuple[int, int, int], tuple[int, int, int, int]]: 178 | monthly_y = self.data.loc[self.data.index.month == month_to_use, target_column] 179 | monthly_exog = self.data.loc[ 180 | self.data.index.month == month_to_use, exog_vars 181 | ].astype(float) 182 | 183 | # Regression model 184 | reg_model = OLS(monthly_y, monthly_exog).fit() 185 | monthly_yt = reg_model.resid 186 | 187 | # STL model 188 | stl_model = STL( 189 | monthly_yt, 190 | seasonal=self.stl_seasonal_value, 191 | period=self.stl_period_value, 192 | ) 193 | stl_result = stl_model.fit() 194 | 195 | # SARIMAX model 196 | monthly_ytt = monthly_yt - stl_result.seasonal - stl_result.trend 197 | best_model = auto_arima( 198 | monthly_ytt, 199 | X=monthly_exog, 200 | start_p=0, 201 | start_q=0, 202 | max_p=2, 203 | max_d=2, 204 | max_q=2, 205 | seasonal=False, 206 | information_criterion="aic", 207 | stepwise=True, 208 | suppress_warnings=suppress_warnings, 209 | error_action="warn", 210 | random_state=seed, 211 | ) 212 | return best_model.order, best_model.seasonal_order 213 | -------------------------------------------------------------------------------- /src/pownet/stochastic/kirsch_nowak.py: -------------------------------------------------------------------------------- 1 | """ kirsch_nowak.py 2 | """ 3 | 4 | import numpy as np 5 | from scipy.linalg import cholesky, eig 6 | 7 | 8 | def KNN_identification(Z, Qtotals, month, k=None): 9 | """ 10 | Identifies K-nearest neighbors of Z in the historical annual data 11 | and computes the associated weights W. 12 | 13 | Args: 14 | Z: Synthetic datum (scalar) 15 | Qtotals: Total monthly flows at all sites for all historical months 16 | within +/- 7 days of the month being disaggregated 17 | month: Month being disaggregated 18 | k: Number of nearest neighbors (by default k=n_year^0.5 19 | according to Lall and Sharma (1996)) 20 | 21 | 22 | Returns: 23 | KNN_id: Indices of the first K-nearest neighbors of Z in the 24 | historical annual data z 25 | W: Nearest neighbors weights, according to Lall and Sharma (1996): 26 | W(i) = (1/i) / (sum(1/i)) 27 | """ 28 | 29 | # Ntotals is the number of historical monthly patterns used for disaggregation. 30 | 31 | Ntotals = Qtotals[month].shape[0] 32 | 33 | if k is None: 34 | K = round(np.sqrt(Ntotals)) 35 | else: 36 | K = k 37 | 38 | # Nearest neighbors identification 39 | Nsites = Qtotals[month].shape[1] 40 | delta = np.zeros(Ntotals) 41 | 42 | for i in range(Ntotals): 43 | for j in range(Nsites): 44 | delta[i] += (Qtotals[month][i, j] - Z[0, 0, j]) ** 2 45 | 46 | Y = np.column_stack((np.arange(1, Ntotals + 1), delta)) 47 | Y_ord = Y[Y[:, 1].argsort()] # Sort by the second column (delta) 48 | KNN_id = Y_ord[:K, 0].astype(int) # Extract the first column (indices) 49 | 50 | # Computation of the weights 51 | f = np.arange(1, K + 1) 52 | f1 = 1 / f 53 | W = f1 / np.sum(f1) 54 | 55 | return KNN_id, W 56 | 57 | 58 | def chol_corr(Z): 59 | """ 60 | Computes the Cholesky decomposition of the correlation matrix of the columns of Z. 61 | Attempts to repair non-positive-definite matrices. 62 | """ 63 | 64 | R = np.corrcoef(Z, rowvar=False) # Calculate the correlation matrix 65 | U, p = cholesky(R, lower=False) # Attempt Cholesky decomposition 66 | 67 | while p > 0: # If not positive definite, modify slightly 68 | k = min( 69 | [np.min(np.real(eig(R)[0])) - 1e-15, -1e-15] 70 | ) # Smallest eigenvalue or a small negative value 71 | R = R - k * np.eye(R.shape[0]) 72 | R = R / R[0, 0] # Rescale to get unit diagonal entries 73 | U, p = cholesky(R, lower=False) # Retry Cholesky decomposition 74 | 75 | return U 76 | -------------------------------------------------------------------------------- /src/pownet/stochastic/timeseries_model.py: -------------------------------------------------------------------------------- 1 | """timeseries_model.py: Abstract class for time series models.""" 2 | 3 | from abc import ABC, abstractmethod 4 | 5 | import matplotlib.pyplot as plt 6 | import pandas as pd 7 | import statsmodels.api as sm 8 | 9 | 10 | class TimeSeriesModel(ABC): 11 | def __init__( 12 | self, 13 | ) -> None: 14 | self._is_fitted: bool = False 15 | self._is_loaded: bool = False 16 | 17 | # DataFrame to be loaded 18 | self.data: pd.DataFrame = pd.DataFrame() 19 | self.months: list = [] # Months are labeled from 1 to 12 20 | self.exog_vars: list[str] = None 21 | 22 | @property 23 | @abstractmethod 24 | def monthly_models(self) -> dict: 25 | pass 26 | 27 | @property 28 | @abstractmethod 29 | def predictions(self) -> pd.Series: 30 | pass 31 | 32 | @property 33 | @abstractmethod 34 | def pred_residuals(self) -> pd.Series: 35 | pass 36 | 37 | def load_data(self, data: pd.DataFrame) -> None: 38 | """Load the time series data""" 39 | # Check that there are columns called 'datetime' and 'value' 40 | if "datetime" not in data.columns: 41 | raise ValueError("Data should have columns 'datetime'") 42 | self.data = data 43 | # Convert 'datetime' column to datetime type 44 | self.data["datetime"] = pd.to_datetime(self.data["datetime"]) 45 | self.months = self.data["datetime"].dt.month.unique().tolist() 46 | # Downstream code expects months to be sorted 47 | self.months.sort() 48 | # Time series modeling requires datetime to be the index 49 | self.data = self.data.set_index("datetime") 50 | self.data.index.freq = "h" # Requires hourly time series 51 | self._is_loaded = True 52 | 53 | def fit( 54 | self, 55 | target_column: str, 56 | arima_order: tuple[int, int, int], 57 | seasonal_order: tuple[int, int, int, int] = None, 58 | exog_vars: list[str] = None, 59 | ) -> None: 60 | """Fit the model to the data 61 | 62 | Args: 63 | target_column (str): Target column to fit the model 64 | arima_order (tuple[int, int, int]): ARIMA order 65 | seasonal_order (tuple[int, int, int, int]): Seasonal order 66 | exog_vars (list[str], optional): List of exogenous variables. Defaults to None. 67 | """ 68 | self._fit( 69 | target_column=target_column, 70 | arima_order=arima_order, 71 | seasonal_order=seasonal_order, 72 | exog_vars=exog_vars, 73 | ) 74 | self._is_fitted = True 75 | self.exog_vars = exog_vars 76 | 77 | def predict(self) -> pd.Series: 78 | """Return the predictions""" 79 | if not self._is_fitted: 80 | raise ValueError("Model must be fitted before making predictions.") 81 | return self._predict() 82 | 83 | def get_synthetic( 84 | self, 85 | exog_data: pd.DataFrame = None, 86 | seed: int = None, 87 | ) -> pd.Series: 88 | """ 89 | Create synthetic time series. 90 | 91 | Args: 92 | exog_data (pd.DataFrame, optional): Exogenous variables. Defaults to None. 93 | seed (int, optional): Random seed. Defaults to None. 94 | 95 | Returns: 96 | pd.Series: Synthetic time series data. 97 | 98 | Raises: 99 | ValueError: If the model is not fitted. 100 | ValueError: If exogenous variables are not in the data. 101 | ValueError: If exogenous data index is not equal to the time series data index. 102 | """ 103 | if not self._is_fitted: 104 | raise ValueError("Model must be fitted before creating synthetic data.") 105 | if exog_data is not None: 106 | if not all([var in exog_data.columns for var in self.exog_vars]): 107 | raise ValueError("Exogenous variables should be in the data.") 108 | if not exog_data.index.equals(self.data.index): 109 | raise ValueError( 110 | "Exogenous data should have the same index as the time series data." 111 | ) 112 | 113 | return self._get_synthetic(exog_data=exog_data, seed=seed) 114 | 115 | def find_best_model( 116 | self, 117 | target_column: str, 118 | exog_vars: list[str] = None, 119 | month_to_use: int = 1, 120 | seed: int = None, 121 | suppress_warnings: bool = False, 122 | ) -> tuple[tuple[int, int, int], tuple[int, int, int, int]]: 123 | """Find the best model for the time series data 124 | 125 | Args: 126 | target_column (str): Target column to fit the model 127 | exog_vars (list[str], optional): List of exogenous variables. Defaults to None. 128 | month_to_use (int, optional): Month to use for finding the best model. Defaults to 1. 129 | seed (int, optional): Random seed. Defaults to 112. 130 | suppress_warnings (bool, optional): Suppress warnings. Defaults to False. 131 | 132 | Returns: 133 | tuple[tuple[int, int, int], tuple[int, int, int, int]]: Best model SARIMA parameters 134 | 135 | Raises: 136 | ValueError: If data is not loaded 137 | """ 138 | if not self._is_loaded: 139 | raise ValueError("Data must be loaded first.") 140 | return self._find_best_model( 141 | target_column=target_column, 142 | exog_vars=exog_vars, 143 | month_to_use=month_to_use, 144 | seed=seed, 145 | suppress_warnings=suppress_warnings, 146 | ) 147 | 148 | @abstractmethod 149 | def _fit( 150 | self, 151 | target_column: str, 152 | arima_order: tuple[int, int, int], 153 | seasonal_order: tuple[int, int, int, int], 154 | exog_vars: list[str], 155 | ) -> None: 156 | pass 157 | 158 | @abstractmethod 159 | def _predict(self) -> pd.Series: 160 | pass 161 | 162 | @abstractmethod 163 | def _get_synthetic(self, exog_data: pd.DataFrame, seed: int) -> pd.Series: 164 | pass 165 | 166 | @abstractmethod 167 | def _find_best_model( 168 | self, 169 | target_column: str, 170 | exog_vars: list[str], 171 | month_to_use: int, 172 | seed: int, 173 | suppress_warnings: bool, 174 | ) -> tuple[tuple[int, int, int], tuple[int, int, int, int]]: 175 | pass 176 | 177 | def plot_residuals(self, bins: int, figure_file: str = None) -> None: 178 | _, ax = plt.subplots(3, 2, figsize=(12, 12)) 179 | # --- Plot 1: Residuals over time --- 180 | self.pred_residuals.plot(ax=ax[0, 0]) 181 | ax[0, 0].set_title("Residuals Over Time") 182 | ax[0, 0].set_xlabel("") 183 | ax[0, 0].set_ylabel("Residual Value") 184 | ax[0, 0].grid(True) # Add grid for readability 185 | 186 | # --- Plot 2: Histogram of Residuals --- 187 | self.pred_residuals.hist(bins=bins, density=False, alpha=0.7, ax=ax[0, 1]) 188 | ax[0, 1].set_title("Histogram of Residuals") 189 | ax[0, 1].set_xlabel("Residual Value") 190 | ax[0, 1].set_ylabel("Frequency") 191 | ax[0, 1].grid(True, axis="y", alpha=0.5) # Add horizontal grid 192 | 193 | # --- Plot 3: ACF of Residuals --- 194 | # sm.graphics.tsa.plot_acf returns a matplotlib Figure object, 195 | # but we pass our specific Axes object (ax[1, 0]) to plot on it. 196 | sm.graphics.tsa.plot_acf( 197 | self.pred_residuals, lags=40, ax=ax[1, 0], title="ACF of Residuals" 198 | ) 199 | ax[1, 0].grid(True, alpha=0.5) 200 | 201 | # --- Plot 4: PACF of Residuals --- 202 | sm.graphics.tsa.plot_pacf( 203 | self.pred_residuals, 204 | lags=40, 205 | ax=ax[1, 1], 206 | title="PACF of Residuals", 207 | method="ywm", 208 | ) 209 | ax[1, 1].grid(True, alpha=0.5) 210 | 211 | # --- Plot 5: Q-Q Plot of Residuals --- 212 | sm.qqplot(self.pred_residuals, line="s", ax=ax[2, 0]) 213 | ax[2, 0].set_title("Q-Q Plot of Residuals") 214 | ax[2, 0].grid(True, alpha=0.5) 215 | 216 | # --- Turn off the empty subplot --- 217 | ax[2, 1].axis("off") 218 | 219 | # --- Adjust layout and display the figure --- 220 | plt.tight_layout() # Adjusts subplot params for a tight layout 221 | plt.suptitle("Residual Analysis Plots", fontsize=16, y=1.02) 222 | 223 | if figure_file: 224 | plt.savefig(figure_file, bbox_inches="tight", dpi=350) 225 | plt.show() 226 | -------------------------------------------------------------------------------- /src/pownet/stochastic/timeseries_utils.py: -------------------------------------------------------------------------------- 1 | """ timeseries_utils.py: Utility functions for timeseries data """ 2 | 3 | import numpy as np 4 | import pandas as pd 5 | from sklearn.preprocessing import PowerTransformer 6 | 7 | 8 | def transform_log1p(y: pd.Series, B1_y: pd.Series): 9 | # Take difference 10 | y = y - B1_y 11 | # Apply the transformation 12 | y = np.log1p(y) 13 | return y 14 | 15 | 16 | def inverse_log1p(y_pred: pd.Series, B1_y: pd.Series) -> pd.Series: 17 | # invert the transformation 18 | y_pred = np.expm1(y_pred) 19 | # Convert negative values to 0 20 | y_pred = np.maximum(y_pred, 0) 21 | # Add the last observed value 22 | y_pred = y_pred + B1_y 23 | return y_pred 24 | 25 | 26 | def transform_yeojohnson( 27 | y: pd.Series, B1_y: pd.Series 28 | ) -> tuple[pd.Series, PowerTransformer]: 29 | """Differencing the data and apply the Yeo-Johnson transformation to the data""" 30 | # Take difference and discard the first observation 31 | transformed_y = (y - B1_y).dropna() 32 | # Apply the transformation 33 | power_transformer = PowerTransformer(method="yeo-johnson") 34 | power_transformer.fit(transformed_y.values.reshape(-1, 1)) 35 | transformed_y = power_transformer.transform(transformed_y.values.reshape(-1, 1)) 36 | return transformed_y, power_transformer 37 | 38 | 39 | def inverse_yeojohnson( 40 | y_pred: np.array, 41 | B1_y: pd.Series, 42 | initial_value: float, 43 | power_transformer: PowerTransformer, 44 | ) -> pd.Series: 45 | """Invert the Yeo-Johnson transformation""" 46 | # invert the transformation 47 | y_pred = power_transformer.inverse_transform(y_pred.reshape(-1, 1)) 48 | y_pred = pd.Series(y_pred.flatten(), index=B1_y.index) 49 | # Add value from the previous timestep because the data was differenced 50 | y_pred = y_pred + B1_y 51 | # Insert the first observed value to the beginning of the series 52 | # because the first value was removed during differencing 53 | temp_series = pd.Series([initial_value]) 54 | temp_series.index = [B1_y.index[0] - 1] 55 | y_pred = pd.concat([temp_series, y_pred]) 56 | return y_pred 57 | 58 | 59 | def post_process_solar( 60 | solar_data: pd.DataFrame, value_cols: list[str], sunrise_time: str, sunset_time: str 61 | ): 62 | # Ensure values are zero outside sunset and sunrise times 63 | solar_data.loc[ 64 | (solar_data["datetime"].dt.time < pd.to_datetime(sunrise_time).time()) 65 | | (solar_data["datetime"].dt.time > pd.to_datetime(sunset_time).time()), 66 | value_cols, 67 | ] = 0 68 | 69 | # Ensure no negative values, which can happen with ARIMA predictions 70 | solar_data[value_cols] = solar_data[value_cols].clip(lower=0) 71 | -------------------------------------------------------------------------------- /src/test_pownet/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Critical-Infrastructure-Systems-Lab/PowNet/c5cb0437ee3af0f21683b81fdb8f4b67f1a6e9f5/src/test_pownet/__init__.py -------------------------------------------------------------------------------- /src/test_pownet/test_builder/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Critical-Infrastructure-Systems-Lab/PowNet/c5cb0437ee3af0f21683b81fdb8f4b67f1a6e9f5/src/test_pownet/test_builder/__init__.py -------------------------------------------------------------------------------- /src/test_pownet/test_builder/test_basebuilder.py: -------------------------------------------------------------------------------- 1 | """test_basebuilder.py: Unit tests for the ComponentBuilder abstract base class.""" 2 | 3 | import unittest 4 | from unittest.mock import MagicMock, patch 5 | 6 | # Import ComponentBuilder from its actual location 7 | from pownet.builder.basebuilder import ComponentBuilder 8 | 9 | # PATCH_BASE should be the module where ComponentBuilder is defined, 10 | # as this is the context where its internal imports (like gp and SystemInput) are resolved. 11 | PATCH_BASE = "pownet.builder.basebuilder" 12 | 13 | # We need ABC for creating test subclasses 14 | from abc import ABC, abstractmethod # ABC is implicitly used by ComponentBuilder 15 | 16 | 17 | # Minimal concrete implementation for testing ComponentBuilder 18 | class MinimalConcreteBuilder(ComponentBuilder): 19 | """A minimal concrete subclass for testing ComponentBuilder.""" 20 | 21 | def add_variables(self, step_k: int) -> None: 22 | """Mock implementation.""" 23 | pass 24 | 25 | def get_fixed_objective_terms(self) -> MagicMock: # Actual type is gp.LinExpr 26 | """Mock implementation.""" 27 | return MagicMock(name="MockLinExpr_fixed") 28 | 29 | def get_variable_objective_terms( 30 | self, step_k: int, **kwargs 31 | ) -> MagicMock: # Actual type is gp.LinExpr 32 | """Mock implementation.""" 33 | return MagicMock(name="MockLinExpr_variable") 34 | 35 | def add_constraints(self, step_k: int, init_conds: dict, **kwargs) -> None: 36 | """Mock implementation.""" 37 | pass 38 | 39 | def update_variables(self, step_k: int) -> None: 40 | """Mock implementation.""" 41 | pass 42 | 43 | def update_constraints(self, step_k: int, init_conds: dict, **kwargs) -> None: 44 | """Mock implementation.""" 45 | pass 46 | 47 | def get_variables( 48 | self, 49 | ) -> dict[str, MagicMock]: # Actual type is dict[str, gp.tupledict] 50 | """Mock implementation.""" 51 | return {"mock_var": MagicMock(name="MockTupleDict")} 52 | 53 | 54 | # An incomplete subclass for testing abstract method enforcement 55 | class IncompleteBuilder(ComponentBuilder): 56 | """An incomplete subclass that misses some abstract methods.""" 57 | 58 | def add_variables(self, step_k: int) -> None: 59 | pass 60 | 61 | def get_fixed_objective_terms(self) -> MagicMock: 62 | return MagicMock(name="MockLinExpr_fixed_incomplete") 63 | 64 | # Missing: get_variable_objective_terms, add_constraints, etc. 65 | def get_variables(self) -> dict[str, MagicMock]: 66 | return {"mock_var_incomplete": MagicMock(name="MockTupleDict_incomplete")} 67 | 68 | # To make it instantiable for other tests, we would need to implement all other abstract methods. 69 | # For this test, we want it to remain abstract. 70 | @abstractmethod # Explicitly mark remaining methods as abstract if not implemented 71 | def get_variable_objective_terms(self, step_k: int, **kwargs) -> MagicMock: 72 | pass 73 | 74 | @abstractmethod 75 | def add_constraints(self, step_k: int, init_conds: dict, **kwargs) -> None: 76 | pass 77 | 78 | @abstractmethod 79 | def update_variables(self, step_k: int) -> None: 80 | pass 81 | 82 | @abstractmethod 83 | def update_constraints(self, step_k: int, init_conds: dict, **kwargs) -> None: 84 | pass 85 | 86 | 87 | @patch(f"{PATCH_BASE}.SystemInput", autospec=True) 88 | @patch(f"{PATCH_BASE}.gp") # Patch the 'gp' alias used in basebuilder.py 89 | class TestComponentBuilder(unittest.TestCase): 90 | 91 | def _configure_mock_gp_alias(self, mock_gp_alias): 92 | """Helper to configure the mocked 'gp' alias and its attributes.""" 93 | mock_gp_alias.Model = MagicMock(name="MockGPModelClass") 94 | mock_gp_alias.LinExpr = MagicMock(name="MockGPLinExprClass") 95 | mock_gp_alias.tupledict = MagicMock(name="MockGPTupleDictClass") 96 | return mock_gp_alias.Model.return_value # Return a mock model instance 97 | 98 | def test_component_builder_is_abc_and_cannot_be_instantiated( 99 | self, mock_gp_alias: MagicMock, mock_system_input_class: MagicMock 100 | ): 101 | """Test that ComponentBuilder cannot be instantiated directly.""" 102 | mock_model_instance = self._configure_mock_gp_alias(mock_gp_alias) 103 | # Ensure all required arguments for SystemInput are provided 104 | mock_inputs_instance = mock_system_input_class( 105 | input_folder="dummy", 106 | model_name="test_model", # Assuming model_name is a required arg for SystemInput 107 | year=2023, # Assuming year is a required arg 108 | sim_horizon=24, # Assuming sim_horizon is a required arg for constructor 109 | ) 110 | # ComponentBuilder uses inputs.sim_horizon, so ensure it's set on the mock if not by constructor 111 | mock_inputs_instance.sim_horizon = 10 # This is what ComponentBuilder will use 112 | 113 | with self.assertRaisesRegex( 114 | TypeError, 115 | # Simplified regex to match the beginning of the actual error message 116 | r"Can't instantiate abstract class ComponentBuilder", 117 | ): 118 | ComponentBuilder(mock_model_instance, mock_inputs_instance) 119 | 120 | def test_concrete_subclass_instantiation_and_init_attributes( 121 | self, mock_gp_alias: MagicMock, mock_system_input_class: MagicMock 122 | ): 123 | """Test instantiation of a concrete subclass and __init__ attributes.""" 124 | mock_model_instance = self._configure_mock_gp_alias(mock_gp_alias) 125 | 126 | mock_inputs_instance = mock_system_input_class( 127 | input_folder="dummy_concrete_path", 128 | model_name="test_model_concrete", 129 | year=2023, 130 | sim_horizon=24, # Initial value for SystemInput constructor 131 | ) 132 | # ComponentBuilder's __init__ uses inputs.sim_horizon. 133 | # We are testing that ComponentBuilder correctly picks up this value. 134 | # So, the value set here is what we expect ComponentBuilder to use. 135 | mock_inputs_instance.sim_horizon = 5 136 | 137 | builder = MinimalConcreteBuilder( 138 | model=mock_model_instance, inputs=mock_inputs_instance 139 | ) 140 | 141 | self.assertIsInstance(builder, MinimalConcreteBuilder) 142 | self.assertIsInstance(builder, ComponentBuilder) 143 | self.assertEqual(builder.model, mock_model_instance) 144 | self.assertEqual(builder.inputs, mock_inputs_instance) 145 | # Test against the value that ComponentBuilder's __init__ should have used 146 | self.assertEqual(builder.sim_horizon, 5) 147 | self.assertEqual(list(builder.timesteps), list(range(1, 5 + 1))) 148 | 149 | def test_incomplete_subclass_cannot_be_instantiated( 150 | self, mock_gp_alias: MagicMock, mock_system_input_class: MagicMock 151 | ): 152 | """Test that a subclass missing abstract methods cannot be instantiated.""" 153 | mock_model_instance = self._configure_mock_gp_alias(mock_gp_alias) 154 | mock_inputs_instance = mock_system_input_class( 155 | input_folder="dummy_incomplete_path", 156 | model_name="test_model_incomplete", 157 | year=2023, 158 | sim_horizon=24, 159 | ) 160 | mock_inputs_instance.sim_horizon = 3 161 | 162 | with self.assertRaisesRegex( 163 | TypeError, 164 | # Simplified regex to match the beginning of the actual error message 165 | r"Can't instantiate abstract class IncompleteBuilder", 166 | ): 167 | IncompleteBuilder(mock_model_instance, mock_inputs_instance) 168 | 169 | def test_abstract_methods_exist( 170 | self, mock_gp_alias: MagicMock, mock_system_input_class: MagicMock 171 | ): 172 | """Check that all declared abstract methods are indeed marked as abstract.""" 173 | expected_abstract_methods = frozenset( 174 | { # Use frozenset for direct comparison 175 | "add_variables", 176 | "get_fixed_objective_terms", 177 | "get_variable_objective_terms", 178 | "add_constraints", 179 | "update_variables", 180 | "update_constraints", 181 | "get_variables", 182 | } 183 | ) 184 | self.assertEqual( 185 | ComponentBuilder.__abstractmethods__, expected_abstract_methods 186 | ) 187 | 188 | 189 | if __name__ == "__main__": 190 | unittest.main(argv=["first-arg-is-ignored"], exit=False) 191 | -------------------------------------------------------------------------------- /src/test_pownet/test_core/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Critical-Infrastructure-Systems-Lab/PowNet/c5cb0437ee3af0f21683b81fdb8f4b67f1a6e9f5/src/test_pownet/test_core/__init__.py -------------------------------------------------------------------------------- /src/test_pownet/test_core/test_data_processor.py: -------------------------------------------------------------------------------- 1 | """test_data_processor.py""" 2 | 3 | import os 4 | import unittest 5 | from pownet.core.data_processor import ( 6 | DataProcessor, 7 | ) 8 | 9 | 10 | class TestDataProcessor(unittest.TestCase): 11 | def test_initialization(self): 12 | # Arrange 13 | test_model_library_path = os.path.abspath( 14 | os.path.join(os.path.dirname(__file__), "..", "test_model_library") 15 | ) 16 | model_name = "dummy" 17 | year = 2024 18 | frequency = 50 19 | 20 | # Act 21 | processor = DataProcessor( 22 | input_folder=test_model_library_path, 23 | model_name=model_name, 24 | year=year, 25 | frequency=frequency, 26 | ) 27 | 28 | processor.execute_data_pipeline() 29 | 30 | # Assert 31 | self.assertEqual(processor.model_name, model_name) 32 | self.assertEqual(processor.year, year) 33 | self.assertEqual(processor.frequency, frequency) 34 | self.assertEqual(processor.wavelength, 6000) 35 | self.assertEqual( 36 | processor.model_folder, 37 | os.path.join(test_model_library_path, model_name), 38 | ) 39 | # Timeseries should have 8760 rows 40 | self.assertEqual(processor.cycle_map, {}) 41 | self.assertEqual(processor.thermal_derate_factors.shape[0], 8760) 42 | 43 | 44 | if __name__ == "__main__": 45 | unittest.main() 46 | -------------------------------------------------------------------------------- /src/test_pownet/test_data_model/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Critical-Infrastructure-Systems-Lab/PowNet/c5cb0437ee3af0f21683b81fdb8f4b67f1a6e9f5/src/test_pownet/test_data_model/__init__.py -------------------------------------------------------------------------------- /src/test_pownet/test_data_model/test_reservoir.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | import pandas as pd 3 | 4 | from pownet.data_model import ReservoirParams 5 | 6 | 7 | class TestReservoirParams(unittest.TestCase): 8 | 9 | def setUp(self): 10 | """Set up common data for tests.""" 11 | self.default_inflow_ts = pd.Series([10.0, 12.0, 15.0], index=[1, 2, 3]) 12 | self.default_minflow_ts = pd.Series([1.0, 1.0, 1.0], index=[1, 2, 3]) 13 | self.valid_params = { 14 | "name": "TestReservoir", 15 | "min_day": 150, 16 | "max_day": 270, 17 | "min_level": 100.0, 18 | "max_level": 150.0, 19 | "max_head": 50.0, 20 | "max_storage": 1000000.0, 21 | "max_release": 500.0, 22 | "max_generation": 100.0, 23 | "turbine_factor": 0.85, 24 | "inflow_ts": self.default_inflow_ts, 25 | "minflow_ts": self.default_minflow_ts, 26 | "upstream_units": [], 27 | "downstream_flow_fracs": {"Downstream1": 0.6, "Downstream2": 0.4}, 28 | } 29 | 30 | def test_valid_params_creation(self): 31 | """Test successful creation with valid parameters.""" 32 | try: 33 | ReservoirParams(**self.valid_params) 34 | except ValueError: 35 | self.fail("ReservoirParams raised ValueError unexpectedly for valid data.") 36 | 37 | def test_downstream_flow_fracs_sum_not_one(self): 38 | """Test ValueError if downstream flow fractions do not sum to 1.""" 39 | params = self.valid_params.copy() 40 | params["downstream_flow_fracs"] = { 41 | "Downstream1": 0.5, 42 | "Downstream2": 0.4, 43 | } # Sums to 0.9 44 | with self.assertRaisesRegex( 45 | ValueError, "Downstream units for TestReservoir do not sum to 1" 46 | ): 47 | ReservoirParams(**params) 48 | 49 | params["downstream_flow_fracs"] = { 50 | "Downstream1": 0.7, 51 | "Downstream2": 0.4, 52 | } # Sums to 1.1 53 | with self.assertRaisesRegex( 54 | ValueError, "Downstream units for TestReservoir do not sum to 1" 55 | ): 56 | ReservoirParams(**params) 57 | 58 | def test_downstream_flow_fracs_sum_is_one_edge_cases(self): 59 | """Test successful creation when downstream flow fractions sum is close to 1.""" 60 | params = self.valid_params.copy() 61 | params["downstream_flow_fracs"] = { 62 | "D1": 0.999 63 | } # Test lower bound (assuming only one downstream) 64 | # To make this test pass, let's adjust for multiple units 65 | params["downstream_flow_fracs"] = {"D1": 0.5, "D2": 0.49999} 66 | try: 67 | ReservoirParams(**params) 68 | except ValueError: 69 | self.fail( 70 | "ReservoirParams raised ValueError for sum slightly less than 1 but within tolerance." 71 | ) 72 | 73 | params["downstream_flow_fracs"] = {"D1": 0.5, "D2": 0.50001} 74 | try: 75 | ReservoirParams(**params) 76 | except ValueError: 77 | self.fail( 78 | "ReservoirParams raised ValueError for sum slightly more than 1 but within tolerance." 79 | ) 80 | 81 | def test_mismatched_timeseries_indices(self): 82 | """Test ValueError if inflow_ts and minflow_ts have different indices.""" 83 | params = self.valid_params.copy() 84 | params["minflow_ts"] = pd.Series( 85 | [1.0, 1.0, 1.0], index=[1, 2, 4] 86 | ) # Mismatched index 87 | with self.assertRaisesRegex( 88 | ValueError, 89 | "Inflows and minflows for TestReservoir are not indexed the same", 90 | ): 91 | ReservoirParams(**params) 92 | 93 | def test_timeseries_index_not_starting_at_one(self): 94 | """Test ValueError if timeseries indices do not start at 1.""" 95 | params_inflow = self.valid_params.copy() 96 | params_inflow["inflow_ts"] = pd.Series([10.0, 12.0], index=[0, 1]) 97 | params_inflow["minflow_ts"] = pd.Series( 98 | [1.0, 1.0], index=[0, 1] 99 | ) # Keep minflow consistent for this test focus 100 | with self.assertRaises(ValueError): 101 | ReservoirParams(**params_inflow) 102 | 103 | params_minflow = self.valid_params.copy() 104 | # Correct inflow to isolate minflow test 105 | params_minflow["inflow_ts"] = pd.Series([10.0, 12.0], index=[1, 2]) 106 | params_minflow["minflow_ts"] = pd.Series([1.0, 1.0], index=[0, 1]) 107 | with self.assertRaises(ValueError): 108 | ReservoirParams(**params_minflow) 109 | 110 | def test_inflow_less_than_minflow(self): 111 | """Test ValueError if inflow_ts is less than minflow_ts on any day.""" 112 | params = self.valid_params.copy() 113 | params["inflow_ts"] = pd.Series( 114 | [10.0, 0.5, 15.0], index=[1, 2, 3] 115 | ) # Day 2 inflow < minflow 116 | params["minflow_ts"] = pd.Series([1.0, 1.0, 1.0], index=[1, 2, 3]) 117 | with self.assertRaises(ValueError): 118 | ReservoirParams(**params) 119 | 120 | def test_empty_downstream_flow_fracs(self): 121 | """Test creation with empty downstream_flow_fracs.""" 122 | params = self.valid_params.copy() 123 | params["downstream_flow_fracs"] = {} 124 | try: 125 | ReservoirParams(**params) 126 | except ValueError: 127 | self.fail( 128 | "ReservoirParams raised ValueError unexpectedly for empty downstream_flow_fracs." 129 | ) 130 | 131 | def test_inflow_equal_to_minflow(self): 132 | """Test successful creation if inflow_ts is equal to minflow_ts.""" 133 | params = self.valid_params.copy() 134 | params["inflow_ts"] = pd.Series([1.0, 2.0, 3.0], index=[1, 2, 3]) 135 | params["minflow_ts"] = pd.Series([1.0, 2.0, 3.0], index=[1, 2, 3]) 136 | try: 137 | ReservoirParams(**params) 138 | except ValueError: 139 | self.fail( 140 | "ReservoirParams raised ValueError when inflow_ts equals minflow_ts." 141 | ) 142 | 143 | 144 | if __name__ == "__main__": 145 | unittest.main(argv=["first-arg-is-ignored"], exit=False) 146 | -------------------------------------------------------------------------------- /src/test_pownet/test_data_utils.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | import pandas as pd 3 | 4 | from pownet.data_utils import calc_remaining_on_duration, calc_remaining_off_duration 5 | 6 | 7 | class TestCalcMinOnlineDuration1(unittest.TestCase): 8 | 9 | def test_startup_within_horizon(self): 10 | """Test case where a unit has a startup within the simulation horizon. 11 | - Simulation horizon of 10 hours. 12 | - A minimum online duration (TU) of 3 hours 13 | - Latest start at hour 8. 14 | - The remaining minimum online duration is 0. 15 | """ 16 | solution = pd.DataFrame( 17 | { 18 | "node": ["Unit1"] * 10 + ["Unit2"] * 10, 19 | "timestep": [x for x in range(1, 11)] + [x for x in range(1, 11)], 20 | "value": [0, 0, 0, 1, 1, 0, 0, 1, 0, 0] 21 | + [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 22 | "vartype": ["startup"] * 10 + ["shutdown"] * 10, 23 | } 24 | ) 25 | sim_horizon = 10 26 | thermal_units = ["Unit1"] 27 | TU = {"Unit1": 3} 28 | 29 | expected_output = { 30 | "Unit1": 0, 31 | } # Unit1 started recently, Unit2 has some time left 32 | result = calc_remaining_on_duration(solution, sim_horizon, thermal_units, TU) 33 | self.assertEqual(result, expected_output) 34 | 35 | def test_startup_outside_horizon(self): 36 | """Test case where a unit has a startup within the simulation horizon. 37 | 38 | Unit1 39 | ------- 40 | - A minimum online duration (TU) of 5 hours 41 | - Latest start at hour 8. 42 | - The remaining minimum online duration is 2. 43 | 44 | Unit2 45 | ------- 46 | - A minimum online duration (TU) of 2 hours 47 | - Latest start at hour 10. 48 | - The remaining minimum online duration is 1. 49 | 50 | 51 | """ 52 | solution = pd.DataFrame( 53 | { 54 | "node": ["Unit1"] * 10 + ["Unit2"] * 10, 55 | "timestep": [x for x in range(1, 11)] + [x for x in range(1, 11)], 56 | "value": [0, 0, 0, 1, 1, 0, 0, 1, 0, 0] 57 | + [0, 0, 0, 0, 0, 0, 0, 0, 0, 1], 58 | "vartype": ["startup"] * 20, 59 | } 60 | ) 61 | sim_horizon = 10 62 | thermal_units = ["Unit1", "Unit2"] 63 | TU = {"Unit1": 5, "Unit2": 2} 64 | 65 | expected_output = { 66 | "Unit1": 2, 67 | "Unit2": 1, 68 | } # Unit1 started recently, Unit2 has some time left 69 | result = calc_remaining_on_duration(solution, sim_horizon, thermal_units, TU) 70 | self.assertEqual(result, expected_output) 71 | 72 | 73 | class TestCalcMinOnlineDuration2(unittest.TestCase): 74 | 75 | def test_shutdown_within_horizon(self): 76 | """Test case where a unit has a shutdown within the simulation horizon. 77 | - Simulation horizon of 10 hours. 78 | - A minimum offline duration (TD) of 1 hours 79 | - Latest shutdown at hour 10. 80 | - The remaining minimum offline duration is 0. 81 | """ 82 | solution = pd.DataFrame( 83 | { 84 | "node": ["Unit1"] * 10 + ["Unit2"] * 10, 85 | "timestep": [x for x in range(1, 11)] + [x for x in range(1, 11)], 86 | "value": [0, 0, 0, 1, 1, 0, 0, 1, 1, 1] 87 | + [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 88 | "vartype": ["dummy_var"] * 10 + ["shutdown"] * 10, 89 | } 90 | ) 91 | sim_horizon = 10 92 | thermal_units = ["Unit2"] # Only test for Unit2 93 | TD = {"Unit2": 1} 94 | 95 | expected_output = { 96 | "Unit2": 0, 97 | } # Unit1 started recently, Unit2 has some time left 98 | result = calc_remaining_off_duration(solution, sim_horizon, thermal_units, TD) 99 | self.assertEqual(result, expected_output) 100 | 101 | def test_shutdown_outside_horizon(self): 102 | """Test case where a unit has a shutdown within the simulation horizon. 103 | 104 | Unit1 105 | ------- 106 | - A minimum offline duration (TD) of 5 hours 107 | - Latest shutdown at hour 6. 108 | - The remaining minimum offline duration is 0. 109 | 110 | Unit2 111 | ------- 112 | - A minimum offline duration (TD) of 2 hours 113 | - Latest shutdown at hour 10. 114 | - The remaining minimum offline duration is 1. 115 | 116 | 117 | """ 118 | solution = pd.DataFrame( 119 | { 120 | "node": ["Unit1"] * 10 + ["Unit2"] * 10, 121 | "timestep": [x for x in range(1, 11)] + [x for x in range(1, 11)], 122 | "value": [0, 0, 0, 0, 0, 1, 0, 0, 0, 0] 123 | + [0, 0, 0, 0, 0, 0, 0, 0, 0, 1], 124 | "vartype": ["shutdown"] * 20, 125 | } 126 | ) 127 | sim_horizon = 10 128 | thermal_units = ["Unit1", "Unit2"] 129 | TD = {"Unit1": 5, "Unit2": 2} 130 | 131 | expected_output = { 132 | "Unit1": 0, 133 | "Unit2": 1, 134 | } 135 | result = calc_remaining_off_duration(solution, sim_horizon, thermal_units, TD) 136 | self.assertEqual(result, expected_output) 137 | 138 | 139 | if __name__ == "__main__": 140 | unittest.main() 141 | -------------------------------------------------------------------------------- /src/test_pownet/test_folder_utils.py: -------------------------------------------------------------------------------- 1 | """test_folder_utils.py""" 2 | 3 | import unittest 4 | import pathlib 5 | from unittest import mock 6 | 7 | from pownet import folder_utils 8 | 9 | 10 | class TestFolderUtils(unittest.TestCase): 11 | 12 | def test_get_pownet_dir(self): 13 | # Path to the actual folder_utils.py file 14 | utils_file_path = pathlib.Path(folder_utils.__file__).resolve() 15 | # Expected: three levels up from folder_utils.py 16 | # .../utils/ -> .../pownet/ -> .../src/ 17 | expected_dir = utils_file_path.parent.parent.parent 18 | 19 | self.assertEqual(pathlib.Path(folder_utils.get_pownet_dir()), expected_dir) 20 | self.assertTrue(expected_dir.is_dir()) 21 | 22 | def test_get_home_dir(self): 23 | with mock.patch( 24 | "os.path.expanduser", return_value="/mocked/home/user" 25 | ) as mocked_expanduser: 26 | self.assertEqual(folder_utils.get_home_dir(), "/mocked/home/user") 27 | mocked_expanduser.assert_called_once_with("~") 28 | 29 | def test_get_database_dir(self): 30 | utils_file_path = pathlib.Path(folder_utils.__file__).resolve() 31 | # Expected: .../utils/database/ 32 | expected_dir = utils_file_path.parent / "database" 33 | 34 | self.assertEqual(pathlib.Path(folder_utils.get_database_dir()), expected_dir) 35 | self.assertTrue(expected_dir.is_dir()) 36 | 37 | def test_get_test_dir(self): 38 | # Scenario 1: Using the actual get_pownet_dir 39 | pownet_dir_path = pathlib.Path(folder_utils.get_pownet_dir()) 40 | # The function joins get_pownet_dir() with "src" and "test_pownet" 41 | expected_dir = pownet_dir_path / "src" / "test_pownet" 42 | self.assertEqual(pathlib.Path(folder_utils.get_test_dir()), expected_dir) 43 | 44 | # Scenario 2: Mocking get_pownet_dir for isolation 45 | with mock.patch( 46 | "pownet.folder_utils.get_pownet_dir", 47 | return_value="/fake/pownet_root_dir", 48 | ) as mocked_get_pownet: 49 | expected_dir_mocked = pathlib.Path("/fake/pownet_root_dir/src/test_pownet") 50 | self.assertEqual( 51 | pathlib.Path(folder_utils.get_test_dir()), expected_dir_mocked 52 | ) 53 | 54 | 55 | if __name__ == "__main__": 56 | unittest.main() 57 | -------------------------------------------------------------------------------- /src/test_pownet/test_model.py: -------------------------------------------------------------------------------- 1 | """This is test_model.py""" 2 | 3 | import unittest 4 | 5 | import gurobipy as gp 6 | import pandas as pd 7 | from pownet.optim_model import PowerSystemModel 8 | 9 | 10 | class TestPowerSystemModel(unittest.TestCase): 11 | def setUp(self): 12 | """Create a simple optimization problem of two variables and a constraint 13 | min -x - 2y 14 | s.t. x + y >= 1 15 | x, y >= 0 16 | 17 | solution: x = 0, y = 1, objval = -2 18 | """ 19 | self.gurobi_instance = gp.Model() 20 | x = self.gurobi_instance.addVar(name="x", lb=0) 21 | y = self.gurobi_instance.addVar(name="y", lb=0) 22 | self.gurobi_instance.setObjective(-x - 2 * y, gp.GRB.MINIMIZE) 23 | self.gurobi_instance.addConstr(x + y <= 1) 24 | self.gurobi_instance.update() 25 | self.psm = PowerSystemModel(self.gurobi_instance) 26 | 27 | self.infeasible_gurobi_instance = gp.Model() 28 | x = self.infeasible_gurobi_instance.addVar(name="x", lb=0) 29 | y = self.infeasible_gurobi_instance.addVar(name="y", lb=0) 30 | self.infeasible_gurobi_instance.setObjective(x + y, gp.GRB.MINIMIZE) 31 | self.infeasible_gurobi_instance.addConstr(x + y <= -1) 32 | self.infeasible_gurobi_instance.update() 33 | self.infeasible_psm = PowerSystemModel(self.infeasible_gurobi_instance) 34 | 35 | self.expected_solution = pd.DataFrame( 36 | {"varname": ["x", "y"], "value": [0.0, 1.0]} 37 | ) 38 | 39 | def test_optimize_gurobi(self): 40 | self.psm.optimize(solver="gurobi", log_to_console=False) 41 | self.psm.check_feasible() 42 | self.assertEqual(self.psm.get_objval(), -2) 43 | 44 | def test_optimize_highs(self): 45 | self.psm.optimize(solver="highs", log_to_console=False) 46 | info = self.psm.model.getInfo() 47 | self.psm.check_feasible() 48 | self.assertEqual(info.objective_function_value, -2) 49 | 50 | def test_optimize_invalid_solver(self): 51 | with self.assertRaises(ValueError): 52 | self.psm.optimize(solver="invalid_solver") 53 | 54 | def test_check_infeasible_gurobi(self): 55 | self.infeasible_psm.optimize(solver="gurobi", log_to_console=False) 56 | self.assertFalse(self.infeasible_psm.check_feasible()) 57 | 58 | def test_get_solution_gurobi(self): 59 | 60 | self.psm.optimize(solver="gurobi", log_to_console=False) 61 | pd.testing.assert_frame_equal( 62 | pd.DataFrame(self.psm.get_solution()), self.expected_solution 63 | ) 64 | 65 | def test_get_solution_highs(self): 66 | self.psm.optimize(solver="highs", log_to_console=False) 67 | pd.testing.assert_frame_equal( 68 | pd.DataFrame(self.psm.get_solution()), self.expected_solution 69 | ) 70 | 71 | 72 | if __name__ == "__main__": 73 | unittest.main() 74 | -------------------------------------------------------------------------------- /src/test_pownet/test_model_library/dummy/nondispatch_unit.csv: -------------------------------------------------------------------------------- 1 | name,contracted_capacity,contract,must_take 2 | pHydro,45,hydro,0 3 | Supplier,65,supplier,0 4 | -------------------------------------------------------------------------------- /src/test_pownet/test_model_library/dummy/pownet_cycle_map.json: -------------------------------------------------------------------------------- 1 | {} -------------------------------------------------------------------------------- /src/test_pownet/test_model_library/dummy/pownet_transmission.csv: -------------------------------------------------------------------------------- 1 | source,sink,source_kv,sink_kv,stability_limit,thermal_limit,line_capacity,max_kv,reactance_per_km,reactance,susceptance 2 | pGas,Node3,765,275,14365,6360,6360,765,0.2544,13.992,15035 3 | Node3,Node1,275,275,2273,1009,1009,275,0.365,63.875,1183 4 | Node1,Node2,275,275,1156,1009,1009,275,0.365,127.75,591 5 | pHydro,Node2,275,275,1340,1009,1009,275,0.365,109.5,690 6 | pOil,Node3,275,275,9895,1009,1009,275,0.365,14.6,5179 7 | pBiomass,Node2,275,275,2648,1009,1009,275,0.365,54.75,1381 8 | Buyer,pHydro,275,275,1018,1009,1009,275,0.365,146.0,517 9 | Supplier,Node1,275,275,1156,1009,1009,275,0.365,127.75,591 10 | -------------------------------------------------------------------------------- /src/test_pownet/test_model_library/dummy/thermal_unit.csv: -------------------------------------------------------------------------------- 1 | name,node,unit_type,fuel_type,max_capacity,min_capacity,heat_rate,operation_cost,fuel_contract,fixed_cost,startup_cost,ramp_rate,min_uptime,min_downtime,must_take 2 | pGas,pGas,gas_cc,gas,1200,10,7.65,2,gas,1.2,70,287.67,4,4,0 3 | pOil,pOil,oil_st,oil,35,1,10.19,3.17,oil,1.5,50,6,1,1,0 4 | pBiomass,pBiomass,biomass_st,biomass,9.9,2,14.1,26,biomass,1,100,9.9,2,2,1 5 | -------------------------------------------------------------------------------- /src/test_pownet/test_model_library/dummy/transmission.csv: -------------------------------------------------------------------------------- 1 | source,sink,user_line_cap,type,n_circuits,source_kv,sink_kv,distance,source_lon,source_lat,sink_lon,sink_lat,user_susceptance 2 | pGas,Node3,-1,acsr,1,765,275,55,-76.47351909,42.4391233,-76.47127586,42.44726029,-1 3 | Node3,Node1,-1,acsr,2,275,275,175,-76.47127586,42.44726029,-76.48522899,42.44206975,-1 4 | Node1,Node2,-1,acsr,2,275,275,350,-76.48522899,42.44206975,-76.51073276,42.42804445,-1 5 | pHydro,Node2,-1,acsr,2,275,275,300,-76.51223575,42.44692185,-76.51073276,42.42804445,-1 6 | pOil,Node3,-1,acsr,2,275,275,40,-76.47899854,42.45793602,-76.47127586,42.44726029,-1 7 | pBiomass,Node2,-1,acsr,2,275,275,150,-76.49899651,42.42885897,-76.51073276,42.42804445,-1 8 | Buyer,pHydro,-1,acsr,2,275,275,400,-76.51909681,42.44094852,-76.51223575,42.44692185,-1 9 | Supplier,Node1,-1,acsr,2,275,275,350,-76.49770115,42.45325699,-76.48522899,42.44206975,-1 10 | -------------------------------------------------------------------------------- /src/test_pownet/test_optim_model/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Critical-Infrastructure-Systems-Lab/PowNet/c5cb0437ee3af0f21683b81fdb8f4b67f1a6e9f5/src/test_pownet/test_optim_model/__init__.py -------------------------------------------------------------------------------- /src/test_pownet/test_optim_model/test_constraints/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Critical-Infrastructure-Systems-Lab/PowNet/c5cb0437ee3af0f21683b81fdb8f4b67f1a6e9f5/src/test_pownet/test_optim_model/test_constraints/__init__.py -------------------------------------------------------------------------------- /src/test_pownet/test_reservoir/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Critical-Infrastructure-Systems-Lab/PowNet/c5cb0437ee3af0f21683b81fdb8f4b67f1a6e9f5/src/test_pownet/test_reservoir/__init__.py -------------------------------------------------------------------------------- /src/test_pownet/test_stochastic/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Critical-Infrastructure-Systems-Lab/PowNet/c5cb0437ee3af0f21683b81fdb8f4b67f1a6e9f5/src/test_pownet/test_stochastic/__init__.py -------------------------------------------------------------------------------- /src/test_pownet/test_stochastic/test_timeseries_model.py: -------------------------------------------------------------------------------- 1 | """test_timeseries_model.py""" 2 | 3 | import unittest 4 | 5 | # Skip this test if the user has not installed pmdarima package 6 | try: 7 | import pmdarima 8 | except ImportError: 9 | pmdarima = None 10 | raise unittest.SkipTest("pmdarima is not installed, skipping tests.") 11 | 12 | import pandas as pd 13 | from pownet.stochastic import timeseries_model 14 | 15 | 16 | class ConcreteTimeSeriesModel(timeseries_model.TimeSeriesModel): 17 | """A concrete implementation for testing the abstract base class.""" 18 | 19 | def __init__(self): 20 | super().__init__() 21 | self._predictions = pd.Series(dtype=float) 22 | self._residuals = pd.Series(dtype=float) 23 | self._monthly_models = {} 24 | 25 | @property 26 | def monthly_models(self) -> dict: 27 | return self._monthly_models 28 | 29 | @property 30 | def predictions(self) -> pd.Series: 31 | return self._predictions 32 | 33 | @property 34 | def pred_residuals(self) -> pd.Series: 35 | return self._residuals 36 | 37 | def _fit( 38 | self, 39 | target_column: str, 40 | arima_order: tuple[int, int, int], 41 | seasonal_order: tuple[int, int, int, int], 42 | exog_vars: list[str], 43 | ) -> None: 44 | # Minimal implementation for testing purposes 45 | # In a real scenario, this would fit some model 46 | self._monthly_models[1] = "dummy_model_for_month_1" 47 | pass 48 | 49 | def _predict(self) -> pd.Series: 50 | # Minimal implementation 51 | if not self.data.empty: 52 | return pd.Series( 53 | [1.0] * len(self.data), index=self.data.index, name="predictions" 54 | ) 55 | return pd.Series(dtype=float) 56 | 57 | def _get_synthetic( 58 | self, exog_data: pd.DataFrame = None, seed: int = None 59 | ) -> pd.Series: 60 | # For this dummy, we'll keep the logic based on self.data, as it doesn't 61 | # actually use the exogenous variables for its dummy output. 62 | if not self.data.empty: 63 | return pd.Series( 64 | [0.5] * len(self.data), index=self.data.index, name="synthetic" 65 | ) 66 | return pd.Series(dtype=float) 67 | 68 | def _find_best_model( 69 | self, 70 | target_column: str, 71 | exog_vars: list[str], 72 | month_to_use: int, 73 | seed: int, 74 | suppress_warnings: bool, 75 | ) -> tuple[tuple[int, int, int], tuple[int, int, int, int]]: 76 | # Minimal implementation 77 | return ((1, 0, 0), (0, 0, 0, 0)) 78 | 79 | 80 | class TestTimeSeriesModel(unittest.TestCase): 81 | 82 | def setUp(self): 83 | self.model = ConcreteTimeSeriesModel() 84 | self.sample_data = pd.DataFrame( 85 | { 86 | "datetime": pd.to_datetime( 87 | [ 88 | "2023-01-01 00:00:00", 89 | "2023-01-01 01:00:00", 90 | "2023-01-01 02:00:00", # Changed this line 91 | "2023-01-01 03:00:00", # Added an extra point for more data 92 | ] 93 | ), 94 | "value": [10, 12, 15, 11], # Adjusted values 95 | "exog1": [1, 2, 3, 4], # Adjusted exog 96 | } 97 | ) 98 | self.target_column = "value" 99 | 100 | def test_initialization(self): 101 | self.assertFalse(self.model._is_fitted) 102 | self.assertFalse(self.model._is_loaded) 103 | self.assertTrue(self.model.data.empty) 104 | self.assertEqual(self.model.months, []) 105 | self.assertIsNone(self.model.exog_vars) 106 | 107 | def test_load_data_success(self): 108 | self.model.load_data(self.sample_data.copy()) 109 | self.assertTrue(self.model._is_loaded) 110 | self.assertFalse(self.model.data.empty) 111 | self.assertIn(pd.Timestamp("2023-01-01 00:00:00"), self.model.data.index) 112 | self.assertEqual(self.model.data.index.freqstr, "h") 113 | self.assertEqual(self.model.months, [1]) # Sorted 114 | self.assertIsInstance(self.model.data.index, pd.DatetimeIndex) 115 | 116 | def test_load_data_missing_datetime_column(self): 117 | bad_data = pd.DataFrame({"val": [1, 2]}) 118 | with self.assertRaisesRegex(ValueError, "Data should have columns 'datetime'"): 119 | self.model.load_data(bad_data) 120 | 121 | def test_fit_success(self): 122 | self.model.load_data(self.sample_data.copy()) 123 | self.model.fit( 124 | target_column=self.target_column, arima_order=(1, 0, 0), exog_vars=["exog1"] 125 | ) 126 | self.assertTrue(self.model._is_fitted) 127 | self.assertEqual(self.model.exog_vars, ["exog1"]) 128 | # You might also check if the dummy _fit method was "called" (e.g., by checking its side effects) 129 | self.assertIn(1, self.model.monthly_models) # Based on dummy _fit 130 | 131 | def test_predict_not_fitted(self): 132 | with self.assertRaisesRegex( 133 | ValueError, "Model must be fitted before making predictions." 134 | ): 135 | self.model.predict() 136 | 137 | def test_predict_success(self): 138 | self.model.load_data(self.sample_data.copy()) 139 | self.model.fit(target_column=self.target_column, arima_order=(1, 0, 0)) 140 | predictions = self.model.predict() 141 | self.assertIsInstance(predictions, pd.Series) 142 | self.assertEqual( 143 | len(predictions), len(self.model.data) 144 | ) # Based on dummy _predict 145 | 146 | def test_get_synthetic_not_fitted(self): 147 | self.model.load_data(self.sample_data.copy()) 148 | with self.assertRaisesRegex( 149 | ValueError, "Model must be fitted before creating synthetic data." 150 | ): 151 | self.model.get_synthetic() 152 | 153 | def test_get_synthetic_exog_vars_mismatch(self): 154 | self.model.load_data(self.sample_data.copy()) 155 | self.model.fit( 156 | target_column=self.target_column, 157 | arima_order=(1, 0, 0), 158 | exog_vars=["exog_missing"], 159 | ) 160 | exog_df = pd.DataFrame( 161 | {"exog_other": [1, 1, 1, 1]}, index=self.model.data.index 162 | ) 163 | with self.assertRaisesRegex( 164 | ValueError, "Exogenous variables should be in the data." 165 | ): 166 | self.model.get_synthetic(exog_data=exog_df) 167 | 168 | def test_get_synthetic_exog_index_mismatch(self): 169 | self.model.load_data(self.sample_data.copy()) 170 | self.model.fit( 171 | target_column=self.target_column, arima_order=(1, 0, 0), exog_vars=["exog1"] 172 | ) 173 | wrong_index = pd.to_datetime(["2024-01-01", "2024-01-02", "2024-01-03"]) 174 | exog_df = pd.DataFrame( 175 | {"exog1": [1, 1, 1]}, index=wrong_index 176 | ) # Different index 177 | with self.assertRaisesRegex( 178 | ValueError, 179 | "Exogenous data should have the same index as the time series data.", 180 | ): 181 | self.model.get_synthetic(exog_data=exog_df) 182 | 183 | def test_get_synthetic_success(self): 184 | self.model.load_data(self.sample_data.copy()) 185 | self.model.fit(target_column=self.target_column, arima_order=(1, 0, 0)) 186 | synthetic_data = self.model.get_synthetic() 187 | self.assertIsInstance(synthetic_data, pd.Series) 188 | self.assertEqual(len(synthetic_data), len(self.model.data)) 189 | 190 | def test_find_best_model_not_loaded(self): 191 | with self.assertRaisesRegex(ValueError, "Data must be loaded first."): 192 | self.model.find_best_model(target_column=self.target_column) 193 | 194 | def test_find_best_model_success(self): 195 | self.model.load_data(self.sample_data.copy()) 196 | order, seasonal_order = self.model.find_best_model( 197 | target_column=self.target_column 198 | ) 199 | self.assertEqual(order, (1, 0, 0)) # From dummy implementation 200 | self.assertEqual(seasonal_order, (0, 0, 0, 0)) # From dummy implementation 201 | 202 | 203 | if __name__ == "__main__": 204 | unittest.main() 205 | --------------------------------------------------------------------------------