├── .github └── workflows │ └── ci.yaml ├── .gitignore ├── .readthedocs.yaml ├── LICENSE ├── MANIFEST.in ├── README.rst ├── doc ├── Makefile ├── about.rst ├── api.rst ├── api │ ├── appl.rst │ ├── etrago.analyze.rst │ ├── etrago.cluster.rst │ ├── etrago.disaggregate.rst │ ├── etrago.tools.rst │ └── network.rst ├── conf.py ├── developer_notes.rst ├── eTraGo_tutorial_release0.9.ipynb ├── getting_started.rst ├── howToUse.rst ├── images │ ├── ego_tools.svg │ ├── etrago_logo.png │ └── modelling_concept.png ├── index.rst ├── installation.rst ├── make.bat ├── theoretical_background.rst ├── whatsnew.rst └── whatsnew │ ├── v0_1.rst │ ├── v0_2.rst │ ├── v0_3.rst │ ├── v0_4.rst │ ├── v0_5.rst │ ├── v0_5_1.rst │ ├── v0_6.rst │ ├── v0_6_1.rst │ ├── v0_7_0.rst │ ├── v0_7_1.rst │ ├── v0_7_2.rst │ ├── v0_8_0.rst │ └── v0_9_0.rst ├── etrago ├── __init__.py ├── analyze │ ├── __init__.py │ ├── calc_results.py │ └── plot.py ├── appl.py ├── args.json ├── cluster │ ├── __init__.py │ ├── electrical.py │ ├── gas.py │ ├── spatial.py │ └── temporal.py ├── data │ └── unit_commitment.csv ├── disaggregate │ ├── __init__.py │ ├── spatial.py │ └── temporal.py ├── execute │ ├── __init__.py │ ├── grid_optimization.py │ ├── market_optimization.py │ └── sclopf.py ├── network.py └── tools │ ├── __init__.py │ ├── constraints.py │ ├── db.py │ ├── extendable.py │ ├── io.py │ ├── sql_scripts │ └── results_md2grid.sql │ └── utilities.py ├── noxfile.py ├── pyproject.toml ├── requirements-doc.txt └── setup.py /.github/workflows/ci.yaml: -------------------------------------------------------------------------------- 1 | name: "CI" 2 | 3 | on: {push: {branches: ['**']}, pull_request: {branches: [dev, master]}} 4 | 5 | jobs: 6 | build: 7 | name: ${{ matrix.os.name }} 8 | runs-on: ${{ matrix.os.image }} 9 | 10 | strategy: 11 | matrix: 12 | os: 13 | - {image: ubuntu-latest, name: Linux} 14 | - {image: windows-latest, name: Windows} 15 | - {image: macos-latest, name: macOS} 16 | max-parallel: 4 17 | fail-fast: false 18 | 19 | steps: 20 | - uses: "actions/checkout@main" 21 | - uses: "actions/setup-python@main" 22 | with: 23 | python-version: | 24 | 3 25 | 3.9 26 | 3.10 27 | 3.11 28 | - name: "Install dependencies" 29 | run: | 30 | python -mpip install --progress-bar=off nox 31 | python --version 32 | pip --version 33 | nox --version 34 | - name: "Run custom checks" 35 | run: "python -m nox -s check" 36 | env: 37 | PLATFORM: ${{ matrix.os.image }} 38 | - name: "Check with `black`" 39 | run: "python -m nox -s black" 40 | env: 41 | PLATFORM: ${{ matrix.os.image }} 42 | - name: "Check with `flake8`" 43 | run: "python -m nox -s flake8" 44 | env: 45 | PLATFORM: ${{ matrix.os.image }} 46 | - name: "Check with `isort`" 47 | run: "python -m nox -s isort" 48 | env: 49 | PLATFORM: ${{ matrix.os.image }} 50 | - name: "Build and check for packaging errors" 51 | run: "python -m nox -s build" 52 | env: 53 | PLATFORM: ${{ matrix.os.image }} 54 | - name: "Install the package" 55 | run: "python -m nox -s install" 56 | env: 57 | PLATFORM: ${{ matrix.os.image }} 58 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | **/__pycache__/* 2 | build/* 3 | dist/* 4 | eTraGo.egg-info/* 5 | -------------------------------------------------------------------------------- /.readthedocs.yaml: -------------------------------------------------------------------------------- 1 | # .readthedocs.yaml 2 | # Read the Docs configuration file 3 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details 4 | 5 | # Required 6 | version: 2 7 | 8 | # Set the version of Python and other tools you might need 9 | build: 10 | os: ubuntu-22.04 11 | tools: 12 | python: "3.8" 13 | 14 | # Build documentation in the docs/ directory with Sphinx 15 | sphinx: 16 | configuration: doc/conf.py 17 | 18 | # We recommend specifying your dependencies to enable reproducible builds: 19 | # https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html 20 | python: 21 | install: 22 | - requirements: requirements-doc.txt 23 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include README.rst 2 | include LICENSE 3 | include MANIFEST.in 4 | include etrago/tools/*.json 5 | include *.txt 6 | include etrago/*.json 7 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | |ci| |docs| 2 | 3 | .. |ci| image:: 4 | https://img.shields.io/github/actions/workflow/status 5 | /openego/eTraGo/ci.yaml?branch=dev&event=push&label=ci 6 | :alt: Continuous Integration Workflow Status 7 | :target: https://github.com/openego/eTraGo/actions/workflows/ci.yaml 8 | 9 | .. |docs| image:: 10 | https://readthedocs.org/projects/etrago/badge/?version=latest 11 | :alt: Documentation Status 12 | :target: http://etrago.readthedocs.io/en/latest/?badge=latest 13 | 14 | .. end-header 15 | 16 | eTraGo 17 | ====== 18 | 19 | Optimization of flexibility options for transmission grids based on PyPSA 20 | 21 | A speciality in this context is that transmission grids are described by the 22 | 380, 220 and 110 kV in Germany. Conventionally, the 110kV grid is part of the 23 | distribution grid. The integration of the transmission and 'upper' distribution 24 | grid is part of eTraGo. 25 | 26 | The focus of optimization are flexibility options with a special focus on 27 | energy storages. Grid expansion measures are not part of this tool and will be 28 | instead part of 'eGo' https://github.com/openego/eGo 29 | 30 | eTraGo is documented on `readthedocs `_. 31 | 32 | 33 | Input data 34 | ========== 35 | The grid model data for eTraGo was created with the open source tool 36 | `eGon-data `_. The resulting data will 37 | be pubished on the `OpenEnergyPlatform `_. 38 | As long as the data is not published there, a local database is needed. 39 | We published a backup of the required tables and instruction on how to use it 40 | on zenodo: 41 | 42 | .. image:: https://zenodo.org/badge/DOI/10.5281/zenodo.8376714.svg 43 | :target: https://doi.org/10.5281/zenodo.8376714 44 | 45 | 46 | Installation 47 | ============ 48 | eTraGo is designed as a Python package therefore it is mandatory to have 49 | `Python 3 `_ installed. If you have a 50 | working Python3 environment, use pypi to install the latest eTraGo version. 51 | We highly recommend you to use a virtual environment. Use following pip 52 | command in order to install eTraGo.. 53 | 54 | .. code-block:: bash 55 | 56 | $ pip3 install eTraGo 57 | 58 | Installation for Developers 59 | =========================== 60 | 61 | Clone the source code from github: 62 | 63 | .. code-block:: 64 | 65 | $ git clone https://github.com/openego/eTraGo 66 | 67 | You can checkout to the dev branch and create new feature branches. 68 | For the correct work-flow, please mind the 69 | `Dreissen Branching Model `_ 70 | 71 | Use the pip -e to install eTraGo directly from the cloned repository: 72 | 73 | .. code-block:: 74 | 75 | $ pip3 install -e /path/to/eTraGo/ 76 | 77 | When you want to draw geographical features in the background of network plots, 78 | please install cartopy: 79 | 80 | .. code-block:: 81 | 82 | $ pip3 install cartopy 83 | 84 | If you run into problems when using cartopy, try to install shapely without binaries: 85 | 86 | .. code-block:: 87 | 88 | $ pip3 install shapely --no-binary shapely 89 | 90 | Using a virtual environment 91 | =========================== 92 | 93 | Before installing eTraGo, 94 | you create a virtual environment (where you like it) and activate it: 95 | 96 | .. code-block:: bash 97 | 98 | $ virtualenv venv --clear -p python3.10 99 | $ source venv/bin/activate 100 | $ cd venv 101 | 102 | Inside your activated virtual environment you can 103 | install eTraGo with the pip command, as previously explained. 104 | 105 | 106 | Copyleft 107 | ========================= 108 | 109 | Code licensed under "GNU Affero General Public License Version 3 (AGPL-3.0)" 110 | It is a collaborative work with several copyright owners: 111 | Cite as "eTraGo" © Flensburg University of Applied Sciences, Centre for 112 | Sustainable Energy Systems © Europa-Universität Flensburg, Centre for 113 | Sustainable Energy Systems © DLR Institute for Networked Energy Systems" 114 | -------------------------------------------------------------------------------- /doc/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = _build 9 | 10 | # User-friendly check for sphinx-build 11 | ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) 12 | $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) 13 | endif 14 | 15 | # Internal variables. 16 | PAPEROPT_a4 = -D latex_paper_size=a4 17 | PAPEROPT_letter = -D latex_paper_size=letter 18 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 19 | # the i18n builder cannot share the environment and doctrees with the others 20 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 21 | 22 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest coverage gettext 23 | 24 | help: 25 | @echo "Please use \`make ' where is one of" 26 | @echo " html to make standalone HTML files" 27 | @echo " dirhtml to make HTML files named index.html in directories" 28 | @echo " singlehtml to make a single large HTML file" 29 | @echo " pickle to make pickle files" 30 | @echo " json to make JSON files" 31 | @echo " htmlhelp to make HTML files and a HTML help project" 32 | @echo " qthelp to make HTML files and a qthelp project" 33 | @echo " applehelp to make an Apple Help Book" 34 | @echo " devhelp to make HTML files and a Devhelp project" 35 | @echo " epub to make an epub" 36 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 37 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 38 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" 39 | @echo " text to make text files" 40 | @echo " man to make manual pages" 41 | @echo " texinfo to make Texinfo files" 42 | @echo " info to make Texinfo files and run them through makeinfo" 43 | @echo " gettext to make PO message catalogs" 44 | @echo " changes to make an overview of all changed/added/deprecated items" 45 | @echo " xml to make Docutils-native XML files" 46 | @echo " pseudoxml to make pseudoxml-XML files for display purposes" 47 | @echo " linkcheck to check all external links for integrity" 48 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 49 | @echo " coverage to run coverage check of the documentation (if enabled)" 50 | 51 | clean: 52 | rm -rf $(BUILDDIR)/* 53 | 54 | html: 55 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 56 | @echo 57 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 58 | 59 | dirhtml: 60 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 61 | @echo 62 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 63 | 64 | singlehtml: 65 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 66 | @echo 67 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 68 | 69 | pickle: 70 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 71 | @echo 72 | @echo "Build finished; now you can process the pickle files." 73 | 74 | json: 75 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 76 | @echo 77 | @echo "Build finished; now you can process the JSON files." 78 | 79 | htmlhelp: 80 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 81 | @echo 82 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 83 | ".hhp project file in $(BUILDDIR)/htmlhelp." 84 | 85 | qthelp: 86 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 87 | @echo 88 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 89 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 90 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/ding0.qhcp" 91 | @echo "To view the help file:" 92 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/ding0.qhc" 93 | 94 | applehelp: 95 | $(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp 96 | @echo 97 | @echo "Build finished. The help book is in $(BUILDDIR)/applehelp." 98 | @echo "N.B. You won't be able to view it unless you put it in" \ 99 | "~/Library/Documentation/Help or install it in your application" \ 100 | "bundle." 101 | 102 | devhelp: 103 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 104 | @echo 105 | @echo "Build finished." 106 | @echo "To view the help file:" 107 | @echo "# mkdir -p $$HOME/.local/share/devhelp/ding0" 108 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/ding0" 109 | @echo "# devhelp" 110 | 111 | epub: 112 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 113 | @echo 114 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 115 | 116 | latex: 117 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 118 | @echo 119 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 120 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 121 | "(use \`make latexpdf' here to do that automatically)." 122 | 123 | latexpdf: 124 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 125 | @echo "Running LaTeX files through pdflatex..." 126 | $(MAKE) -C $(BUILDDIR)/latex all-pdf 127 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 128 | 129 | latexpdfja: 130 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 131 | @echo "Running LaTeX files through platex and dvipdfmx..." 132 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja 133 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 134 | 135 | text: 136 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 137 | @echo 138 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 139 | 140 | man: 141 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 142 | @echo 143 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 144 | 145 | texinfo: 146 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 147 | @echo 148 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." 149 | @echo "Run \`make' in that directory to run these through makeinfo" \ 150 | "(use \`make info' here to do that automatically)." 151 | 152 | info: 153 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 154 | @echo "Running Texinfo files through makeinfo..." 155 | make -C $(BUILDDIR)/texinfo info 156 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." 157 | 158 | gettext: 159 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale 160 | @echo 161 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." 162 | 163 | changes: 164 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 165 | @echo 166 | @echo "The overview file is in $(BUILDDIR)/changes." 167 | 168 | linkcheck: 169 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 170 | @echo 171 | @echo "Link check complete; look for any errors in the above output " \ 172 | "or in $(BUILDDIR)/linkcheck/output.txt." 173 | 174 | doctest: 175 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 176 | @echo "Testing of doctests in the sources finished, look at the " \ 177 | "results in $(BUILDDIR)/doctest/output.txt." 178 | 179 | coverage: 180 | $(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage 181 | @echo "Testing of coverage in the sources finished, look at the " \ 182 | "results in $(BUILDDIR)/coverage/python.txt." 183 | 184 | xml: 185 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml 186 | @echo 187 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml." 188 | 189 | pseudoxml: 190 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml 191 | @echo 192 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." 193 | -------------------------------------------------------------------------------- /doc/about.rst: -------------------------------------------------------------------------------- 1 | ============ 2 | About eTraGo 3 | ============ 4 | 5 | eTraGo stands for **e**\lectric **Tra**\nsmission **G**\rid **o**\ptimization. 6 | 7 | The python package eTraGo provides optimization strategies of flexibility options 8 | for transmission grids based on PyPSA. A peculiarity in this context is that 9 | the German transmission grid is described by the 380, 220 and 110 kV voltage levels. 10 | Conventionally the 110kV grid is part of the distribution grid. The integration of 11 | the transmission and ‘upper’ distribution grid is part of eTraGo. 12 | 13 | The focus of optimization are flexibility options with a special focus on 14 | energy storage and grid expansion measures. 15 | 16 | 17 | 18 | Research projects 19 | ==================== 20 | This software project was initially developed in the research project 21 | `open_eGo `_. 22 | It is constantly further developed in different reserach projects, 23 | e.g. `eGon `_ and `PoWerD `_. 24 | 25 | 26 | The OpenEnergy Platform 27 | ======================= 28 | Within the open_eGo project we developed the OpenEnergy Platform which this software 29 | is using in order to get and store the in- and output data. Before you start to 30 | calculate a registration on the platform is needed. For more information see 31 | `openenergy-platform `_ and login. 32 | 33 | The OpenEnergy platform mainly addresses students, researchers and scientists in 34 | the field of energy modelling and analytics as well as interested persons in 35 | those fields. The platform provides great tools to make your energy system 36 | modelling process transparent. All data of the open_eGo project are stored at 37 | this platform. 38 | `Learn more about the database access `_. 39 | 40 | 41 | 42 | 43 | 44 | Tool overview 45 | ============= 46 | 47 | 48 | 49 | .. figure:: images/ego_tools.svg 50 | :align: center 51 | :scale: 75% 52 | 53 | 54 | eDisGo 55 | ====== 56 | The python package eDisGo provides a toolbox for analysis and optimization 57 | of distribution grids. It is closely related to the python project Ding0 as this 58 | project is currently the single data source for eDisGo providing synthetic 59 | grid data for whole Germany. `Learn more here `_. 60 | 61 | 62 | eGo 63 | === 64 | 65 | The python package eGo is a toolbox and application which connects the tool eTraGo 66 | (optimization of flexibility options at transmission grid level) 67 | and eDisGo (optimization of distribution grids). All those python 68 | packages were initially developed in the research project 69 | `open_eGo `_. 70 | `Learn more here `_. 71 | 72 | 73 | Data model creation 74 | =================== 75 | For the eGon project the python-tool `eGon-data `_ was implemented, which creates input data for the optimization tools `eTraGo `_, `ding0 `_ and `eDisGo `_ and delivers for example data on grid topologies, demands/demand curves and generation capacities in a high spatial resolution. The outputs of egon-data are published under open source and open data licenses. 76 | 77 | eGon-data is a further development of the `Data processing `_ developed in the former research project `open_eGo `_. It aims for an extensions of the data models as well as for a better replicability and manageability of the data preparation and processing. 78 | The resulting data set serves as an input for the optimization tools `eTraGo `_, `ding0 `_ and `eDisGo `_ and delivers for example data on grid topologies, demands/demand curves and generation capacities in a high spatial resolution. The outputs of egon-data are published under open source and open data licenses. 79 | 80 | 81 | Dingo 82 | ===== 83 | 84 | The DIstribution Network GeneratOr (Ding0) is a tool to generate synthetic 85 | medium and low voltage power distribution grids based on open 86 | (or at least accessible) data. 87 | `Learn more here `_. 88 | 89 | 90 | 91 | 92 | LICENSE 93 | ======= 94 | 95 | © Copyright 2015-2023 96 | Flensburg University of Applied Sciences, 97 | Europa-Universität Flensburg, 98 | Centre for Sustainable Energy Systems and 99 | DLR-Institute for Networked Energy Systems 100 | 101 | 102 | 103 | This program is free software: you can redistribute it and/or modify it under 104 | the terms of the GNU Affero General Public License as published by the Free 105 | Software Foundation, either version 3 of the License, or (at your option) 106 | any later version. 107 | 108 | This program is distributed in the hope that it will be useful, but WITHOUT 109 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS 110 | FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for 111 | more details. 112 | 113 | You should have received a copy of the GNU General Public License along 114 | with this program. 115 | If not, see `www.gnu.org/licenses `_. 116 | -------------------------------------------------------------------------------- /doc/api.rst: -------------------------------------------------------------------------------- 1 | API 2 | ==== 3 | 4 | .. toctree:: 5 | :maxdepth: 7 6 | :glob: 7 | :titlesonly: 8 | 9 | api/etrago.analyze 10 | api/etrago.cluster 11 | api/etrago.disaggregate 12 | api/etrago.execute 13 | api/etrago.tools 14 | api/appl.rst 15 | api/network.rst 16 | -------------------------------------------------------------------------------- /doc/api/appl.rst: -------------------------------------------------------------------------------- 1 | etrago.appl module 2 | ------------------- 3 | 4 | .. automodule:: etrago.appl 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: -------------------------------------------------------------------------------- /doc/api/etrago.analyze.rst: -------------------------------------------------------------------------------- 1 | etrago.analyze package 2 | ====================== 3 | 4 | etrago.analyze.calc\_results module 5 | ----------------------------------- 6 | 7 | .. automodule:: etrago.analyze.calc_results 8 | :members: 9 | :undoc-members: 10 | :show-inheritance: 11 | 12 | etrago.analyze.plot module 13 | --------------------------- 14 | 15 | .. automodule:: etrago.analyze.plot 16 | :members: 17 | :undoc-members: 18 | :show-inheritance: 19 | 20 | 21 | -------------------------------------------------------------------------------- /doc/api/etrago.cluster.rst: -------------------------------------------------------------------------------- 1 | etrago.cluster package 2 | ======================= 3 | 4 | 5 | etrago.cluster.electrical module 6 | ----------------------------------------- 7 | 8 | .. automodule:: etrago.cluster.electrical 9 | :members: 10 | :undoc-members: 11 | :show-inheritance: 12 | 13 | etrago.cluster.gas module 14 | ----------------------------------------- 15 | 16 | .. automodule:: etrago.cluster.gas 17 | :members: 18 | :undoc-members: 19 | :show-inheritance: 20 | 21 | etrago.cluster.temporal module 22 | -------------------------------- 23 | 24 | .. automodule:: etrago.cluster.temporal 25 | :members: 26 | :undoc-members: 27 | :show-inheritance: 28 | 29 | etrago.cluster.spatial module 30 | -------------------------------- 31 | 32 | .. automodule:: etrago.cluster.spatial 33 | :members: 34 | :undoc-members: 35 | :show-inheritance: 36 | -------------------------------------------------------------------------------- /doc/api/etrago.disaggregate.rst: -------------------------------------------------------------------------------- 1 | etrago.disaggregate package 2 | =========================== 3 | 4 | etrago.disaggregate.spatial module 5 | ---------------------------------- 6 | 7 | .. automodule:: etrago.disaggregate.spatial 8 | :members: 9 | :undoc-members: 10 | :show-inheritance: 11 | 12 | etrago.disaggregate.temporal module 13 | ----------------------------------- 14 | 15 | .. automodule:: etrago.disaggregate.temporal 16 | :members: 17 | :undoc-members: 18 | :show-inheritance: 19 | 20 | -------------------------------------------------------------------------------- /doc/api/etrago.tools.rst: -------------------------------------------------------------------------------- 1 | etrago.tools package 2 | ===================== 3 | 4 | etrago.tools.calc\_results module 5 | ---------------------------------- 6 | 7 | .. automodule:: etrago.tools.calc_results 8 | :members: 9 | :undoc-members: 10 | :show-inheritance: 11 | 12 | etrago.tools.constraints module 13 | ---------------------------------- 14 | 15 | .. automodule:: etrago.tools.constraints 16 | :members: 17 | :undoc-members: 18 | :show-inheritance: 19 | 20 | etrago.tools.execute module 21 | ---------------------------------- 22 | 23 | .. automodule:: etrago.tools.execute 24 | :members: 25 | :undoc-members: 26 | :show-inheritance: 27 | 28 | etrago.tools.extendable module 29 | ---------------------------------- 30 | 31 | .. automodule:: etrago.tools.extendable 32 | :members: 33 | :undoc-members: 34 | :show-inheritance: 35 | 36 | etrago.tools.io module 37 | ------------------------ 38 | 39 | .. automodule:: etrago.tools.io 40 | :members: 41 | :undoc-members: 42 | :show-inheritance: 43 | 44 | etrago.tools.network module 45 | ----------------------------- 46 | 47 | .. automodule:: etrago.tools.network 48 | :members: 49 | :undoc-members: 50 | :show-inheritance: 51 | 52 | etrago.tools.plot module 53 | --------------------------- 54 | 55 | .. automodule:: etrago.tools.plot 56 | :members: 57 | :undoc-members: 58 | :show-inheritance: 59 | 60 | etrago.tools.utilities module 61 | ------------------------------- 62 | 63 | .. automodule:: etrago.tools.utilities 64 | :members: 65 | :undoc-members: 66 | :show-inheritance: 67 | -------------------------------------------------------------------------------- /doc/api/network.rst: -------------------------------------------------------------------------------- 1 | etrago.network module 2 | ------------------- 3 | 4 | .. automodule:: etrago.network 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: 8 | -------------------------------------------------------------------------------- /doc/conf.py: -------------------------------------------------------------------------------- 1 | """This file is part of eTraGO 2 | 3 | It is developed in the project open_eGo: https://openegoproject.wordpress.com 4 | 5 | eTraGo lives at github: https://github.com/openego/etrago/ 6 | The documentation is available on RTD: https://etrago.readthedocs.io""" 7 | 8 | 9 | __copyright__ = "Flensburg University of Applied Sciences, Europa-Universität Flensburg, Centre for Sustainable Energy Systems, DLR-Institute for Networked Energy Systems" 10 | __license__ = "GNU Affero General Public License Version 3 (AGPL-3.0)" 11 | __author__ = "wolf_bunke" 12 | 13 | 14 | # -*- coding: utf-8 -*- 15 | # 16 | # eTraGo documentation build configuration file, created by 17 | # sphinx-quickstart on Fri Sep 29 10:55:47 2017. 18 | # 19 | # This file is execfile()d with the current directory set to its 20 | # containing dir. 21 | # 22 | # Note that not all possible configuration values are present in this 23 | # autogenerated file. 24 | # 25 | # All configuration values have a default; values that are commented out 26 | # serve to show the default. 27 | 28 | import sys 29 | import os 30 | import shlex 31 | from unittest.mock import MagicMock 32 | #from mock import Mock as MagicMock 33 | 34 | # If extensions (or modules to document with autodoc) are in another directory, 35 | # add these directories to sys.path here. If the directory is relative to the 36 | # documentation root, use os.path.abspath to make it absolute, like shown here. 37 | #sys.path.insert(0, os.path.abspath('.')) 38 | sys.path.insert(0, os.path.abspath('../')) 39 | 40 | # -- General configuration ------------------------------------------------ 41 | 42 | # If your documentation needs a minimal Sphinx version, state it here. 43 | #needs_sphinx = '1.0' 44 | 45 | # Add any Sphinx extension module names here, as strings. They can be 46 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 47 | # ones. 48 | extensions = [ 49 | 'sphinx.ext.autodoc', 50 | 'sphinx.ext.intersphinx', 51 | 'sphinx.ext.todo', 52 | 'sphinx.ext.coverage', 53 | 'sphinx.ext.imgmath' , 54 | 'sphinx.ext.viewcode', 55 | # 'sphinx.ext.autosummary', 56 | # 'sphinxcontrib.napoleon',#enable Napoleon interpreter of docstrings Sphinx v<=1.2 57 | 'sphinx.ext.napoleon', #enable Napoleon Sphinx v>1.3 58 | # 'sphinx_paramlinks',#to have links to the types of the parameters of the functions 59 | 'numpydoc', 60 | 'sphinx.ext.extlinks', # enables external links with a key 61 | 'nbsphinx' # incluede notebooks 62 | ] 63 | 64 | # Napoleon settings 65 | napoleon_google_docstring = True 66 | napoleon_numpy_docstring = True 67 | napoleon_include_init_with_doc = False 68 | napoleon_include_private_with_doc = False 69 | napoleon_include_special_with_doc = False 70 | napoleon_use_admonition_for_examples = False 71 | napoleon_use_admonition_for_notes = False 72 | napoleon_use_admonition_for_references = False 73 | napoleon_use_ivar = False 74 | napoleon_use_param = True 75 | napoleon_use_rtype = True 76 | napoleon_use_keyword = False 77 | 78 | 79 | # Dictionary of external links 80 | extlinks = {'pandas':('http://pandas.pydata.org/pandas-docs/stable/api.html#%s', 81 | 'pandas.'), 82 | 'sqlalchemy':('http://docs.sqlalchemy.org/en/latest/orm/session_basics.html%s', 83 | 'SQLAlchemy session object'), 84 | 'shapely':('http://toblerity.org/shapely/manual.html#%s', 85 | 'Shapely object') 86 | } 87 | 88 | 89 | 90 | 91 | 92 | # Add any paths that contain templates here, relative to this directory. 93 | templates_path = ['_templates'] 94 | 95 | # The suffix(es) of source filenames. 96 | # You can specify multiple suffix as a list of string: 97 | # source_suffix = ['.rst', '.md'] 98 | source_suffix = '.rst' 99 | 100 | # The encoding of source files. 101 | #source_encoding = 'utf-8-sig' 102 | 103 | # The master toctree document. 104 | master_doc = 'index' 105 | 106 | # General information about the project. 107 | project = u'eTraGo' 108 | copyright = u'2015-2023, Flensburg University of Applied Sciences, Europa-Universität Flensburg, Centre for Sustainable Energy Systems, DLR-Institute for Networked Energy Systems' 109 | author = u'ulfmueller, lukasol, wolfbunke, mariusves, s3pp' 110 | 111 | # The version info for the project you're documenting, acts as replacement for 112 | # |version| and |release|, also used in various other places throughout the 113 | # built documents. 114 | # 115 | # The short X.Y version. 116 | version = '0.9' 117 | # The full version, including alpha/beta/rc tags. 118 | release = '0.9.0' 119 | 120 | # The language for content autogenerated by Sphinx. Refer to documentation 121 | # for a list of supported languages. 122 | # 123 | # This is also used if you do content translation via gettext catalogs. 124 | # Usually you set "language" from the command line for these cases. 125 | language = "English" 126 | 127 | # There are two options for replacing |today|: either, you set today to some 128 | # non-false value, then it is used: 129 | #today = '' 130 | # Else, today_fmt is used as the format for a strftime call. 131 | #today_fmt = '%B %d, %Y' 132 | 133 | # List of patterns, relative to source directory, that match files and 134 | # directories to ignore when looking for source files. 135 | exclude_patterns = ['_build', 'whatsnew', 'eTraGo_tutorial_release0.9.ipynb'] 136 | 137 | # The reST default role (used for this markup: `text`) to use for all 138 | # documents. 139 | #default_role = None 140 | 141 | # If true, '()' will be appended to :func: etc. cross-reference text. 142 | #add_function_parentheses = True 143 | 144 | # If true, the current module name will be prepended to all description 145 | # unit titles (such as .. function::). 146 | #add_module_names = True 147 | 148 | # If true, sectionauthor and moduleauthor directives will be shown in the 149 | # output. They are ignored by default. 150 | #show_authors = False 151 | 152 | # The name of the Pygments (syntax highlighting) style to use. 153 | pygments_style = 'sphinx' 154 | 155 | # A list of ignored prefixes for module index sorting. 156 | #modindex_common_prefix = [] 157 | 158 | # If true, keep warnings as "system message" paragraphs in the built documents. 159 | #keep_warnings = False 160 | 161 | # If true, `todo` and `todoList` produce output, else they produce nothing. 162 | todo_include_todos = True 163 | 164 | 165 | # Fix import error of modules which depend on C modules (mock out the imports for these modules) 166 | # see http://read-the-docs.readthedocs.io/en/latest/faq.html#i-get-import-errors-on-libraries-that-depend-on-c-modules 167 | 168 | 169 | if 'READTHEDOCS' in os.environ: 170 | class Mock(MagicMock): 171 | @classmethod 172 | def __getattr__(cls, name): 173 | return MagicMock() 174 | 175 | MOCK_MODULES = ['ding0', 'ding0.results', 'shapely'] 176 | sys.modules.update((mod_name, Mock()) for mod_name in MOCK_MODULES) 177 | 178 | MOCK_MODULES = ['libgeos', 'geos', 'libgeos_c', 'geos_c','libgeos_c.so.1', 179 | 'libgeos_c.so', 'shapely', 'geoalchemy2', 'geoalchemy2.shape '] 180 | 181 | 182 | 183 | # -- Options for HTML output ---------------------------------------------- 184 | 185 | # The theme to use for HTML and HTML Help pages. See the documentation for 186 | # a list of builtin themes. 187 | # html_theme = 'alabaster' 188 | 189 | import sphinx_rtd_theme 190 | html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] 191 | html_theme = 'sphinx_rtd_theme' 192 | 193 | # Theme options are theme-specific and customize the look and feel of a theme 194 | # further. For a list of options available for each theme, see the 195 | # documentation. 196 | #html_theme_options = {} 197 | 198 | # Add any paths that contain custom themes here, relative to this directory. 199 | #html_theme_path = [] 200 | 201 | # The name for this set of Sphinx documents. If None, it defaults to 202 | # " v documentation". 203 | #html_title = None 204 | 205 | # A shorter title for the navigation bar. Default is the same as html_title. 206 | #html_short_title = None 207 | 208 | # The name of an image file (relative to this directory) to place at the top 209 | # of the sidebar. 210 | #html_logo = None 211 | 212 | # The name of an image file (within the static path) to use as favicon of the 213 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 214 | # pixels large. 215 | #html_favicon = None 216 | 217 | # Add any paths that contain custom static files (such as style sheets) here, 218 | # relative to this directory. They are copied after the builtin static files, 219 | # so a file named "default.css" will overwrite the builtin "default.css". 220 | # html_static_path = ['_static'] 221 | 222 | # Add any extra paths that contain custom files (such as robots.txt or 223 | # .htaccess) here, relative to this directory. These files are copied 224 | # directly to the root of the documentation. 225 | #html_extra_path = [] 226 | 227 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, 228 | # using the given strftime format. 229 | #html_last_updated_fmt = '%b %d, %Y' 230 | 231 | # If true, SmartyPants will be used to convert quotes and dashes to 232 | # typographically correct entities. 233 | #html_use_smartypants = True 234 | 235 | # Custom sidebar templates, maps document names to template names. 236 | #html_sidebars = {} 237 | 238 | # Additional templates that should be rendered to pages, maps page names to 239 | # template names. 240 | #html_additional_pages = {} 241 | 242 | # If false, no module index is generated. 243 | #html_domain_indices = True 244 | 245 | # If false, no index is generated. 246 | #html_use_index = True 247 | 248 | # If true, the index is split into individual pages for each letter. 249 | #html_split_index = False 250 | 251 | # If true, links to the reST sources are added to the pages. 252 | #html_show_sourcelink = True 253 | 254 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. 255 | #html_show_sphinx = True 256 | 257 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. 258 | #html_show_copyright = True 259 | 260 | # If true, an OpenSearch description file will be output, and all pages will 261 | # contain a tag referring to it. The value of this option must be the 262 | # base URL from which the finished HTML is served. 263 | #html_use_opensearch = '' 264 | 265 | # This is the file name suffix for HTML files (e.g. ".xhtml"). 266 | #html_file_suffix = None 267 | 268 | # Language to be used for generating the HTML full-text search index. 269 | # Sphinx supports the following languages: 270 | # 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' 271 | # 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' 272 | #html_search_language = 'en' 273 | 274 | # A dictionary with options for the search language support, empty by default. 275 | # Now only 'ja' uses this config value 276 | #html_search_options = {'type': 'default'} 277 | 278 | # The name of a javascript file (relative to the configuration directory) that 279 | # implements a search results scorer. If empty, the default will be used. 280 | #html_search_scorer = 'scorer.js' 281 | 282 | # Output file base name for HTML help builder. 283 | htmlhelp_basename = 'eTraGodoc' 284 | 285 | # -- Options for LaTeX output --------------------------------------------- 286 | 287 | latex_elements = { 288 | # The paper size ('letterpaper' or 'a4paper'). 289 | #'papersize': 'letterpaper', 290 | 291 | # The font size ('10pt', '11pt' or '12pt'). 292 | #'pointsize': '10pt', 293 | 294 | # Additional stuff for the LaTeX preamble. 295 | #'preamble': '', 296 | 297 | # Latex figure (float) alignment 298 | #'figure_align': 'htbp', 299 | } 300 | 301 | # Grouping the document tree into LaTeX files. List of tuples 302 | # (source start file, target name, title, 303 | # author, documentclass [howto, manual, or own class]). 304 | latex_documents = [ 305 | (master_doc, 'etrago.tex', u'eTraGo Documentation', 306 | u'open_eGo-Team', 'manual'), 307 | ] 308 | 309 | # The name of an image file (relative to this directory) to place at the top of 310 | # the title page. 311 | #latex_logo = None 312 | 313 | # For "manual" documents, if this is true, then toplevel headings are parts, 314 | # not chapters. 315 | #latex_use_parts = False 316 | 317 | # If true, show page references after internal links. 318 | #latex_show_pagerefs = False 319 | 320 | # If true, show URL addresses after external links. 321 | #latex_show_urls = False 322 | 323 | # Documents to append as an appendix to all manuals. 324 | #latex_appendices = [] 325 | 326 | # If false, no module index is generated. 327 | #latex_domain_indices = True 328 | 329 | 330 | # -- Options for manual page output --------------------------------------- 331 | 332 | # One entry per manual page. List of tuples 333 | # (source start file, name, description, authors, manual section). 334 | man_pages = [ 335 | (master_doc, 'eTraGo', u'eTraGo Documentation', 336 | [author], 1) 337 | ] 338 | 339 | # If true, show URL addresses after external links. 340 | #man_show_urls = False 341 | 342 | 343 | # -- Options for Texinfo output ------------------------------------------- 344 | 345 | # Grouping the document tree into Texinfo files. List of tuples 346 | # (source start file, target name, title, author, 347 | # dir menu entry, description, category) 348 | texinfo_documents = [ 349 | (master_doc, 'eTraGo', u'eTraGo Documentation', 350 | author, 'eTraGo', 'electrical Transmission Grid Optimization of flexibility options for transmission grids based on PyPSA', 351 | 'Miscellaneous'), 352 | ] 353 | 354 | # Documents to append as an appendix to all manuals. 355 | #texinfo_appendices = [] 356 | 357 | # If false, no module index is generated. 358 | #texinfo_domain_indices = True 359 | 360 | # How to display URL addresses: 'footnote', 'no', or 'inline'. 361 | #texinfo_show_urls = 'footnote' 362 | 363 | # If true, do not generate a @detailmenu in the "Top" node's menu. 364 | #texinfo_no_detailmenu = False 365 | 366 | 367 | # Example configuration for intersphinx: refer to the Python standard library. 368 | intersphinx_mapping = {'python': ('https://docs.python.org/3', None)} 369 | 370 | # Numbered figures 371 | numfig = True 372 | 373 | autodoc_member_order = 'bysource' 374 | -------------------------------------------------------------------------------- /doc/developer_notes.rst: -------------------------------------------------------------------------------- 1 | 2 | =============== 3 | Developer notes 4 | =============== 5 | 6 | 7 | Installation for Developers 8 | =========================== 9 | 10 | 11 | .. note:: 12 | Installation is primarly tested on (Ubuntu like) linux OS. 13 | 14 | 1. If you like, create a virtual environment (where you like it) and activate it (if you do not use venv start with 2.): 15 | 16 | .. code-block:: bash 17 | 18 | $ virtualenv --clear -p python3.10 etrago`` 19 | $ cd etrago/ 20 | $ source bin/activate 21 | 22 | 2. Clone the source code from github: 23 | 24 | .. code-block:: bash 25 | 26 | $ git clone https://github.com/openego/eTraGo 27 | 28 | You can checkout to the dev branch and create new feature branches. 29 | For the correct work-flow, please mind the 30 | `Dreissen Branching Model `_ 31 | 32 | 3. Use the pip -e to install eTraGo directly from the cloned repository: 33 | 34 | .. code-block:: bash 35 | 36 | $ pip3 install -e /path/to/eTraGo/ 37 | 38 | -------------------------------------------------------------------------------- /doc/getting_started.rst: -------------------------------------------------------------------------------- 1 | Installation 2 | ============ 3 | 4 | 5 | Run: 6 | 7 | ``` 8 | git clone https://github.com/openego/eTraGo 9 | ``` 10 | 11 | Create a virtualenvironment (where you like it) and activate it: 12 | 13 | ``` 14 | virtualenv -p python3 venv 15 | source venv/bin/activate 16 | ``` 17 | 18 | With your activated environment `cd` to the cloned directory and run: 19 | 20 | ``` 21 | pip install -e eTraGo 22 | ``` 23 | 24 | This will install all needed packages into your environment. Now you should be ready to go. 25 | 26 | Installation for windows users 27 | ----------------------------------- 28 | 29 | - install anaconda inclusing python 3 (https://www.anaconda.com/download/) 30 | 31 | - open an anaconda prompt as administrator and run: 32 | 33 | ``` 34 | conda install pip 35 | ``` 36 | 37 | ``` 38 | conda config --add channels conda-forge 39 | ``` 40 | 41 | ``` 42 | conda install shapely 43 | ``` 44 | 45 | - download and install github (https://desktop.github.com) 46 | 47 | - open GitHubDesktop and clone eTraGo from open_eGo 48 | 49 | - open an anaconda prompt as administrator and run: 50 | 51 | ``` 52 | pip install -e path/to/Github/Folder/eTraGo 53 | ``` 54 | 55 | ``` 56 | pip install pandas == 0.20.3 (version 0.21 is not working!) 57 | ``` 58 | 59 | - to check if everything is installed run: 60 | 61 | ``` 62 | pip freeze 63 | ``` 64 | 65 | For using the session maker in eTraGo you need oemof.db: 66 | 67 | - open an anaconda prompt as administrator and run: 68 | 69 | ``` 70 | pip install oemof.db 71 | ``` 72 | 73 | ``` 74 | cd C:/Users/YourUserName 75 | ``` 76 | 77 | ``` 78 | md .oemof 79 | ``` 80 | 81 | - open the new folder .oemof and use the editor to create a file „config.ini“ and insert the following lines, when you just calculate local or on the oedb you just need this section: 82 | 83 | :: 84 | 85 | [oedb] 86 | 87 | username = YourOEDBUserName 88 | 89 | database = oedb 90 | 91 | host = oe2.iws.cs.ovgu.de 92 | 93 | port = 5432 94 | 95 | pw = YourOEDBPassword 96 | 97 | [local] 98 | 99 | username = YourLocalUserName 100 | 101 | database = YourLocalDatabaseName 102 | 103 | host = 127.0.0.1 104 | 105 | port = 5432 106 | 107 | pw = YourLocalPassword 108 | 109 | 110 | 111 | when you just calculate local or on the oedb you just need this section 112 | -------------------------------------------------------------------------------- /doc/howToUse.rst: -------------------------------------------------------------------------------- 1 | .. _HowToUse: 2 | ================== 3 | How to use eTraGo? 4 | ================== 5 | 6 | After you installed eTraGo you would typically start optimization runs by 7 | executing the ‘appl.py’ which is situated in 8 | ``./eTrago/etrago/`` (e.g by ``python3 appl.py`` from the terminal). 9 | 10 | eTraGo doesn't have a graphical user interface, 11 | the ‘appl.py’ is used as a simple user interface which can be edited with 12 | the preferred python-editor. 13 | Here parameters, calculation methods and scenario settings are set in a python 14 | dictionary called 'args'. 15 | To run the desired calculation, it is crucial to understand these parameters. 16 | In addition, some of them contradict the usage of others. 17 | You find the documentation of all defined parameters from the 'args' here: 18 | :func:`etrago.appl.run_etrago`. 19 | 20 | Alternatively, the 'args' dictionary can be edited in a json-file. 21 | Then the path to the json-file has to be set in the initilization of the 22 | Etrago-object (:class:`etrago.tools.network.Etrago`). Once a path is given 23 | the 'args' dictionary within the 'appl.py' is ignored 24 | and replaced by the 'args' of the json-file. 25 | 26 | The appl.py contains the :func:`etrago.appl.run_etrago` function which uses the 27 | defined 'args' dictionary to start the desired calculation. 28 | 29 | To improve the performance of the optimization of the selected solver, 30 | you might want to use solver options (part of 'args'). For gurobi 31 | the most used ones are described 32 | `here `_. 33 | 34 | For more specific or extensive changes you are highly invited 35 | to write code and add new functionalities. 36 | 37 | Once the calculation has finished the PyPSA network of the Etrago-object will 38 | contain all results. Some main results (e.g. anuual system costs) are calculated 39 | by :meth:`etrago.calc_results` and can be accesed via 'etrago.results'. 40 | You can use several plotting functions from the :meth:`etrago.tools.plot` in order 41 | to visualize the results. For example 42 | the :meth:`etrago.tools.plot.plot_grid` can be used to plot relative line loading 43 | in % or the optimized expansion of all AC lines and DC links of the network. 44 | 45 | To save the results you can write them to csv files. These functionalites can be 46 | specified also in the 'args' dictionary. 47 | 48 | 49 | .. _Examples: 50 | Examples and tutorial notebooks 51 | =============================== 52 | 53 | 54 | 55 | **eTraGo version 0.5.1:** 56 | `etrago_OpenMod_Zuerich18 `_. 57 | 58 | **eTraGo version 0.9:** 59 | `eTraGo_eGon_final_workshop `_. 60 | -------------------------------------------------------------------------------- /doc/images/etrago_logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/openego/eTraGo/969a8f90dd2fe5db437698120b08cd83fda2bf89/doc/images/etrago_logo.png -------------------------------------------------------------------------------- /doc/images/modelling_concept.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/openego/eTraGo/969a8f90dd2fe5db437698120b08cd83fda2bf89/doc/images/modelling_concept.png -------------------------------------------------------------------------------- /doc/index.rst: -------------------------------------------------------------------------------- 1 | .. eTraGo documentation master file, created by 2 | sphinx-quickstart on Fri Sep 29 10:55:47 2017. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | Welcome to eTraGo's documentation! 7 | ================================== 8 | 9 | 10 | .. figure:: images/etrago_logo.png 11 | :align: right 12 | :scale: 80% 13 | 14 | .. warning:: Note, eTraGo and its documentation is in 15 | continuous development. 16 | 17 | .. toctree:: 18 | :maxdepth: 2 19 | 20 | about 21 | installation 22 | howToUse 23 | theoretical_background 24 | developer_notes 25 | whatsnew 26 | api 27 | 28 | 29 | Indices and tables 30 | ================== 31 | 32 | * :ref:`genindex` 33 | * :ref:`modindex` 34 | * :ref:`search` 35 | -------------------------------------------------------------------------------- /doc/installation.rst: -------------------------------------------------------------------------------- 1 | ============ 2 | Installation 3 | ============ 4 | eTraGo is designed as a Python package therefore it is mandatory to have 5 | `Python 3 `_ installed. If you have a 6 | working Python3 environment, use pypi to install the latest eTraGo version. 7 | We highly recommend you to use a virtual environment. Use following pip 8 | command in order to install eTraGo. 9 | 10 | .. code-block:: bash 11 | 12 | $ pip3 install eTraGo 13 | 14 | Using a virtual environment 15 | =========================== 16 | 17 | 18 | Before installing eTraGo, 19 | you create a virtual environment (where you like it) and activate it: 20 | 21 | .. code-block:: bash 22 | 23 | $ virtualenv venv --clear -p python3.10 24 | $ source venv/bin/activate 25 | $ cd venv 26 | 27 | Inside your activated virtual environment you can 28 | install eTraGo with the pip command, as previously explained. 29 | 30 | Linux and Ubuntu 31 | ================ 32 | 33 | The Package eTraGo is tested with Ubuntu 16.04, 18.04, 20.04 and 22.04 inside the virtual 34 | environments of `virtualenv `_. 35 | The installation is shown above. 36 | 37 | 38 | 39 | Windows or Mac OSX users 40 | ======================== 41 | 42 | For Windows and/or Mac OSX user we highly recommend to install and use Anaconda 43 | for your Python3 installation. First install Conda including python 3.10 or 44 | higher version from https://www.anaconda.com/download/ and open an anaconda 45 | prompt as administrator and run: 46 | 47 | .. code-block:: bash 48 | 49 | $ conda config --add channels conda-forge 50 | $ conda create -n etrago_env python=3.10 51 | $ conda activate etrago_env 52 | $ pip install eTraGo 53 | 54 | 55 | The full Documentation can be found 56 | `on this page `_ . We use Anaconda 57 | with an own environment in order to reduze problems with Packages and different 58 | versions on our system. Learn more about 59 | `Anacona `_ 60 | environments. 61 | 62 | 63 | 64 | Setup database connection 65 | ========================= 66 | The eTraGo module `db `_ 67 | gives you a python SQL-Alchemy representations of 68 | the `OpenEnergy-Database(oedb) `_ 69 | and access to it by using the 70 | `oedialect `_, which is a SQL-Alchemy binding 71 | Python package for the REST-API used by the OpenEnergy Platform (OEP). 72 | 73 | In order to connect eTraGo via the oedialect with the oedb you 74 | have to create an account at 75 | `openenergy-platform.org/login `_. 76 | You can name the `'db' `_ 77 | argument of the 'args' of the :func:`etrago.appl.etrago` 78 | as you wish. Once the :func:`etrago.appl.etrago` is executed you will be asked 79 | to enter how you want to connect to which database. If you want to use 80 | the oedialect enter the following connection parameter. For and 81 | you have to take your credentials which you obtained by registering 82 | at `openenergy-platform.org/login `_. 83 | 84 | Your API access / login data will be saved in the folder ``.etrago_database`` in the file 85 | ``config.ini``. Consequently, in the config.ini you can also change 86 | your connection parameters or add new ones. 87 | In the following you can see how the config.ini looks like when you use the 88 | oedialect, a local postgresql database or the old psycopg2 developer connection. 89 | 90 | Once you have created a connection (which is saved in the config.ini) you do not have 91 | to enter the connection parameter again. The software will take the connection parameter 92 | which corresponds to the entry at the `'db' `_ argument. 93 | 94 | 95 | oedialect connection 96 | -------------------- 97 | 98 | .. code-block:: desktop 99 | 100 | [oedb] 101 | dialect = oedialect 102 | username = 103 | database = oedb 104 | host = openenergy-platform.org 105 | port = 80 106 | password = 107 | 108 | 109 | Local database connection 110 | ------------------------- 111 | 112 | .. code-block:: desktop 113 | 114 | [local] 115 | username = YourOEDBUserName 116 | database = YourLocalDatabaseName 117 | host = localhost or 127.0.0.1 118 | port = 5433 119 | pw = YourLocalPassword 120 | 121 | 122 | 123 | Old developer connection 124 | ------------------------- 125 | 126 | .. code-block:: desktop 127 | 128 | [oedb] 129 | username = YourOEDBUserName 130 | database = oedb 131 | host = oe2.iws.cs.ovgu.de 132 | port = 5432 133 | pw = YourOEDBPassword 134 | -------------------------------------------------------------------------------- /doc/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | REM Command file for Sphinx documentation 4 | 5 | if "%SPHINXBUILD%" == "" ( 6 | set SPHINXBUILD=sphinx-build 7 | ) 8 | set BUILDDIR=_build 9 | set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . 10 | set I18NSPHINXOPTS=%SPHINXOPTS% . 11 | if NOT "%PAPER%" == "" ( 12 | set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% 13 | set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% 14 | ) 15 | 16 | if "%1" == "" goto help 17 | 18 | if "%1" == "help" ( 19 | :help 20 | echo.Please use `make ^` where ^ is one of 21 | echo. html to make standalone HTML files 22 | echo. dirhtml to make HTML files named index.html in directories 23 | echo. singlehtml to make a single large HTML file 24 | echo. pickle to make pickle files 25 | echo. json to make JSON files 26 | echo. htmlhelp to make HTML files and a HTML help project 27 | echo. qthelp to make HTML files and a qthelp project 28 | echo. devhelp to make HTML files and a Devhelp project 29 | echo. epub to make an epub 30 | echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter 31 | echo. text to make text files 32 | echo. man to make manual pages 33 | echo. texinfo to make Texinfo files 34 | echo. gettext to make PO message catalogs 35 | echo. changes to make an overview over all changed/added/deprecated items 36 | echo. xml to make Docutils-native XML files 37 | echo. pseudoxml to make pseudoxml-XML files for display purposes 38 | echo. linkcheck to check all external links for integrity 39 | echo. doctest to run all doctests embedded in the documentation if enabled 40 | echo. coverage to run coverage check of the documentation if enabled 41 | goto end 42 | ) 43 | 44 | if "%1" == "clean" ( 45 | for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i 46 | del /q /s %BUILDDIR%\* 47 | goto end 48 | ) 49 | 50 | 51 | REM Check if sphinx-build is available and fallback to Python version if any 52 | %SPHINXBUILD% 2> nul 53 | if errorlevel 9009 goto sphinx_python 54 | goto sphinx_ok 55 | 56 | :sphinx_python 57 | 58 | set SPHINXBUILD=python -m sphinx.__init__ 59 | %SPHINXBUILD% 2> nul 60 | if errorlevel 9009 ( 61 | echo. 62 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 63 | echo.installed, then set the SPHINXBUILD environment variable to point 64 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 65 | echo.may add the Sphinx directory to PATH. 66 | echo. 67 | echo.If you don't have Sphinx installed, grab it from 68 | echo.http://sphinx-doc.org/ 69 | exit /b 1 70 | ) 71 | 72 | :sphinx_ok 73 | 74 | 75 | if "%1" == "html" ( 76 | %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html 77 | if errorlevel 1 exit /b 1 78 | echo. 79 | echo.Build finished. The HTML pages are in %BUILDDIR%/html. 80 | goto end 81 | ) 82 | 83 | if "%1" == "dirhtml" ( 84 | %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml 85 | if errorlevel 1 exit /b 1 86 | echo. 87 | echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. 88 | goto end 89 | ) 90 | 91 | if "%1" == "singlehtml" ( 92 | %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml 93 | if errorlevel 1 exit /b 1 94 | echo. 95 | echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. 96 | goto end 97 | ) 98 | 99 | if "%1" == "pickle" ( 100 | %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle 101 | if errorlevel 1 exit /b 1 102 | echo. 103 | echo.Build finished; now you can process the pickle files. 104 | goto end 105 | ) 106 | 107 | if "%1" == "json" ( 108 | %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json 109 | if errorlevel 1 exit /b 1 110 | echo. 111 | echo.Build finished; now you can process the JSON files. 112 | goto end 113 | ) 114 | 115 | if "%1" == "htmlhelp" ( 116 | %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp 117 | if errorlevel 1 exit /b 1 118 | echo. 119 | echo.Build finished; now you can run HTML Help Workshop with the ^ 120 | .hhp project file in %BUILDDIR%/htmlhelp. 121 | goto end 122 | ) 123 | 124 | if "%1" == "qthelp" ( 125 | %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp 126 | if errorlevel 1 exit /b 1 127 | echo. 128 | echo.Build finished; now you can run "qcollectiongenerator" with the ^ 129 | .qhcp project file in %BUILDDIR%/qthelp, like this: 130 | echo.^> qcollectiongenerator %BUILDDIR%\qthelp\ding0.qhcp 131 | echo.To view the help file: 132 | echo.^> assistant -collectionFile %BUILDDIR%\qthelp\ding0.ghc 133 | goto end 134 | ) 135 | 136 | if "%1" == "devhelp" ( 137 | %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp 138 | if errorlevel 1 exit /b 1 139 | echo. 140 | echo.Build finished. 141 | goto end 142 | ) 143 | 144 | if "%1" == "epub" ( 145 | %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub 146 | if errorlevel 1 exit /b 1 147 | echo. 148 | echo.Build finished. The epub file is in %BUILDDIR%/epub. 149 | goto end 150 | ) 151 | 152 | if "%1" == "latex" ( 153 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 154 | if errorlevel 1 exit /b 1 155 | echo. 156 | echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. 157 | goto end 158 | ) 159 | 160 | if "%1" == "latexpdf" ( 161 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 162 | cd %BUILDDIR%/latex 163 | make all-pdf 164 | cd %~dp0 165 | echo. 166 | echo.Build finished; the PDF files are in %BUILDDIR%/latex. 167 | goto end 168 | ) 169 | 170 | if "%1" == "latexpdfja" ( 171 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 172 | cd %BUILDDIR%/latex 173 | make all-pdf-ja 174 | cd %~dp0 175 | echo. 176 | echo.Build finished; the PDF files are in %BUILDDIR%/latex. 177 | goto end 178 | ) 179 | 180 | if "%1" == "text" ( 181 | %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text 182 | if errorlevel 1 exit /b 1 183 | echo. 184 | echo.Build finished. The text files are in %BUILDDIR%/text. 185 | goto end 186 | ) 187 | 188 | if "%1" == "man" ( 189 | %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man 190 | if errorlevel 1 exit /b 1 191 | echo. 192 | echo.Build finished. The manual pages are in %BUILDDIR%/man. 193 | goto end 194 | ) 195 | 196 | if "%1" == "texinfo" ( 197 | %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo 198 | if errorlevel 1 exit /b 1 199 | echo. 200 | echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. 201 | goto end 202 | ) 203 | 204 | if "%1" == "gettext" ( 205 | %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale 206 | if errorlevel 1 exit /b 1 207 | echo. 208 | echo.Build finished. The message catalogs are in %BUILDDIR%/locale. 209 | goto end 210 | ) 211 | 212 | if "%1" == "changes" ( 213 | %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes 214 | if errorlevel 1 exit /b 1 215 | echo. 216 | echo.The overview file is in %BUILDDIR%/changes. 217 | goto end 218 | ) 219 | 220 | if "%1" == "linkcheck" ( 221 | %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck 222 | if errorlevel 1 exit /b 1 223 | echo. 224 | echo.Link check complete; look for any errors in the above output ^ 225 | or in %BUILDDIR%/linkcheck/output.txt. 226 | goto end 227 | ) 228 | 229 | if "%1" == "doctest" ( 230 | %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest 231 | if errorlevel 1 exit /b 1 232 | echo. 233 | echo.Testing of doctests in the sources finished, look at the ^ 234 | results in %BUILDDIR%/doctest/output.txt. 235 | goto end 236 | ) 237 | 238 | if "%1" == "coverage" ( 239 | %SPHINXBUILD% -b coverage %ALLSPHINXOPTS% %BUILDDIR%/coverage 240 | if errorlevel 1 exit /b 1 241 | echo. 242 | echo.Testing of coverage in the sources finished, look at the ^ 243 | results in %BUILDDIR%/coverage/python.txt. 244 | goto end 245 | ) 246 | 247 | if "%1" == "xml" ( 248 | %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml 249 | if errorlevel 1 exit /b 1 250 | echo. 251 | echo.Build finished. The XML files are in %BUILDDIR%/xml. 252 | goto end 253 | ) 254 | 255 | if "%1" == "pseudoxml" ( 256 | %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml 257 | if errorlevel 1 exit /b 1 258 | echo. 259 | echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. 260 | goto end 261 | ) 262 | 263 | :end 264 | -------------------------------------------------------------------------------- /doc/theoretical_background.rst: -------------------------------------------------------------------------------- 1 | ====================== 2 | Theoretical Background 3 | ====================== 4 | 5 | 6 | 7 | Definitions and Units 8 | ===================== 9 | 10 | eTraGo is based on the open source tool `PyPSA `_ and uses its definitions and units. 11 | 12 | 13 | Assumptions on Data 14 | =================== 15 | 16 | eTraGo fetches the input data from the `OpenEnergy Platform `_. The data includes electricity and gas grid topology as well as data on energy supply and load for the considered sectors (electricity, gas, heat and e-mobility) plus data on flexibility potential deriving from those sectors e.g. Dynamic Line Rating, Demand Side Management and flexibility potentials arising from e-mobility. More details on the data model can be found in the documentaton of `eGon-data `_. 17 | 18 | At the moment, there are two scenarios available basing on scenario C2035 of the network expansion plan ([NEP]_), version 2021. The base one is called eGon2035. To analyse the effect of flexibility options, there is an eGon2035_lowflex scenario available which depicts a lower penetration of flexibilities. More scenarios are being developed. The eGon100RE scenario is being implemented which is characterised by a 100% renewable generation. Analog to the scenario above, a eGon100RE_lowflex scenario will be available. 19 | 20 | You can see the modeling concepts of the scenarios in the figure below. The components marked green have exogenous capacity and endogenous dispatch whereas the components marked in red are optimised endogenously in capacity and dispatch. 21 | 22 | .. figure:: images/modelling_concept.png 23 | :align: center 24 | :scale: 75% 25 | 26 | 27 | Methods 28 | ======= 29 | 30 | 31 | Optimisation with PyPSA 32 | ----------------------- 33 | 34 | Within eTraGo, the fetched data model is translated into a `PyPSA `_-network. The optimisation is performed with a linear approximation assuming eTraGo to fulfill the assumptions to perfom a LOPF (as those are small voltage angle differences, branch resistances negligible to their reactances, voltage magnitudes can be kept at nominal values) since it focuses on the extra-high and high voltage levels. As objective value of the optimisation, the overall system costs are considered. 35 | 36 | With the argument ‘pf_post_lopf’, after the LOPF a non-linear power flow simulation can be conducted. 37 | 38 | 39 | Complexity Reduction 40 | --------------------- 41 | 42 | The data model is characterised by a high spatial (about 8,000 electrical and 600 gas nodes) and temporal resolution (8,760 timesteps). To reduce the complexity of the resulting optimisation problem, several methods can be applied. 43 | 44 | 45 | Reduction in spatial dimension: 46 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 47 | 48 | The **ehv clustering** maps all electrical nodes with a voltage level below the extra-high voltage level to their nearest neighboring node in the extra-high voltage level with the Dijkstra’s algorithm (110 kV —> 220 / 380 kV). 49 | 50 | The **k-means Clustering** reduces the electrical or gas network to an adjustable number of nodes by considering the geographical position of the respective nodes. This method has been implemented within PyPSA by [Hoersch]_. 51 | 52 | The **k-medoids Dijkstra Clustering** aggregates nodes considering the network topology. First, a k-medoids Clustering is used dividing the original nodes of the network into groups by their geographical positions while identifiying the geographical medoid nodes per cluster. Afterwards, the original nodes in the original network are assigned to the former identified medoids considering the original network’s topology applying a Dijkstra’s algorithm considering the line lengths. Afterall, the original nodes are represented by one aggregated node per cluster at the position of the former identified medoid node. 53 | 54 | In general, the clustering of the **sector-coupled system** is divided into two steps: 55 | First, the electrical and gas grid are clustered independently using one of the methods described above. Afterwards, nodes of the other sectors (hydrogen, heat, e-mobility and DSM nodes) are mapped according to their connection to electricity or gas buses and aggregated to one node per carrier. 56 | 57 | After optimising the spatially reduced network, a **spatial disaggregation** can be conducted. 58 | 59 | 60 | Reduction in temporal dimension: 61 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 62 | 63 | The method **Skip Snapshots** implies a downsampling to every nth time step. The considered snapshots are weighted respectively to account for the analysis of one whole year. 64 | 65 | By using the method called **Segmentation**, a hierarchical clustering of consecutive timesteps to segments with variable lengths is applied [Pineda]_. 66 | 67 | The **Snapshot Clustering on Typical Periods** implies a hierarchical clustering of time periods with a predefined length (e.g. days or weeks) to typical periods. Those typical periods are weighted according to the number of periods in their cluster. This method optionally includes the linkage of the typical periods in a second time layer to account for the intertemporal dependencies following [Kotzur]_. 68 | 69 | By applying a 2-level-approach, a **temporal disaggregation** can be conducted. This means optimising dispatch using the fullcomplex time series in the second step after having optimised grid and storage expansion using the complexity-reduced time series in the first step. 70 | 71 | 72 | Grid and Storage / Store expansion 73 | ----------------------------------- 74 | 75 | The grid expansion is realized by extending the capacities of existing lines and substations. These capacities are considered as part of the optimisation problem whereby the possible extension is unlimited. With respect to the different voltage levels and lengths, MVA-specific costs are considered in the optimisation. 76 | 77 | As shown in the figure above, several options to store energy are part of the modeling concept. Extendable batteries (modeled as storage units) are assigned to every node in the electrical grid. A minimum installed capacity is being considered to account for home batteries ([NEP]_). The expansion and operation is part of the optimisation. Furthermore, two types of hydrogen stores (modeled as stores) are available. Overground stores are optimised in operation and dispatch without limitations whereas underground stores depicting saltcaverns are limited by geographical conditions ([BGR]_). Additionally, heat stores part of the optimisation in terms of power and energy without upper limits. 78 | 79 | 80 | Miscellaneous Features 81 | ---------------------- 82 | 83 | Several features were developed to enhance the functionality of eTraGo. 84 | 85 | To customize computation settings, ‘solver_options’ and ‘generator_noise’ should be adapted. The latter adds a reproducible small random noise to the marginal costs of each generator in order to prevent an optima plateau. The specific solver options depend on the applied solver (e.g. Gurobi, CPLEX or GLPK). 86 | 87 | In ‚extendable‘ you can adapt the type of components you want to be optimised in capacity and set upper limits for gird expansion inside Germany and of lines to foreign countries. 88 | 89 | The ‚extra_functionality‘-argument allows to consider extra constraints like limits for energy imort and export or minimal renewable shares in generation. 90 | 91 | ‘branch_capacity_factor’ adds a factor to adapt all line capacities in order to consider (n-1) security. Because the average number of HV systems is much smaller than the one of eHV lines, you can choose factors for ‘HV’ and ‘eHV’ separately. 92 | 93 | The ‘load_shedding’-argument is used for debugging complex grids in order to avoid infeasibilities. It introduces a very expensive generator at each bus to meet the demand. When optimising storage units and grid expansion without limiting constraints, the need for load shedding should not be existent. 94 | 95 | With ‘foreign_lines‘ you can adapt the foreign lines to be modeled as DC-links (e.g. to avoid loop flows). 96 | 97 | 98 | References 99 | ========== 100 | 101 | .. [NEP] Übertragungsnetzbetreiber Deutschland (2021): 102 | *Netzentwicklungsplan Strom 2035*, Version 2021, 1. Entwurf. 2021. 103 | 104 | .. [Hoersch] Jonas Hoersch et al. (2017): 105 | *The role of spatial scale in joint optimisations of generation and transmission for European highly renewable scenarios*. 2017. 106 | ``_ 107 | 108 | .. [Pineda] Salvador Pineda et al. (2018): 109 | *Chronological Time-Period Clustering for Optimal Capacity Expansion Planning With Storage*. 2018. 110 | ``_ 111 | 112 | .. [Kotzur] Leander Kotzur et al. (2018): 113 | *Time series aggregation for energy system design: Modeling seasonal storage*. 2018. 114 | ``_ 115 | 116 | .. [BGR] Bundesanstalt fuer Geowissenschaften und Rohstoffe et al. (2020): 117 | *nSpEE-DS - Teilprojekt Bewertungskriterien und Potenzialabschätzung*. 2020. 118 | ``_ 119 | -------------------------------------------------------------------------------- /doc/whatsnew.rst: -------------------------------------------------------------------------------- 1 | What's New 2 | ~~~~~~~~~~ 3 | 4 | These are new features and improvements of note in each release. 5 | 6 | .. contents:: `Releases` 7 | :depth: 1 8 | :local: 9 | :backlinks: top 10 | 11 | .. include:: whatsnew/v0_9_0.rst 12 | .. include:: whatsnew/v0_8_0.rst 13 | .. include:: whatsnew/v0_7_2.rst 14 | .. include:: whatsnew/v0_7_1.rst 15 | .. include:: whatsnew/v0_7_0.rst 16 | .. include:: whatsnew/v0_6_1.rst 17 | .. include:: whatsnew/v0_6.rst 18 | .. include:: whatsnew/v0_5_1.rst 19 | .. include:: whatsnew/v0_5.rst 20 | .. include:: whatsnew/v0_4.rst 21 | .. include:: whatsnew/v0_3.rst 22 | .. include:: whatsnew/v0_2.rst 23 | .. include:: whatsnew/v0_1.rst 24 | -------------------------------------------------------------------------------- /doc/whatsnew/v0_1.rst: -------------------------------------------------------------------------------- 1 | Release 0.1 (June 30, 2017) 2 | +++++++++++++++++++++++++++ 3 | 4 | **First release of eTraGo** 5 | -------------------------------------------------------------------------------- /doc/whatsnew/v0_2.rst: -------------------------------------------------------------------------------- 1 | Release 0.2 (July 20, 2017) 2 | +++++++++++++++++++++++++++ 3 | 4 | **This is the version 0.2 of eTraGo.** 5 | 6 | 7 | This new version shall be install via pip and a setup.py. 8 | Moreover there has been minor adjustments such as the creation of a 9 | callable etrago function. 10 | 11 | Added features 12 | -------------- 13 | * install via pip and a setup.py 14 | * callable etrago function 15 | -------------------------------------------------------------------------------- /doc/whatsnew/v0_3.rst: -------------------------------------------------------------------------------- 1 | 2 | Release 0.3 (September 8, 2017) 3 | +++++++++++++++++++++++++++++++ 4 | 5 | **Release introducing k-means clustering and several additional functionalities** 6 | 7 | 8 | Added features 9 | -------------- 10 | * k-means clustering 11 | -------------------------------------------------------------------------------- /doc/whatsnew/v0_4.rst: -------------------------------------------------------------------------------- 1 | Release 0.4 (October 12, 2017) 2 | ++++++++++++++++++++++++++++++ 3 | **eTraGo integrates ego.powerflow functionalities** 4 | 5 | 6 | Release 0.4 is mainly the merging of ego.powerflow into eTraGo. 7 | Additionally, some restructuring has been carried out, plotting functions 8 | have been updated and the first approach for a documentation was set up. 9 | 10 | Other changes 11 | ------------- 12 | * merging of ego.powerflow into eTraGo 13 | -------------------------------------------------------------------------------- /doc/whatsnew/v0_5.rst: -------------------------------------------------------------------------------- 1 | Release 0.5 (December 08, 2017) 2 | ++++++++++++++++++++++++++++++++ 3 | eTraGo works with PyPSA 0.11.0. 4 | 5 | 6 | Added features 7 | -------------- 8 | * Readthedocs documentation 9 | * Result export to postgresql database in particular to the open energy data base (oedb) 10 | * parallelisation function hands over SOC of storages to following problem. 11 | * New plot for displaying voltage deviations at network buses. 12 | * Line loading plot displays the direction of power flows. 13 | 14 | 15 | Bug fixes 16 | --------- 17 | * k-means clustering got a more suitable scaling factor concerning calculations on the 110kV grid. 18 | * K-means weighting of the buses is a bit more robust to changes in the data structure. 19 | * the list of carriers is imported again to the pypsa network from the oedb data model. 20 | 21 | 22 | Other changes 23 | ------------- 24 | * update PyPSA Version from 0.8.0 to PyPSA 0.11.0 25 | 26 | -------------------------------------------------------------------------------- /doc/whatsnew/v0_5_1.rst: -------------------------------------------------------------------------------- 1 | Release 0.5.1 (February 01, 2018) 2 | ++++++++++++++++++++++++++++++++++ 3 | eTraGo works with ego.io 0.3.0 4 | 5 | 6 | Added features 7 | -------------- 8 | * Result export to oedb functionality was improved. Now, a safe tag can be set in order to state that the result set shall be versioned and moved to the schema 'grid'. 9 | * the new database sessionmaker of ego.io 0.3.0 is enabled which gets rid of dependency to oemof.db. 10 | * it is possible to skip snapshots in order to ingenuously simplify the problem. 11 | 12 | 13 | Other news 14 | ------------- 15 | * eTraGo will be available on PyPI 16 | * eTraGo 0.5.1 will be used for eGo 0.0.1 17 | 18 | 19 | -------------------------------------------------------------------------------- /doc/whatsnew/v0_6.rst: -------------------------------------------------------------------------------- 1 | Release 0.6 (June 27, 2018) 2 | ++++++++++++++++++++++++++++ 3 | eTraGo now enables combined grid and storage expansion, snapshot clustering and the consideration of exogenous grid expansion. 4 | 5 | Added features 6 | -------------- 7 | * A part from optimizing the investment of storages it is now also possible to optimize grid expansion investments. In this context we added an argument 'extendable' which expects an array of the different component types you want to optimize. This argument corresponds to functions in the new extendable.py file. It is possible to choose from expansion strategies which are defined within that sub-package. Capital costs for new grid components can be defined and are annualized by means of interest rate, component lifetime and operation period. 8 | * The k-means network clustering ('network_clustering_kmeans') has been refactored. It is now possible to reproduce busmaps by csv-importing already defined busmaps. Consequently it is possible to write busmaps. Here the argument 'load_cluster' was introduced. Moreover it is possible read and write bus_weightings. This helps to e.g. run a future scenario but using a bus weighting of the status quo. Moreover, the remove_stubs function from PyPSA is now easily usable in eTraGo. 9 | * The snapshot_clustering can now be used in order to reduce the temporal complexity. Typical days are taken in order to represent the entire year. Here the package tsam (developed by Leander Kotzur) is used. Standardly a hierarchical clustering method is used which is e.g. described by Nahmacher et al. ( see: https://www.sciencedirect.com/science/article/pii/S0360544216308556 ). 10 | * Scenario variations from the oedb can be introduced. The argument 'scn_extension' will activate an extension scenario which adds components such as lines or generator to the base scenario. The 'scn_decommissioning' argument states whether you want to remove existing components. Right now, in the oedb two scenarios are accessible which represent the grid expansion (and the corresponding removal of existing equipment) planned by the German network development plan. 11 | * Our data model at the oedb represents Germany and the electrical neighbors. If you consider planned grid expansion to Norway and Belgium you would most probably want to include also these countries as electrical neighbors including their aggregated generation and demand characteristics. The argument 'add_Belgium_Norway' therefore was introduced. Once activated it will add the countries to the model. 12 | * DC links are now also modelled as PyPSA DC links. Consequently Sweden is now connected by a DC link. 13 | 14 | Other changes 15 | ------------- 16 | * The plotting sub-package was amplified and enhanced by new plotting functions and improvements of existing ones (e. g. introduced a legend for the plotting function storage_expansion()) 17 | * The code complies now mostly with the pep8 standard 18 | * Documentation was improved (but ongoing work in progress) considering doc strings for functions and the rtd-documentation web site 19 | * The io was slightly reformatted and restructured. 20 | -------------------------------------------------------------------------------- /doc/whatsnew/v0_6_1.rst: -------------------------------------------------------------------------------- 1 | Release 0.6.1 (Juli 18, 2018) 2 | ++++++++++++++++++++++++++++++ 3 | eTraGo works with pypi and is suitable for eGo 0.2.0 4 | 5 | Added features 6 | -------------- 7 | * An installation issue when installing from pypi was fixed. 8 | * The random noise function was improved. Now you set a (reproducible) random seed. 9 | * snapshot.weightings are used within the plotting functions 10 | * bug fix for k-means clustering with respect to the aggregation of p_max_pu values of variable generators. They are weighted by their p_nom now. 11 | 12 | 13 | -------------------------------------------------------------------------------- /doc/whatsnew/v0_7_0.rst: -------------------------------------------------------------------------------- 1 | Release 0.7.0 (September 6, 2018) 2 | ++++++++++++++++++++++++++++++++++ 3 | eTraGo is able to produce feasible non-linear power flows based on optimization results and allows the disaggregation of clustered results to original spatial complexities. 4 | 5 | Added features 6 | --------------- 7 | 8 | * The pf_post_lopf function was improved. Due to changes in the data set now the non-linear power flow (pf) creates feasible solutions. If network optimization is turned on, a second lopf which regards the updated reactances and optimizes only dispatch is performed before the pf is executed. 9 | * The disaggregation method was included. When using a network clustering method to reduce the spatial complexity of the given network, a disaggregation method can be used afterwards to distribute the nodal results (generation and storage timeseries) to the original complexity. The method 'disaggregation': 'uniform' can be used as an interface functionality for distribution grid planning tools like eDisGo. 10 | * For the network expansion it is now additionally possible to only optimize the German power lines or only the crossborder lines. Moreover one can choose to optimize only a predefined set of power lines which are identified by a worst-case analysis beforehand. 11 | * Intertemporal constraints can be applied to certain power plants. For different technologies certain parameters i.e. 'start_up_cost', 'start_up_fuel', 'min_up_time' and 'min_down_time' are defined in the ramp_limits function. 12 | * Crossborder lines can now easily be modelled as 'DC' links. Moreover the capacities of these lines can be adjusted with respect to a ACER report on thermal as well as net transfer capacities. 13 | * Thanks to @jankaeh manually the grid topology within the cities Stuttgart, Munich and Hannover was improved. Perspectively this function should be obsolete when openstreetmap and/or osmTGmod get better data coverage. 14 | * As an alternative to the normal editing of the calcualtion settings (args) within the appl.py it is now possible to load an args.json file. 15 | 16 | 17 | -------------------------------------------------------------------------------- /doc/whatsnew/v0_7_1.rst: -------------------------------------------------------------------------------- 1 | Release 0.7.1 (October 25, 2018) 2 | ++++++++++++++++++++++++++++++++ 3 | A minor release adding new options for additional constraints, modelling assumptions and plotting. 4 | 5 | Added features 6 | -------------- 7 | 8 | * Two extra functionalities were introduced in order to apply constraints concerning a minimal share of renewable energy and a global upper bound for grid expansion. You can activate these functions in the 'args' of the etrago() function. 9 | * The branch_capacity_factor can now be defined separately for the high and extra high voltage level in order to address the (n-1) criteria more accurately. 10 | * There are some more plotting functions e.g. plotting the state-of-charge and dispatch of storage units. 11 | * Storage capacities in foreign countries can easily be be optimized. 12 | * By default the maximum expansion of each line and transformer is set to four times its original capacity. Being an argument of the extendable() function it can be easily adjusted. 13 | * k-means clustered results can now also be exported to the oedb. 14 | 15 | 16 | 17 | 18 | 19 | -------------------------------------------------------------------------------- /doc/whatsnew/v0_7_2.rst: -------------------------------------------------------------------------------- 1 | Release 0.7.2 (Juni 15, 2020) 2 | ++++++++++++++++++++++++++++++ 3 | A minor release adding the following features. 4 | 5 | Added features 6 | -------------- 7 | 8 | * for single use of eTraGo (not as a sub-module of eGo), we recommend to use the newest minor data release 'gridversion': 'v0.4.6'. This data release includes some minor bug fixes but it is not consistent with the data on the MV and LV levels. Hence, the modelling results are only adequate for the HV and EHV level applying solely the tool eTraGo. 9 | * snapshot clustering includes now an approach to model seasonal storage as in Kotzur et al, 2018 ( https://www.sciencedirect.com/science/article/pii/S0306261918300242 ). Moreover the method may include extreme periods using an option of the tsam package. 10 | * osm maps can now be used for background plotting 11 | * the iterate_lopf function enables to adequately model the reactances when expanding the grid 12 | * important bug fix for the adjustment of reactances when harmonizing the voltage level when applying the k-means network clustering 13 | * multiple extra_functionalities can be called easily called now at once 14 | * various minor changes such as specifying installation requires for flawless performance 15 | 16 | 17 | -------------------------------------------------------------------------------- /doc/whatsnew/v0_8_0.rst: -------------------------------------------------------------------------------- 1 | Release 0.8.0 (April 8, 2021) 2 | ++++++++++++++++++++++++++++++ 3 | eTraGo has now a more object-oriented programming design. 4 | 5 | Added features 6 | -------------- 7 | 8 | * eTraGo uses PyPSA version 0.17.1 directly, the fork is not needed anymore. The updated pypsa version includes various features, e.g. running a lopf without using pyomo which is faster and needs less memory. 9 | * (n-1)-security factors are set as line/transformer parameters s_max_pu instead of adding the additional argument s_nom_original 10 | * There is now one central plotting function for all grid topology plots which also allows to combine different results (e.g. plot storage expansion and line expansion at once) 11 | * eTraGo is now compatible to Python3.7 12 | * A bug in setting the line_length_factor in kmeans clustering is fixed. 13 | 14 | 15 | -------------------------------------------------------------------------------- /doc/whatsnew/v0_9_0.rst: -------------------------------------------------------------------------------- 1 | Release 0.9.0 (November 21, 2023) 2 | ++++++++++++++++++++++++++++ 3 | 4 | Added features 5 | -------------- 6 | 7 | * eTraGo is now compatible with Python 3.8 8 | * eTraGo can now import and optimize networks that include other energy sectors such as gas, heating and mobility 9 | * Various flexibility options from different energy sectors can be considered in the optimization: 10 | - Weather dependent capacity of transmission lines (Dynamic Line Rating) 11 | - Demand Side Management 12 | - Flexible charging of electric vehicles 13 | - Heat and hydrogen stores 14 | - Power2Hydrogen, Hydrogen2Power 15 | - Methanation and Steam Methane Reforming 16 | * eTraGo arguments can now be partially provided and updated 17 | * eTraGo can now import datamodels from databases without using the ego.io 18 | * Existing clustering methods were adapted to be able to reduce the complexity of not electrical sectors 19 | * Improvement of the ehv clustering (much faster now) 20 | * A new clustering method named "k-medoids Dijkstra Clustering" (can be called by "kmedoids-dijkstra") was implemented. This method considers the electrical distance between the buses in the network. It is also available for the methane grid. 21 | * It is possible to select if foreign buses are considered or not during the clustering process. 22 | * The number of CPUs used to perform the clustering can be provided by the user. 23 | * Some more options are available to conduct a reduction in temporal dimension: 24 | - segmentation: clustering of adjacent hours to segments of variable length 25 | - clustering to typical periods extended to cluster on weeks and months 26 | * A temporal disaggregation is available through a 2-level-approach including a dispatch optimization on the temporally fullcomplex model. To limit the RAM usage, you can optionally divide the optimisation problem into a chosen number of slices. 27 | * New plotting functions to visualize the optimization results from all the included energy sectors were implemented 28 | * Functions to analyze results were updated to consider new sectors 29 | -------------------------------------------------------------------------------- /etrago/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | eTraGo 3 | ****** 4 | - Optimization of flexibility options for transmission grids based on PyPSA 5 | 6 | 7 | """ 8 | 9 | 10 | __copyright__ = "Flensburg University of Applied Sciences, Europa-Universität Flensburg, Centre for Sustainable Energy Systems, DLR-Institute for Networked Energy Systems" 11 | __license__ = "GNU Affero General Public License Version 3 (AGPL-3.0)" 12 | __author__ = "ulfmueller, lukasol, wolfbunke, mariusves, s3pp" 13 | __version__ = "0.4" 14 | 15 | from etrago.network import Etrago 16 | -------------------------------------------------------------------------------- /etrago/analyze/__init__.py: -------------------------------------------------------------------------------- 1 | """ """ 2 | 3 | __copyright__ = "tba" 4 | __license__ = "tba" 5 | __author__ = "tba" 6 | -------------------------------------------------------------------------------- /etrago/args.json: -------------------------------------------------------------------------------- 1 | { 2 | "db": "egon-data", 3 | "gridversion": null, 4 | "method": { 5 | "type": "lopf", 6 | "n_iter": 4, 7 | "formulation": "linopy", 8 | "market_optimization": 9 | { 10 | "active": true, 11 | "market_zones": "status_quo", 12 | "rolling_horizon": { 13 | "planning_horizon": 168, 14 | "overlap": 120 15 | }, 16 | "redispatch": true 17 | } 18 | }, 19 | "pf_post_lopf": { 20 | "active": false, 21 | "add_foreign_lopf": true, 22 | "q_allocation": "p_nom" 23 | }, 24 | "start_snapshot": 1, 25 | "end_snapshot": 2, 26 | "solver": "gurobi", 27 | "solver_options": {}, 28 | "model_formulation": "kirchhoff", 29 | "scn_name": "eGon2035", 30 | "scn_extension": null, 31 | "scn_decommissioning": null, 32 | "lpfile": false, 33 | "csv_export": "results", 34 | "extendable": { 35 | "extendable_components": [ 36 | "as_in_db" 37 | ], 38 | "upper_bounds_grid": { 39 | "grid_max_D": null, 40 | "grid_max_abs_D": { 41 | "380": { 42 | "i": 1020, 43 | "wires": 4, 44 | "circuits": 4 45 | }, 46 | "220": { 47 | "i": 1020, 48 | "wires": 4, 49 | "circuits": 4 50 | }, 51 | "110": { 52 | "i": 1020, 53 | "wires": 4, 54 | "circuits": 2 55 | }, 56 | "dc": 0 57 | }, 58 | "grid_max_foreign": 4, 59 | "grid_max_abs_foreign": null 60 | } 61 | }, 62 | "generator_noise": 789456, 63 | "extra_functionality": {}, 64 | "delete_dispensable_ac_buses": true, 65 | "network_clustering_ehv": { 66 | "active": false, 67 | "busmap": false 68 | }, 69 | "network_clustering": { 70 | "active": true, 71 | "method": "kmedoids-dijkstra", 72 | "n_clusters_AC": 30, 73 | "cluster_foreign_AC": false, 74 | "method_gas": "kmedoids-dijkstra", 75 | "n_clusters_gas": 15, 76 | "n_clusters_h2": 15, 77 | "cluster_foreign_gas": false, 78 | "k_elec_busmap": false, 79 | "k_gas_busmap": false, 80 | "bus_weight_tocsv": null, 81 | "bus_weight_fromcsv": null, 82 | "gas_weight_tocsv": null, 83 | "gas_weight_fromcsv": null, 84 | "line_length_factor": 1, 85 | "remove_stubs": false, 86 | "use_reduced_coordinates": false, 87 | "random_state": 42, 88 | "n_init": 10, 89 | "max_iter": 100, 90 | "tol": 1e-6, 91 | "CPU_cores": 4 92 | }, 93 | "sector_coupled_clustering": { 94 | "active": true, 95 | "carrier_data": { 96 | "central_heat": { 97 | "base": [ 98 | "CH4", 99 | "AC" 100 | ], 101 | "strategy": "simultaneous" 102 | } 103 | } 104 | }, 105 | "spatial_disaggregation": null, 106 | "snapshot_clustering": { 107 | "active": false, 108 | "method": "segmentation", 109 | "extreme_periods": null, 110 | "how": "daily", 111 | "storage_constraints": "soc_constraints", 112 | "n_clusters": 5, 113 | "n_segments": 5 114 | }, 115 | "skip_snapshots": 5, 116 | "temporal_disaggregation": { 117 | "active": false, 118 | "no_slices": 8 119 | }, 120 | "branch_capacity_factor": { 121 | "HV": 0.5, 122 | "eHV": 0.7 123 | }, 124 | "load_shedding": false, 125 | "foreign_lines": { 126 | "carrier": "AC", 127 | "capacity": "osmTGmod" 128 | }, 129 | "comments": null 130 | } 131 | -------------------------------------------------------------------------------- /etrago/cluster/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | 3 | """ 4 | 5 | __copyright__ = "tba" 6 | __license__ = "tba" 7 | __author__ = "tba" 8 | -------------------------------------------------------------------------------- /etrago/cluster/spatial.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Copyright 2016-2023 Flensburg University of Applied Sciences, 3 | # Europa-Universität Flensburg, 4 | # Centre for Sustainable Energy Systems, 5 | # DLR-Institute for Networked Energy Systems 6 | 7 | # This program is free software; you can redistribute it and/or 8 | # modify it under the terms of the GNU Affero General Public License as 9 | # published by the Free Software Foundation; either version 3 of the 10 | # License, or (at your option) any later version. 11 | 12 | # This program is distributed in the hope that it will be useful, 13 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 14 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 15 | # GNU Affero General Public License for more details. 16 | 17 | # You should have received a copy of the GNU General Public License 18 | # along with this program. If not, see . 19 | 20 | # File description for read-the-docs 21 | """spatial.py defines the methods to run spatial clustering on networks.""" 22 | 23 | import os 24 | 25 | if "READTHEDOCS" not in os.environ: 26 | from itertools import product 27 | from math import ceil 28 | import logging 29 | import multiprocessing as mp 30 | 31 | from networkx import NetworkXNoPath 32 | from pypsa.clustering.spatial import ( 33 | busmap_by_kmeans, 34 | busmap_by_stubs, 35 | flatten_multiindex, 36 | get_clustering_from_busmap, 37 | ) 38 | from sklearn.cluster import KMeans 39 | from threadpoolctl import threadpool_limits 40 | import networkx as nx 41 | import numpy as np 42 | import pandas as pd 43 | import pypsa 44 | 45 | from etrago.tools.utilities import ( 46 | buses_grid_linked, 47 | buses_of_vlvl, 48 | connected_grid_lines, 49 | connected_transformer, 50 | ) 51 | 52 | logger = logging.getLogger(__name__) 53 | 54 | __copyright__ = ( 55 | "Flensburg University of Applied Sciences, " 56 | "Europa-Universität Flensburg, " 57 | "Centre for Sustainable Energy Systems, " 58 | "DLR-Institute for Networked Energy Systems" 59 | ) 60 | __license__ = "GNU Affero General Public License Version 3 (AGPL-3.0)" 61 | __author__ = ( 62 | "MGlauer, MarlonSchlemminger, mariusves, BartelsJ, gnn, lukasoldi, " 63 | "ulfmueller, lukasol, ClaraBuettner, CarlosEpia, KathiEsterl, " 64 | "pieterhexen, fwitte, AmeliaNadal, cjbernal071421" 65 | ) 66 | 67 | # TODO: Workaround because of agg 68 | 69 | 70 | def _make_consense_links(x): 71 | """ 72 | Ensure that all elements in the input Series `x` are identical, or that 73 | they are all NaN. 74 | 75 | Parameters 76 | ---------- 77 | x : pandas.Series 78 | A Series containing the values to be checked for consensus. 79 | 80 | Returns 81 | ------- 82 | object 83 | The value of the first element in the Series `x`. 84 | """ 85 | 86 | v = x.iat[0] 87 | assert ( 88 | x == v 89 | ).all() or x.isnull().all(), ( 90 | f"No consense in table links column {x.name}: \n {x}" 91 | ) 92 | return v 93 | 94 | 95 | def nan_links(x): 96 | return np.nan 97 | 98 | 99 | def ext_storage(x): 100 | v = any(x[x]) 101 | return v 102 | 103 | 104 | def sum_with_inf(x): 105 | if (x == np.inf).any(): 106 | return np.inf 107 | else: 108 | return x.sum() 109 | 110 | 111 | def strategies_buses(): 112 | return {"geom": nan_links, "country": "first"} 113 | 114 | 115 | def strategies_lines(): 116 | return {"geom": nan_links, "country": "first"} 117 | 118 | 119 | def strategies_one_ports(): 120 | return { 121 | "StorageUnit": { 122 | "marginal_cost": "mean", 123 | "capital_cost": "mean", 124 | "efficiency_dispatch": "mean", 125 | "standing_loss": "mean", 126 | "efficiency_store": "mean", 127 | "p_min_pu": "min", 128 | "p_nom_extendable": ext_storage, 129 | "p_nom_max": sum_with_inf, 130 | "scn_name": "first", 131 | }, 132 | "Store": { 133 | "marginal_cost": "mean", 134 | "capital_cost": "mean", 135 | "standing_loss": "mean", 136 | "e_nom": "sum", 137 | "e_nom_min": "sum", 138 | "e_nom_max": sum_with_inf, 139 | "e_initial": "sum", 140 | "e_min_pu": "mean", 141 | "e_max_pu": "mean", 142 | }, 143 | } 144 | 145 | 146 | def strategies_generators(): 147 | return { 148 | "p_nom_min": "min", 149 | "p_nom_max": sum_with_inf, 150 | "weight": "sum", 151 | "p_nom": "sum", 152 | "p_nom_opt": "sum", 153 | "marginal_cost": "mean", 154 | "capital_cost": "mean", 155 | "e_nom_max": sum_with_inf, 156 | "up_time_before": "mean", 157 | } 158 | 159 | 160 | def strategies_links(): 161 | return { 162 | "scn_name": "first", 163 | "bus0": _make_consense_links, 164 | "bus1": _make_consense_links, 165 | "carrier": _make_consense_links, 166 | "p_nom": "sum", 167 | "p_nom_extendable": "any", 168 | "p_nom_max": sum_with_inf, 169 | "capital_cost": "mean", 170 | "length": "mean", 171 | "geom": nan_links, 172 | "topo": nan_links, 173 | "type": nan_links, 174 | "efficiency": "mean", 175 | "p_nom_min": "sum", 176 | "p_set": "mean", 177 | "p_min_pu": "mean", 178 | "p_max_pu": "mean", 179 | "marginal_cost": "mean", 180 | "terrain_factor": _make_consense_links, 181 | "p_nom_opt": "mean", 182 | "country": nan_links, 183 | "build_year": "mean", 184 | "lifetime": "mean", 185 | "min_up_time": "mean", 186 | "min_down_time": "mean", 187 | "up_time_before": "mean", 188 | "down_time_before": "mean", 189 | "committable": "all", 190 | } 191 | 192 | 193 | def group_links(network, with_time=True, carriers=None, cus_strateg=dict()): 194 | """ 195 | Aggregate network.links and network.links_t after any kind of clustering 196 | 197 | Parameters 198 | ---------- 199 | network : pypsa.Network object 200 | Container for all network components. 201 | with_time : bool 202 | says if the network object contains timedependent series. 203 | carriers : list of strings 204 | Describe which type of carriers should be aggregated. The default is 205 | None. 206 | strategies : dictionary 207 | custom strategies to perform the aggregation 208 | 209 | Returns 210 | ------- 211 | new_df : 212 | links aggregated based on bus0, bus1 and carrier 213 | new_pnl : 214 | links time series aggregated 215 | """ 216 | 217 | def normed_or_uniform(x): 218 | return ( 219 | x / x.sum() 220 | if x.sum(skipna=False) > 0 221 | else pd.Series(1.0 / len(x), x.index) 222 | ) 223 | 224 | def arrange_dc_bus0_bus1(network): 225 | dc_links = network.links[network.links.carrier == "DC"].copy() 226 | dc_links["n0"] = dc_links.apply( 227 | lambda x: x.bus0 if x.bus0 < x.bus1 else x.bus1, axis=1 228 | ) 229 | dc_links["n1"] = dc_links.apply( 230 | lambda x: x.bus0 if x.bus0 > x.bus1 else x.bus1, axis=1 231 | ) 232 | dc_links["bus0"] = dc_links["n0"] 233 | dc_links["bus1"] = dc_links["n1"] 234 | dc_links.drop(columns=["n0", "n1"], inplace=True) 235 | 236 | network.links.drop(index=dc_links.index, inplace=True) 237 | network.links = pd.concat([network.links, dc_links]) 238 | 239 | return network 240 | 241 | network = arrange_dc_bus0_bus1(network) 242 | 243 | if carriers is None: 244 | carriers = network.links.carrier.unique() 245 | 246 | links_agg_b = network.links.carrier.isin(carriers) 247 | links = network.links.loc[links_agg_b] 248 | grouper = [links.bus0, links.bus1, links.carrier] 249 | 250 | weighting = links.p_nom.groupby(grouper, axis=0).transform( 251 | normed_or_uniform 252 | ) 253 | strategies = strategies_links() 254 | strategies.update(cus_strateg) 255 | strategies.pop("topo") 256 | strategies.pop("geom") 257 | 258 | new_df = links.groupby(grouper).agg(strategies) 259 | new_df.index = flatten_multiindex(new_df.index).rename("name") 260 | new_df = pd.concat( 261 | [new_df, network.links.loc[~links_agg_b]], axis=0, sort=False 262 | ) 263 | new_df["new_id"] = np.arange(len(new_df)).astype(str) 264 | cluster_id = new_df["new_id"].to_dict() 265 | new_df.set_index("new_id", inplace=True) 266 | new_df.index = new_df.index.rename("Link") 267 | 268 | new_pnl = dict() 269 | if with_time: 270 | for attr, df in network.links_t.items(): 271 | pnl_links_agg_b = df.columns.to_series().map(links_agg_b) 272 | df_agg = df.loc[:, pnl_links_agg_b].astype(float) 273 | if not df_agg.empty: 274 | if attr in ["efficiency", "p_max_pu", "p_min_pu"]: 275 | df_agg = df_agg.multiply( 276 | weighting.loc[df_agg.columns], axis=1 277 | ) 278 | pnl_df = df_agg.T.groupby(grouper).sum().T 279 | pnl_df.columns = flatten_multiindex(pnl_df.columns).rename( 280 | "name" 281 | ) 282 | new_pnl[attr] = pd.concat( 283 | [df.loc[:, ~pnl_links_agg_b], pnl_df], axis=1, sort=False 284 | ) 285 | new_pnl[attr].columns = new_pnl[attr].columns.map(cluster_id) 286 | else: 287 | new_pnl[attr] = network.links_t[attr] 288 | 289 | new_pnl = pypsa.descriptors.Dict(new_pnl) 290 | 291 | return new_df, new_pnl 292 | 293 | 294 | def graph_from_edges(edges): 295 | """ 296 | Constructs an undirected multigraph from a list containing data on 297 | weighted edges. 298 | 299 | Parameters 300 | ---------- 301 | edges : list 302 | List of tuples each containing first node, second node, weight, key. 303 | 304 | Returns 305 | ------- 306 | M : :class:`networkx.classes.multigraph.MultiGraph` 307 | """ 308 | 309 | M = nx.MultiGraph() 310 | 311 | for e in edges: 312 | n0, n1, weight, key = e 313 | 314 | M.add_edge(n0, n1, weight=weight, key=key) 315 | 316 | return M 317 | 318 | 319 | def gen(nodes, n, graph): 320 | # TODO There could be a more convenient way of doing this. This generators 321 | # single purpose is to prepare data for multiprocessing's starmap function. 322 | """ 323 | Generator for applying multiprocessing. 324 | 325 | Parameters 326 | ---------- 327 | nodes : list 328 | List of nodes in the system. 329 | n : int 330 | Number of desired multiprocessing units. 331 | graph : :class:`networkx.classes.multigraph.MultiGraph` 332 | Graph representation of an electrical grid. 333 | 334 | Returns 335 | ------- 336 | None 337 | """ 338 | 339 | g = graph.copy() 340 | 341 | for i in range(0, len(nodes), n): 342 | yield (nodes[i : i + n], g) 343 | 344 | 345 | def shortest_path(paths, graph): 346 | """ 347 | Finds the minimum path lengths between node pairs defined in paths. 348 | 349 | Parameters 350 | ---------- 351 | paths : list 352 | List of pairs containing a source and a target node 353 | graph : :class:`networkx.classes.multigraph.MultiGraph` 354 | Graph representation of an electrical grid. 355 | 356 | Returns 357 | ------- 358 | df : pd.DataFrame 359 | DataFrame holding source and target node and the minimum path length. 360 | """ 361 | 362 | idxnames = ["source", "target"] 363 | idx = pd.MultiIndex.from_tuples(paths, names=idxnames) 364 | df = pd.DataFrame(index=idx, columns=["path_length"]) 365 | df.sort_index(inplace=True) 366 | 367 | df_isna = df.isnull() 368 | for s, t in paths: 369 | while df_isna.loc[(s, t), "path_length"]: 370 | try: 371 | s_to_other = nx.single_source_dijkstra_path_length(graph, s) 372 | for t in idx.levels[1]: 373 | if t in s_to_other: 374 | df.loc[(s, t), "path_length"] = s_to_other[t] 375 | else: 376 | df.loc[(s, t), "path_length"] = np.inf 377 | except NetworkXNoPath: 378 | continue 379 | df_isna = df.isnull() 380 | 381 | return df 382 | 383 | 384 | def busmap_by_shortest_path(etrago, fromlvl, tolvl, cpu_cores=4): 385 | """ 386 | Creates a busmap for the EHV-Clustering between voltage levels based 387 | on dijkstra shortest path. The result is automatically written to the 388 | `model_draft` on the [www.openenergy-platform.org] 389 | database with the name `ego_grid_pf_hv_busmap` and the attributes scn_name 390 | (scenario name), bus0 (node before clustering), bus1 (node after 391 | clustering) and path_length (path length). 392 | An AssertionError occurs if buses with a voltage level are not covered by 393 | the input lists 'fromlvl' or 'tolvl'. 394 | 395 | Parameters 396 | ---------- 397 | network : pypsa.Network 398 | Container for all network components. 399 | session : sqlalchemy.orm.session.Session object 400 | Establishes interactions with the database. 401 | fromlvl : list 402 | List of voltage-levels to cluster. 403 | tolvl : list 404 | List of voltage-levels to remain. 405 | cpu_cores : int 406 | Number of CPU-cores. 407 | 408 | Returns 409 | ------- 410 | None 411 | """ 412 | 413 | # data preperation 414 | s_buses = buses_grid_linked(etrago.network, fromlvl) 415 | lines = connected_grid_lines(etrago.network, s_buses) 416 | transformer = connected_transformer(etrago.network, s_buses) 417 | mask = transformer.bus1.isin(buses_of_vlvl(etrago.network, tolvl)) 418 | 419 | dc = etrago.network.links[etrago.network.links.carrier == "DC"] 420 | dc.index = "DC_" + dc.index 421 | lines_plus_dc = pd.concat([lines, dc]) 422 | lines_plus_dc = lines_plus_dc[etrago.network.lines.columns] 423 | lines_plus_dc["carrier"] = "AC" 424 | 425 | # temporary end points, later replaced by bus1 pendant 426 | t_buses = transformer[mask].bus0 427 | 428 | # create all possible pathways 429 | ppaths = list(product(s_buses, t_buses)) 430 | 431 | # graph creation 432 | edges = [ 433 | (row.bus0, row.bus1, row.length, ix) 434 | for ix, row in lines_plus_dc.iterrows() 435 | ] 436 | M = graph_from_edges(edges) 437 | 438 | # applying multiprocessing 439 | p = mp.Pool(cpu_cores) 440 | 441 | chunksize = ceil(len(ppaths) / cpu_cores) 442 | container = p.starmap(shortest_path, gen(ppaths, chunksize, M)) 443 | df = pd.concat(container) 444 | 445 | # post processing 446 | df.sort_index(inplace=True) 447 | df = df.fillna(10000000) 448 | 449 | mask = df.groupby(level="source")["path_length"].idxmin() 450 | df = df.loc[mask, :] 451 | 452 | # rename temporary endpoints 453 | df.reset_index(inplace=True) 454 | df.target = df.target.map( 455 | dict( 456 | zip( 457 | etrago.network.transformers.bus0, 458 | etrago.network.transformers.bus1, 459 | ) 460 | ) 461 | ) 462 | 463 | # append to busmap buses only connected to transformer 464 | transformer = etrago.network.transformers 465 | idx = list( 466 | set(buses_of_vlvl(etrago.network, fromlvl)).symmetric_difference( 467 | set(s_buses) 468 | ) 469 | ) 470 | mask = transformer.bus0.isin(idx) 471 | 472 | toappend = pd.DataFrame( 473 | list(zip(transformer[mask].bus0, transformer[mask].bus1)), 474 | columns=["source", "target"], 475 | ) 476 | toappend["path_length"] = 0 477 | 478 | df = pd.concat([df, toappend], ignore_index=True, axis=0) 479 | 480 | # append all other buses 481 | buses = etrago.network.buses[etrago.network.buses.carrier == "AC"] 482 | mask = buses.index.isin(df.source) 483 | 484 | assert (buses[~mask].v_nom.astype(int).isin(tolvl)).all() 485 | 486 | tofill = pd.DataFrame([buses.index[~mask]] * 2).transpose() 487 | tofill.columns = ["source", "target"] 488 | tofill["path_length"] = 0 489 | 490 | df = pd.concat([df, tofill], ignore_index=True, axis=0) 491 | df.drop_duplicates(inplace=True) 492 | 493 | df.rename(columns={"source": "bus0", "target": "bus1"}, inplace=True) 494 | 495 | busmap = pd.Series(df.bus1.values, index=df.bus0).to_dict() 496 | 497 | return busmap 498 | 499 | 500 | def busmap_ehv_clustering(etrago): 501 | """ 502 | Generates a busmap that can be used to cluster an electrical network to 503 | only extra high voltage buses. If a path to a busmap in a csv file is 504 | passed in the arguments, it loads the csv file and returns it. 505 | 506 | Parameters 507 | ---------- 508 | etrago : Etrago 509 | An instance of the Etrago class 510 | 511 | Returns 512 | ------- 513 | busmap : dict 514 | Maps old bus_ids to new bus_ids. 515 | """ 516 | 517 | if etrago.args["network_clustering_ehv"]["busmap"] is False: 518 | cpu_cores = etrago.args["network_clustering"]["CPU_cores"] 519 | if cpu_cores == "max": 520 | cpu_cores = mp.cpu_count() 521 | else: 522 | cpu_cores = int(cpu_cores) 523 | 524 | busmap = busmap_by_shortest_path( 525 | etrago, 526 | fromlvl=[110], 527 | tolvl=[220, 380, 400, 450], 528 | cpu_cores=cpu_cores, 529 | ) 530 | pd.DataFrame(busmap.items(), columns=["bus0", "bus1"]).to_csv( 531 | "ehv_elecgrid_busmap_result.csv", 532 | index=False, 533 | ) 534 | else: 535 | busmap = pd.read_csv(etrago.args["network_clustering_ehv"]["busmap"]) 536 | busmap = pd.Series( 537 | busmap.bus1.apply(str).values, index=busmap.bus0.apply(str) 538 | ).to_dict() 539 | 540 | return busmap 541 | 542 | 543 | def kmean_clustering(etrago, selected_network, weight, n_clusters): 544 | """ 545 | Main function of the k-mean clustering approach. Maps an original 546 | network to a new one with adjustable number of nodes and new coordinates. 547 | 548 | Parameters 549 | ---------- 550 | network : pypsa.Network 551 | Container for all network components. 552 | n_clusters : int 553 | Desired number of clusters. 554 | load_cluster : boolean 555 | Loads cluster coordinates from a former calculation. 556 | line_length_factor : float 557 | Factor to multiply the crow-flies distance between new buses in order 558 | to get new line lengths. 559 | remove_stubs: boolean 560 | Removes stubs and stubby trees (i.e. sequentially reducing dead-ends). 561 | use_reduced_coordinates: boolean 562 | If True, do not average cluster coordinates, but take from busmap. 563 | bus_weight_tocsv : str 564 | Creates a bus weighting based on conventional generation and load 565 | and save it to a csv file. 566 | bus_weight_fromcsv : str 567 | Loads a bus weighting from a csv file to apply it to the clustering 568 | algorithm. 569 | 570 | Returns 571 | ------- 572 | network : pypsa.Network 573 | Container for all network components. 574 | """ 575 | network = etrago.network 576 | kmean_settings = etrago.args["network_clustering"] 577 | 578 | with threadpool_limits(limits=kmean_settings["CPU_cores"], user_api=None): 579 | # remove stubs 580 | if kmean_settings["remove_stubs"]: 581 | network.determine_network_topology() 582 | busmap = busmap_by_stubs(network) 583 | network.generators["weight"] = network.generators["p_nom"] 584 | aggregate_one_ports = network.one_port_components.copy() 585 | aggregate_one_ports.discard("Generator") 586 | 587 | # reset coordinates to the new reduced guys, rather than taking an 588 | # average (copied from pypsa.networkclustering) 589 | if kmean_settings["use_reduced_coordinates"]: 590 | # TODO : FIX THIS HACK THAT HAS UNEXPECTED SIDE-EFFECTS, 591 | # i.e. network is changed in place!! 592 | network.buses.loc[busmap.index, ["x", "y"]] = ( 593 | network.buses.loc[busmap, ["x", "y"]].values 594 | ) 595 | 596 | clustering = get_clustering_from_busmap( 597 | network, 598 | busmap, 599 | aggregate_generators_weighted=True, 600 | one_port_strategies=strategies_one_ports(), 601 | generator_strategies=strategies_generators(), 602 | aggregate_one_ports=aggregate_one_ports, 603 | line_length_factor=kmean_settings["line_length_factor"], 604 | ) 605 | etrago.network = clustering.network 606 | 607 | weight = weight.groupby(busmap.values).sum() 608 | 609 | # k-mean clustering 610 | busmap = busmap_by_kmeans( 611 | selected_network, 612 | bus_weightings=pd.Series(weight), 613 | n_clusters=n_clusters, 614 | n_init=kmean_settings["n_init"], 615 | max_iter=kmean_settings["max_iter"], 616 | tol=kmean_settings["tol"], 617 | random_state=kmean_settings["random_state"], 618 | ) 619 | 620 | return busmap 621 | 622 | 623 | def dijkstras_algorithm(buses, connections, medoid_idx, cpu_cores): 624 | """ 625 | Function for combination of k-medoids Clustering and Dijkstra's algorithm. 626 | Creates a busmap assigning the nodes of a original network to the nodes of 627 | a clustered network considering the electrical distances based on 628 | Dijkstra's shortest path. 629 | 630 | Parameters 631 | ---------- 632 | network : pypsa.Network 633 | Container for all network components. 634 | medoid_idx : pandas.Series 635 | Indices of k-medoids 636 | busmap_kmedoid: pandas.Series 637 | Busmap based on k-medoids clustering 638 | cpu_cores: string 639 | numbers of cores used during multiprocessing 640 | 641 | Returns 642 | ------- 643 | busmap : pandas.Series 644 | Mapping from bus ids to medoids ids 645 | """ 646 | 647 | # original data 648 | o_buses = buses.index 649 | # k-medoids centers 650 | medoid_idx = medoid_idx.astype("str") 651 | c_buses = medoid_idx.tolist() 652 | 653 | # list of all possible pathways 654 | ppathss = list(product(o_buses, c_buses)) 655 | 656 | # graph creation 657 | edges = [ 658 | (row.bus0, row.bus1, row.length, ix) 659 | for ix, row in connections.iterrows() 660 | ] 661 | M = graph_from_edges(edges) 662 | 663 | # processor count 664 | if cpu_cores == "max": 665 | cpu_cores = mp.cpu_count() 666 | else: 667 | cpu_cores = int(cpu_cores) 668 | 669 | # calculation of shortest path between original points and k-medoids 670 | # centers using multiprocessing 671 | p = mp.Pool(cpu_cores) 672 | chunksize = ceil(len(ppathss) / cpu_cores) 673 | container = p.starmap(shortest_path, gen(ppathss, chunksize, M)) 674 | df = pd.concat(container) 675 | 676 | # assignment of data points to closest k-medoids centers 677 | df["path_length"] = pd.to_numeric(df["path_length"]) 678 | mask = df.groupby(level="source")["path_length"].idxmin() 679 | df_dijkstra = df.loc[mask, :] 680 | df_dijkstra.reset_index(inplace=True) 681 | 682 | # delete double entries in df due to multiprocessing 683 | df_dijkstra.drop_duplicates(inplace=True) 684 | df_dijkstra.index = df_dijkstra["source"] 685 | 686 | # creation of new busmap with final assignment (format: medoids indices) 687 | busmap_ind = pd.Series(df_dijkstra["target"], dtype=object).rename( 688 | "final_assignment", inplace=True 689 | ) 690 | busmap_ind.index = df_dijkstra["source"] 691 | 692 | # adaption of busmap to format with labels (necessary for aggregation) 693 | busmap = busmap_ind.copy() 694 | mapping = pd.Series(index=medoid_idx, data=medoid_idx.index) 695 | busmap = busmap_ind.map(mapping).astype(str) 696 | busmap.index = list(busmap.index.astype(str)) 697 | 698 | return busmap 699 | 700 | 701 | def kmedoids_dijkstra_clustering( 702 | etrago, buses, connections, weight, n_clusters 703 | ): 704 | """ 705 | Applies a k-medoids clustering on the given network and calls the function 706 | to conduct a Dijkstra's algorithm afterwards for the consideration of the 707 | network's topology in the spatial clustering. 708 | 709 | Parameters 710 | ---------- 711 | etrago : Etrago 712 | An instance of the Etrago class 713 | buses : pandas.DataFrame 714 | DataFrame with information about the buses of the network. 715 | connections : pandas.DataFrame 716 | DataFrame with information about the connections of the network 717 | (links or lines). 718 | weight : pandas.Series 719 | Series with the weight for each bus. 720 | n_clusters : int 721 | The number of clusters to create. 722 | 723 | Returns 724 | ------- 725 | Tuple containing: 726 | busmap : pandas.Series 727 | Series containing the mapping of buses to their resp. medoids 728 | medoid_idx : pandas.Series 729 | Series containing the medoid indeces 730 | """ 731 | 732 | settings = etrago.args["network_clustering"] 733 | 734 | # n_jobs was deprecated for the function fit(). scikit-learn recommends 735 | # to use threadpool_limits: 736 | # https://scikit-learn.org/stable/computing/parallelism.html 737 | with threadpool_limits(limits=settings["CPU_cores"], user_api=None): 738 | # remove stubs 739 | if settings["remove_stubs"]: 740 | logger.info( 741 | """options remove_stubs and use_reduced_coordinates not 742 | reasonable for k-medoids Dijkstra Clustering""" 743 | ) 744 | 745 | bus_weightings = pd.Series(weight) 746 | buses_i = buses.index 747 | points = buses.loc[buses_i, ["x", "y"]].values.repeat( 748 | bus_weightings.reindex(buses_i).astype(int), axis=0 749 | ) 750 | 751 | kmeans = KMeans( 752 | init="k-means++", 753 | n_clusters=n_clusters, 754 | n_init=settings["n_init"], 755 | max_iter=settings["max_iter"], 756 | tol=settings["tol"], 757 | random_state=settings["random_state"], 758 | ) 759 | kmeans.fit(points) 760 | 761 | busmap = pd.Series( 762 | data=kmeans.predict(buses.loc[buses_i, ["x", "y"]].values), 763 | index=buses_i, 764 | dtype=object, 765 | ) 766 | 767 | # identify medoids per cluster -> k-medoids clustering 768 | 769 | distances = pd.DataFrame( 770 | data=kmeans.transform(buses.loc[buses_i, ["x", "y"]].values), 771 | index=buses_i, 772 | dtype=object, 773 | ) 774 | distances = distances.apply(pd.to_numeric) 775 | 776 | medoid_idx = distances.idxmin() 777 | 778 | if len(busmap) > n_clusters: 779 | # dijkstra's algorithm 780 | busmap = dijkstras_algorithm( 781 | buses, 782 | connections, 783 | medoid_idx, 784 | etrago.args["network_clustering"]["CPU_cores"], 785 | ) 786 | elif len(busmap) < n_clusters: 787 | logger.warning( 788 | f""" 789 | The number supplied to the parameter n_clusters for 790 | {buses.carrier[0]} buses is larger than the actual number of buses 791 | in the network. 792 | """ 793 | ) 794 | 795 | busmap.index.name = "bus_id" 796 | 797 | return busmap, medoid_idx 798 | 799 | 800 | def drop_nan_values(network): 801 | """ 802 | Drops nan values after clustering an replaces output data time series with 803 | empty dataframes 804 | 805 | Parameters 806 | ---------- 807 | network : pypsa.Network 808 | Container for all network components. 809 | 810 | Returns 811 | ------- 812 | None. 813 | 814 | """ 815 | 816 | # Drop nan values after clustering 817 | network.links.min_up_time.fillna(0, inplace=True) 818 | network.links.min_down_time.fillna(0, inplace=True) 819 | network.links.up_time_before.fillna(0, inplace=True) 820 | network.links.down_time_before.fillna(0, inplace=True) 821 | # Drop nan values in timeseries after clustering 822 | for c in network.iterate_components(): 823 | for pnl in c.attrs[ 824 | (c.attrs.status == "Output") & (c.attrs.varying) 825 | ].index: 826 | c.pnl[pnl] = pd.DataFrame(index=network.snapshots) 827 | -------------------------------------------------------------------------------- /etrago/data/unit_commitment.csv: -------------------------------------------------------------------------------- 1 | attribute,OCGT,CCGT,coal,lignite,nuclear 2 | ramp_limit_up,1,1,1,1,0.3 3 | ramp_limit_start_up,0.2,0.45,0.38,0.4,0.5 4 | ramp_limit_shut_down,0.2,0.45,0.38,0.4,0.5 5 | p_min_pu,0.2,0.45,0.325,0.4,0.5 6 | min_up_time,,3,5,7,6 7 | min_down_time,,2,6,6,10 8 | start_up_cost,9.6,34.2,35.64,19.14,16.5 9 | -------------------------------------------------------------------------------- /etrago/disaggregate/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | 3 | """ 4 | 5 | __copyright__ = "tba" 6 | __license__ = "tba" 7 | __author__ = "tba" 8 | -------------------------------------------------------------------------------- /etrago/disaggregate/temporal.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Copyright 2016-2023 Flensburg University of Applied Sciences, 3 | # Europa-Universität Flensburg, 4 | # Centre for Sustainable Energy Systems, 5 | # DLR-Institute for Networked Energy Systems 6 | # 7 | # This program is free software; you can redistribute it and/or 8 | # modify it under the terms of the GNU Affero General Public License as 9 | # published by the Free Software Foundation; either version 3 of the 10 | # License, or (at your option) any later version. 11 | # 12 | # This program is distributed in the hope that it will be useful, 13 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 14 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 15 | # GNU Affero General Public License for more details. 16 | # 17 | # You should have received a copy of the GNU Affero General Public License 18 | # along with this program. If not, see . 19 | 20 | # File description 21 | """ 22 | execute.py defines optimization and simulation methods for the etrago object. 23 | """ 24 | import logging 25 | import os 26 | import time 27 | 28 | import pandas as pd 29 | 30 | logger = logging.getLogger(__name__) 31 | 32 | if "READTHEDOCS" not in os.environ: 33 | 34 | from etrago.execute import iterate_lopf 35 | from etrago.tools.constraints import Constraints 36 | 37 | 38 | __copyright__ = ( 39 | "Flensburg University of Applied Sciences, " 40 | "Europa-Universität Flensburg, " 41 | "Centre for Sustainable Energy Systems, " 42 | "DLR-Institute for Networked Energy Systems" 43 | ) 44 | __license__ = "GNU Affero General Public License Version 3 (AGPL-3.0)" 45 | __author__ = ( 46 | "ulfmueller, s3pp, wolfbunke, mariusves, lukasol, KathiEsterl, " 47 | "ClaraBuettner, CarlosEpia, AmeliaNadal" 48 | ) 49 | 50 | 51 | def dispatch_disaggregation(self): 52 | """ 53 | Function running the tempral disaggregation meaning the optimization 54 | of dispatch in the temporally fully resolved network; therfore, the problem 55 | is reduced to smaller subproblems by slicing the whole considered time span 56 | while keeping inforation on the state of charge of storage units and stores 57 | to ensure compatibility and to reproduce saisonality. 58 | 59 | Returns 60 | ------- 61 | None. 62 | 63 | """ 64 | 65 | if self.args["temporal_disaggregation"]["active"]: 66 | x = time.time() 67 | 68 | if self.args["temporal_disaggregation"]["no_slices"]: 69 | # split dispatch_disaggregation into subproblems 70 | # keep some information on soc in beginning and end of slices 71 | # to ensure compatibility and to reproduce saisonality 72 | 73 | # define number of slices and corresponding slice length 74 | no_slices = self.args["temporal_disaggregation"]["no_slices"] 75 | slice_len = int(len(self.network.snapshots) / no_slices) 76 | 77 | # transition snapshots defining start and end of slices 78 | transits = self.network.snapshots[0::slice_len] 79 | if len(transits) > 1: 80 | transits = transits[1:] 81 | if transits[-1] != self.network.snapshots[-1]: 82 | transits = transits.insert( 83 | (len(transits)), self.network.snapshots[-1] 84 | ) 85 | # for stores, exclude emob and dsm because of their special 86 | # constraints 87 | sto = self.network.stores[ 88 | ~self.network.stores.carrier.isin( 89 | ["battery_storage", "battery storage", "dsm"] 90 | ) 91 | ] 92 | 93 | # save state of charge of storage units and stores at those 94 | # transition snapshots 95 | self.conduct_dispatch_disaggregation = pd.DataFrame( 96 | columns=self.network.storage_units.index.append(sto.index), 97 | index=transits, 98 | ) 99 | for storage in self.network.storage_units.index: 100 | self.conduct_dispatch_disaggregation[storage] = ( 101 | self.network.storage_units_t.state_of_charge[storage] 102 | ) 103 | for store in sto.index: 104 | self.conduct_dispatch_disaggregation[store] = ( 105 | self.network.stores_t.e[store] 106 | ) 107 | 108 | extra_func = self.args["extra_functionality"] 109 | self.args["extra_functionality"] = {} 110 | 111 | load_shedding = self.args["load_shedding"] 112 | if not load_shedding: 113 | self.args["load_shedding"] = True 114 | self.load_shedding(temporal_disaggregation=True) 115 | 116 | iterate_lopf( 117 | self, 118 | Constraints( 119 | self.args, self.conduct_dispatch_disaggregation 120 | ).functionality, 121 | method=self.args["method"], 122 | ) 123 | 124 | # switch to temporally fully resolved network as standard network, 125 | # temporally reduced network is stored in network_tsa 126 | network1 = self.network.copy() 127 | self.network = self.network_tsa.copy() 128 | self.network_tsa = network1.copy() 129 | network1 = 0 130 | 131 | # keep original settings 132 | 133 | if self.args["temporal_disaggregation"]["no_slices"]: 134 | self.args["extra_functionality"] = extra_func 135 | self.args["load_shedding"] = load_shedding 136 | 137 | self.network.lines["s_nom_extendable"] = self.network_tsa.lines[ 138 | "s_nom_extendable" 139 | ] 140 | self.network.links["p_nom_extendable"] = self.network_tsa.links[ 141 | "p_nom_extendable" 142 | ] 143 | self.network.transformers.s_nom_extendable = ( 144 | self.network_tsa.transformers.s_nom_extendable 145 | ) 146 | self.network.storage_units["p_nom_extendable"] = ( 147 | self.network_tsa.storage_units["p_nom_extendable"] 148 | ) 149 | self.network.stores["e_nom_extendable"] = self.network_tsa.stores[ 150 | "e_nom_extendable" 151 | ] 152 | self.network.storage_units.cyclic_state_of_charge = ( 153 | self.network_tsa.storage_units.cyclic_state_of_charge 154 | ) 155 | self.network.stores.e_cyclic = self.network_tsa.stores.e_cyclic 156 | 157 | if not self.args["csv_export"]: 158 | path = self.args["csv_export"] 159 | self.export_to_csv(path) 160 | self.export_to_csv(path + "/temporal_disaggregaton") 161 | 162 | y = time.time() 163 | z = (y - x) / 60 164 | logger.info("Time for LOPF [min]: {}".format(round(z, 2))) 165 | -------------------------------------------------------------------------------- /etrago/execute/grid_optimization.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Copyright 2016-2023 Flensburg University of Applied Sciences, 3 | # Europa-Universität Flensburg, 4 | # Centre for Sustainable Energy Systems, 5 | # DLR-Institute for Networked Energy Systems 6 | # 7 | # This program is free software; you can redistribute it and/or 8 | # modify it under the terms of the GNU Affero General Public License as 9 | # published by the Free Software Foundation; either version 3 of the 10 | # License, or (at your option) any later version. 11 | # 12 | # This program is distributed in the hope that it will be useful, 13 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 14 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 15 | # GNU Affero General Public License for more details. 16 | # 17 | # You should have received a copy of the GNU Affero General Public License 18 | # along with this program. If not, see . 19 | 20 | # File description 21 | """ 22 | Defines the market optimization within eTraGo 23 | """ 24 | import os 25 | 26 | if "READTHEDOCS" not in os.environ: 27 | import logging 28 | 29 | import numpy as np 30 | import pandas as pd 31 | 32 | logger = logging.getLogger(__name__) 33 | 34 | __copyright__ = ( 35 | "Flensburg University of Applied Sciences, " 36 | "Europa-Universität Flensburg, " 37 | "Centre for Sustainable Energy Systems, " 38 | "DLR-Institute for Networked Energy Systems" 39 | ) 40 | __license__ = "GNU Affero General Public License Version 3 (AGPL-3.0)" 41 | __author__ = "ulfmueller, ClaraBuettner, CarlosEpia" 42 | 43 | 44 | def grid_optimization( 45 | self, 46 | factor_redispatch_cost=1, 47 | management_cost=0, 48 | time_depended_cost=True, 49 | fre_mangement_fee=0, 50 | ): 51 | logger.info("Start building grid optimization model") 52 | 53 | # Drop existing ramping generators 54 | self.network.mremove( 55 | "Generator", 56 | self.network.generators[ 57 | self.network.generators.index.str.contains("ramp") 58 | ].index, 59 | ) 60 | self.network.mremove( 61 | "Link", 62 | self.network.links[ 63 | self.network.links.index.str.contains("ramp") 64 | ].index, 65 | ) 66 | 67 | fix_chp_generation(self) 68 | 69 | add_redispatch_generators( 70 | self, 71 | factor_redispatch_cost, 72 | management_cost, 73 | time_depended_cost, 74 | fre_mangement_fee, 75 | ) 76 | 77 | if not self.args["method"]["market_optimization"]["redispatch"]: 78 | self.network.mremove( 79 | "Generator", 80 | self.network.generators[ 81 | self.network.generators.index.str.contains("ramp") 82 | ].index, 83 | ) 84 | self.network.mremove( 85 | "Link", 86 | self.network.links[ 87 | self.network.links.index.str.contains("ramp") 88 | ].index, 89 | ) 90 | logger.info("Start solving grid optimization model") 91 | 92 | # Replace NaN values in quadratic costs to keep problem linear 93 | self.network.generators.marginal_cost_quadratic.fillna(0.0, inplace=True) 94 | self.network.links.marginal_cost_quadratic.fillna(0.0, inplace=True) 95 | 96 | # Replacevery small values with zero to avoid numerical problems 97 | self.network.generators_t.p_max_pu.where( 98 | self.network.generators_t.p_max_pu.abs() > 1e-5, 99 | other=0.0, 100 | inplace=True, 101 | ) 102 | self.network.generators_t.p_min_pu.where( 103 | self.network.generators_t.p_min_pu.abs() > 1e-5, 104 | other=0.0, 105 | inplace=True, 106 | ) 107 | self.network.links_t.p_max_pu.where( 108 | self.network.links_t.p_max_pu.abs() > 1e-5, other=0.0, inplace=True 109 | ) 110 | 111 | self.network.links_t.p_min_pu.where( 112 | self.network.links_t.p_min_pu.abs() > 1e-5, other=0.0, inplace=True 113 | ) 114 | 115 | self.network.links.loc[ 116 | ( 117 | self.network.links.bus0.isin( 118 | self.network.buses[self.network.buses.country == "GB"].index 119 | ) 120 | ) 121 | & ( 122 | self.network.links.bus1.isin( 123 | self.network.buses[self.network.buses.country == "GB"].index 124 | ) 125 | ) 126 | & (self.network.links.carrier == "DC"), 127 | "p_nom_max", 128 | ] = np.inf 129 | 130 | self.network.storage_units.loc[ 131 | ( 132 | self.network.storage_units.bus.isin( 133 | self.network.buses[self.network.buses.country != "DE"].index 134 | ) 135 | ) 136 | & (self.network.storage_units.carrier == "battery"), 137 | "p_nom_max", 138 | ] = np.inf 139 | 140 | if self.args["method"]["type"] == "lopf": 141 | self.lopf() 142 | else: 143 | self.sclopf( 144 | post_lopf=False, 145 | n_process=4, 146 | delta=0.01, 147 | n_overload=0, 148 | div_ext_lines=False, 149 | ) 150 | 151 | 152 | def fix_chp_generation(self): 153 | # Select generator and link components that are fixed after 154 | # the market optimization. 155 | gens_fixed = self.network.generators[ 156 | self.network.generators.carrier.str.endswith("_CHP") 157 | ].index 158 | 159 | links_fixed = self.network.links[ 160 | self.network.links.carrier.str.endswith("_CHP") 161 | ].index 162 | 163 | # Fix generator dispatch from market simulation: 164 | # Set p_max_pu of generators using results from (disaggregated) market 165 | # model 166 | self.network.generators_t.p_max_pu.loc[:, gens_fixed] = ( 167 | self.market_model.generators_t.p[gens_fixed].mul( 168 | 1.01 / self.market_model.generators.p_nom[gens_fixed] 169 | ) 170 | ) 171 | 172 | # Set p_min_pu of generators using results from (disaggregated) market 173 | # model 174 | self.network.generators_t.p_min_pu.loc[:, gens_fixed] = ( 175 | self.market_model.generators_t.p[gens_fixed].mul( 176 | 0.99 / self.market_model.generators.p_nom[gens_fixed] 177 | ) 178 | ) 179 | 180 | # Fix link dispatch (gas turbines) from market simulation 181 | # Set p_max_pu of links using results from (disaggregated) market model 182 | self.network.links_t.p_max_pu.loc[:, links_fixed] = ( 183 | self.market_model.links_t.p0[links_fixed].mul( 184 | 1.01 / self.market_model.links.p_nom[links_fixed] 185 | ) 186 | ) 187 | 188 | # Set p_min_pu of links using results from (disaggregated) market model 189 | self.network.links_t.p_min_pu.loc[:, links_fixed] = ( 190 | self.market_model.links_t.p0[links_fixed].mul( 191 | 0.99 / self.market_model.links.p_nom[links_fixed] 192 | ) 193 | ) 194 | 195 | 196 | def add_redispatch_generators( 197 | self, 198 | factor_redispatch_cost, 199 | management_cost, 200 | time_depended_cost, 201 | fre_mangement_fee, 202 | ): 203 | """Add components and parameters to model redispatch with costs 204 | 205 | This function currently assumes that the market_model includes all 206 | generators and links for the spatial resolution of the grid optimization 207 | 208 | Returns 209 | ------- 210 | None. 211 | 212 | """ 213 | 214 | # Select generator and link components that are considered in redispatch 215 | # all others can be redispatched without any extra costs 216 | gens_redispatch = self.network.generators[ 217 | ( 218 | self.network.generators.carrier.isin( 219 | [ 220 | "coal", 221 | "lignite", 222 | "nuclear", 223 | "oil", 224 | "others", 225 | "reservoir", 226 | "run_of_river", 227 | "solar", 228 | "wind_offshore", 229 | "wind_onshore", 230 | "solar_rooftop", 231 | "biomass", 232 | "OCGT", 233 | ] 234 | ) 235 | & (~self.network.generators.index.str.contains("ramp")) 236 | ) 237 | ].index 238 | 239 | # this function is called here before p_max_pu is modified to set the 240 | # dispatch values from the market optimization. 241 | p_max_pu_all = self.network.get_switchable_as_dense( 242 | "Generator", "p_max_pu" 243 | ).copy() 244 | 245 | links_redispatch = self.network.links[ 246 | ( 247 | self.network.links.carrier.isin(["OCGT", "CCGT"]) 248 | & (~self.network.links.index.str.contains("ramp")) 249 | ) 250 | ].index 251 | 252 | management_cost_carrier = pd.Series( 253 | index=self.network.generators.loc[gens_redispatch].carrier.unique(), 254 | data=management_cost, 255 | ) 256 | management_cost_carrier["OCGT"] = management_cost 257 | management_cost_carrier["CCGT"] = management_cost 258 | if fre_mangement_fee: 259 | management_cost_carrier[ 260 | ["wind_onshore", "wind_offshore", "solar", "solar_rooftop"] 261 | ] = fre_mangement_fee 262 | 263 | management_cost_per_generator = management_cost_carrier.loc[ 264 | self.network.generators.loc[gens_redispatch, "carrier"].values 265 | ] 266 | management_cost_per_generator.index = gens_redispatch 267 | 268 | management_cost_per_link = management_cost_carrier.loc[ 269 | self.network.links.loc[links_redispatch, "carrier"].values 270 | ] 271 | management_cost_per_link.index = links_redispatch 272 | 273 | if time_depended_cost: 274 | management_cost_per_generator = pd.DataFrame( 275 | index=self.network.snapshots, 276 | columns=management_cost_per_generator.index, 277 | ) 278 | management_cost_per_link = pd.DataFrame( 279 | index=self.network.snapshots, 280 | columns=management_cost_per_link.index, 281 | ) 282 | for i in self.network.snapshots: 283 | management_cost_per_generator.loc[i, :] = ( 284 | management_cost_carrier.loc[ 285 | self.network.generators.loc[ 286 | gens_redispatch, "carrier" 287 | ].values 288 | ].values 289 | ) 290 | 291 | management_cost_per_link.loc[i, :] = management_cost_carrier.loc[ 292 | self.network.links.loc[links_redispatch, "carrier"].values 293 | ].values 294 | 295 | # Fix generator dispatch from market simulation: 296 | # Set p_max_pu of generators using results from (disaggregated) market 297 | # model 298 | self.network.generators_t.p_max_pu.loc[:, gens_redispatch] = ( 299 | self.market_model.generators_t.p[gens_redispatch].mul( 300 | 1 / self.market_model.generators.p_nom[gens_redispatch] 301 | ) 302 | ) 303 | 304 | # Set p_min_pu of generators using results from (disaggregated) market 305 | # model 306 | self.network.generators_t.p_min_pu.loc[:, gens_redispatch] = ( 307 | self.market_model.generators_t.p[gens_redispatch].mul( 308 | 1 / self.market_model.generators.p_nom[gens_redispatch] 309 | ) 310 | ) 311 | 312 | # Fix link dispatch (gas turbines) from market simulation 313 | # Set p_max_pu of links using results from (disaggregated) market model 314 | self.network.links_t.p_max_pu.loc[:, links_redispatch] = ( 315 | self.market_model.links_t.p0[links_redispatch] 316 | .clip(lower=0.0) 317 | .mul(1 / self.market_model.links.p_nom[links_redispatch]) 318 | ) 319 | 320 | # Set p_min_pu of links using results from (disaggregated) market model 321 | self.network.links_t.p_min_pu.loc[:, links_redispatch] = ( 322 | self.market_model.links_t.p0[links_redispatch] 323 | .clip(lower=0.0) 324 | .mul(1 / self.market_model.links.p_nom[links_redispatch]) 325 | ) 326 | 327 | # Calculate costs for redispatch 328 | # Extract prices per market zone from market model results 329 | market_price_per_bus = self.market_model.buses_t.marginal_price.copy() 330 | 331 | # Set market price for each disaggregated generator according to the bus 332 | # can be reduced liner by setting a factor_redispatch_cost 333 | market_price_per_generator = ( 334 | market_price_per_bus.loc[ 335 | :, self.market_model.generators.loc[gens_redispatch, "bus"] 336 | ] 337 | * factor_redispatch_cost 338 | ) 339 | 340 | market_price_per_link = ( 341 | market_price_per_bus.loc[ 342 | :, self.market_model.links.loc[links_redispatch, "bus1"] 343 | ] 344 | * factor_redispatch_cost 345 | ) 346 | 347 | if not time_depended_cost: 348 | market_price_per_generator = market_price_per_generator.median() 349 | market_price_per_generator.index = gens_redispatch 350 | market_price_per_link = market_price_per_link.median() 351 | market_price_per_link.index = links_redispatch 352 | else: 353 | market_price_per_generator.columns = gens_redispatch 354 | market_price_per_link.columns = links_redispatch 355 | market_price_per_generator = market_price_per_generator.loc[ 356 | self.network.snapshots 357 | ] 358 | 359 | # Costs for ramp_up generators are first set the marginal_cost for each 360 | # generator 361 | if time_depended_cost: 362 | ramp_up_costs = pd.DataFrame( 363 | index=self.network.snapshots, 364 | columns=gens_redispatch, 365 | ) 366 | for i in ramp_up_costs.index: 367 | ramp_up_costs.loc[i, gens_redispatch] = ( 368 | self.network.generators.loc[ 369 | gens_redispatch, "marginal_cost" 370 | ].values 371 | ) 372 | 373 | else: 374 | ramp_up_costs = self.network.generators.loc[ 375 | gens_redispatch, "marginal_cost" 376 | ] 377 | 378 | # In case the market price is higher than the marginal_cost (e.g. for 379 | # renewables) ramp up costs are set to the market price. This way, 380 | # every generator gets at least the costs at the market. 381 | # In case the marginal cost are higher, e.g. because of fuel costs, 382 | # the real marginal price is payed for redispatch 383 | 384 | if time_depended_cost: 385 | ramp_up_costs[market_price_per_generator > ramp_up_costs] = ( 386 | market_price_per_generator 387 | ) 388 | 389 | else: 390 | ramp_up_costs[ 391 | market_price_per_generator 392 | > self.network.generators.loc[gens_redispatch, "marginal_cost"] 393 | ] = market_price_per_generator 394 | 395 | ramp_up_costs = ramp_up_costs + management_cost_per_generator.values 396 | 397 | # Costs for ramp down generators consist of the market price 398 | # which is still payed for the generation. Fuel costs can be saved, 399 | # therefore the ramp down costs are reduced by the marginal costs 400 | if time_depended_cost: 401 | ramp_down_costs = ( 402 | market_price_per_generator 403 | - self.network.generators.loc[ 404 | gens_redispatch, "marginal_cost" 405 | ].values 406 | ) 407 | ramp_down_costs.columns = gens_redispatch + " ramp_down" 408 | else: 409 | ramp_down_costs = ( 410 | market_price_per_generator 411 | - self.network.generators.loc[ 412 | gens_redispatch, "marginal_cost" 413 | ].values 414 | ) 415 | ramp_down_costs = ramp_down_costs + management_cost_per_generator.values 416 | # Add ramp up generators to the network for the grid optimization 417 | # Marginal cost are incread by a management fee of 4 EUR/MWh 418 | self.network.madd( 419 | "Generator", 420 | gens_redispatch + " ramp_up", 421 | bus=self.network.generators.loc[gens_redispatch, "bus"].values, 422 | p_nom=self.network.generators.loc[gens_redispatch, "p_nom"].values, 423 | carrier=self.network.generators.loc[gens_redispatch, "carrier"].values, 424 | ) 425 | 426 | if time_depended_cost: 427 | ramp_up_costs.columns += " ramp_up" 428 | self.network.generators_t.marginal_cost = pd.concat( 429 | [self.network.generators_t.marginal_cost, ramp_up_costs], axis=1 430 | ) 431 | else: 432 | self.network.generators.loc[ 433 | gens_redispatch + " ramp_up", "marginal_cost" 434 | ] = ramp_up_costs 435 | 436 | # Set maximum feed-in limit for ramp up generators based on feed-in of 437 | # (disaggregated) generators from the market optimization and potential 438 | # feedin time series 439 | 440 | self.network.generators_t.p_max_pu.loc[:, gens_redispatch + " ramp_up"] = ( 441 | ( 442 | p_max_pu_all.loc[:, gens_redispatch].mul( 443 | self.network.generators.loc[gens_redispatch, "p_nom"] 444 | ) 445 | - ( 446 | self.market_model.generators_t.p.loc[ 447 | self.network.snapshots, gens_redispatch 448 | ] 449 | ) 450 | ) 451 | .clip(lower=0.0) 452 | .mul(1 / self.network.generators.loc[gens_redispatch, "p_nom"]) 453 | .values 454 | ) 455 | 456 | # Add ramp up links to the network for the grid optimization 457 | # Marginal cost are incread by a management fee of 4 EUR/MWh 458 | if time_depended_cost: 459 | ramp_up_costs_links = pd.DataFrame( 460 | index=self.network.snapshots, 461 | columns=links_redispatch, 462 | ) 463 | for i in ramp_up_costs.index: 464 | ramp_up_costs_links.loc[i, links_redispatch] = ( 465 | self.network.links.loc[ 466 | links_redispatch, "marginal_cost" 467 | ].values 468 | ) 469 | 470 | ramp_up_costs_links[ 471 | market_price_per_link.loc[self.network.snapshots] 472 | > ramp_up_costs_links 473 | ] = market_price_per_link 474 | 475 | else: 476 | ramp_up_costs_links = self.network.links.loc[ 477 | links_redispatch + " ramp_up", "marginal_cost" 478 | ] 479 | 480 | ramp_up_costs_links[ 481 | market_price_per_link 482 | > self.network.links.loc[links_redispatch, "marginal_cost"] 483 | ] = market_price_per_link 484 | 485 | ramp_up_costs_links = ramp_up_costs_links + management_cost_per_link.values 486 | 487 | self.network.madd( 488 | "Link", 489 | links_redispatch + " ramp_up", 490 | bus0=self.network.links.loc[links_redispatch, "bus0"].values, 491 | bus1=self.network.links.loc[links_redispatch, "bus1"].values, 492 | p_nom=self.network.links.loc[links_redispatch, "p_nom"].values, 493 | carrier=self.network.links.loc[links_redispatch, "carrier"].values, 494 | efficiency=self.network.links.loc[ 495 | links_redispatch, "efficiency" 496 | ].values, 497 | ) 498 | 499 | if time_depended_cost: 500 | ramp_up_costs_links.columns += " ramp_up" 501 | self.network.links_t.marginal_cost = pd.concat( 502 | [self.network.links_t.marginal_cost, ramp_up_costs_links], axis=1 503 | ) 504 | else: 505 | self.network.links.loc[ 506 | links_redispatch + " ramp_up", "marginal_cost" 507 | ] = ramp_up_costs_links 508 | 509 | # Set maximum feed-in limit for ramp up links based on feed-in of 510 | # (disaggregated) links from the market optimization 511 | self.network.links_t.p_max_pu.loc[:, links_redispatch + " ramp_up"] = ( 512 | ( 513 | self.network.links.loc[links_redispatch, "p_nom"] 514 | - ( 515 | self.market_model.links_t.p0.loc[ 516 | self.network.snapshots, links_redispatch 517 | ] 518 | ) 519 | ) 520 | .clip(lower=0.0) 521 | .mul(1 / self.network.links.loc[links_redispatch, "p_nom"]) 522 | .values 523 | ) 524 | 525 | # Add ramp down generators to the network for the grid optimization 526 | # Marginal cost are incread by a management fee of 4 EUR/MWh, since the 527 | # feedin is negative, the costs are multiplyed by (-1) 528 | self.network.madd( 529 | "Generator", 530 | gens_redispatch + " ramp_down", 531 | bus=self.network.generators.loc[gens_redispatch, "bus"].values, 532 | p_nom=self.network.generators.loc[gens_redispatch, "p_nom"].values, 533 | carrier=self.network.generators.loc[gens_redispatch, "carrier"].values, 534 | ) 535 | 536 | if time_depended_cost: 537 | self.network.generators_t.marginal_cost = pd.concat( 538 | [self.network.generators_t.marginal_cost, -ramp_down_costs], axis=1 539 | ) 540 | else: 541 | self.network.generators.loc[ 542 | gens_redispatch + " ramp_down", "marginal_cost" 543 | ] = -(ramp_down_costs.values) 544 | 545 | # Ramp down generators can not feed-in addtional energy 546 | self.network.generators_t.p_max_pu.loc[ 547 | :, gens_redispatch + " ramp_down" 548 | ] = 0.0 549 | # Ramp down can be at maximum as high as the feed-in of the 550 | # (disaggregated) generators in the market model 551 | self.network.generators_t.p_min_pu.loc[ 552 | :, gens_redispatch + " ramp_down" 553 | ] = ( 554 | -( 555 | self.market_model.generators_t.p.loc[ 556 | self.network.snapshots, gens_redispatch 557 | ] 558 | .clip(lower=0.0) 559 | .mul(1 / self.network.generators.loc[gens_redispatch, "p_nom"]) 560 | ) 561 | ).values 562 | 563 | # Add ramp down links to the network for the grid optimization 564 | # Marginal cost are currently only the management fee of 4 EUR/MWh, 565 | # other costs are somehow complicated due to the gas node and fuel costs 566 | # this is still an open ToDO. 567 | self.network.madd( 568 | "Link", 569 | links_redispatch + " ramp_down", 570 | bus0=self.network.links.loc[links_redispatch, "bus0"].values, 571 | bus1=self.network.links.loc[links_redispatch, "bus1"].values, 572 | p_nom=self.network.links.loc[links_redispatch, "p_nom"].values, 573 | marginal_cost=-(management_cost), 574 | carrier=self.network.links.loc[links_redispatch, "carrier"].values, 575 | efficiency=self.network.links.loc[ 576 | links_redispatch, "efficiency" 577 | ].values, 578 | ) 579 | 580 | # Ramp down links can not feed-in addtional energy 581 | self.network.links_t.p_max_pu.loc[:, links_redispatch + " ramp_down"] = 0.0 582 | 583 | # Ramp down can be at maximum as high as the feed-in of the 584 | # (disaggregated) links in the market model 585 | self.network.links_t.p_min_pu.loc[:, links_redispatch + " ramp_down"] = ( 586 | -( 587 | self.market_model.links_t.p0.loc[ 588 | self.network.snapshots, links_redispatch 589 | ] 590 | .clip(lower=0.0) 591 | .mul(1 / self.network.links.loc[links_redispatch, "p_nom"]) 592 | ) 593 | ).values 594 | 595 | # Check if the network contains any problems 596 | self.network.consistency_check() 597 | 598 | # just for the current status2019 scenario a quick fix for buses which 599 | # do not have a connection 600 | # self.network.buses.drop( 601 | # self.network.buses[ 602 | # self.network.buses.index.isin(['47085', '47086', '37865', '37870' 603 | # ])].index, inplace=True) 604 | 605 | 606 | def extra_functionality(): 607 | return None 608 | -------------------------------------------------------------------------------- /etrago/execute/market_optimization.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Copyright 2016-2023 Flensburg University of Applied Sciences, 3 | # Europa-Universität Flensburg, 4 | # Centre for Sustainable Energy Systems, 5 | # DLR-Institute for Networked Energy Systems 6 | # 7 | # This program is free software; you can redistribute it and/or 8 | # modify it under the terms of the GNU Affero General Public License as 9 | # published by the Free Software Foundation; either version 3 of the 10 | # License, or (at your option) any later version. 11 | # 12 | # This program is distributed in the hope that it will be useful, 13 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 14 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 15 | # GNU Affero General Public License for more details. 16 | # 17 | # You should have received a copy of the GNU Affero General Public License 18 | # along with this program. If not, see . 19 | 20 | # File description 21 | """ 22 | Defines the market optimization within eTraGo 23 | """ 24 | import os 25 | 26 | if "READTHEDOCS" not in os.environ: 27 | import logging 28 | 29 | from pypsa.components import component_attrs 30 | import pandas as pd 31 | 32 | from etrago.cluster.electrical import postprocessing, preprocessing 33 | from etrago.cluster.spatial import group_links 34 | from etrago.tools.constraints import Constraints 35 | 36 | logger = logging.getLogger(__name__) 37 | 38 | __copyright__ = ( 39 | "Flensburg University of Applied Sciences, " 40 | "Europa-Universität Flensburg, " 41 | "Centre for Sustainable Energy Systems, " 42 | "DLR-Institute for Networked Energy Systems" 43 | ) 44 | __license__ = "GNU Affero General Public License Version 3 (AGPL-3.0)" 45 | __author__ = "ulfmueller, ClaraBuettner, CarlosEpia" 46 | 47 | from etrago.tools.utilities import adjust_chp_model, adjust_PtH2_model 48 | 49 | 50 | def market_optimization(self): 51 | logger.info("Start building pre market model") 52 | 53 | unit_commitment = True 54 | 55 | build_market_model(self, unit_commitment) 56 | self.pre_market_model.determine_network_topology() 57 | 58 | logger.info("Start solving pre market model") 59 | 60 | if self.args["method"]["formulation"] == "pyomo": 61 | self.pre_market_model.lopf( 62 | solver_name=self.args["solver"], 63 | solver_options=self.args["solver_options"], 64 | pyomo=True, 65 | extra_functionality=Constraints(self.args, False).functionality, 66 | formulation=self.args["model_formulation"], 67 | ) 68 | elif self.args["method"]["formulation"] == "linopy": 69 | status, condition = self.pre_market_model.optimize( 70 | solver_name=self.args["solver"], 71 | solver_options=self.args["solver_options"], 72 | extra_functionality=Constraints(self.args, False).functionality, 73 | linearized_unit_commitment=True, 74 | ) 75 | 76 | if status != "ok": 77 | logger.warning( 78 | f"""Optimization failed with status {status} 79 | and condition {condition}""" 80 | ) 81 | 82 | else: 83 | logger.warning("Method type must be either 'pyomo' or 'linopy'") 84 | 85 | # Export results of pre-market model 86 | if self.args["csv_export"]: 87 | path = self.args["csv_export"] 88 | if not os.path.exists(path): 89 | os.makedirs(path, exist_ok=True) 90 | self.pre_market_model.export_to_csv_folder(path + "/pre_market") 91 | logger.info("Preparing short-term UC market model") 92 | 93 | build_shortterm_market_model(self, unit_commitment) 94 | 95 | self.market_model.determine_network_topology() 96 | logger.info("Start solving short-term UC market model") 97 | 98 | # Set 'linopy' as formulation to make sure that constraints are added 99 | method_args = self.args["method"]["formulation"] 100 | self.args["method"]["formulation"] = "linopy" 101 | 102 | optimize_with_rolling_horizon( 103 | self.market_model, 104 | self.pre_market_model, 105 | snapshots=None, 106 | horizon=self.args["method"]["market_optimization"]["rolling_horizon"][ 107 | "planning_horizon" 108 | ], 109 | overlap=self.args["method"]["market_optimization"]["rolling_horizon"][ 110 | "overlap" 111 | ], 112 | solver_name=self.args["solver"], 113 | extra_functionality=Constraints( 114 | self.args, False, apply_on="market_model" 115 | ).functionality, 116 | args=self.args, 117 | ) 118 | 119 | # Reset formulation to previous setting of args 120 | self.args["method"]["formulation"] = method_args 121 | 122 | # Export results of market model 123 | if self.args["csv_export"]: 124 | path = self.args["csv_export"] 125 | if not os.path.exists(path): 126 | os.makedirs(path, exist_ok=True) 127 | self.market_model.export_to_csv_folder(path + "/market") 128 | 129 | 130 | def optimize_with_rolling_horizon( 131 | n, 132 | pre_market, 133 | snapshots, 134 | horizon, 135 | overlap, 136 | solver_name, 137 | extra_functionality, 138 | args, 139 | ): 140 | """ 141 | Optimizes the network in a rolling horizon fashion. 142 | 143 | Parameters 144 | ---------- 145 | n : pypsa.Network 146 | snapshots : list-like 147 | Set of snapshots to consider in the optimization. The default is None. 148 | horizon : int 149 | Number of snapshots to consider in each iteration. Defaults to 100. 150 | overlap : int 151 | Number of snapshots to overlap between two iterations. Defaults to 0. 152 | **kwargs: 153 | Keyword argument used by `linopy.Model.solve`, such as `solver_name`, 154 | 155 | Returns 156 | ------- 157 | None 158 | """ 159 | if snapshots is None: 160 | snapshots = n.snapshots 161 | 162 | if horizon <= overlap: 163 | raise ValueError("overlap must be smaller than horizon") 164 | 165 | # Make sure that quadratic costs as zero and not NaN 166 | n.links.marginal_cost_quadratic = 0.0 167 | 168 | starting_points = range(0, len(snapshots), horizon - overlap) 169 | for i, start in enumerate(starting_points): 170 | end = min(len(snapshots), start + horizon) 171 | sns = snapshots[start:end] 172 | logger.info( 173 | f"""Optimizing network for snapshot horizon 174 | [{sns[0]}:{sns[-1]}] ({i+1}/{len(starting_points)}).""" 175 | ) 176 | 177 | if not n.stores.empty: 178 | stores_no_dsm = n.stores[ 179 | ~n.stores.carrier.isin( 180 | [ 181 | "PtH2_waste_heat", 182 | "PtH2_O2", 183 | "dsm", 184 | "battery_storage", 185 | "central_heat_store", 186 | "H2_overground", 187 | "CH4", 188 | "H2_underground", 189 | ] 190 | ) 191 | ].index 192 | if start != 0: 193 | n.stores.loc[stores_no_dsm, "e_initial"] = n.stores_t.e.loc[ 194 | snapshots[start - 1], stores_no_dsm 195 | ] 196 | else: 197 | n.stores.loc[stores_no_dsm, "e_initial"] = ( 198 | pre_market.stores_t.e.loc[ 199 | snapshots[start - 1], stores_no_dsm 200 | ] 201 | ) 202 | 203 | # Select seasonal stores 204 | seasonal_stores = n.stores.index[ 205 | n.stores.carrier.isin( 206 | [ 207 | "central_heat_store", 208 | "H2_overground", 209 | "CH4", 210 | "H2_underground", 211 | ] 212 | ) 213 | ] 214 | 215 | # Set e_initial from pre_market model for seasonal stores 216 | n.stores.e_initial[seasonal_stores] = pre_market.stores_t.e.loc[ 217 | snapshots[start - 1], seasonal_stores 218 | ] 219 | 220 | # Set e at the end of the horizon 221 | # by setting e_max_pu and e_min_pu 222 | n.stores_t.e_max_pu.loc[snapshots[end - 1], seasonal_stores] = ( 223 | pre_market.stores_t.e.loc[snapshots[end - 1], seasonal_stores] 224 | .div(pre_market.stores.e_nom_opt[seasonal_stores]) 225 | .clip(lower=0.0) 226 | * 1.01 227 | ) 228 | n.stores_t.e_min_pu.loc[snapshots[end - 1], seasonal_stores] = ( 229 | pre_market.stores_t.e.loc[snapshots[end - 1], seasonal_stores] 230 | .div(pre_market.stores.e_nom_opt[seasonal_stores]) 231 | .clip(lower=0.0) 232 | * 0.99 233 | ) 234 | n.stores_t.e_min_pu.fillna(0.0, inplace=True) 235 | n.stores_t.e_max_pu.fillna(1.0, inplace=True) 236 | 237 | if not n.storage_units.empty: 238 | n.storage_units.state_of_charge_initial = ( 239 | n.storage_units_t.state_of_charge.loc[snapshots[start - 1]] 240 | ) 241 | # Make sure that state of charge of batteries and pumped hydro 242 | # plants are cyclic over the year by using the state_of_charges 243 | # from the pre_market_model 244 | if i == 0: 245 | n.storage_units.state_of_charge_initial = ( 246 | pre_market.storage_units_t.state_of_charge.iloc[-1] 247 | ) 248 | seasonal_storage = pre_market.storage_units[ 249 | pre_market.storage_units.carrier == "reservoir" 250 | ].index 251 | 252 | soc_value = pre_market.storage_units_t.state_of_charge.loc[ 253 | snapshots[end - 1], seasonal_storage 254 | ] 255 | 256 | args_addition = { 257 | "pre_market_seasonal_soc": soc_value, 258 | } 259 | 260 | extra_functionality = Constraints( 261 | {**args, **args_addition}, False, apply_on="market_model" 262 | ).functionality 263 | 264 | elif i == len(starting_points) - 1: 265 | if len(snapshots) > 1000: 266 | extra_functionality = Constraints( 267 | args, False, apply_on="last_market_model" 268 | ).functionality 269 | else: 270 | seasonal_storage = pre_market.storage_units[ 271 | pre_market.storage_units.carrier == "reservoir" 272 | ].index 273 | 274 | soc_value = pre_market.storage_units_t.state_of_charge.loc[ 275 | snapshots[end - 1], seasonal_storage 276 | ] 277 | 278 | args_addition = { 279 | "pre_market_seasonal_soc": soc_value, 280 | } 281 | 282 | extra_functionality = Constraints( 283 | {**args, **args_addition}, False, apply_on="market_model" 284 | ).functionality 285 | 286 | status, condition = n.optimize( 287 | sns, 288 | solver_name=solver_name, 289 | extra_functionality=extra_functionality, 290 | assign_all_duals=True, 291 | linearized_unit_commitment=True, 292 | ) 293 | 294 | if status != "ok": 295 | logger.warning( 296 | f"""Optimization failed with status {status} 297 | and condition {condition}""" 298 | ) 299 | n.model.print_infeasibilities() 300 | import pdb 301 | 302 | pdb.set_trace() 303 | return n 304 | 305 | 306 | def build_market_model(self, unit_commitment=False): 307 | """Builds market model based on imported network from eTraGo 308 | 309 | 310 | - import market regions from file or database 311 | - Cluster network to market regions 312 | -- consider marginal cost incl. generator noise when grouoping electrical 313 | generation capacities 314 | 315 | Returns 316 | ------- 317 | None. 318 | 319 | """ 320 | 321 | # use existing preprocessing to get only the electricity system 322 | net, weight, n_clusters, busmap_foreign = preprocessing( 323 | self, apply_on="market_model" 324 | ) 325 | 326 | # Define market regions based on settings. 327 | # Currently the only option is 'status_quo' which means that the current 328 | # regions are used. When other market zone options are introduced, they 329 | # can be assinged here. 330 | if ( 331 | self.args["method"]["market_optimization"]["market_zones"] 332 | == "status_quo" 333 | ): 334 | df = pd.DataFrame( 335 | { 336 | "country": net.buses.country.unique(), 337 | "marketzone": net.buses.country.unique(), 338 | }, 339 | columns=["country", "marketzone"], 340 | ) 341 | 342 | df.loc[(df.country == "DE") | (df.country == "LU"), "marketzone"] = ( 343 | "DE/LU" 344 | ) 345 | 346 | df["cluster"] = df.groupby(df.marketzone).grouper.group_info[0] 347 | 348 | for i in net.buses.country.unique(): 349 | net.buses.loc[net.buses.country == i, "cluster"] = df.loc[ 350 | df.country == i, "cluster" 351 | ].values[0] 352 | 353 | busmap = pd.Series( 354 | net.buses.cluster.astype(int).astype(str), net.buses.index 355 | ) 356 | medoid_idx = pd.Series(dtype=str) 357 | 358 | else: 359 | logger.warning( 360 | f""" 361 | Market zone setting {self.args['method']['market_zones']} 362 | is not available. Please use one of ['status_quo'].""" 363 | ) 364 | 365 | logger.info("Start market zone specifc clustering") 366 | 367 | clustering, busmap = postprocessing( 368 | self, 369 | busmap, 370 | busmap_foreign, 371 | medoid_idx, 372 | aggregate_generators_carriers=[], 373 | aggregate_links=False, 374 | apply_on="market_model", 375 | ) 376 | 377 | net = clustering.network 378 | 379 | # Adjust positions foreign buses 380 | foreign = self.network.buses[self.network.buses.country != "DE"].copy() 381 | foreign = foreign[foreign.index.isin(self.network.loads.bus)] 382 | foreign = foreign.drop_duplicates(subset="country") 383 | foreign = foreign.set_index("country") 384 | 385 | for country in foreign.index: 386 | bus_for = net.buses.index[net.buses.country == country] 387 | net.buses.loc[bus_for, "x"] = foreign.at[country, "x"] 388 | net.buses.loc[bus_for, "y"] = foreign.at[country, "y"] 389 | 390 | # links_col = net.links.columns 391 | ac = net.lines[net.lines.carrier == "AC"] 392 | str1 = "transshipment_" 393 | ac.index = f"{str1}" + ac.index 394 | net.import_components_from_dataframe( 395 | ac.loc[:, ["bus0", "bus1", "capital_cost", "length"]] 396 | .assign(p_nom=ac.s_nom) 397 | .assign(p_nom_min=ac.s_nom_min) 398 | .assign(p_nom_max=ac.s_nom_max) 399 | .assign(p_nom_extendable=ac.s_nom_extendable) 400 | .assign(p_max_pu=ac.s_max_pu) 401 | .assign(p_min_pu=-1.0) 402 | .assign(carrier="DC") 403 | .set_index(ac.index), 404 | "Link", 405 | ) 406 | net.lines.drop( 407 | net.lines.loc[net.lines.carrier == "AC"].index, inplace=True 408 | ) 409 | # net.buses.loc[net.buses.carrier == 'AC', 'carrier'] = "DC" 410 | 411 | net.generators_t.p_max_pu = self.network_tsa.generators_t.p_max_pu 412 | 413 | # Set stores and storage_units to cyclic 414 | if len(self.network_tsa.snapshots) > 1000: 415 | net.stores.loc[net.stores.carrier != "battery_storage", "e_cyclic"] = ( 416 | True 417 | ) 418 | net.storage_units.cyclic_state_of_charge = True 419 | net.stores.loc[net.stores.carrier == "dsm", "e_cyclic"] = False 420 | net.storage_units.cyclic_state_of_charge = True 421 | 422 | self.pre_market_model = net 423 | 424 | gas_clustering_market_model(self) 425 | 426 | if unit_commitment: 427 | set_unit_commitment(self, apply_on="pre_market_model") 428 | 429 | self.pre_market_model.links.loc[ 430 | self.pre_market_model.links.carrier.isin( 431 | ["CH4", "DC", "AC", "H2_grid", "H2_saltcavern"] 432 | ), 433 | "p_min_pu", 434 | ] = -1.0 435 | 436 | self.pre_market_model = adjust_PtH2_model(self) 437 | logger.info("PtH2-Model adjusted in pre_market_network") 438 | 439 | self.pre_market_model = adjust_chp_model(self) 440 | logger.info( 441 | "CHP model in foreign countries adjusted in pre_market_network" 442 | ) 443 | 444 | # Set country tags for market model 445 | self.buses_by_country(apply_on="pre_market_model") 446 | self.geolocation_buses(apply_on="pre_market_model") 447 | 448 | self.market_model = self.pre_market_model.copy() 449 | 450 | self.pre_market_model.links, self.pre_market_model.links_t = group_links( 451 | self.pre_market_model, 452 | carriers=[ 453 | "central_heat_pump", 454 | "central_resistive_heater", 455 | "rural_heat_pump", 456 | "rural_resistive_heater", 457 | "BEV_charger", 458 | "dsm", 459 | "central_gas_boiler", 460 | "rural_gas_boiler", 461 | ], 462 | ) 463 | self.pre_market_model.links.min_up_time = ( 464 | self.pre_market_model.links.min_up_time.astype(int) 465 | ) 466 | self.pre_market_model.links.down_up_time = ( 467 | self.pre_market_model.links.min_down_time.astype(int) 468 | ) 469 | self.pre_market_model.links.down_time_before = ( 470 | self.pre_market_model.links.down_time_before.astype(int) 471 | ) 472 | self.pre_market_model.links.up_time_before = ( 473 | self.pre_market_model.links.up_time_before.astype(int) 474 | ) 475 | self.pre_market_model.links.min_down_time = ( 476 | self.pre_market_model.links.min_down_time.astype(int) 477 | ) 478 | self.pre_market_model.links.min_up_time = ( 479 | self.pre_market_model.links.min_up_time.astype(int) 480 | ) 481 | 482 | 483 | def build_shortterm_market_model(self, unit_commitment=False): 484 | 485 | self.market_model.storage_units.loc[ 486 | self.market_model.storage_units.p_nom_extendable, "p_nom" 487 | ] = self.pre_market_model.storage_units.loc[ 488 | self.pre_market_model.storage_units.p_nom_extendable, "p_nom_opt" 489 | ].clip( 490 | lower=0 491 | ) 492 | self.market_model.stores.loc[ 493 | self.market_model.stores.e_nom_extendable, "e_nom" 494 | ] = self.pre_market_model.stores.loc[ 495 | self.pre_market_model.stores.e_nom_extendable, "e_nom_opt" 496 | ].clip( 497 | lower=0 498 | ) 499 | 500 | # Fix oder of bus0 and bus1 of DC links 501 | dc_links = self.market_model.links[self.market_model.links.carrier == "DC"] 502 | bus0 = dc_links[dc_links.bus0.astype(int) < dc_links.bus1.astype(int)].bus1 503 | bus1 = dc_links[dc_links.bus0.astype(int) < dc_links.bus1.astype(int)].bus0 504 | self.market_model.links.loc[bus0.index, "bus0"] = bus0.values 505 | self.market_model.links.loc[bus1.index, "bus1"] = bus1.values 506 | 507 | dc_links = self.pre_market_model.links[ 508 | self.pre_market_model.links.carrier == "DC" 509 | ] 510 | bus0 = dc_links[dc_links.bus0.astype(int) < dc_links.bus1.astype(int)].bus1 511 | bus1 = dc_links[dc_links.bus0.astype(int) < dc_links.bus1.astype(int)].bus0 512 | self.pre_market_model.links.loc[bus0.index, "bus0"] = bus0.values 513 | self.pre_market_model.links.loc[bus1.index, "bus1"] = bus1.values 514 | 515 | grouped_links = ( 516 | self.market_model.links.loc[self.market_model.links.p_nom_extendable] 517 | .groupby(["carrier", "bus0", "bus1"]) 518 | .p_nom.sum() 519 | .reset_index() 520 | ) 521 | for link in grouped_links.index: 522 | print(link) 523 | bus0 = grouped_links.loc[link, "bus0"] 524 | bus1 = grouped_links.loc[link, "bus1"] 525 | carrier = grouped_links.loc[link, "carrier"] 526 | 527 | self.market_model.links.loc[ 528 | (self.market_model.links.bus0 == bus0) 529 | & (self.market_model.links.bus1 == bus1) 530 | & (self.market_model.links.carrier == carrier), 531 | "p_nom", 532 | ] = ( 533 | self.pre_market_model.links.loc[ 534 | (self.pre_market_model.links.bus0 == bus0) 535 | & (self.pre_market_model.links.bus1 == bus1) 536 | & (self.pre_market_model.links.carrier == carrier), 537 | "p_nom_opt", 538 | ] 539 | .clip(lower=0) 540 | .values 541 | ) 542 | 543 | self.market_model.lines.loc[ 544 | self.market_model.lines.s_nom_extendable, "s_nom" 545 | ] = self.pre_market_model.lines.loc[ 546 | self.pre_market_model.lines.s_nom_extendable, "s_nom_opt" 547 | ].clip( 548 | lower=0 549 | ) 550 | 551 | self.market_model.storage_units.p_nom_extendable = False 552 | self.market_model.stores.e_nom_extendable = False 553 | self.market_model.links.p_nom_extendable = False 554 | self.market_model.lines.s_nom_extendable = False 555 | 556 | self.market_model.mremove( 557 | "Store", 558 | self.market_model.stores[self.market_model.stores.e_nom == 0].index, 559 | ) 560 | self.market_model.stores.e_cyclic = False 561 | self.market_model.storage_units.cyclic_state_of_charge = False 562 | 563 | if unit_commitment: 564 | set_unit_commitment(self, apply_on="market_model") 565 | 566 | self.market_model.links.loc[ 567 | self.market_model.links.carrier.isin( 568 | ["CH4", "DC", "AC", "H2_grid", "H2_saltcavern"] 569 | ), 570 | "p_min_pu", 571 | ] = -1.0 572 | 573 | # Set country tags for market model 574 | self.buses_by_country(apply_on="market_model") 575 | self.geolocation_buses(apply_on="market_model") 576 | 577 | 578 | def set_unit_commitment(self, apply_on): 579 | 580 | if apply_on == "market_model": 581 | network = self.market_model 582 | elif apply_on == "pre_market_model": 583 | network = self.pre_market_model 584 | else: 585 | print(f"Can not be applied on {apply_on} yet.") 586 | return 587 | 588 | # set UC constraints 589 | unit_commitment_fpath = os.path.join( 590 | os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 591 | "data", 592 | "unit_commitment.csv", 593 | ) 594 | unit_commitment = pd.read_csv(unit_commitment_fpath, index_col=0) 595 | unit_commitment.fillna(0, inplace=True) 596 | committable_attrs = network.generators.carrier.isin( 597 | unit_commitment 598 | ).to_frame("committable") 599 | 600 | for attr in unit_commitment.index: 601 | default = component_attrs["Generator"].default[attr] 602 | committable_attrs[attr] = network.generators.carrier.map( 603 | unit_commitment.loc[attr] 604 | ).fillna(default) 605 | committable_attrs[attr] = committable_attrs[attr].astype( 606 | network.generators.carrier.map(unit_commitment.loc[attr]).dtype 607 | ) 608 | 609 | network.generators[committable_attrs.columns] = committable_attrs 610 | network.generators.min_up_time = network.generators.min_up_time.astype(int) 611 | network.generators.min_down_time = network.generators.min_down_time.astype( 612 | int 613 | ) 614 | 615 | # Tadress link carriers i.e. OCGT 616 | committable_links = network.links.carrier.isin(unit_commitment).to_frame( 617 | "committable" 618 | ) 619 | 620 | for attr in unit_commitment.index: 621 | default = component_attrs["Link"].default[attr] 622 | committable_links[attr] = network.links.carrier.map( 623 | unit_commitment.loc[attr] 624 | ).fillna(default) 625 | committable_links[attr] = committable_links[attr].astype( 626 | network.links.carrier.map(unit_commitment.loc[attr]).dtype 627 | ) 628 | 629 | network.links[committable_links.columns] = committable_links 630 | network.links.min_up_time = network.links.min_up_time.astype(int) 631 | network.links.min_down_time = network.links.min_down_time.astype(int) 632 | 633 | network.generators.loc[ 634 | network.generators.committable, "ramp_limit_down" 635 | ].fillna(1.0, inplace=True) 636 | network.links.loc[network.links.committable, "ramp_limit_down"].fillna( 637 | 1.0, inplace=True 638 | ) 639 | 640 | if apply_on == "pre_market_model": 641 | # Set all start_up and shut_down cost to 0 to simpify unit committment 642 | network.links.loc[network.links.committable, "start_up_cost"] = 0.0 643 | network.links.loc[network.links.committable, "shut_down_cost"] = 0.0 644 | 645 | # Set all start_up and shut_down cost to 0 to simpify unit committment 646 | network.generators.loc[ 647 | network.generators.committable, "start_up_cost" 648 | ] = 0.0 649 | network.generators.loc[ 650 | network.generators.committable, "shut_down_cost" 651 | ] = 0.0 652 | 653 | logger.info(f"Unit commitment set for {apply_on}") 654 | 655 | 656 | def gas_clustering_market_model(self): 657 | from etrago.cluster.gas import ( 658 | gas_postprocessing, 659 | preprocessing as gas_preprocessing, 660 | ) 661 | 662 | ch4_network, weight_ch4, n_clusters_ch4 = gas_preprocessing( 663 | self, "CH4", apply_on="market_model" 664 | ) 665 | 666 | df = pd.DataFrame( 667 | { 668 | "country": ch4_network.buses.country.unique(), 669 | "marketzone": ch4_network.buses.country.unique(), 670 | }, 671 | columns=["country", "marketzone"], 672 | ) 673 | 674 | df.loc[(df.country == "DE") | (df.country == "LU"), "marketzone"] = "DE/LU" 675 | 676 | df["cluster"] = df.groupby(df.marketzone).grouper.group_info[0] 677 | 678 | for i in ch4_network.buses.country.unique(): 679 | ch4_network.buses.loc[ch4_network.buses.country == i, "cluster"] = ( 680 | df.loc[df.country == i, "cluster"].values[0] 681 | ) 682 | 683 | busmap = pd.Series( 684 | ch4_network.buses.cluster.astype(int).astype(str), 685 | ch4_network.buses.index, 686 | ) 687 | 688 | if "H2_grid" in self.network.links.carrier.unique(): 689 | h2_network, weight_h2, n_clusters_h2 = gas_preprocessing( 690 | self, "H2_grid", apply_on="market_model" 691 | ) 692 | 693 | df_h2 = pd.DataFrame( 694 | { 695 | "country": h2_network.buses.country.unique(), 696 | "marketzone": h2_network.buses.country.unique(), 697 | }, 698 | columns=["country", "marketzone"], 699 | ) 700 | 701 | df_h2.loc[ 702 | (df.country == "DE") | (df_h2.country == "LU"), "marketzone" 703 | ] = "DE/LU" 704 | 705 | df_h2["cluster"] = df_h2.groupby(df_h2.marketzone).grouper.group_info[ 706 | 0 707 | ] + len(df) 708 | 709 | for i in h2_network.buses.country.unique(): 710 | h2_network.buses.loc[h2_network.buses.country == i, "cluster"] = ( 711 | df_h2.loc[df_h2.country == i, "cluster"].values[0] 712 | ) 713 | 714 | busmap = pd.concat( 715 | [ 716 | busmap, 717 | pd.Series( 718 | h2_network.buses.cluster.astype(int).astype(str), 719 | h2_network.buses.index, 720 | ), 721 | ] 722 | ) 723 | 724 | medoid_idx = pd.Series() 725 | # Set country tags for market model 726 | self.buses_by_country(apply_on="pre_market_model") 727 | self.geolocation_buses(apply_on="pre_market_model") 728 | 729 | self.pre_market_model, busmap_new = gas_postprocessing( 730 | self, busmap, medoid_idx=medoid_idx, apply_on="market_model" 731 | ) 732 | -------------------------------------------------------------------------------- /etrago/network.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Copyright 2016-2023 Flensburg University of Applied Sciences, 3 | # Europa-Universität Flensburg, 4 | # Centre for Sustainable Energy Systems, 5 | # DLR-Institute for Networked Energy Systems 6 | # 7 | # This program is free software; you can redistribute it and/or 8 | # modify it under the terms of the GNU Affero General Public License as 9 | # published by the Free Software Foundation; either version 3 of the 10 | # License, or (at your option) any later version. 11 | # 12 | # This program is distributed in the hope that it will be useful, 13 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 14 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 15 | # GNU Affero General Public License for more details. 16 | # 17 | # You should have received a copy of the GNU Affero General Public License 18 | # along with this program. If not, see . 19 | 20 | # File description 21 | """ 22 | Define class Etrago 23 | """ 24 | 25 | import logging 26 | import os 27 | 28 | from pypsa.components import Network 29 | from sqlalchemy.orm import sessionmaker 30 | import pandas as pd 31 | 32 | if "READTHEDOCS" not in os.environ: 33 | from etrago.tools import db 34 | 35 | from etrago import __version__ 36 | from etrago.analyze.calc_results import ( 37 | ac_export, 38 | ac_export_per_country, 39 | calc_atlas_results, 40 | calc_etrago_results, 41 | dc_export, 42 | dc_export_per_country, 43 | german_network, 44 | system_costs_germany, 45 | ) 46 | from etrago.analyze.plot import ( 47 | bev_flexibility_potential, 48 | demand_side_management, 49 | flexibility_usage, 50 | heat_stores, 51 | hydrogen_stores, 52 | plot_carrier, 53 | plot_clusters, 54 | plot_gas_generation, 55 | plot_gas_summary, 56 | plot_grid, 57 | plot_h2_generation, 58 | plot_h2_summary, 59 | plot_heat_loads, 60 | plot_heat_summary, 61 | shifted_energy, 62 | ) 63 | from etrago.cluster.electrical import ehv_clustering, run_spatial_clustering 64 | from etrago.cluster.gas import run_spatial_clustering_gas 65 | from etrago.cluster.temporal import skip_snapshots, snapshot_clustering 66 | from etrago.disaggregate.spatial import run_disaggregation 67 | from etrago.disaggregate.temporal import dispatch_disaggregation 68 | from etrago.execute import lopf, optimize, run_pf_post_lopf 69 | from etrago.execute.grid_optimization import ( 70 | add_redispatch_generators, 71 | grid_optimization, 72 | ) 73 | from etrago.execute.market_optimization import ( 74 | build_market_model, 75 | market_optimization, 76 | ) 77 | from etrago.execute.sclopf import ( 78 | iterate_sclopf, 79 | post_contingency_analysis_lopf, 80 | ) 81 | from etrago.tools.extendable import extendable 82 | from etrago.tools.io import ( 83 | NetworkScenario, 84 | add_ch4_h2_correspondence, 85 | decommissioning, 86 | extension, 87 | ) 88 | from etrago.tools.utilities import ( 89 | add_missing_components, 90 | adjust_CH4_gen_carriers, 91 | adjust_chp_model, 92 | adjust_PtH2_model, 93 | buses_by_country, 94 | check_args, 95 | convert_capital_costs, 96 | crossborder_capacity, 97 | delete_dispensable_ac_buses, 98 | delete_irrelevant_oneports, 99 | drop_sectors, 100 | export_to_csv, 101 | filter_links_by_carrier, 102 | foreign_links, 103 | geolocation_buses, 104 | get_args_setting, 105 | get_clustering_data, 106 | levelize_abroad_inland_parameters, 107 | load_shedding, 108 | manual_fixes_datamodel, 109 | set_branch_capacity, 110 | set_control_strategies, 111 | set_line_costs, 112 | set_q_foreign_loads, 113 | set_q_national_loads, 114 | set_random_noise, 115 | set_trafo_costs, 116 | update_busmap, 117 | ) 118 | 119 | logger = logging.getLogger(__name__) 120 | 121 | __copyright__ = ( 122 | "Flensburg University of Applied Sciences, " 123 | "Europa-Universität Flensburg, " 124 | "Centre for Sustainable Energy Systems, " 125 | "DLR-Institute for Networked Energy Systems" 126 | ) 127 | __license__ = "GNU Affero General Public License Version 3 (AGPL-3.0)" 128 | __author__ = """AmeliaNadal, CarlosEpia, ClaraBuettner, KathiEsterl, gnn, 129 | fwitte, ulfmueller, pieterhexen""" 130 | 131 | 132 | class Etrago: 133 | """ 134 | Object containing pypsa.Network including the transmission grid, not 135 | electric sectors, input parameters and optimization results. 136 | 137 | Parameters 138 | ---------- 139 | args : dict 140 | Dictionary including all input parameters. 141 | csv_folder_name : string 142 | Name of folder from which to import CSVs of network data. 143 | name : string, default "" 144 | Network name. 145 | ignore_standard_types : boolean, default False 146 | If True, do not read in PyPSA standard types into standard types 147 | DataFrames. 148 | kwargs 149 | Any remaining attributes to set 150 | 151 | Returns 152 | ------- 153 | None 154 | 155 | Examples 156 | -------- 157 | """ 158 | 159 | def __init__( 160 | self, 161 | args=None, 162 | csv_folder_name=None, 163 | ignore_standard_types=False, 164 | json_path=None, 165 | name="", 166 | **kwargs, 167 | ): 168 | self.tool_version = __version__ 169 | 170 | self.clustering = None 171 | 172 | self.results = pd.DataFrame() 173 | 174 | self.network = Network() 175 | 176 | self.network_tsa = Network() 177 | 178 | self.disaggregated_network = Network() 179 | 180 | self.__re_carriers = [ 181 | "wind_onshore", 182 | "wind_offshore", 183 | "solar", 184 | "biomass", 185 | "run_of_river", 186 | "reservoir", 187 | ] 188 | self.__vre_carriers = ["wind_onshore", "wind_offshore", "solar"] 189 | 190 | self.busmap = {} 191 | 192 | self.ch4_h2_mapping = {} 193 | 194 | if args is not None: 195 | self.args = args 196 | 197 | self.get_args_setting(json_path) 198 | 199 | conn = db.connection(section=self.args["db"]) 200 | 201 | session = sessionmaker(bind=conn) 202 | 203 | self.engine = conn 204 | 205 | self.session = session() 206 | 207 | self.check_args() 208 | 209 | elif csv_folder_name is not None: 210 | self.get_args_setting(csv_folder_name + "/args.json") 211 | 212 | self.network = Network( 213 | csv_folder_name, name, ignore_standard_types 214 | ) 215 | 216 | if self.args["spatial_disaggregation"] is not None: 217 | self.disaggregated_network = Network( 218 | csv_folder_name + "/disaggregated_network", 219 | name, 220 | ignore_standard_types, 221 | ) 222 | 223 | if self.args["method"]["market_optimization"]: 224 | try: 225 | self.market_model = Network( 226 | csv_folder_name + "/market", 227 | name, 228 | ignore_standard_types, 229 | ) 230 | except ValueError: 231 | logger.warning( 232 | """ 233 | Could not import a market_model but the selected 234 | method in the args indicated that it should be there. 235 | This happens when the exported network was not solved 236 | yet.Run 'etrago.optimize()' to build and solve the 237 | market model. 238 | """ 239 | ) 240 | 241 | self.get_clustering_data(csv_folder_name) 242 | 243 | else: 244 | logger.error("Set args or csv_folder_name") 245 | 246 | # Add functions 247 | get_args_setting = get_args_setting 248 | 249 | check_args = check_args 250 | 251 | geolocation_buses = geolocation_buses 252 | 253 | add_missing_components = add_missing_components 254 | 255 | load_shedding = load_shedding 256 | 257 | set_random_noise = set_random_noise 258 | 259 | set_q_national_loads = set_q_national_loads 260 | 261 | set_q_foreign_loads = set_q_foreign_loads 262 | 263 | foreign_links = foreign_links 264 | 265 | crossborder_capacity = crossborder_capacity 266 | 267 | convert_capital_costs = convert_capital_costs 268 | 269 | extendable = extendable 270 | 271 | extension = extension 272 | 273 | set_branch_capacity = set_branch_capacity 274 | 275 | decommissioning = decommissioning 276 | 277 | add_ch4_h2_correspondence = add_ch4_h2_correspondence 278 | 279 | spatial_clustering = run_spatial_clustering 280 | 281 | spatial_clustering_gas = run_spatial_clustering_gas 282 | 283 | skip_snapshots = skip_snapshots 284 | 285 | ehv_clustering = ehv_clustering 286 | 287 | snapshot_clustering = snapshot_clustering 288 | 289 | add_redispatch_generators = add_redispatch_generators 290 | 291 | build_market_model = build_market_model 292 | 293 | grid_optimization = grid_optimization 294 | 295 | market_optimization = market_optimization 296 | 297 | lopf = lopf 298 | 299 | optimize = optimize 300 | 301 | temporal_disaggregation = dispatch_disaggregation 302 | 303 | pf_post_lopf = run_pf_post_lopf 304 | 305 | spatial_disaggregation = run_disaggregation 306 | 307 | calc_results = calc_etrago_results 308 | 309 | calc_atlas_results = calc_atlas_results 310 | 311 | calc_ac_export = ac_export 312 | 313 | calc_ac_export_per_country = ac_export_per_country 314 | 315 | calc_dc_export = dc_export 316 | 317 | calc_dc_export_per_country = dc_export_per_country 318 | 319 | export_to_csv = export_to_csv 320 | 321 | filter_links_by_carrier = filter_links_by_carrier 322 | 323 | german_network = german_network 324 | 325 | set_line_costs = set_line_costs 326 | 327 | set_trafo_costs = set_trafo_costs 328 | 329 | system_costs_germany = system_costs_germany 330 | 331 | drop_sectors = drop_sectors 332 | 333 | buses_by_country = buses_by_country 334 | 335 | update_busmap = update_busmap 336 | 337 | plot_grid = plot_grid 338 | 339 | plot_clusters = plot_clusters 340 | 341 | plot_carrier = plot_carrier 342 | 343 | plot_gas_generation = plot_gas_generation 344 | 345 | plot_gas_summary = plot_gas_summary 346 | 347 | plot_h2_generation = plot_h2_generation 348 | 349 | plot_h2_summary = plot_h2_summary 350 | 351 | plot_heat_loads = plot_heat_loads 352 | 353 | plot_heat_summary = plot_heat_summary 354 | 355 | plot_flexibility_usage = flexibility_usage 356 | 357 | demand_side_management = demand_side_management 358 | 359 | bev_flexibility_potential = bev_flexibility_potential 360 | 361 | heat_stores = heat_stores 362 | 363 | hydrogen_stores = hydrogen_stores 364 | 365 | delete_dispensable_ac_buses = delete_dispensable_ac_buses 366 | 367 | delete_irrelevant_oneports = delete_irrelevant_oneports 368 | 369 | get_clustering_data = get_clustering_data 370 | 371 | adjust_CH4_gen_carriers = adjust_CH4_gen_carriers 372 | 373 | manual_fixes_datamodel = manual_fixes_datamodel 374 | 375 | shifted_energy = shifted_energy 376 | 377 | post_contingency_analysis = post_contingency_analysis_lopf 378 | 379 | sclopf = iterate_sclopf 380 | 381 | adjust_PtH2_model = adjust_PtH2_model 382 | 383 | adjust_chp_model = adjust_chp_model 384 | 385 | levelize_abroad_inland_parameters = levelize_abroad_inland_parameters 386 | 387 | def dc_lines(self): 388 | return self.filter_links_by_carrier("DC", like=False) 389 | 390 | def build_network_from_db(self): 391 | """Function that imports transmission grid from chosen database 392 | 393 | Returns 394 | ------- 395 | None. 396 | 397 | """ 398 | self.scenario = NetworkScenario( 399 | self.engine, 400 | self.session, 401 | version=self.args["gridversion"], 402 | start_snapshot=self.args["start_snapshot"], 403 | end_snapshot=self.args["end_snapshot"], 404 | scn_name=self.args["scn_name"], 405 | ) 406 | 407 | self.network = self.scenario.build_network() 408 | 409 | self.extension() 410 | 411 | self.decommissioning() 412 | 413 | if ("H2_grid" in self.network.buses.carrier.unique()) & ( 414 | "H2_grid" not in self.network.links.carrier.unique() 415 | ): 416 | self.add_ch4_h2_correspondence() 417 | 418 | logger.info("Imported network from db") 419 | 420 | def adjust_network(self): 421 | """ 422 | Function that adjusts the network imported from the database according 423 | to given input-parameters. 424 | 425 | Returns 426 | ------- 427 | None. 428 | 429 | """ 430 | 431 | self.manual_fixes_datamodel() 432 | 433 | self.geolocation_buses() 434 | 435 | self.load_shedding() 436 | 437 | self.adjust_CH4_gen_carriers() 438 | 439 | self.set_random_noise(0.01) 440 | 441 | self.set_q_national_loads(cos_phi=0.9) 442 | 443 | self.set_q_foreign_loads(cos_phi=0.9) 444 | 445 | self.foreign_links() 446 | 447 | self.crossborder_capacity() 448 | 449 | self.set_branch_capacity() 450 | 451 | self.extendable( 452 | grid_max_D=self.args["extendable"]["upper_bounds_grid"][ 453 | "grid_max_D" 454 | ], 455 | grid_max_abs_D=self.args["extendable"]["upper_bounds_grid"][ 456 | "grid_max_abs_D" 457 | ], 458 | grid_max_foreign=self.args["extendable"]["upper_bounds_grid"][ 459 | "grid_max_foreign" 460 | ], 461 | grid_max_abs_foreign=self.args["extendable"]["upper_bounds_grid"][ 462 | "grid_max_abs_foreign" 463 | ], 464 | ) 465 | 466 | self.convert_capital_costs() 467 | 468 | self.delete_dispensable_ac_buses() 469 | 470 | self.delete_irrelevant_oneports() 471 | 472 | set_control_strategies(self.network) 473 | 474 | self.levelize_abroad_inland_parameters() 475 | 476 | def _ts_weighted(self, timeseries): 477 | return timeseries.mul(self.network.snapshot_weightings, axis=0) 478 | -------------------------------------------------------------------------------- /etrago/tools/__init__.py: -------------------------------------------------------------------------------- 1 | """Multi purpose tools that don't fit anywhere else in eTraGo. 2 | """ 3 | 4 | __copyright__ = ( 5 | "Copyright (C) 2023" 6 | " Otto-von-Guericke-University Magdeburg," 7 | " Research group for theoretical computer science" 8 | ) 9 | __license__ = "GNU Affero General Public License Version 3 (AGPL-3.0)" 10 | __author__ = "gnn " 11 | 12 | 13 | def noop(*ignored_arguments, **ignored_keyword_arguments): 14 | """Do nothing. 15 | 16 | Accept all kinds of arguments, ignore them and do nothing. 17 | """ 18 | pass 19 | 20 | 21 | class Noops: 22 | """Provide arbitrarily named methods that do nothing. 23 | 24 | Any attribute access will return a method that does nothing, i.e. 25 | all methods of this object are :py:func:`noop`s. Normally you don't 26 | need to instantiate this class. All instances behave the same, so 27 | the containing module provides one called :py:obj:`noops` which you 28 | can import and use. 29 | """ 30 | 31 | @classmethod 32 | def __getattribute__(cls, ignored_name): 33 | return noop 34 | 35 | 36 | noops = Noops() 37 | """A default :py:class:`Noops` instance so you don't have to create one. 38 | """ 39 | -------------------------------------------------------------------------------- /etrago/tools/db.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Copyright 2016-2018 Flensburg University of Applied Sciences, 3 | # Europa-Universität Flensburg, 4 | # Centre for Sustainable Energy Systems, 5 | # DLR-Institute for Networked Energy Systems 6 | 7 | # This program is free software; you can redistribute it and/or 8 | # modify it under the terms of the GNU Affero General Public License as 9 | # published by the Free Software Foundation; either version 3 of the 10 | # License, or (at your option) any later version. 11 | 12 | # This program is distributed in the hope that it will be useful, 13 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 14 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 15 | # GNU Affero General Public License for more details. 16 | 17 | # You should have received a copy of the GNU Affero General Public License 18 | # along with this program. If not, see . 19 | 20 | import configparser as cp 21 | import getpass 22 | import os 23 | 24 | from sqlalchemy import create_engine 25 | import keyring 26 | import oedialect 27 | 28 | 29 | def readcfg(filepath, section): 30 | """ 31 | Reads the configuration file. If section is not available, calls 32 | create_oedb_config_file to add the new section to an existing config.ini. 33 | 34 | Parameters 35 | ---------- 36 | filepath : str 37 | Absolute path of config file including the filename itself 38 | section : str 39 | Section in config file which contains connection details 40 | Returns 41 | ------- 42 | cfg : configparser.ConfigParser 43 | Used for configuration file parser language. 44 | """ 45 | 46 | cfg = cp.ConfigParser() 47 | cfg.read(filepath) 48 | 49 | if not cfg.has_section(section): 50 | print( 51 | 'The section "{sec}" is not in the config file {file}.'.format( 52 | sec=section, file=filepath 53 | ) 54 | ) 55 | cfg = create_oedb_config_file(filepath, section) 56 | 57 | return cfg 58 | 59 | 60 | def get_connection_details(section): 61 | """ 62 | Asks the user for the database connection details and returns them as a 63 | ConfigParser-object. 64 | 65 | Parameters 66 | ---------- 67 | None 68 | 69 | Returns 70 | ------- 71 | cfg : configparser.ConfigParser 72 | Used for configuration file parser language. 73 | """ 74 | print("Please enter your connection details:") 75 | dialect = ( 76 | input("Enter input value for `dialect` (default: psycopg2): ") 77 | or "psycopg2" 78 | ) 79 | username = input("Enter value for `username`: ") 80 | database = input("Enter value for `database`: ") 81 | host = input("Enter value for `host`: ") 82 | port = input("Enter value for `port` (default: 5432): ") or "5432" 83 | 84 | cfg = cp.ConfigParser() 85 | cfg.add_section(section) 86 | cfg.set(section, "dialect", dialect) 87 | cfg.set(section, "username", username) 88 | cfg.set(section, "host", host) 89 | cfg.set(section, "port", port) 90 | cfg.set(section, "database", database) 91 | pw = getpass.getpass( 92 | prompt="Enter your password/token to " 93 | "store it in " 94 | "keyring: ".format(database=section) 95 | ) 96 | keyring.set_password(section, cfg.get(section, "username"), pw) 97 | 98 | return cfg 99 | 100 | 101 | def create_oedb_config_file(filepath, section="oep"): 102 | """ 103 | 104 | Parameters 105 | ---------- 106 | filepath : str 107 | Absolute path of config file including the filename itself 108 | section : str 109 | Section in config file which contains connection details 110 | 111 | Returns 112 | ------- 113 | cfg : configparser.ConfigParser 114 | Used for configuration file parser language. 115 | """ 116 | 117 | cfg = get_connection_details(section) 118 | 119 | print( 120 | "Do you want to store the connection details in the config file {file} ?".format( 121 | file=filepath 122 | ) 123 | ) 124 | choice = "" 125 | while choice not in ["y", "n"]: 126 | choice = input("(y/n): ") 127 | 128 | if choice == "y": 129 | # create egoio dir if not existent 130 | base_path = os.path.split(filepath)[0] 131 | if not os.path.isdir(base_path): 132 | os.mkdir(base_path) 133 | print("The directory {path} was created.".format(path=base_path)) 134 | 135 | with open(filepath, "a") as configfile: 136 | cfg.write(configfile) 137 | pass 138 | 139 | print( 140 | 'Template {0} with section "{1}" created.\nYou can manually edit' 141 | " the config file.".format(filepath, section) 142 | ) 143 | else: 144 | pass 145 | 146 | return cfg 147 | 148 | 149 | def connection(filepath=None, section="oep", readonly=False): 150 | """ 151 | Instantiate a database connection (for the use with SQLAlchemy). 152 | 153 | The keyword argument `filepath` specifies the location of the config file 154 | that contains database connection information. If not given, the default 155 | of `~/.etrago_database/config.ini` applies. 156 | 157 | Parameters 158 | ---------- 159 | filepath : str 160 | Absolute path of config file including the filename itself 161 | section : str 162 | Section in config file containing database connection parameters. 163 | Default: 'oep'. 164 | readonly : bool 165 | Set this option to True for creating a read-only and passwordless 166 | engine for accessing the open energy platform. 167 | Default: False. 168 | 169 | Returns 170 | ------- 171 | conn : sqlalchemy.engine 172 | SQLalchemy engine object containing the connection details 173 | """ 174 | 175 | if readonly: 176 | conn = create_engine("postgresql+oedialect://openenergy-platform.org") 177 | else: 178 | # define default filepath if not provided 179 | if filepath is None: 180 | filepath = os.path.join( 181 | os.path.expanduser("~"), ".etrago_database", "config.ini" 182 | ) 183 | 184 | # does the file exist? 185 | if not os.path.isfile(filepath): 186 | print( 187 | "DB config file {file} not found. " 188 | "This might be the first run of the tool. ".format( 189 | file=filepath 190 | ) 191 | ) 192 | cfg = create_oedb_config_file(filepath, section=section) 193 | else: 194 | cfg = readcfg(filepath, section) 195 | 196 | try: 197 | pw = cfg.get(section, "password") 198 | except: 199 | pw = keyring.get_password(section, cfg.get(section, "username")) 200 | if pw is None: 201 | pw = getpass.getpass( 202 | prompt='No password found for database "{db}". ' 203 | "Enter your password to " 204 | "store it in keyring: ".format( 205 | db=cfg.get(section, "database") 206 | ) 207 | ) 208 | keyring.set_password(section, cfg.get(section, "username"), pw) 209 | 210 | # establish connection and return it 211 | conn = create_engine( 212 | "postgresql+{dialect}://{user}:{password}@{host}:{port}/{db}".format( 213 | dialect=cfg.get(section, "dialect", fallback="psycopg2"), 214 | user=cfg.get(section, "username"), 215 | password=pw, 216 | host=cfg.get(section, "host"), 217 | port=cfg.get(section, "port"), 218 | db=cfg.get(section, "database"), 219 | ) 220 | ) 221 | 222 | return conn 223 | -------------------------------------------------------------------------------- /etrago/tools/sql_scripts/results_md2grid.sql: -------------------------------------------------------------------------------- 1 | DO $$ 2 | DECLARE 3 | rec RECORD; 4 | new_grid_res INTEGER; 5 | 6 | BEGIN 7 | 8 | FOR rec in SELECT result_id from model_draft.ego_grid_pf_hv_result_meta WHERE safe_results = TRUE 9 | 10 | LOOP 11 | new_grid_res = CASE WHEN (SELECT min(result_id) from grid.ego_pf_hv_result_meta) IS NULL 12 | THEN 1 13 | ELSE (SELECT max(result_id)+1 from grid.ego_pf_hv_result_meta) 14 | END; 15 | 16 | 17 | INSERT INTO grid.ego_pf_hv_result_meta 18 | (result_id, 19 | modeldraft_id , 20 | scn_name, 21 | calc_date, 22 | user_name, 23 | method, 24 | start_snapshot, 25 | end_snapshot, 26 | snapshots, 27 | solver, 28 | settings) 29 | SELECT 30 | new_grid_res, 31 | rec.result_id, 32 | scn_name, 33 | calc_date, 34 | user_name, 35 | method, 36 | start_snapshot, 37 | end_snapshot, 38 | snapshots, 39 | solver, 40 | settings 41 | FROM model_draft.ego_grid_pf_hv_result_meta 42 | WHERE result_id = rec.result_id; 43 | 44 | INSERT INTO grid.ego_pf_hv_result_bus 45 | (result_id, 46 | bus_id , 47 | x, 48 | y, 49 | v_nom, 50 | current_type, 51 | v_mag_pu_min, 52 | v_mag_pu_max, 53 | geom) 54 | SELECT 55 | new_grid_res, 56 | bus_id , 57 | x, 58 | y, 59 | v_nom, 60 | current_type, 61 | v_mag_pu_min, 62 | v_mag_pu_max, 63 | geom 64 | FROM model_draft.ego_grid_pf_hv_result_bus 65 | WHERE result_id = rec.result_id; 66 | 67 | INSERT INTO grid.ego_pf_hv_result_bus_t 68 | (result_id, 69 | bus_id, 70 | v_mag_pu_set, 71 | p, 72 | q, 73 | v_mag_pu, 74 | v_ang, 75 | marginal_price) 76 | SELECT 77 | new_grid_res, 78 | bus_id, 79 | v_mag_pu_set, 80 | p, 81 | q, 82 | v_mag_pu, 83 | v_ang, 84 | marginal_price 85 | FROM model_draft.ego_grid_pf_hv_result_bus_t 86 | WHERE result_id = rec.result_id; 87 | 88 | INSERT INTO grid.ego_pf_hv_result_generator 89 | (result_id, 90 | generator_id, 91 | bus, 92 | dispatch, 93 | control, 94 | p_nom, 95 | p_nom_extendable, 96 | p_nom_min, 97 | p_nom_max, 98 | p_min_pu_fixed, 99 | p_max_pu_fixed, 100 | sign, 101 | source, 102 | marginal_cost, 103 | capital_cost, 104 | efficiency, 105 | p_nom_opt) 106 | SELECT 107 | new_grid_res, 108 | generator_id, 109 | bus, 110 | dispatch, 111 | control, 112 | p_nom, 113 | p_nom_extendable, 114 | p_nom_min, 115 | p_nom_max, 116 | p_min_pu_fixed, 117 | p_max_pu_fixed, 118 | sign, 119 | source, 120 | marginal_cost, 121 | capital_cost, 122 | efficiency, 123 | p_nom_opt 124 | FROM model_draft.ego_grid_pf_hv_result_generator 125 | WHERE result_id = rec.result_id; 126 | 127 | INSERT INTO grid.ego_pf_hv_result_generator_t 128 | (result_id, 129 | generator_id, 130 | p_set, 131 | q_set, 132 | p_min_pu, 133 | p_max_pu, 134 | p, 135 | q, 136 | status) 137 | SELECT 138 | new_grid_res, 139 | generator_id, 140 | p_set, 141 | q_set, 142 | p_min_pu, 143 | p_max_pu, 144 | p, 145 | q, 146 | status 147 | FROM model_draft.ego_grid_pf_hv_result_generator_t 148 | WHERE result_id = rec.result_id; 149 | 150 | INSERT INTO grid.ego_pf_hv_result_line 151 | (result_id, 152 | line_id, 153 | bus0, 154 | bus1, 155 | x, 156 | r, 157 | g, 158 | b, 159 | s_nom, 160 | s_nom_extendable, 161 | s_nom_min, 162 | s_nom_max, 163 | capital_cost, 164 | length, 165 | cables, 166 | frequency, 167 | terrain_factor, 168 | x_pu, 169 | r_pu, 170 | g_pu, 171 | b_pu, 172 | s_nom_opt, 173 | geom, 174 | topo) 175 | SELECT 176 | new_grid_res, 177 | line_id, 178 | bus0, 179 | bus1, 180 | x, 181 | r, 182 | g, 183 | b, 184 | s_nom, 185 | s_nom_extendable, 186 | s_nom_min, 187 | s_nom_max, 188 | capital_cost, 189 | length, 190 | cables, 191 | frequency, 192 | terrain_factor, 193 | x_pu, 194 | r_pu, 195 | g_pu, 196 | b_pu, 197 | s_nom_opt, 198 | geom, 199 | topo 200 | FROM model_draft.ego_grid_pf_hv_result_line 201 | WHERE result_id = rec.result_id; 202 | 203 | INSERT INTO grid.ego_pf_hv_result_line_t 204 | (result_id, 205 | line_id, 206 | p0, 207 | q0, 208 | p1, 209 | q1) 210 | SELECT 211 | new_grid_res, 212 | line_id, 213 | p0, 214 | q0, 215 | p1, 216 | q1 217 | FROM model_draft.ego_grid_pf_hv_result_line_t 218 | WHERE result_id = rec.result_id; 219 | 220 | INSERT INTO grid.ego_pf_hv_result_load 221 | (result_id, 222 | load_id, 223 | bus, 224 | sign, 225 | e_annual) 226 | SELECT 227 | new_grid_res, 228 | load_id, 229 | bus, 230 | sign, 231 | e_annual 232 | FROM model_draft.ego_grid_pf_hv_result_load 233 | WHERE result_id = rec.result_id; 234 | 235 | INSERT INTO grid.ego_pf_hv_result_load_t 236 | (result_id, 237 | load_id, 238 | p_set, 239 | q_set, 240 | p, 241 | q) 242 | SELECT 243 | new_grid_res, 244 | load_id, 245 | p_set, 246 | q_set, 247 | p, 248 | q 249 | FROM model_draft.ego_grid_pf_hv_result_load_t 250 | WHERE result_id = rec.result_id; 251 | 252 | INSERT INTO grid.ego_pf_hv_result_storage 253 | (result_id, 254 | storage_id, 255 | bus, 256 | dispatch, 257 | control, 258 | p_nom, 259 | p_nom_extendable, 260 | p_nom_min, 261 | p_nom_max, 262 | p_min_pu_fixed, 263 | p_max_pu_fixed, 264 | sign, 265 | source, 266 | marginal_cost, 267 | capital_cost, 268 | efficiency, 269 | soc_initial, 270 | soc_cyclic, 271 | max_hours, 272 | efficiency_store, 273 | efficiency_dispatch, 274 | standing_loss, 275 | p_nom_opt) 276 | SELECT 277 | new_grid_res, 278 | storage_id, 279 | bus, 280 | dispatch, 281 | control, 282 | p_nom, 283 | p_nom_extendable, 284 | p_nom_min, 285 | p_nom_max, 286 | p_min_pu_fixed, 287 | p_max_pu_fixed, 288 | sign, 289 | source, 290 | marginal_cost, 291 | capital_cost, 292 | efficiency, 293 | soc_initial, 294 | soc_cyclic, 295 | max_hours, 296 | efficiency_store, 297 | efficiency_dispatch, 298 | standing_loss, 299 | p_nom_opt 300 | FROM model_draft.ego_grid_pf_hv_result_storage 301 | WHERE result_id = rec.result_id; 302 | 303 | INSERT INTO grid.ego_pf_hv_result_storage_t 304 | (result_id, 305 | storage_id, 306 | p_set, 307 | q_set, 308 | p_min_pu, 309 | p_max_pu, 310 | soc_set, 311 | inflow, 312 | p, 313 | q, 314 | state_of_charge, 315 | spill) 316 | SELECT 317 | new_grid_res, 318 | storage_id, 319 | p_set, 320 | q_set, 321 | p_min_pu, 322 | p_max_pu, 323 | soc_set, 324 | inflow, 325 | p, 326 | q, 327 | state_of_charge, 328 | spill 329 | FROM model_draft.ego_grid_pf_hv_result_storage_t 330 | WHERE result_id = rec.result_id; 331 | 332 | INSERT INTO grid.ego_pf_hv_result_transformer 333 | (result_id, 334 | trafo_id, 335 | bus0, 336 | bus1, 337 | x, 338 | r, 339 | g, 340 | b, 341 | s_nom, 342 | s_nom_extendable, 343 | s_nom_min, 344 | s_nom_max, 345 | tap_ratio, 346 | phase_shift, 347 | capital_cost, 348 | x_pu, 349 | r_pu, 350 | g_pu, 351 | b_pu, 352 | s_nom_opt, 353 | geom, 354 | topo) 355 | SELECT 356 | new_grid_res, 357 | trafo_id, 358 | bus0, 359 | bus1, 360 | x, 361 | r, 362 | g, 363 | b, 364 | s_nom, 365 | s_nom_extendable, 366 | s_nom_min, 367 | s_nom_max, 368 | tap_ratio, 369 | phase_shift, 370 | capital_cost, 371 | x_pu, 372 | r_pu, 373 | g_pu, 374 | b_pu, 375 | s_nom_opt, 376 | geom, 377 | topo 378 | FROM model_draft.ego_grid_pf_hv_result_transformer 379 | WHERE result_id = rec.result_id; 380 | 381 | INSERT INTO grid.ego_pf_hv_result_transformer_t 382 | (result_id, 383 | trafo_id, 384 | p0, 385 | q0, 386 | p1, 387 | q1) 388 | SELECT 389 | new_grid_res, 390 | trafo_id, 391 | p0, 392 | q0, 393 | p1, 394 | q1 395 | FROM model_draft.ego_grid_pf_hv_result_transformer_t 396 | WHERE result_id = rec.result_id; 397 | 398 | UPDATE model_draft.ego_grid_pf_hv_result_meta 399 | SET safe_results = FALSE 400 | WHERE result_id = rec.result_id; 401 | 402 | END LOOP; 403 | 404 | END; $$ 405 | 406 | -------------------------------------------------------------------------------- /noxfile.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | from pprint import pformat 3 | import platform 4 | 5 | import nox 6 | 7 | cleaned = [ 8 | "etrago/analyze/__init__.py", 9 | "etrago/analyze/calc_results.py", 10 | "etrago/analyze/plot.py", 11 | "etrago/cluster/electrical.py", 12 | "etrago/cluster/gas.py", 13 | "etrago/cluster/spatial.py", 14 | "etrago/cluster/temporal.py", 15 | "etrago/disaggregate/spatial.py", 16 | "etrago/disaggregate/temporal.py", 17 | "etrago/execute/__init__.py", 18 | "etrago/execute/grid_optimization.py", 19 | "etrago/execute/market_optimization.py", 20 | "etrago/network.py", 21 | "etrago/tools/extendable.py", 22 | "etrago/tools/io.py", 23 | "etrago/tools/utilities.py", 24 | "noxfile.py", 25 | "setup.py", 26 | ] 27 | 28 | 29 | def setdefaults(session): 30 | session.env["PYTHONUNBUFFERED"] = "yes" 31 | 32 | 33 | @nox.session(python="3") 34 | def check(session): 35 | """Run custom checks.""" 36 | setdefaults(session) 37 | assert cleaned == sorted(set(cleaned)), ( 38 | "The list of cleaned files contains duplicates and/or isn't sorted" 39 | " alphabetically." 40 | f"\nExpected:\n{pformat(sorted(set(cleaned)))}" 41 | f"\nGot:\n{pformat(cleaned)}" 42 | ) 43 | 44 | 45 | @nox.session(python="3") 46 | def black(session): 47 | """Check for happy little style accidents with `black`.""" 48 | setdefaults(session) 49 | session.install("black") 50 | session.run("black", "--check", "--diff", *cleaned) 51 | 52 | 53 | @nox.session(python="3") 54 | def isort(session): 55 | """Check import ordering with `isort`.""" 56 | setdefaults(session) 57 | session.install("isort >= 5") 58 | session.run("isort", "--check-only", "--diff", *cleaned) 59 | 60 | 61 | @nox.session(python="3") 62 | def flake8(session): 63 | """Check for happy little style accidents with `flake8`.""" 64 | setdefaults(session) 65 | session.install("Flake8-pyproject", "flake8") 66 | session.run("flake8", "--ignore=E722, W605, W503, E203", *cleaned) 67 | 68 | 69 | @nox.session(python=["3", "3.9", "3.10", "3.11"]) 70 | def build(session): 71 | """Build the package and check for packaging errors.""" 72 | # Get the current Python version and OS 73 | current_version = session.python if session.python else "unknown" 74 | current_os = platform.system() 75 | print(f"Running install on Python {current_version} and OS {current_os}") 76 | 77 | # Check if the current session is Python 3.9 on macOS and skip 78 | if current_version == "3.9" and current_os == "Darwin": 79 | session.skip("Skipping tests for Python 3.9 on macOS") 80 | 81 | setdefaults(session) 82 | session.install("build", "twine") 83 | session.run("python", "-m", "build") 84 | session.run("twine", "check", "dist/*") 85 | 86 | 87 | @nox.session(python=["3", "3.9", "3.10", "3.11"]) 88 | def install(session): 89 | """Install the package.""" 90 | # Get the current Python version and OS 91 | current_version = session.python if session.python else "unknown" 92 | current_os = platform.system() 93 | print(f"Running install on Python {current_version} and OS {current_os}") 94 | 95 | # Check if the current session is Python 3.9 on macOS and skip 96 | if current_version == "3.9" and current_os == "Darwin": 97 | session.skip("Skipping tests for Python 3.9 on macOS") 98 | 99 | setdefaults(session) 100 | session.env["SKLEARN_ALLOW_DEPRECATED_SKLEARN_PACKAGE_INSTALL"] = "False" 101 | session.run("python", "-mpip", "install", "--upgrade", "pip") 102 | session.run("python", "-mpip", "install", *Path("dist").glob("*.whl")) 103 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.black] 2 | exclude = ''' 3 | /( 4 | \.git 5 | | build 6 | | dist 7 | )/ 8 | ''' 9 | include = '\.pyi?$' 10 | line-length = 79 11 | target-version = ["py38", "py39", "py310", "py311"] 12 | 13 | [tool.flake8] 14 | max-line-length = 79 15 | extend-ignore = ["E203", "E741", "W503"] 16 | exclude = ["build", "dist"] 17 | 18 | [tool.isort] 19 | combine_as_imports = true 20 | from_first = true 21 | line_length = 79 22 | profile = "black" 23 | -------------------------------------------------------------------------------- /requirements-doc.txt: -------------------------------------------------------------------------------- 1 | # Packages for read the docs 2 | # Using single requirments for docs, see: 3 | # https://github.com/rtfd/readthedocs.org/issues/2070 4 | geoalchemy2 5 | keyring 6 | loguru 7 | matplotlib 8 | nbsphinx 9 | numpydoc 10 | pandas < 2 11 | pyomo != 6.4.3 12 | pypsa == 0.20.1 13 | saio 14 | scikit-learn 15 | sphinx_rtd_theme > 1.2.2 16 | sqlalchemy -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from os.path import dirname, join 2 | import io 3 | import re 4 | 5 | from setuptools import find_packages, setup 6 | 7 | __copyright__ = ( 8 | "Flensburg University of Applied Sciences, " 9 | "Europa-Universität Flensburg, " 10 | "Centre for Sustainable Energy Systems, " 11 | "DLR-Institute for Networked Energy Systems" 12 | ) 13 | __license__ = "GNU Affero General Public License Version 3 (AGPL-3.0)" 14 | __author__ = ( 15 | "ulfmueller, wolfbunke, BartelsJ, ClaraBuettner, gnn, " 16 | "simnh, lukasol, s3pp, MGlauer, kimvk, MarlonSchlemminger, " 17 | "mariusves", 18 | "CarlosEpia", 19 | "KathiEsterl", 20 | "pieterhexen", 21 | "fwitte", 22 | "AmeliaNadal", 23 | ) 24 | 25 | 26 | def read(*names, **kwargs): 27 | with io.open( 28 | join(dirname(__file__), *names), 29 | encoding=kwargs.get("encoding", "utf8"), 30 | ) as fh: 31 | return fh.read() 32 | 33 | 34 | setup( 35 | name="eTraGo", 36 | author="DLR VE, ZNES Flensburg", 37 | author_email="", 38 | description="electric transmission grid optimization", 39 | long_description="{}".format( 40 | re.compile("^.. start-badges.*^.. end-header", re.M | re.S).sub( 41 | "", read("README.rst") 42 | ) 43 | ), 44 | long_description_content_type="text/x-rst", 45 | version="0.9.0", 46 | url="https://github.com/openego/eTraGo", 47 | license="GNU Affero General Public License Version 3 (AGPL-3.0)", 48 | packages=find_packages(), 49 | include_package_data=True, 50 | install_requires=[ 51 | "geoalchemy2 >= 0.3.0", 52 | "geopandas", 53 | "keyring", 54 | "loguru", 55 | "matplotlib >= 3.0.3", 56 | "oedialect", 57 | "pandas < 2.2", 58 | "pyomo < 6.6", 59 | "pypsa == 0.26.2", 60 | "rtree", 61 | "saio", 62 | "scikit-learn", 63 | "setuptools >= 54.2.0", 64 | "shapely", 65 | "sqlalchemy < 2", 66 | "tables", 67 | "tilemapbase == 0.4.5", 68 | "tsam", 69 | ], 70 | extras_require={ 71 | "docs": [ 72 | "nbsphinx", 73 | "numpydoc", 74 | "sphinx >= 1.4", 75 | "sphinx_rtd_theme", 76 | ], 77 | "gurobipy": ["gurobipy"], 78 | "cartopy": ["cartopy", "requests"], 79 | }, 80 | package_data={"etrago": [join("tools", "*.json")]}, 81 | ) 82 | --------------------------------------------------------------------------------