├── .flake8
├── .gitignore
├── .isort.cfg
├── .pre-commit-config.yaml
├── LICENSE
├── MANIFEST.in
├── README.rst
├── doc
├── Makefile
├── _static
│ ├── ego_example_iplot_map.html
│ ├── ribbons.html
│ └── total_investment_costs_map.png
├── api.rst
├── api
│ ├── ego.tools.rst
│ └── modules.rst
├── conf.py
├── developer.rst
├── files
│ ├── investment_costs_of_grid_ measures.csv
│ └── storage_units.csv
├── getting_started.rst
├── images
│ ├── battery.png
│ ├── eGo_integration.png
│ ├── etrago-storage_parameters.png
│ ├── open_ego_icon.svg
│ ├── open_ego_icon_web.png
│ ├── open_ego_logo.png
│ ├── open_ego_models_overview.png
│ └── trafo.png
├── index.rst
├── installation.rst
├── make.bat
├── theoretical_background.rst
├── welcome.rst
├── whatsnew.rst
└── whatsnew
│ ├── v0-0-1.rst
│ ├── v0-1-0.rst
│ ├── v0-2-0.rst
│ ├── v0-3-0.rst
│ ├── v0-3-1.rst
│ ├── v0-3-2.rst
│ ├── v0-3-3.rst
│ └── v0-3-4.rst
├── ego
├── __init__.py
├── appl.py
├── mv_clustering
│ ├── __init__.py
│ ├── database.py
│ ├── egon_data_io.py
│ └── mv_clustering.py
├── run_test.py
├── scenario_setting.json
└── tools
│ ├── __init__.py
│ ├── config.json
│ ├── economics.py
│ ├── edisgo_integration.py
│ ├── interface.py
│ ├── io.py
│ ├── plots.py
│ ├── results.py
│ ├── storages.py
│ └── utilities.py
├── pytest.ini
├── requirements.txt
├── setup.py
└── tests
├── conftest.py
├── data
├── create_test_grid.ipynb
├── etrago_test_network_1
│ ├── buses.csv
│ ├── generators-p.csv
│ ├── generators-p_max_pu.csv
│ ├── generators-p_min_pu.csv
│ ├── generators-q.csv
│ ├── generators.csv
│ ├── links-p0.csv
│ ├── links-p1.csv
│ ├── links.csv
│ ├── snapshots.csv
│ ├── storage_units-p.csv
│ ├── storage_units-q.csv
│ ├── storage_units-state_of_charge.csv
│ ├── storage_units.csv
│ ├── stores-e.csv
│ └── stores.csv
└── interface_results_reference_data
│ ├── dispatchable_generators_active_power.csv
│ ├── dispatchable_generators_reactive_power.csv
│ ├── dsm_active_power.csv
│ ├── dsm_reactive_power.csv
│ ├── electromobility_active_power.csv
│ ├── electromobility_reactive_power.csv
│ ├── feedin_district_heating.csv
│ ├── heat_pump_central_active_power.csv
│ ├── heat_pump_central_reactive_power.csv
│ ├── heat_pump_rural_active_power.csv
│ ├── heat_pump_rural_reactive_power.csv
│ ├── renewables_curtailment.csv
│ ├── renewables_dispatch_reactive_power.csv
│ ├── renewables_dispatch_reactive_power_max_cosphi.csv
│ ├── renewables_p_nom.csv
│ ├── renewables_potential.csv
│ ├── storage_units_active_power.csv
│ ├── storage_units_reactive_power.csv
│ ├── storage_units_soc.csv
│ ├── thermal_storage_central_soc.csv
│ └── thermal_storage_rural_soc.csv
└── tools
└── test_interface.py
/.flake8:
--------------------------------------------------------------------------------
1 | [flake8]
2 | extend-exclude = docs
3 | max-line-length = 88
4 | extend-ignore = E203, F841
5 | count = true
6 | statistics = true
7 | show-source = true
8 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | *.pyc
2 | gurobi.log
3 | __pycache__/
4 | results
5 | *.egg-info
6 | ego/scenario_setting.json
7 | .spyproject/
8 | ego/noise_values.csv
9 | .idea/
10 | noise_values.csv
11 | *.pkl
12 | .ipynb_checkpoints
13 |
--------------------------------------------------------------------------------
/.isort.cfg:
--------------------------------------------------------------------------------
1 | [settings]
2 | profile = black
3 | multi_line_output = 3
4 | lines_between_types = 1
5 |
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | repos:
2 | - repo: https://github.com/pre-commit/pre-commit-hooks
3 | rev: v4.3.0
4 | hooks:
5 | - id: check-yaml
6 | - id: end-of-file-fixer
7 | - id: trailing-whitespace
8 | - repo: https://github.com/psf/black
9 | rev: 22.8.0
10 | hooks:
11 | - id: black
12 | - repo: https://github.com/pycqa/isort
13 | rev: 5.10.1
14 | hooks:
15 | - id: isort
16 | name: isort (python)
17 | - repo: https://github.com/asottile/pyupgrade
18 | rev: v2.38.0
19 | hooks:
20 | - id: pyupgrade
21 | - repo: https://github.com/pycqa/flake8
22 | rev: 5.0.4
23 | hooks:
24 | - id: flake8
25 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include README.rst
2 | include LICENSE
3 | include *txt
4 | include MANIFEST.in
5 | include *.json
6 | include *.rst
7 | include *.csv
8 | include ego/scenario_setting.json
9 | include ego/data/*.csv
10 | include ego/tools/*.json
11 | include ego/examples/tutorials/*
12 | include pypsa/component_attrs/*.csv
13 | include pypsa/standard_types/*.csv
14 | include pypsa/components.csv
15 |
--------------------------------------------------------------------------------
/README.rst:
--------------------------------------------------------------------------------
1 | |readthedocs| |badge_githubstars| |zenodo|
2 |
3 | -----
4 |
5 |
6 | .. image:: https://openegoproject.files.wordpress.com/2017/02/open_ego_logo_breit.png?w=400
7 |
8 |
9 | *A cross-grid-level electricity grid and storage optimization tool*
10 | | `openegoproject.wordpress.com `_
11 |
12 |
13 | ---
14 | eGo
15 | ---
16 |
17 | Integrated optimization of flexibility options and grid extension measures
18 | for power grids based on `eTraGo `_ and
19 | `eDisGo `_. The Documentation of the eGo tool
20 | can be found on
21 | `openego.readthedocs.io `_ .
22 |
23 | .. contents::
24 |
25 | ------------
26 | Installation
27 | ------------
28 |
29 | .. code-block::
30 |
31 | $ pip3 install eGo --process-dependency-links
32 |
33 |
34 | In case of installation errors of pypsa-fork use:
35 |
36 | .. code-block::
37 |
38 | $ pip3 install -e git+https://github.com/openego/PyPSA@master#egg=0.11.0fork
39 |
40 |
41 | ----------------------------
42 | Installing Developer Version
43 | ----------------------------
44 |
45 | Create a virtualenvironment and activate it:
46 |
47 | .. code-block::
48 |
49 | $ virtualenv venv --clear -p python3.8
50 | $ source venv/bin/activate
51 | $ cd path/to/eGo
52 | $ python -m pip install -e .[full]
53 | $ pre-commit install # install pre-commit hooks
54 |
55 |
56 | -------
57 | License
58 | -------
59 |
60 | © Europa-Universität Flensburg,
61 | © Flensburg University of Applied Sciences,
62 | *Centre for Sustainable Energy Systems*
63 | © DLR Institute for Networked Energy Systems,
64 | © Reiner-Lemoine-Institute"
65 |
66 | This program is free software: you can redistribute it and/or modify it under
67 | the terms of the GNU Affero General Public License as published by the Free
68 | Software Foundation, either version 3 of the License, or (at your option) any
69 | later version.
70 |
71 | This program is distributed in the hope that it will be useful, but WITHOUT
72 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
73 | FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
74 | details.
75 |
76 | You should have received a copy of the GNU General Public License along with
77 | this program. If not, see https://www.gnu.org/licenses/.
78 |
79 |
80 |
81 | .. |badge_githubstars| image:: https://img.shields.io/github/stars/openego/eGo.svg?style=flat-square&label=github%20stars
82 | :target: https://github.com/openego/eGo/
83 | :alt: GitHub stars
84 |
85 |
86 | .. |readthedocs| image:: https://readthedocs.org/projects/openego/badge/?version=master
87 | :target: http://openego.readthedocs.io/en/latest/?badge=master
88 | :alt: Documentation Status
89 |
90 | .. |zenodo| image:: https://zenodo.org/badge/87306120.svg
91 | :target: https://zenodo.org/badge/latestdoi/87306120
92 |
--------------------------------------------------------------------------------
/doc/Makefile:
--------------------------------------------------------------------------------
1 | # Makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line.
5 | SPHINXOPTS =
6 | SPHINXBUILD = sphinx-build
7 | PAPER =
8 | BUILDDIR = _build
9 |
10 | # User-friendly check for sphinx-build
11 | ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
12 | $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
13 | endif
14 |
15 | # Internal variables.
16 | PAPEROPT_a4 = -D latex_paper_size=a4
17 | PAPEROPT_letter = -D latex_paper_size=letter
18 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
19 | # the i18n builder cannot share the environment and doctrees with the others
20 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
21 |
22 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest coverage gettext
23 |
24 | help:
25 | @echo "Please use \`make ' where is one of"
26 | @echo " html to make standalone HTML files"
27 | @echo " dirhtml to make HTML files named index.html in directories"
28 | @echo " singlehtml to make a single large HTML file"
29 | @echo " pickle to make pickle files"
30 | @echo " json to make JSON files"
31 | @echo " htmlhelp to make HTML files and a HTML help project"
32 | @echo " qthelp to make HTML files and a qthelp project"
33 | @echo " applehelp to make an Apple Help Book"
34 | @echo " devhelp to make HTML files and a Devhelp project"
35 | @echo " epub to make an epub"
36 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
37 | @echo " latexpdf to make LaTeX files and run them through pdflatex"
38 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
39 | @echo " text to make text files"
40 | @echo " man to make manual pages"
41 | @echo " texinfo to make Texinfo files"
42 | @echo " info to make Texinfo files and run them through makeinfo"
43 | @echo " gettext to make PO message catalogs"
44 | @echo " changes to make an overview of all changed/added/deprecated items"
45 | @echo " xml to make Docutils-native XML files"
46 | @echo " pseudoxml to make pseudoxml-XML files for display purposes"
47 | @echo " linkcheck to check all external links for integrity"
48 | @echo " doctest to run all doctests embedded in the documentation (if enabled)"
49 | @echo " coverage to run coverage check of the documentation (if enabled)"
50 |
51 | clean:
52 | rm -rf $(BUILDDIR)/*
53 |
54 | html:
55 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
56 | @echo
57 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
58 |
59 | dirhtml:
60 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
61 | @echo
62 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
63 |
64 | singlehtml:
65 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
66 | @echo
67 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
68 |
69 | pickle:
70 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
71 | @echo
72 | @echo "Build finished; now you can process the pickle files."
73 |
74 | json:
75 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
76 | @echo
77 | @echo "Build finished; now you can process the JSON files."
78 |
79 | htmlhelp:
80 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
81 | @echo
82 | @echo "Build finished; now you can run HTML Help Workshop with the" \
83 | ".hhp project file in $(BUILDDIR)/htmlhelp."
84 |
85 | qthelp:
86 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
87 | @echo
88 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \
89 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
90 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/ding0.qhcp"
91 | @echo "To view the help file:"
92 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/ding0.qhc"
93 |
94 | applehelp:
95 | $(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp
96 | @echo
97 | @echo "Build finished. The help book is in $(BUILDDIR)/applehelp."
98 | @echo "N.B. You won't be able to view it unless you put it in" \
99 | "~/Library/Documentation/Help or install it in your application" \
100 | "bundle."
101 |
102 | devhelp:
103 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
104 | @echo
105 | @echo "Build finished."
106 | @echo "To view the help file:"
107 | @echo "# mkdir -p $$HOME/.local/share/devhelp/ding0"
108 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/ding0"
109 | @echo "# devhelp"
110 |
111 | epub:
112 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
113 | @echo
114 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub."
115 |
116 | latex:
117 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
118 | @echo
119 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
120 | @echo "Run \`make' in that directory to run these through (pdf)latex" \
121 | "(use \`make latexpdf' here to do that automatically)."
122 |
123 | latexpdf:
124 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
125 | @echo "Running LaTeX files through pdflatex..."
126 | $(MAKE) -C $(BUILDDIR)/latex all-pdf
127 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
128 |
129 | latexpdfja:
130 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
131 | @echo "Running LaTeX files through platex and dvipdfmx..."
132 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
133 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
134 |
135 | text:
136 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
137 | @echo
138 | @echo "Build finished. The text files are in $(BUILDDIR)/text."
139 |
140 | man:
141 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
142 | @echo
143 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man."
144 |
145 | texinfo:
146 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
147 | @echo
148 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
149 | @echo "Run \`make' in that directory to run these through makeinfo" \
150 | "(use \`make info' here to do that automatically)."
151 |
152 | info:
153 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
154 | @echo "Running Texinfo files through makeinfo..."
155 | make -C $(BUILDDIR)/texinfo info
156 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
157 |
158 | gettext:
159 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
160 | @echo
161 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
162 |
163 | changes:
164 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
165 | @echo
166 | @echo "The overview file is in $(BUILDDIR)/changes."
167 |
168 | linkcheck:
169 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
170 | @echo
171 | @echo "Link check complete; look for any errors in the above output " \
172 | "or in $(BUILDDIR)/linkcheck/output.txt."
173 |
174 | doctest:
175 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
176 | @echo "Testing of doctests in the sources finished, look at the " \
177 | "results in $(BUILDDIR)/doctest/output.txt."
178 |
179 | coverage:
180 | $(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage
181 | @echo "Testing of coverage in the sources finished, look at the " \
182 | "results in $(BUILDDIR)/coverage/python.txt."
183 |
184 | xml:
185 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
186 | @echo
187 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml."
188 |
189 | pseudoxml:
190 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
191 | @echo
192 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
193 |
--------------------------------------------------------------------------------
/doc/_static/ribbons.html:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/doc/_static/total_investment_costs_map.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/openego/eGo/dfab09eff2617927905e3021ef3a666d1e5ff311/doc/_static/total_investment_costs_map.png
--------------------------------------------------------------------------------
/doc/api.rst:
--------------------------------------------------------------------------------
1 | .. make doc-string generated documentation appear here
2 |
3 | .. toctree::
4 | :maxdepth: 4
5 | :glob:
6 |
7 |
8 | API
9 |
--------------------------------------------------------------------------------
/doc/api/ego.tools.rst:
--------------------------------------------------------------------------------
1 | ego\.tools package
2 | ==================
3 |
4 |
5 | ego\.tools\.economics
6 | ---------------------
7 |
8 | .. automodule:: ego.tools.economics
9 | :members:
10 | :undoc-members:
11 | :show-inheritance:
12 |
13 | ego\.tools\.edisgo_integration
14 | ------------------------------
15 |
16 | .. automodule:: ego.tools.edisgo_integration
17 | :members:
18 | :undoc-members:
19 | :show-inheritance:
20 |
21 | ego\.tools\.io
22 | --------------
23 |
24 | .. automodule:: ego.tools.io
25 | :members:
26 | :undoc-members:
27 | :show-inheritance:
28 |
29 | ego\.tools\.mv_cluster
30 | ----------------------
31 |
32 | .. automodule:: ego.tools.mv_cluster
33 | :members:
34 | :undoc-members:
35 | :show-inheritance:
36 |
37 | ego\.tools\.plots
38 | -----------------
39 |
40 | .. automodule:: ego.tools.plots
41 | :members:
42 | :undoc-members:
43 | :show-inheritance:
44 |
45 | ego\.tools\.results
46 | -------------------
47 |
48 | .. automodule:: ego.tools.results
49 | :members:
50 | :undoc-members:
51 | :show-inheritance:
52 |
53 |
54 | ego\.tools\.specs
55 | -----------------
56 |
57 | .. automodule:: ego.tools.specs
58 | :members:
59 | :undoc-members:
60 | :show-inheritance:
61 |
62 | ego\.tools\.storages
63 | --------------------
64 |
65 | .. automodule:: ego.tools.storages
66 | :members:
67 | :undoc-members:
68 | :show-inheritance:
69 |
70 | ego\.tools\.utilities
71 | ---------------------
72 |
73 | .. automodule:: ego.tools.utilities
74 | :members:
75 | :undoc-members:
76 | :show-inheritance:
77 |
--------------------------------------------------------------------------------
/doc/api/modules.rst:
--------------------------------------------------------------------------------
1 | ===
2 | eGo
3 | ===
4 |
5 |
6 |
7 | Overview of modules
8 | ===================
9 |
10 |
11 | .. toctree::
12 | :maxdepth: 7
13 |
14 | ego.tools
15 |
16 | scenario_settings.json
17 | ======================
18 |
19 | With the ``scenario_settings.json`` file you set up your calcualtion.
20 | The file can be found on
21 | `github `_.
22 |
23 | .. json:object:: scenario_setting.json
24 |
25 | This file contains all input settings for the eGo tool.
26 |
27 | :property global: Global (superordinate) settings that are valid for both, eTraGo and eDisGo.
28 | :proptype global: :json:object:`global`
29 | :property eTraGo: eTraGo settings, only valid for eTraGo runs.
30 | :proptype eTraGo: :json:object:`eTraGo`
31 | :property eDisGo: eDisGo settings, only valid for eDisGo runs.
32 | :proptype eDisGo: :json:object:`eDisGo`
33 |
34 |
35 | .. json:object:: global
36 |
37 | :property bool eTraGo: Decide if you want to run the eTraGo tool (HV/EHV grid optimization).
38 | :property bool eDisGo: Decide if you want to run the eDisGo tool (MV grid optimiztaion). Please note: eDisGo requires eTraGo= ``true``.
39 | :property string csv_import_eTraGo: ``false`` or path to previously calculated eTraGo results (in order to reload the results instead of performing a new run).
40 | :property string csv_import_eDisGo: ``false`` or path to previously calculated eDisGo results (in order to reload the results instead of performing a new run).
41 |
42 |
43 | .. json:object:: eTraGo
44 |
45 | This section of :json:object:`scenario_setting.json` contains all input parameters for the eTraGo tool. A description of the parameters can be found `here. `_
46 |
47 |
48 | .. json:object:: eDisGo
49 |
50 | This section of :json:object:`scenario_setting.json` contains all input parameters for the eDisGo tool and the clustering of MV grids.
51 |
52 | :property string gridversion: This parameter is currently not used.
53 | :property string grid_path: Path to the MV grid files (created by `ding0 `_) (e.g. ``''data/MV_grids/20180713110719''``)
54 | :property string choice_mode: Mode that eGo uses to chose MV grids out of the files in **grid_path** (e.g. ``''manual''``, ``''cluster''`` or ``''all''``). If ``''manual''`` is chosen, the parameter **manual_grids** must contain a list of the desired grids. If ``''cluster''`` is chosen, **no_grids** must specify the desired number of clusters and **cluster_attributes** must specify the applied cluster attributes. If ``''all''`` is chosen, all MV grids from **grid_path** are calculated.
55 | :property list cluster_attributes: List of strings containing the desired cluster attributes. Available attributes are all attributes returned from :py:func:`~ego.mv_clustering.mv_clustering.get_cluster_attributes.
56 | :property bool only_cluster: If ``true``, eGo only identifies cluster results, but performs no eDisGo run. Please note that for **only_cluster** an eTraGo run or dataset must be provided.
57 | :property list manual_grids: List of MV grid ID's in case of **choice_mode** = ``''manual''`` (e.g. ``[1718,1719]``). Ohterwise this parameter is ignored.
58 | :property int n_clusters: Number of MV grid clusters (from all grids in **grid_path**, a specified number of representative clusters is calculated) in case of **choice_mode** = ``''cluster''``. Otherwise this parameter is ignored.
59 | :property bool parallelization: If ``false``, eDisgo is used in a consecutive way (this may take very long time). In order to increase the performance of MV grid simulations, ``true`` allows the parallel calculation of MV grids. If **parallelization** = ``true``, **max_calc_time** and **max_workers** must be specified.
60 | :property float max_calc_time: Maximum calculation time in hours for eDisGo simulations. The calculation is terminated after this time and all costs are extrapolated based on the unfinished simulation. Please note that this parameter is only used if **parallelization** = ``true``.
61 | :property ing max_workers: Number of workers (cpus) that are allocated to the simulation. If the given value exceeds the number of available workers, it is reduced to the number of available workers. Please note that this parameter is only used if **parallelization** = ``true``.
62 | :property float max_cos_phi_renewable: Maximum power factor for wind and solar generators in MV grids (e.g. ``0.9``). If the reactive power (as calculated by eTraGo) exceeds this power factor, the reactive power is reduced in order to reach the power factor conditions.
63 | :property string solver: Solver eDisGo uses to optimize the curtailment and storage integration (e.g. ``''gurobi''``).
64 | :property string results: Path to folder where eDisGo's results will be saved.
65 | :property list tasks: List of string defining the tasks to run. The eDisGo calculation for each MV grid can be devided into separate tasks which is helpful in case one tasks fails and calculations do not need to started in the beginning. The following tasks exist: ``''1_setup_grid''``, ``''2_specs_overlying_grid''``, ``''3_temporal_complexity_reduction''``, ``''4_optimisation''``, ``''5_grid_reinforcement''``.
66 |
67 |
68 |
69 | appl.py
70 | ===========
71 |
72 | This is the application file for the tool eGo. The application eGo calculates
73 | the distribution and transmission grids of eTraGo and eDisGo.
74 |
75 | .. note:: Note, the data source of eGo relies on
76 | the Open Energy Database. - The registration for the public
77 | accessible API can be found on
78 | `openenergy-platform.org/login `_.
79 |
80 | Run the ``appl.py`` file with:
81 |
82 | .. code-block:: bash
83 |
84 | >>> python3 -i appl.py
85 | >>> ...
86 | >>> INFO:ego:Start calculation
87 | >>> ...
88 |
89 | The eGo application works like:
90 |
91 | .. code-block:: python
92 |
93 | >>> from ego.tools.io import eGo
94 | >>> ego = eGo(jsonpath='scenario_setting.json')
95 | >>> ego.etrago_line_loading()
96 | >>> print(ego.etrago.storage_costs)
97 | >>> ...
98 | >>> INFO:ego:Start calculation
99 | >>> ...
100 |
--------------------------------------------------------------------------------
/doc/conf.py:
--------------------------------------------------------------------------------
1 | """This file is part of
2 |
3 | It is developed in the project open_eGo: https://openegoproject.wordpress.com
4 |
5 | eGo lives at github: https://github.com/openego/eGo/
6 | The documentation is available on RTD: https://openego.readthedocs.io"""
7 |
8 |
9 | __copyright__ = (
10 | "Flensburg University of Applied Sciences, Europa-Universität "
11 | "Flensburg, Centre for Sustainable Energy Systems, DLR-Institute "
12 | "for Networked Energy Systems"
13 | )
14 | __license__ = "GNU Affero General Public License Version 3 (AGPL-3.0)"
15 | __author__ = "wolf_bunke"
16 |
17 |
18 | # -*- coding: utf-8 -*-
19 | #
20 | # eGo documentation build configuration file, created by
21 | # sphinx-quickstart on Fri Sep 29 10:55:47 2017.
22 | #
23 | # This file is execfile()d with the current directory set to its
24 | # containing dir.
25 | #
26 | # Note that not all possible configuration values are present in this
27 | # autogenerated file.
28 | #
29 | # All configuration values have a default; values that are commented out
30 | # serve to show the default.
31 |
32 | import os
33 | import sys
34 |
35 | from unittest.mock import MagicMock
36 |
37 | import sphinx_rtd_theme
38 |
39 | # from mock import Mock as MagicMock
40 |
41 | # If extensions (or modules to document with autodoc) are in another directory,
42 | # add these directories to sys.path here. If the directory is relative to the
43 | # documentation root, use os.path.abspath to make it absolute, like shown here.
44 | # sys.path.insert(0, os.path.abspath('.'))
45 | sys.path.insert(0, os.path.abspath("../"))
46 | sys.path.insert(0, os.path.abspath("../.."))
47 |
48 | # -- General configuration ------------------------------------------------
49 |
50 | # If your documentation needs a minimal Sphinx version, state it here.
51 | # needs_sphinx = '1.0'
52 |
53 | # Add any Sphinx extension module names here, as strings. They can be
54 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
55 | # ones.
56 | extensions = [
57 | "sphinx.ext.autodoc",
58 | "sphinx.ext.intersphinx",
59 | "sphinx.ext.todo",
60 | "sphinx.ext.coverage",
61 | "sphinx.ext.imgmath",
62 | "sphinx.ext.viewcode",
63 | "sphinx.ext.autosummary",
64 | "sphinx.ext.napoleon", # enable Napoleon Sphinx v>1.3
65 | "numpydoc",
66 | "sphinxcontrib.httpdomain", # for restfull API
67 | "sphinxcontrib.autohttp.flask",
68 | "sphinx.ext.extlinks", # enables external links with a key
69 | "sphinxjsondomain",
70 | ]
71 |
72 |
73 | # https://stackoverflow.com/questions/12206334/sphinx-autosummary-toctree-contains-reference-to-nonexisting-document-warnings
74 | numpydoc_show_class_members = False
75 |
76 | # Napoleon settings
77 | napoleon_google_docstring = True
78 | napoleon_numpy_docstring = True
79 | napoleon_include_init_with_doc = False
80 | napoleon_include_private_with_doc = False
81 | napoleon_include_special_with_doc = False
82 | napoleon_use_admonition_for_examples = False
83 | napoleon_use_admonition_for_notes = False
84 | napoleon_use_admonition_for_references = False
85 | napoleon_use_ivar = False
86 | napoleon_use_param = True
87 | napoleon_use_rtype = True
88 | napoleon_use_keyword = False
89 |
90 |
91 | # Dictionary of external links
92 | extlinks = {
93 | "pandas": ("http://pandas.pydata.org/pandas-docs/stable/api.html#%s", "pandas."),
94 | "sqlalchemy": (
95 | "http://docs.sqlalchemy.org/en/latest/orm/session_basics.html%s",
96 | "SQLAlchemy session object",
97 | ),
98 | "shapely": ("http://toblerity.org/shapely/manual.html#%s", "Shapely object"),
99 | }
100 |
101 |
102 | # test oedb implementation
103 | def rstjinja(app, docname, source):
104 | """
105 | Render our pages as a jinja template for fancy templating goodness.
106 | """
107 | # Make sure we're outputting HTML
108 | if app.builder.format != "html":
109 | return
110 | src = source[0]
111 | rendered = app.builder.templates.render_string(src, app.config.html_context)
112 | source[0] = rendered
113 |
114 |
115 | def setup(app):
116 | app.connect("source-read", rstjinja)
117 |
118 |
119 | # import requests
120 |
121 | # oep_url= 'http://oep.iks.cs.ovgu.de/'
122 |
123 | # get data from oedb test
124 |
125 | # import json
126 | # path = os.getcwd()
127 | # json_file = '../ego/scenario_setting.json'
128 |
129 | # with open(path +'/'+json_file) as f:
130 | # scn_set = json.load(f)
131 | #
132 | # json_global = list(scn_set['eTraGo'])
133 | #
134 | # html_context = {
135 | # 'power_class': power_class,
136 | # 'scn_setting': scn_set
137 | # }
138 |
139 |
140 | # add RestFull API
141 | httpexample_scheme = "https"
142 |
143 |
144 | # Add any paths that contain templates here, relative to this directory.
145 | templates_path = ["_templates"]
146 |
147 | # The suffix(es) of source filenames.
148 | # You can specify multiple suffix as a list of string:
149 | # source_suffix = ['.rst', '.md']
150 | source_suffix = ".rst"
151 |
152 | # The encoding of source files.
153 | # source_encoding = 'utf-8-sig'
154 |
155 | # The master toctree document.
156 | master_doc = "index"
157 |
158 | # General information about the project.
159 | project = "eGo"
160 | copyright = "2015-2018, open_eGo-Team"
161 | author = "open_eGo-Team"
162 |
163 |
164 | # The version info for the project you're documenting, acts as replacement for
165 | # |version| and |release|, also used in various other places throughout the
166 | # built documents.
167 | #
168 | # The short X.Y version.
169 | version = "0.3.4"
170 | # The full version, including alpha/beta/rc tags.
171 | release = "0.3.4"
172 |
173 |
174 | # The language for content autogenerated by Sphinx. Refer to documentation
175 | # for a list of supported languages.
176 | #
177 | # This is also used if you do content translation via gettext catalogs.
178 | # Usually you set "language" from the command line for these cases.
179 | language = "en"
180 |
181 | # There are two options for replacing |today|: either, you set today to some
182 | # non-false value, then it is used:
183 | # today = ''
184 | # Else, today_fmt is used as the format for a strftime call.
185 | # today_fmt = '%B %d, %Y'
186 |
187 | # List of patterns, relative to source directory, that match files and
188 | # directories to ignore when looking for source files.
189 | exclude_patterns = ["_build", "whatsnew", "_static"]
190 |
191 | # The reST default role (used for this markup: `text`) to use for all
192 | # documents.
193 | # default_role = None
194 |
195 | # If true, '()' will be appended to :func: etc. cross-reference text.
196 | # add_function_parentheses = True
197 |
198 | # If true, the current module name will be prepended to all description
199 | # unit titles (such as .. function::).
200 | # add_module_names = True
201 |
202 | # If true, sectionauthor and moduleauthor directives will be shown in the
203 | # output. They are ignored by default.
204 | # show_authors = False
205 |
206 | # The name of the Pygments (syntax highlighting) style to use.
207 | pygments_style = "sphinx"
208 |
209 | # A list of ignored prefixes for module index sorting.
210 | # modindex_common_prefix = []
211 |
212 | # If true, keep warnings as "system message" paragraphs in the built documents.
213 | # keep_warnings = False
214 |
215 | # If true, `todo` and `todoList` produce output, else they produce nothing.
216 | todo_include_todos = True
217 |
218 |
219 | # Fix import error of modules which depend on C modules (mock out the imports for
220 | # these modules)
221 | # see http://read-the-docs.readthedocs.io/en/latest/faq.html#i-get-import-
222 | # errors-on-libraries-that-depend-on-c-modules
223 |
224 |
225 | if "READTHEDOCS" in os.environ:
226 |
227 | class Mock(MagicMock):
228 | @classmethod
229 | def __getattr__(cls, name):
230 | return MagicMock()
231 |
232 | MOCK_MODULES = ["ding0", "ding0.results", "shapely"]
233 | sys.modules.update((mod_name, Mock()) for mod_name in MOCK_MODULES)
234 |
235 | MOCK_MODULES = [
236 | "libgeos",
237 | "geos",
238 | "libgeos_c",
239 | "geos_c",
240 | "libgeos_c.so.1",
241 | "libgeos_c.so",
242 | "shapely",
243 | "geoalchemy2",
244 | "geoalchemy2.shape ",
245 | ]
246 |
247 |
248 | # -- Options for HTML output ----------------------------------------------
249 |
250 | # The theme to use for HTML and HTML Help pages. See the documentation for
251 | # a list of builtin themes.
252 | # html_theme = 'alabaster'
253 |
254 | html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
255 | html_theme = "sphinx_rtd_theme"
256 |
257 | # Theme options are theme-specific and customize the look and feel of a theme
258 | # further. For a list of options available for each theme, see the
259 | # documentation.
260 | # html_theme_options = {}
261 |
262 | # Add any paths that contain custom themes here, relative to this directory.
263 | # html_theme_path = []
264 |
265 | # The name for this set of Sphinx documents. If None, it defaults to
266 | # " v documentation".
267 | # html_title = None
268 |
269 | # A shorter title for the navigation bar. Default is the same as html_title.
270 | # html_short_title = None
271 |
272 | # The name of an image file (relative to this directory) to place at the top
273 | # of the sidebar.
274 | # html_logo = None
275 |
276 | # The name of an image file (within the static path) to use as favicon of the
277 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
278 | # pixels large.
279 | # html_favicon = None
280 |
281 | # Add any paths that contain custom static files (such as style sheets) here,
282 | # relative to this directory. They are copied after the builtin static files,
283 | # so a file named "default.css" will overwrite the builtin "default.css".
284 | html_static_path = ["_static"]
285 |
286 | # Add any extra paths that contain custom files (such as robots.txt or
287 | # .htaccess) here, relative to this directory. These files are copied
288 | # directly to the root of the documentation.
289 | # html_extra_path = []
290 |
291 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
292 | # using the given strftime format.
293 | # html_last_updated_fmt = '%b %d, %Y'
294 |
295 | # If true, SmartyPants will be used to convert quotes and dashes to
296 | # typographically correct entities.
297 | # html_use_smartypants = True
298 |
299 | # Custom sidebar templates, maps document names to template names.
300 | # html_sidebars = {}
301 |
302 | # Additional templates that should be rendered to pages, maps page names to
303 | # template names.
304 | # html_additional_pages = {}
305 |
306 | # If false, no module index is generated.
307 | # html_domain_indices = True
308 |
309 | # If false, no index is generated.
310 | # html_use_index = True
311 |
312 | # If true, the index is split into individual pages for each letter.
313 | # html_split_index = False
314 |
315 | # If true, links to the reST sources are added to the pages.
316 | # html_show_sourcelink = True
317 |
318 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
319 | # html_show_sphinx = True
320 |
321 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
322 | # html_show_copyright = True
323 |
324 | # If true, an OpenSearch description file will be output, and all pages will
325 | # contain a tag referring to it. The value of this option must be the
326 | # base URL from which the finished HTML is served.
327 | # html_use_opensearch = ''
328 |
329 | # This is the file name suffix for HTML files (e.g. ".xhtml").
330 | # html_file_suffix = None
331 |
332 | # Language to be used for generating the HTML full-text search index.
333 | # Sphinx supports the following languages:
334 | # 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
335 | # 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
336 | # html_search_language = 'en'
337 |
338 | # A dictionary with options for the search language support, empty by default.
339 | # Now only 'ja' uses this config value
340 | # html_search_options = {'type': 'default'}
341 |
342 | # The name of a javascript file (relative to the configuration directory) that
343 | # implements a search results scorer. If empty, the default will be used.
344 | # html_search_scorer = 'scorer.js'
345 |
346 | # Output file base name for HTML help builder.
347 | htmlhelp_basename = "eGodoc"
348 |
349 | # -- Options for LaTeX output ---------------------------------------------
350 |
351 | latex_elements = {
352 | # The paper size ('letterpaper' or 'a4paper').
353 | # 'papersize': 'letterpaper',
354 | # The font size ('10pt', '11pt' or '12pt').
355 | # 'pointsize': '10pt',
356 | # Additional stuff for the LaTeX preamble.
357 | # 'preamble': '',
358 | # Latex figure (float) alignment
359 | # 'figure_align': 'htbp',
360 | }
361 |
362 | # Grouping the document tree into LaTeX files. List of tuples
363 | # (source start file, target name, title,
364 | # author, documentclass [howto, manual, or own class]).
365 | latex_documents = [
366 | (master_doc, "eGo.tex", "eGo Documentation", r"open\_eGo-Team", "manual"),
367 | ]
368 |
369 | # The name of an image file (relative to this directory) to place at the top of
370 | # the title page.
371 | # latex_logo = None
372 |
373 | # For "manual" documents, if this is true, then toplevel headings are parts,
374 | # not chapters.
375 | # latex_use_parts = False
376 |
377 | # If true, show page references after internal links.
378 | # latex_show_pagerefs = False
379 |
380 | # If true, show URL addresses after external links.
381 | # latex_show_urls = False
382 |
383 | # Documents to append as an appendix to all manuals.
384 | # latex_appendices = []
385 |
386 | # If false, no module index is generated.
387 | # latex_domain_indices = True
388 |
389 |
390 | # -- Options for manual page output ---------------------------------------
391 |
392 | # One entry per manual page. List of tuples
393 | # (source start file, name, description, authors, manual section).
394 | man_pages = [(master_doc, "eGo", "eGo Documentation", [author], 1)]
395 |
396 | # If true, show URL addresses after external links.
397 | # man_show_urls = False
398 |
399 |
400 | # -- Options for Texinfo output -------------------------------------------
401 |
402 | # Grouping the document tree into Texinfo files. List of tuples
403 | # (source start file, target name, title, author,
404 | # dir menu entry, description, category)
405 | texinfo_documents = [
406 | (master_doc, "eGo", "eGo Documentation", author, "eGo", "Titel", "Miscellaneous"),
407 | ]
408 |
409 | # Documents to append as an appendix to all manuals.
410 | # texinfo_appendices = []
411 |
412 | # If false, no module index is generated.
413 | # texinfo_domain_indices = True
414 |
415 | # How to display URL addresses: 'footnote', 'no', or 'inline'.
416 | # texinfo_show_urls = 'footnote'
417 |
418 | # If true, do not generate a @detailmenu in the "Top" node's menu.
419 | # texinfo_no_detailmenu = False
420 |
421 |
422 | # Example configuration for intersphinx: refer to the Python standard library.
423 | intersphinx_mapping = {
424 | "python": ("https://docs.python.org/3", None),
425 | "etrago": ("https://etrago.readthedocs.io/en/latest", None),
426 | "edisgo": ("http://edisgo.readthedocs.io/en/dev", None),
427 | "ding0": ("https://dingo.readthedocs.io/en/dev", None),
428 | "pypsa": ("https://pypsa.org/doc/", None),
429 | "sqlalchemy": ("https://docs.sqlalchemy.org/en/latest/", None),
430 | }
431 |
432 | # Numbered figures
433 | numfig = True
434 |
435 | autodoc_member_order = "bysource"
436 |
--------------------------------------------------------------------------------
/doc/developer.rst:
--------------------------------------------------------------------------------
1 | ===============
2 | Developer notes
3 | ===============
4 |
5 |
6 | Installation
7 | ============
8 |
9 | .. note::
10 | Installation is only tested on (Ubuntu 16.04 ) linux OS.
11 |
12 | Please read the Installation Guideline :ref:`ego.doc.installation`.
13 |
14 |
15 | 1. Use virtual environment
16 | --------------------------
17 |
18 | Create a virtual environment and activate it:
19 |
20 | .. code-block:: bash
21 |
22 | $ virtualenv --clear -p python3.5 ego_dev``
23 | $ cd ego_dev/
24 | $ source bin/activate
25 |
26 |
27 | 2. Get eGo
28 | ----------
29 |
30 | Clone eGo from github.com by running the following command in your terminal:
31 |
32 | .. code-block:: bash
33 |
34 | $ git clone https://github.com/openego/eGo
35 |
36 |
37 | With your activated environment `cd` to the cloned directory and run
38 | ``pip3 install -e eGo --process-dependency-links`` .
39 | This will install all needed packages into your environment.
40 |
41 | 3. Get your Database login data
42 | -------------------------------
43 |
44 | `Learn more here `_.
45 |
46 | 4. Create Dingo grids
47 | ----------------------
48 |
49 | Install ding0 from github.com and run the ``example_parallel_multiple_grid_districts.py``
50 | script, which can be found under ``ding0/ding0/examples/``.
51 |
52 | .. code-block:: bash
53 |
54 | $ git clone https://github.com/openego/ding0.git
55 | $ pip3 install -e ding0
56 | $ python3 ding0/ding0/examples/example_parallel_multiple_grid_districts.py
57 |
58 | `Learn more about Dingo `_.
59 | Before you run the script check also the configs of Dingo and eDisGo in order to
60 | use the right database version. You find this files under
61 | ``ding0/ding0/config/config_db_tables.cfg`` and
62 | ``~.edisgo/config/config_db_tables.cfg``. Your created ding0 grids are stored in
63 | ``~.ding0/..``.
64 |
65 |
66 |
67 | eDisGo and eTraGo
68 | -----------------
69 |
70 | Please read the Developer notes of
71 | `eDisGo `_ and
72 | `eTraGo `_.
73 |
74 |
75 | Error handling
76 | --------------
77 |
78 | 1. Installation Error use pip-18.1 for your installation.
79 | ``pip install --upgrade pip==18.1``
80 |
81 | 2. Installation Error of eTraGo, eDisGo, Pypsa fork or ding0.
82 | If you have problems with one of those packages please clone it from
83 | *github.com* and install it from the master or dev branch. For example
84 | ``pip3 install -e git+https://github.com/openego//PyPSA.git@master#egg=pypsafork``
85 |
86 | 3. Matplotlib error on server and few other systems. Please change your settings
87 | in ``matplotlibrc`` from ``backend : TkAgg`` to ``backend : PDF``. You can
88 | find the file for example in a virtual environment under
89 | ``~/env/lib/python3.5/site-packages/matplotlib/mpl-data$ vim matplotlibrc``.
90 | `Learn more here. `_.
91 |
92 | 4. Geopandas error caused by Rtree ``Could not find libspatialindex_c library``
93 | Please reinstall Rtree with ``sudo pip3 install Rtree`` or install
94 | ``libspatialindex_c`` via ``sudo apt install python3-rtree``. On Windows or
95 | macOS you maybe install ``libspatialindex_c`` straight from source.
96 |
--------------------------------------------------------------------------------
/doc/files/investment_costs_of_grid_ measures.csv:
--------------------------------------------------------------------------------
1 | voltage level,component,capital costs,unit,source
2 | 110,AC overhead transmission lines,230,EUR/MVA,Dena 2012
3 | 220,AC overhead transmission lines,290,EUR/MVA,NEP 2015
4 | 380,AC overhead transmission lines,85,EUR/MVA,NEP 2015
5 | DC,DC overhead transmission lines,375,EUR/MVA,NEP 2015
6 | 110/220,transformer,7500,EUR/MVA,Dena 2012
7 | 110/380,transformer,17333,EUR/MVA,NEP 2015
8 | 220/380,transformer,14166,EUR/MVA,NEP 2015
9 |
--------------------------------------------------------------------------------
/doc/files/storage_units.csv:
--------------------------------------------------------------------------------
1 | attribute, type, unit, default, description, status
2 | name, string, n/a, n/a, Unique name, Input (required)
3 | bus, string, n/a, n/a,Name of bus to which storage unit is attached., Input (required)
4 | control, string, n/a, PQ,"P, Q, V control strategy for PF, must be ""PQ"", ""PV"" or ""Slack"".", Input (optional)
5 | type, string, n/a, n/a,Placeholder for storage unit type. Not yet implemented., Input (optional)
6 | p_nom, float,kW,0, Nominal power for limits in OPF., Input (optional)
7 | p_nom_extendable, boolean, n/a,FALSE, Switch to allow capacity p_nom to be extended in OPF., Input (optional)
8 | p_nom_min, float, kW,0,"If p_nom is extendable in OPF, set its minimum value.", Input (optional)
9 | p_nom_max, float, kW, inf,"If p_nom is extendable in OPF, set its maximum value (e.g. limited by potential).", Input (optional)
10 | p_min_pu, static or series, per unit,-1,The minimum output for each snapshot per unit of p_nom for the OPF (negative sign implies storing mode withdrawing power from bus)., Input (optional)
11 | p_max_pu, static or series, per unit,1,The maximum output for each snapshot per unit of p_nom for the OPF., Input (optional)
12 | p_set, static or series, kW,0, active power set point (for PF), Input (optional)
13 | q_set, static or series, kVar,0, reactive power set point (for PF), Input (optional)
14 | sign, float, n/a,1, power sign, Input (optional)
15 | carrier, string, n/a, n/a,"Prime mover energy carrier (e.g. coal, gas, wind, solar); required for global constraints on primary energy in OPF", Input (optional)
16 | marginal_cost, static or series, currency/kWh,0,Marginal cost of production of 1 MWh., Input (optional)
17 | capital_cost, float, currency/kW,0,Capital cost of extending p_nom by 1 MW., Input (optional)
18 | state_of_charge_initial, float, kWh,0, State of charge before the snapshots in the OPF., Input (optional)
19 | state_of_charge_set, static or series, kWh, NaN, State of charge set points for snapshots in the OPF., Input (optional)
20 | cyclic_state_of_charge, boolean, n/a,FALSE,"Switch: if True, then state_of_charge_initial is ignored and the initial state of charge is set to the final state of charge for the group of snapshots in the OPF (soc[-1] = soc[len(snapshots)-1]).", Input (optional)
21 | max_hours, float, hours,1, Maximum state of charge capacity in terms of hours at full output capacity p_nom, Input (optional)
22 | efficiency_store, float, per unit,1, Efficiency of storage on the way into the storage., Input (optional)
23 | efficiency_dispatch, float, per unit,1, Efficiency of storage on the way out of the storage., Input (optional)
24 | standing_loss, float, per unit,0, Losses per hour to state of charge., Input (optional)
25 | inflow, static or series, kW,0,"Inflow to the state of charge, e.g. due to river inflow in hydro reservoir.", Input (optional)
26 | p, series, kW,0, active power at bus (positive if net generation), Output
27 | q, series, kVar,0, reactive power (positive if net generation), Output
28 | state_of_charge, series, kWh, NaN, State of charge as calculated by the OPF., Output
29 | spill, series, kW,0, Spillage for each snapshot., Output
30 | p_nom_opt, float, kW,0, Optimised nominal power., Output
31 |
--------------------------------------------------------------------------------
/doc/getting_started.rst:
--------------------------------------------------------------------------------
1 | ===============
2 | Getting started
3 | ===============
4 |
5 | In order to start and run the eGo-tool a few steps needs to be done.
6 |
7 | Steps to run eGo
8 | ================
9 |
10 | 1. Are you registered on the OpenEnergy Platform?
11 | The registration for the public accessible API can be found on
12 | `openenergy-platform.org/login `_.
13 |
14 | 2. You have Python 3 installed? Install for example the Python
15 | distribution of ``_.
16 |
17 | 3. Install and use a virtual environment for your installation (optional).
18 |
19 | 4. Install the eGo tool ``pip3 install eGo --process-dependency-links``.
20 |
21 | 5. Create mid and low voltage distribution grids with ding0.
22 | Learn more about Ding0 on ``_.
23 |
24 | 6. Check and prepare your eGo setting in ``ego/scenario_setting.json``. Add your
25 | local paths and prepare your parameters.
26 |
27 | 7. Start your calculation and run the tool for example under
28 | ``eGo/ego`` and ``>>> python3 appl.py`` . You can also use any other Python
29 | Terminal, Jupyter Notebook or Editor.
30 |
31 |
32 |
33 | How to use eGo?
34 | ===============
35 |
36 | Start and use eGo from the terminal.
37 |
38 | .. code-block:: bash
39 |
40 | >>> python3 appl.py
41 | >>> ...
42 | >>> INFO:ego:Start calculation
43 | >>> ...
44 |
45 |
46 |
47 | Examples
48 | --------
49 | Inside the appl.py
50 |
51 | .. code-block:: python
52 |
53 | # import the eGo tool
54 | from ego.tools.io import eGo
55 |
56 | # Run your scenario
57 | ego = eGo(jsonpath='scenario_setting.json')
58 |
59 | # Analyse your results on extra high voltage level (etrago)
60 | ego.etrago_line_loading()
61 |
62 |
63 |
64 | Tutorials as Jupyter Notebook
65 | =============================
66 |
67 | Learn more about Jupyter Notebook and how to install and use it
68 | on `jupyter.org `_.
69 |
70 |
71 | `Workshop open_eGo Session eGo (in German) `_
72 |
73 | `Workshop open_eGo Session eTraGo (in German) `_
74 |
75 | `Workshop open_eGo Session DinGo (in German) `_
76 |
77 | `Workshop open_eGo Session eDisGo (in German) `_
78 |
79 | `OpenMod eTraGo Tutorial (in English) `_
80 |
81 |
82 |
83 |
84 | eGo Result Example of Germany
85 | =============================
86 |
87 |
88 | A small example of the eGo results is displayed below. The full page can be found `here `_
89 |
90 |
91 | .. raw:: html
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 | The plot is created by the eGo function:
100 |
101 | .. code-block:: python
102 |
103 | ego.iplot
104 |
--------------------------------------------------------------------------------
/doc/images/battery.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/openego/eGo/dfab09eff2617927905e3021ef3a666d1e5ff311/doc/images/battery.png
--------------------------------------------------------------------------------
/doc/images/eGo_integration.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/openego/eGo/dfab09eff2617927905e3021ef3a666d1e5ff311/doc/images/eGo_integration.png
--------------------------------------------------------------------------------
/doc/images/etrago-storage_parameters.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/openego/eGo/dfab09eff2617927905e3021ef3a666d1e5ff311/doc/images/etrago-storage_parameters.png
--------------------------------------------------------------------------------
/doc/images/open_ego_icon.svg:
--------------------------------------------------------------------------------
1 |
2 |
112 |
--------------------------------------------------------------------------------
/doc/images/open_ego_icon_web.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/openego/eGo/dfab09eff2617927905e3021ef3a666d1e5ff311/doc/images/open_ego_icon_web.png
--------------------------------------------------------------------------------
/doc/images/open_ego_logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/openego/eGo/dfab09eff2617927905e3021ef3a666d1e5ff311/doc/images/open_ego_logo.png
--------------------------------------------------------------------------------
/doc/images/open_ego_models_overview.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/openego/eGo/dfab09eff2617927905e3021ef3a666d1e5ff311/doc/images/open_ego_models_overview.png
--------------------------------------------------------------------------------
/doc/images/trafo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/openego/eGo/dfab09eff2617927905e3021ef3a666d1e5ff311/doc/images/trafo.png
--------------------------------------------------------------------------------
/doc/index.rst:
--------------------------------------------------------------------------------
1 | ===============================
2 | Welcome to eGo's documentation!
3 | ===============================
4 |
5 |
6 |
7 | .. image:: images/open_ego_logo.png
8 | :scale: 40%
9 |
10 |
11 |
12 |
13 | .. note:: Note, the data source of eGo relies on
14 | the Open Energy Database. The registration for the public
15 | accessible API can be found on
16 | `openenergy-platform.org/login `_.
17 |
18 |
19 | Overview
20 | ========
21 |
22 | .. toctree::
23 | :maxdepth: 2
24 |
25 | welcome
26 | installation
27 | getting_started
28 | theoretical_background
29 | developer
30 | whatsnew
31 | api
32 |
33 |
34 | Take also a look into the documentation of
35 | `eTraGo `_ and
36 | `eDisGo `_ which are part of eGo.
37 |
38 |
39 |
40 | Indices and tables
41 | ==================
42 |
43 | * :ref:`genindex`
44 | * :ref:`modindex`
45 | * :ref:`search`
46 |
47 |
48 |
49 | .. raw:: html
50 | :file: _static/ribbons.html
51 |
--------------------------------------------------------------------------------
/doc/installation.rst:
--------------------------------------------------------------------------------
1 | ============
2 | Installation
3 | ============
4 | eGo is designed as a Python package therefore it is mandatory to have
5 | `Python 3 `_ installed. If you have a
6 | working Python3 environment, use PyPI to install the latest eGo version.
7 | We highly recommend to use a virtual environment. Use the following pip
8 | command in order to install eGo:
9 |
10 | .. code-block:: bash
11 |
12 | $ pip3 install eGo --process-dependency-links
13 |
14 | Please ensure, that you are using the pip version 18.1.
15 | Use ``pip install --upgrade pip==18.1`` to get the right pip version.
16 | In Case of problems with the Installation and the ``dependency_links`` of
17 | the PyPSA fork, please istall PyPSA from the github.com/openego Repository.
18 |
19 | .. code-block:: bash
20 |
21 | $ pip3 install -e git+https://github.com/openego/PyPSA@master#egg=0.11.0fork
22 |
23 |
24 | Using virtual environment
25 | =========================
26 |
27 | At first create a virtual environment and activate it:
28 |
29 | .. code-block:: bash
30 |
31 | $ virtualenv venv --clear -p python3.5
32 | $ source venv/bin/activate
33 | $ cd venv
34 |
35 | Inside your virtual environment you can install eGo with the pip command.
36 |
37 | Linux and Ubuntu
38 | ================
39 |
40 | The package eGo is tested with Ubuntu 16.04 and 18.04 inside a virtual
41 | environment of `virtualenv `_.
42 | The installation is shown above.
43 |
44 |
45 |
46 | Windows or Mac OSX users
47 | ========================
48 |
49 | For Windows and/or Mac OSX user we highly recommend to install and use Anaconda
50 | for your Python3 installation. First install anaconda including python version 3.5 or
51 | higher from https://www.anaconda.com/download/ and open an anaconda
52 | prompt as administrator and run:
53 |
54 | .. code-block:: bash
55 |
56 | $ conda install pip
57 | $ conda config --add channels conda-forge
58 | $ conda install shapely
59 | $ pip3 install eGo --process-dependency-links
60 |
61 | The full documentation can be found
62 | `on this page `_. We use Anaconda
63 | with an own environment in order to reduce problems with packages and different
64 | versions on our system. Learn more about
65 | `Anacona `_
66 | environments.
67 |
68 |
69 |
70 | Setup database connection
71 | =========================
72 | The package ``ego.io`` gives you a python SQL-Alchemy representation of
73 | the **OpenEnergy-Database** (oedb) and access to it by using the
74 | `oedialect `_ - a SQL-Alchemy binding
75 | Python package for the REST-API used by the OpenEnergy Platform (OEP). Your API
76 | access / login data will be saved in the folder ``.egoio`` in the file
77 | ``config.ini``. You can create a new account on
78 | `openenergy-platform.org/login `_.
79 |
80 |
81 | oedialect connection
82 | --------------------
83 |
84 | .. code-block:: bash
85 |
86 | [oedb]
87 | dialect = oedialect
88 | username =
89 | database = oedb
90 | host = openenergy-platform.org
91 | port = 80
92 | password =
93 |
94 |
95 | Local database connection
96 | -------------------------
97 |
98 | .. code-block:: bash
99 |
100 | [local]
101 | username = YourOEDBUserName
102 | database = YourLocalDatabaseName
103 | host = localhost or 127.0.0.1
104 | port = 5433
105 | pw = YourLocalPassword
106 |
107 |
108 |
109 | Old developer connection
110 | ------------------------
111 |
112 | .. code-block:: bash
113 |
114 | [oedb]
115 | username = YourOEDBUserName
116 | database = oedb
117 | host = oe2.iws.cs.ovgu.de
118 | port = 5432
119 | pw = YourOEDBPassword
120 |
121 |
122 |
123 | Please find more information on *Developer notes*.
124 |
--------------------------------------------------------------------------------
/doc/make.bat:
--------------------------------------------------------------------------------
1 | @ECHO OFF
2 |
3 | REM Command file for Sphinx documentation
4 |
5 | if "%SPHINXBUILD%" == "" (
6 | set SPHINXBUILD=sphinx-build
7 | )
8 | set BUILDDIR=_build
9 | set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% .
10 | set I18NSPHINXOPTS=%SPHINXOPTS% .
11 | if NOT "%PAPER%" == "" (
12 | set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%
13 | set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS%
14 | )
15 |
16 | if "%1" == "" goto help
17 |
18 | if "%1" == "help" (
19 | :help
20 | echo.Please use `make ^` where ^ is one of
21 | echo. html to make standalone HTML files
22 | echo. dirhtml to make HTML files named index.html in directories
23 | echo. singlehtml to make a single large HTML file
24 | echo. pickle to make pickle files
25 | echo. json to make JSON files
26 | echo. htmlhelp to make HTML files and a HTML help project
27 | echo. qthelp to make HTML files and a qthelp project
28 | echo. devhelp to make HTML files and a Devhelp project
29 | echo. epub to make an epub
30 | echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter
31 | echo. text to make text files
32 | echo. man to make manual pages
33 | echo. texinfo to make Texinfo files
34 | echo. gettext to make PO message catalogs
35 | echo. changes to make an overview over all changed/added/deprecated items
36 | echo. xml to make Docutils-native XML files
37 | echo. pseudoxml to make pseudoxml-XML files for display purposes
38 | echo. linkcheck to check all external links for integrity
39 | echo. doctest to run all doctests embedded in the documentation if enabled
40 | echo. coverage to run coverage check of the documentation if enabled
41 | goto end
42 | )
43 |
44 | if "%1" == "clean" (
45 | for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i
46 | del /q /s %BUILDDIR%\*
47 | goto end
48 | )
49 |
50 |
51 | REM Check if sphinx-build is available and fallback to Python version if any
52 | %SPHINXBUILD% 2> nul
53 | if errorlevel 9009 goto sphinx_python
54 | goto sphinx_ok
55 |
56 | :sphinx_python
57 |
58 | set SPHINXBUILD=python -m sphinx.__init__
59 | %SPHINXBUILD% 2> nul
60 | if errorlevel 9009 (
61 | echo.
62 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
63 | echo.installed, then set the SPHINXBUILD environment variable to point
64 | echo.to the full path of the 'sphinx-build' executable. Alternatively you
65 | echo.may add the Sphinx directory to PATH.
66 | echo.
67 | echo.If you don't have Sphinx installed, grab it from
68 | echo.http://sphinx-doc.org/
69 | exit /b 1
70 | )
71 |
72 | :sphinx_ok
73 |
74 |
75 | if "%1" == "html" (
76 | %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html
77 | if errorlevel 1 exit /b 1
78 | echo.
79 | echo.Build finished. The HTML pages are in %BUILDDIR%/html.
80 | goto end
81 | )
82 |
83 | if "%1" == "dirhtml" (
84 | %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml
85 | if errorlevel 1 exit /b 1
86 | echo.
87 | echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml.
88 | goto end
89 | )
90 |
91 | if "%1" == "singlehtml" (
92 | %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml
93 | if errorlevel 1 exit /b 1
94 | echo.
95 | echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml.
96 | goto end
97 | )
98 |
99 | if "%1" == "pickle" (
100 | %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle
101 | if errorlevel 1 exit /b 1
102 | echo.
103 | echo.Build finished; now you can process the pickle files.
104 | goto end
105 | )
106 |
107 | if "%1" == "json" (
108 | %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json
109 | if errorlevel 1 exit /b 1
110 | echo.
111 | echo.Build finished; now you can process the JSON files.
112 | goto end
113 | )
114 |
115 | if "%1" == "htmlhelp" (
116 | %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp
117 | if errorlevel 1 exit /b 1
118 | echo.
119 | echo.Build finished; now you can run HTML Help Workshop with the ^
120 | .hhp project file in %BUILDDIR%/htmlhelp.
121 | goto end
122 | )
123 |
124 | if "%1" == "qthelp" (
125 | %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp
126 | if errorlevel 1 exit /b 1
127 | echo.
128 | echo.Build finished; now you can run "qcollectiongenerator" with the ^
129 | .qhcp project file in %BUILDDIR%/qthelp, like this:
130 | echo.^> qcollectiongenerator %BUILDDIR%\qthelp\ding0.qhcp
131 | echo.To view the help file:
132 | echo.^> assistant -collectionFile %BUILDDIR%\qthelp\ding0.ghc
133 | goto end
134 | )
135 |
136 | if "%1" == "devhelp" (
137 | %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp
138 | if errorlevel 1 exit /b 1
139 | echo.
140 | echo.Build finished.
141 | goto end
142 | )
143 |
144 | if "%1" == "epub" (
145 | %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub
146 | if errorlevel 1 exit /b 1
147 | echo.
148 | echo.Build finished. The epub file is in %BUILDDIR%/epub.
149 | goto end
150 | )
151 |
152 | if "%1" == "latex" (
153 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
154 | if errorlevel 1 exit /b 1
155 | echo.
156 | echo.Build finished; the LaTeX files are in %BUILDDIR%/latex.
157 | goto end
158 | )
159 |
160 | if "%1" == "latexpdf" (
161 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
162 | cd %BUILDDIR%/latex
163 | make all-pdf
164 | cd %~dp0
165 | echo.
166 | echo.Build finished; the PDF files are in %BUILDDIR%/latex.
167 | goto end
168 | )
169 |
170 | if "%1" == "latexpdfja" (
171 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
172 | cd %BUILDDIR%/latex
173 | make all-pdf-ja
174 | cd %~dp0
175 | echo.
176 | echo.Build finished; the PDF files are in %BUILDDIR%/latex.
177 | goto end
178 | )
179 |
180 | if "%1" == "text" (
181 | %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text
182 | if errorlevel 1 exit /b 1
183 | echo.
184 | echo.Build finished. The text files are in %BUILDDIR%/text.
185 | goto end
186 | )
187 |
188 | if "%1" == "man" (
189 | %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man
190 | if errorlevel 1 exit /b 1
191 | echo.
192 | echo.Build finished. The manual pages are in %BUILDDIR%/man.
193 | goto end
194 | )
195 |
196 | if "%1" == "texinfo" (
197 | %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo
198 | if errorlevel 1 exit /b 1
199 | echo.
200 | echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo.
201 | goto end
202 | )
203 |
204 | if "%1" == "gettext" (
205 | %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale
206 | if errorlevel 1 exit /b 1
207 | echo.
208 | echo.Build finished. The message catalogs are in %BUILDDIR%/locale.
209 | goto end
210 | )
211 |
212 | if "%1" == "changes" (
213 | %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes
214 | if errorlevel 1 exit /b 1
215 | echo.
216 | echo.The overview file is in %BUILDDIR%/changes.
217 | goto end
218 | )
219 |
220 | if "%1" == "linkcheck" (
221 | %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck
222 | if errorlevel 1 exit /b 1
223 | echo.
224 | echo.Link check complete; look for any errors in the above output ^
225 | or in %BUILDDIR%/linkcheck/output.txt.
226 | goto end
227 | )
228 |
229 | if "%1" == "doctest" (
230 | %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest
231 | if errorlevel 1 exit /b 1
232 | echo.
233 | echo.Testing of doctests in the sources finished, look at the ^
234 | results in %BUILDDIR%/doctest/output.txt.
235 | goto end
236 | )
237 |
238 | if "%1" == "coverage" (
239 | %SPHINXBUILD% -b coverage %ALLSPHINXOPTS% %BUILDDIR%/coverage
240 | if errorlevel 1 exit /b 1
241 | echo.
242 | echo.Testing of coverage in the sources finished, look at the ^
243 | results in %BUILDDIR%/coverage/python.txt.
244 | goto end
245 | )
246 |
247 | if "%1" == "xml" (
248 | %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml
249 | if errorlevel 1 exit /b 1
250 | echo.
251 | echo.Build finished. The XML files are in %BUILDDIR%/xml.
252 | goto end
253 | )
254 |
255 | if "%1" == "pseudoxml" (
256 | %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml
257 | if errorlevel 1 exit /b 1
258 | echo.
259 | echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml.
260 | goto end
261 | )
262 |
263 | :end
264 |
--------------------------------------------------------------------------------
/doc/theoretical_background.rst:
--------------------------------------------------------------------------------
1 | ======================
2 | Theoretical background
3 | ======================
4 |
5 | .. contents::
6 |
7 |
8 | Models overview
9 | ===============
10 |
11 |
12 | .. figure:: images/open_ego_models_overview.png
13 | :width: 1123px
14 | :height: 794px
15 | :scale: 70%
16 | :alt: Overview of Models and processes which are used by eGo
17 | :align: center
18 |
19 |
20 | eTraGo's theoretical Background
21 | ===============================
22 |
23 | Learn more about eTraGo's theoretical background of methods and assumptions
24 | `here `_.
25 |
26 | eDisGo's theoretical Background
27 | ===============================
28 |
29 | Learn more about eTraGo's theoretical background of methods and assumptions
30 | `here `_.
31 |
32 |
33 | eDisGo Cluster Method
34 | =====================
35 |
36 | In order to achieve acceptable computation times, the problem's complexity can be reduced by applying a k-means cluster-algorithm to MV grids. The algorithm identifies a specified number of representative MV grids and assigns a weighting to each grid. As described `here `_, the available clustering attributes are:
37 |
38 | * cumulative installed **wind capacity**,
39 | * cumulative installed **solar capacity**,
40 | * distance between transition point and **farthest node** of the MV grid
41 | * installed **battery capacity** (as a result of eTraGo's investment optimization)
42 |
43 | Subsequent to the MV grid simulations with the reduced number of representative grids, the cluster weighting is used to extrapolate the costs back to the original number of MV grids.
44 |
45 |
46 | Economic calculation
47 | ====================
48 |
49 | The tool *eGo* unites the extra high (ehv) and high voltage (hv) models with the
50 | medium (mv) and low voltage (lv) models to ascertain the costs per selected
51 | measure and scenario. This results in a cross-grid-level economic result of
52 | the electrical grid and storage optimisation.
53 |
54 |
55 | Overnight costs
56 | ---------------
57 |
58 | The *overnight costs* represents the investment costs of the components or
59 | construction project without any interest, as if the project was completed
60 | "overnight". The overnight costs (:math:`C_{\text{Overnight}}` ) of the grid measures
61 | (lines and transformers) are calculated as:
62 |
63 |
64 | .. math::
65 | C_{Line~extension} = S_{Extension}~[MVA] * C_{assumtion}~[\frac{EUR}{MVA}] * L_{Line~length}~[km]
66 |
67 | .. math::
68 | C_{Transformer~extension} = S_{Extension}~[MVA] * C_{assumtion}~[\frac{EUR}{MVA}]
69 |
70 |
71 | The total overnight grid extension costs are given by:
72 |
73 | .. math::
74 | C_{overnight} = \sum C_{Line~extension} + \sum C_{Transformer~extension}
75 |
76 |
77 |
78 | The conversion of the given annuity costs of *eTraGo* is done in
79 | :func:`~ego.tools.economics.etrago_convert_overnight_cost`.
80 |
81 |
82 |
83 |
84 | Annuity costs
85 | -------------
86 |
87 | The *annuity costs* represents project investment costs with an interest as present
88 | value of an annuity. The investment years *T* and the interest rate *p* are
89 | defined as default in *eGo* with an interest rate ( :math:`p` ) of ``0.05``
90 | and a number of investment years ( :math:`T` ) of ``40 years``. The values are
91 | based on the [StromNEV_A1]_ for the grid investment regulation in Germany.
92 |
93 | The present value of an annuity (PVA) is calculated as:
94 |
95 | .. math::
96 | PVA = \frac{1}{p}- \frac{1}{\left ( p*\left (1 + p \right )^T \right )}
97 |
98 |
99 | In order to calculate the :math:`C_{annuity}` of a given period less than a
100 | year the annuity costs are factorized by the hours of the :math:`t_{year}=8760` and the defined calculation period.
101 |
102 | .. math::
103 | t_{period} = t_{\text{end\_snapshot}} - t_{\text{start\_snapshot}} ~[h]
104 |
105 |
106 | The annuity costs ( :math:`C_{annuity}` ) is calculated as:
107 |
108 | .. math::
109 | C_{annuity} = C_{\text{overnight}} * PVA * \left ( \frac{t_{year}}{\left ( t_{\text{period}}+ 1 \right )} \right )
110 |
111 |
112 |
113 |
114 | Investment costs ehv/hv
115 | -----------------------
116 |
117 | The investment costs of the grid and storage expansion are taken from the studies
118 | [NEP2015a]_ for the extra and high voltage components and the [Dena]_. The
119 | given costs are transformed in respect to PyPSA *[€/MVA]* format [PyPSA]_
120 | components for the optimisation.
121 |
122 |
123 | **Overview of grid cost assumtions:**
124 |
125 | The table displays the transformer and line costs which are used for the
126 | calculation with *eTraGo*.
127 |
128 | .. csv-table:: Overview of grid cost assumtions
129 | :file: files/investment_costs_of_grid_ measures.csv
130 | :delim: ,
131 | :header-rows: 1
132 |
133 | The *eTraGo* calculation of the annuity costs per simulation period is defined
134 | in :func:`~etrago.tools.utilities.set_line_costs` and
135 | :func:`~etrago.tools.utilities.set_trafo_costs`.
136 |
137 | **Overview of storage cost assumtions:**
138 |
139 | .. figure:: images/etrago-storage_parameters.png
140 | :scale: 80%
141 | :alt: Overview of eTraGo storage parameters and costs
142 |
143 | Investment costs mv/lv
144 | ----------------------
145 |
146 | The tool *eDisGO* is calculating all grid expansion measures as capital or
147 | *overnight* costs. In order to get the annuity costs of eDisGo's optimisation
148 | results the function :func:`~ego.tools.economics.edisgo_convert_capital_costs`
149 | is used. The cost assumption of [eDisGo]_ are taken from the [Dena]_
150 | and [CONSENTEC]_ study. Based on the component the costs including earthwork
151 | costs can depend on population density according to [Dena]_.
152 |
153 |
154 |
155 | References
156 | ==========
157 |
158 |
159 | .. [NEP2015a] Übertragungsnetzbetreiber Deutschland. (2015).
160 | *Netzentwicklungsplan Strom 2025 - Kostenschaetzungen*, Version 2015,
161 | 1. Entwurf, 2015. (``_)
163 |
164 | .. [Dena] dena Verteilnetzstudie. (2012).
165 | *Ausbau- und Innovationsbedarf der Stromverteilnetze in Deutschland bis 2030.*
166 | , Version 2015. (``_)
168 |
169 | .. [PyPSA] PyPSA’s documentation (2018).
170 | *Documentation of components.* , Version v0.11.0. (``_)
171 |
172 | .. [StromNEV_A1] Stromnetzentgeltverordnung - StromNEV Anlage 1 (2018).
173 | *Verordnung über die Entgelte für den Zugang zu Elektrizitätsversorgungsnetzen*
174 | *(Stromnetzentgeltverordnung - StromNEV) Anlage 1 (zu § 6 Abs. 5 Satz 1)*
175 | *Betriebsgewöhnliche Nutzungsdauern*.
176 | (``_)
177 |
178 | .. [Overnight cost] Wikipedia (2018).
179 | *Definition of overnight cost*.
180 | (``_)
181 |
182 | .. [eDisGo] eDisGo - grid expantion costs (2018).
183 | *Cost assumption on mv and lv grid components*.
184 | (``_)
186 |
187 | .. [CONSENTEC] CONSENTEC et.al (2006).
188 | *Untersuchung der Voraussetzungen und möglicher Anwendung analytischer*
189 | *Kostenmodelle in der deutschen Energiewirtschaft *.
190 | (``_)
193 |
--------------------------------------------------------------------------------
/doc/welcome.rst:
--------------------------------------------------------------------------------
1 | ============
2 | The eGo tool
3 | ============
4 |
5 |
6 | The python package eGo is a toolbox and also an application which combines
7 | **eTraGo** - a tool for optimizing flexibility options for transmission grids
8 | based on PyPSA and **eDisGo** - a toolbox in itself capable of analyzing distribution
9 | grids for grid issues and evaluating measures responding these.
10 |
11 | .. figure:: images/eGo_integration.png
12 | :width: 1055px
13 | :height: 423px
14 | :scale: 90%
15 |
16 |
17 | The open_eGo project
18 | ====================
19 | This software project is part of the research project
20 | `open_eGo `_.
21 |
22 |
23 | The OpenEnergy Platform
24 | =======================
25 | Within this project we developed the OpenEnergy Platform which the eGo toolbox
26 | relies upon to get and store in- and output data. Because of this dependency
27 | in order to use eGo a registration on the OpenEnergy Platform is required. For more
28 | information see
29 | `openenergy-platform `_ and login.
30 |
31 | The OpenEnergy platform mainly addresses students, researchers and scientists in
32 | the field of energy modelling and analytics, but also welcomes all other interested parties.
33 | The platform provides great tools to make your energy system
34 | modelling process transparent. Data of the open_eGo project are stored on
35 | this platform.
36 | `Learn more about the database access `_.
37 |
38 |
39 | Model overview
40 | ==============
41 |
42 | .. figure:: images/open_ego_models_overview.png
43 | :width: 1123px
44 | :height: 794px
45 | :scale: 70%
46 | :alt: Overview of Models and processes which are used by eGo
47 | :align: center
48 |
49 |
50 | eTraGo
51 | ------
52 |
53 | The python package eTraGo provides an optimization of flexibility options for
54 | transmission grids based on PyPSA. In particular transmission grids of different voltage levels
55 | , that is 380, 220 and 110 kV in Germany, can be handled.
56 | Conventionally the 110kV grid is part of the distribution grid.
57 | The integration of the transmission and ‘upper’ distribution grid
58 | is part of eTraGo.
59 |
60 | The focus of optimization are flexibility options with a special focus on
61 | energy storages and grid expansion measures.
62 | `Learn more here `_.
63 |
64 |
65 | eDisGo
66 | ------
67 | The python package eDisGo provides a toolbox for analysis and optimization
68 | of distribution grids. It is closely related to the python project Ding0 as this
69 | project is currently the single data source for eDisGo providing synthetic
70 | grid data for whole Germany. `Learn more here `_.
71 |
72 |
73 | Dataprocessing
74 | --------------
75 |
76 | For the open_eGo project several python packages are developed which are feeded
77 | by the input data of the data processing. The dataprocessing is written in
78 | SQL and Python. `Learn more here `_.
79 |
80 | ego.io
81 | ------
82 |
83 | The ``ego.io`` is a `SQLAlchemy `_ interface to
84 | the OpenEnergy database (oedb). The module provides ORM objects mirroring oedb
85 | tables and additionally contains helper functions for I/O operations.
86 | `Learn more here `_.
87 |
88 |
89 | Dingo
90 | -----
91 |
92 | The DIstribution Network GeneratOr (Ding0) is a tool to generate synthetic
93 | medium and low voltage power distribution grids based on open
94 | (or at least accessible) data.
95 | `Learn more here `_.
96 |
97 | Supported by
98 | ============
99 |
100 | This project is supported by the German Federal Ministry for Economic
101 | Affairs and Energy (BMWI).
102 |
103 |
104 | .. image:: https://i0.wp.com/reiner-lemoine-institut.de/wp-content/uploads/2016/07/BMWi_Logo_Englisch_KLEIN.jpg
105 | :scale: 90%
106 | :alt: Supported by BMWi
107 | :target: http://www.bmwi.de/Navigation/EN/Home/home.html
108 |
109 |
110 |
111 |
112 | License
113 | =======
114 |
115 | .. image:: images/open_ego_icon_web.png
116 | :scale: 100%
117 | :align: right
118 |
119 | © Copyright 2015-2018
120 |
121 | Flensburg University of Applied Sciences,
122 | Europa-Universität Flensburg,
123 | Centre for Sustainable Energy Systems
124 |
125 |
126 | This program is free software: you can redistribute it and/or modify it under
127 | the terms of the GNU Affero General Public License as published by the Free
128 | Software Foundation, either version 3 of the License, or (at your option)
129 | any later version.
130 |
131 | This program is distributed in the hope that it will be useful, but WITHOUT
132 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
133 | FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for
134 | more details.
135 |
136 | You should have received a copy of the GNU General Public License along
137 | with this program.
138 | If not, see `www.gnu.org/licenses `_.
139 |
140 |
141 |
142 | Partner
143 | =======
144 |
145 |
146 | .. image:: https://i0.wp.com/reiner-lemoine-institut.de/wp-content/uploads/2017/03/Logo_ZNES_farbig_NEU.png
147 | :scale: 90%
148 | :width: 300px
149 | :height: 110px
150 | :alt: ZNES Flensburg
151 | :target: http://www.znes-flensburg.de/project/150?language=en
152 | :align: right
153 |
154 | .. image:: https://i0.wp.com/reiner-lemoine-institut.de/wp-content/uploads/2015/08/RLI_Logo.png
155 | :scale: 90%
156 | :width: 180px
157 | :height: 131px
158 | :alt: RLI
159 | :target: http://reiner-lemoine-institut.de/en/open_ego-open-electricity-grid-optimization/
160 | :align: left
161 |
162 |
163 | .. image:: https://openegoproject.files.wordpress.com/2017/02/dlr_logo_vernetzte_energiesysteme_gb_grau.jpg?w=301&h=141
164 | :scale: 90%
165 | :width: 300px
166 | :height: 141px
167 | :alt: DLR
168 | :target: http://www.dlr.de/ve/en/desktopdefault.aspx/tabid-12472/21440_read-49440/
169 | :align: right
170 |
171 |
172 | .. image:: https://i1.wp.com/reiner-lemoine-institut.de/wp-content/uploads/2016/07/Logo_Uni_Magdeburg.png
173 | :scale: 90%
174 | :width: 300px
175 | :height: 103px
176 | :alt: Uni Magdeburg
177 | :target: http://iks.cs.ovgu.de/IKS.html
178 | :align: left
179 |
--------------------------------------------------------------------------------
/doc/whatsnew.rst:
--------------------------------------------------------------------------------
1 | What's new
2 | ~~~~~~~~~~
3 |
4 |
5 | .. contents:: `Releases`
6 | :depth: 1
7 | :local:
8 | :backlinks: top
9 |
10 | .. include:: whatsnew/v0-3-4.rst
11 | .. include:: whatsnew/v0-3-3.rst
12 | .. include:: whatsnew/v0-3-2.rst
13 | .. include:: whatsnew/v0-3-1.rst
14 | .. include:: whatsnew/v0-3-0.rst
15 | .. include:: whatsnew/v0-2-0.rst
16 | .. include:: whatsnew/v0-1-0.rst
17 | .. include:: whatsnew/v0-0-1.rst
18 |
--------------------------------------------------------------------------------
/doc/whatsnew/v0-0-1.rst:
--------------------------------------------------------------------------------
1 | Release v0.0.1 (February 02, 2018)
2 | ++++++++++++++++++++++++++++++++++
3 |
4 | As this is the first release of eGo. The tool eGo use the Python3 Packages
5 | eTraGo (Optimization of flexibility options for transmission grids based on
6 | PyPSA) and eDisGo (Optimization of flexibility options and grid expansion for
7 | distribution grids based on PyPSA) for an electrical power calculation from
8 | extra high voltage to selected low voltage level.
9 |
10 | Added features
11 | --------------
12 |
13 | * Interface between eTraGo and eDisGo
14 | * Plots with folium
15 | * First result structure
16 |
--------------------------------------------------------------------------------
/doc/whatsnew/v0-1-0.rst:
--------------------------------------------------------------------------------
1 | Release v0.1.0 (March 29, 2018)
2 | +++++++++++++++++++++++++++++++
3 |
4 | As this is the second release of eGo. This Release introduce the results class
5 | and is still under construction and not ready for a normal use.
6 |
7 |
8 |
9 |
10 | Added features
11 | --------------
12 |
13 | * Update of interface between eTraGo and eDisGo (specs)
14 | * New structure of eGo module / resulte class
15 | * Restructuring of functions
16 | * Add import function of eTraGo results form oedb
17 |
18 | Notes
19 | -----
20 |
21 | * The 'direct_specs' function is not working and needs to be set to ``false``
22 |
--------------------------------------------------------------------------------
/doc/whatsnew/v0-2-0.rst:
--------------------------------------------------------------------------------
1 | Release v0.2.0 (July 18, 2018)
2 | ++++++++++++++++++++++++++++++
3 |
4 | Fundamental structural changes of the eGo tool are included in this release.
5 | A new feature is the integration of the MV grid power flow simulations,
6 | performed by the tool `eDisGo. `_.
7 | Thereby, eGo can be used to perform power flow simulations and optimizations
8 | for EHV, HV (*eTraGo*) and MV (*eDisGo*) grids.
9 |
10 | Moreover, the use of the Dataprocessing versions
11 | ``''v0.4.1''`` and ``''v0.4.2''`` is supported. Please note, that this release
12 | is still under construction and only recommended for developers of
13 | the *open_eGo* project.
14 |
15 | Furthermore, overall cost aggregation functions are available.
16 |
17 | Added features
18 | --------------
19 |
20 | * Cleaned and restructured eGo classes and functions
21 | * Move classes of eGo from results.py to io.py
22 | * Move serveral function
23 |
24 | * Introduce new files for *eDisGo* handling
25 | * edisgo_integration.py
26 | * mv_cluster.py
27 |
28 | * Introduce new file storages.py for eTraGo
29 | * Updated eTraGo 0.6 and integrated eTraGo's new functions and features to eGo
30 | * Updated eDisGo 0.0.3 version which includes the features of a parallelization
31 | for custom function and other important API changes.
32 | * Started to implemented pep8 style to eGo Code
33 | * Implemented logging function for the whole model
34 | * Using the Restfull-API for the OpenEnergy Database connection, buy using
35 | ego.io v0.4.2. A registration is needed and can be done on
36 | `openenergy-platform.org/login `_
37 | * Remove functionalities from ``ego_main.py`` to the eGo class
38 | * Fixed eTraGo scenario import of ``etrago_from_oedb()``
39 |
40 |
41 | Notes
42 | -----
43 | * As an external user you need to have an account on the
44 | `openenergy-platform.org/login `_
45 | * In future versions, all MV grids (*ding0* grids) will be queried from your
46 | database. However, in this version all MV grids have to be generated with
47 | the tool `ding0 `_ and stored in *eGo*'s
48 | *data* folder.
49 | * Total operational costs are missing in this release
50 |
--------------------------------------------------------------------------------
/doc/whatsnew/v0-3-0.rst:
--------------------------------------------------------------------------------
1 | Release v0.3.0 (September 07, 2018)
2 | +++++++++++++++++++++++++++++++++++
3 |
4 | Power Flow and Clustering. eGo is now using eTraGo non-linear power flows based
5 | on optimization results and its disaggregation of clustered results
6 | to an original spatial complexities. With the release of eDisGo speed-up options,
7 | a new storage integration methodology and more are now available.
8 |
9 |
10 | Added features
11 | --------------
12 |
13 | * Update of dependencies
14 | * Implementing of Ding0 grid parallelization
15 | * Redesign of scenario settings and API simplifications
16 | * Adding and using the Power Flow of eTraGo in eGo
17 | * Testing and using new dataprocessing Version v0.4.3, v0.4.4 and v0.4.5
18 | * make eGo installable from pip via ``pip3 install eGo -- process-dependency-links``
19 | * Implementing eDisGo's storage distribution for MV and LV grids
20 | * Improved logging and the creation of status files
21 | * Maximal calculation time for ding0 grids can be set by user
22 | * eDisGo results import and export (all eGo-relevant data from eDisGo can be re-imported after a run)
23 | * Storage-related investment costs are also allocated to MV grids
24 | * Update of cluster plots
25 | * Plot of investment costs per line and bus
26 | * Update of ``ego.iplot`` for an interactive visualization
27 |
--------------------------------------------------------------------------------
/doc/whatsnew/v0-3-1.rst:
--------------------------------------------------------------------------------
1 | Release v0.3.1 (October 27, 2018)
2 | +++++++++++++++++++++++++++++++++
3 |
4 | This release contains documentation and bug fixes for the new features
5 | introduced in 0.3.0.
6 |
7 | Added features
8 | --------------
9 |
10 | * Update of interactiv plot (iplot)
11 | * Update of Documentation
12 | * Update of eTraGo functionalities
13 | * Update of eDisGo functionalities
14 | * Change and update of API file scenario_setting.json
15 | * Improved cluster plot of ``ego.plot_edisgo_cluster()``
16 | * Improved cost differentiation
17 | * Add jupyter notebook eGo tutorials
18 |
19 |
20 | Fixes
21 | -----
22 | * Fix installation problems of the pypsa 0.11.0 fork (use pip 18.1)
23 | * Fix parallel calculation of mv results
24 |
--------------------------------------------------------------------------------
/doc/whatsnew/v0-3-2.rst:
--------------------------------------------------------------------------------
1 | Release v0.3.2 (October 27, 2018)
2 | +++++++++++++++++++++++++++++++++
3 |
4 | Making eGo quotable with zenodo.
5 |
6 | Added features
7 | --------------
8 |
9 | * Registration at zenodo.org
10 |
--------------------------------------------------------------------------------
/doc/whatsnew/v0-3-3.rst:
--------------------------------------------------------------------------------
1 | Release v0.3.3 (December 7, 2018)
2 | +++++++++++++++++++++++++++++++++
3 |
4 | Add interactive map for documentation.
5 |
6 | Added features
7 | --------------
8 |
9 | * Create and add interactive result map
10 | * Add workshop jupyter notebook to docs
11 |
12 | Fixes
13 | -----
14 |
15 | * Fix bug of period calculation
16 | * removed duplicate matplotlib from setup.py
17 | * fixed csv import
18 |
--------------------------------------------------------------------------------
/doc/whatsnew/v0-3-4.rst:
--------------------------------------------------------------------------------
1 | Release v0.3.4 (December 10, 2018)
2 | ++++++++++++++++++++++++++++++++++
3 |
4 | Update eDisGo release.
5 |
6 | Added features
7 | --------------
8 |
9 | * Update eDisGo version from 0.0.8 to 0.0.9
10 |
--------------------------------------------------------------------------------
/ego/__init__.py:
--------------------------------------------------------------------------------
1 | # This program is free software; you can redistribute it and/or
2 | # modify it under the terms of the GNU Affero General Public License as
3 | # published by the Free Software Foundation; either version 3 of the
4 | # License, or (at your option) any later version.
5 |
6 | # This program is distributed in the hope that it will be useful,
7 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
8 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9 | # GNU Affero General Public License for more details.
10 | # You should have received a copy of the GNU Affero General Public License
11 | # along with this program. If not, see .
12 |
13 | __version__ = "0.3.4"
14 | __copyright__ = (
15 | "Europa-Universität Flensburg, " " Centre for Sustainable Energy Systems"
16 | )
17 | __license__ = "GNU Affero General Public License Version 3 (AGPL-3.0)"
18 | __author__ = "wolf_bunke"
19 |
20 |
21 | import logging
22 |
23 | logging.basicConfig(level=logging.INFO)
24 |
--------------------------------------------------------------------------------
/ego/appl.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2016-2018 Europa-Universität Flensburg,
3 | # Flensburg University of Applied Sciences,
4 | # Centre for Sustainable Energy Systems
5 | #
6 | # This program is free software; you can redistribute it and/or
7 | # modify it under the terms of the GNU Affero General Public License as
8 | # published by the Free Software Foundation; either version 3 of the
9 | # License, or (at your option) any later version.
10 | #
11 | # This program is distributed in the hope that it will be useful,
12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 | # GNU Affero General Public License for more details.
15 | #
16 | # You should have received a copy of the GNU Affero General Public License
17 | # along with this program. If not, see .
18 |
19 | # File description
20 | """
21 | This is the application file for the tool eGo. The application eGo calculates
22 | the distribution and transmission grids of eTraGo and eDisGo.
23 |
24 | .. note:: Note, the data source of eGo relies on
25 | the Open Energy Database. - The registration for the public
26 | accessible API can be found on
27 | `openenergy-platform.org/login `_.
28 | """
29 |
30 | import os
31 |
32 | if not "READTHEDOCS" in os.environ:
33 | from tools.io import eGo
34 | from tools.utilities import define_logging
35 |
36 | logger = define_logging(name="ego")
37 |
38 | __copyright__ = (
39 | "Flensburg University of Applied Sciences, "
40 | "Europa-Universität Flensburg, "
41 | "Centre for Sustainable Energy Systems"
42 | )
43 | __license__ = "GNU Affero General Public License Version 3 (AGPL-3.0)"
44 | __author__ = "wolf_bunke, maltesc"
45 |
46 |
47 | if __name__ == "__main__":
48 |
49 | logger.info("Start calculation")
50 |
51 | ego = eGo(jsonpath="scenario_setting.json")
52 | # logger.info('Print results')
53 | # ego.etrago_line_loading()
54 | # print(ego.etrago.generator)
55 | # print(ego.etrago.storage_costs)
56 | # print(ego.etrago.operating_costs)
57 |
--------------------------------------------------------------------------------
/ego/mv_clustering/__init__.py:
--------------------------------------------------------------------------------
1 | from ego.mv_clustering.mv_clustering import cluster_workflow # noqa: F401
2 |
--------------------------------------------------------------------------------
/ego/mv_clustering/database.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import subprocess
3 | import sys
4 | import time
5 |
6 | from contextlib import contextmanager
7 | from functools import wraps
8 |
9 | import saio
10 |
11 | from sqlalchemy import create_engine
12 | from sqlalchemy.orm import sessionmaker
13 |
14 | logger = logging.getLogger(__name__)
15 |
16 |
17 | def get_engine(config=None):
18 | config = config["database"]
19 | engine = create_engine(
20 | f"postgresql+psycopg2://{config['user']}:"
21 | f"{config['password']}@{config['host']}:"
22 | f"{int(config['port'])}/{config['database_name']}",
23 | echo=False,
24 | )
25 | logger.info(f"Created engine: {engine}.")
26 | return engine
27 |
28 |
29 | @contextmanager
30 | def sshtunnel(config=None):
31 | ssh_config = config["ssh"]
32 | if ssh_config["enabled"]:
33 | try:
34 | logger.info("Open ssh tunnel.")
35 | proc = subprocess.Popen(
36 | [
37 | "ssh",
38 | "-N",
39 | "-L",
40 | f"{ssh_config['local_port']}"
41 | f":{ssh_config['local_address']}"
42 | f":{ssh_config['port']}",
43 | f"{ssh_config['user']}@{ssh_config['ip']}",
44 | ],
45 | stdout=subprocess.PIPE,
46 | stderr=subprocess.PIPE,
47 | )
48 | time.sleep(2)
49 | yield proc
50 | finally:
51 | logger.info("Close ssh tunnel.")
52 | proc.kill()
53 | outs, errs = proc.communicate()
54 | logger.info(
55 | f"SSH process output STDOUT:{outs.decode('utf-8')}, "
56 | f"STDERR:{errs.decode('utf-8')}"
57 | )
58 | else:
59 | try:
60 | logger.info("Don't use an ssh tunnel.")
61 | yield None
62 | finally:
63 | logger.info("Close contextmanager.")
64 |
65 |
66 | @contextmanager
67 | def session_scope(engine):
68 | Session = sessionmaker(bind=engine)
69 | session = Session()
70 | try:
71 | yield session
72 | session.commit()
73 | except: # noqa: E722
74 | session.rollback()
75 | raise
76 | finally:
77 | session.close()
78 |
79 |
80 | def session_decorator(f):
81 | @wraps(f)
82 | def wrapper(*args, **kwargs):
83 | with session_scope(kwargs["engine"]) as session:
84 | kwargs["session"] = session
85 | kwargs.pop("engine")
86 | logger.info(f"Calling {f.__name__}")
87 | return f(*args, **kwargs)
88 |
89 | return wrapper
90 |
91 |
92 | def register_tables_in_saio(engine):
93 | db_tables = {
94 | "egon_mv_grid_district": "grid.egon_mv_grid_district",
95 | "generators_pv_status_quo": "supply.egon_power_plants_pv",
96 | "generators_pv_rooftop": "supply.egon_power_plants_pv_roof_building",
97 | "generators_wind_status_quo": "supply.egon_power_plants_wind",
98 | "generators": "supply.egon_power_plants",
99 | "etrago_load": "grid.egon_etrago_load",
100 | "etrago_load_timeseries": "grid.egon_etrago_load_timeseries",
101 | "heat_pump_capacity_individual": "supply.egon_individual_heating",
102 | "pth_capacity_district_heating": "grid.egon_etrago_link",
103 | }
104 | orm = {}
105 |
106 | for name, table_str in db_tables.items():
107 | table_list = table_str.split(".")
108 | table_schema = table_list[0]
109 | table_name = table_list[1]
110 | saio.register_schema(table_schema, engine)
111 | orm[name] = sys.modules[f"saio.{table_schema}"].__getattr__(table_name)
112 | return orm
113 |
--------------------------------------------------------------------------------
/ego/mv_clustering/egon_data_io.py:
--------------------------------------------------------------------------------
1 | import logging
2 |
3 | import pandas as pd
4 |
5 | from sqlalchemy import func
6 |
7 | from ego.mv_clustering.database import session_decorator
8 |
9 | logger = logging.getLogger(__name__)
10 |
11 |
12 | def func_within(geom_a, geom_b, srid=3035):
13 | """
14 | Checks if geometry a is completely within geometry b.
15 |
16 | Parameters
17 | ----------
18 | geom_a : Geometry
19 | Geometry within `geom_b`.
20 | geom_b : Geometry
21 | Geometry containing `geom_a`.
22 | srid : int
23 | SRID geometries are transformed to in order to use the same SRID for both
24 | geometries.
25 |
26 | """
27 | return func.ST_Within(
28 | func.ST_Transform(
29 | geom_a,
30 | srid,
31 | ),
32 | func.ST_Transform(
33 | geom_b,
34 | srid,
35 | ),
36 | )
37 |
38 |
39 | @session_decorator
40 | def get_grid_ids(orm=None, session=None):
41 | """
42 | Gets all MV grid IDs and the area of each grid in m^2.
43 |
44 | Parameters
45 | -----------
46 | orm : dict
47 | Dictionary with tables to retrieve data from.
48 |
49 | Returns
50 | -------
51 | pandas.DataFrame
52 | Dataframe with grid ID in index and corresponding area in m^2 in column
53 | "area_m2".
54 |
55 | """
56 | query = session.query(
57 | orm["egon_mv_grid_district"].bus_id,
58 | orm["egon_mv_grid_district"].area.label("area_m2"),
59 | )
60 | return pd.read_sql_query(query.statement, session.bind, index_col="bus_id")
61 |
62 |
63 | @session_decorator
64 | def get_solar_capacity(scenario, grid_ids, orm=None, session=None):
65 | """
66 | Gets PV capacity (rooftop and ground mounted) in MW per grid in specified scenario.
67 |
68 | Parameters
69 | -----------
70 | scenario : str
71 | Scenario to obtain data for. Possible options are "status_quo", "eGon2035",
72 | and "eGon100RE".
73 | grid_ids : list(int)
74 | List of grid IDs to obtain data for.
75 | orm : dict
76 | Dictionary with tables to retrieve data from.
77 |
78 | Returns
79 | -------
80 | pandas.DataFrame
81 | DataFrame with grid ID in index and corresponding PV capacity in MW in column
82 | "pv_capacity_mw".
83 |
84 | """
85 | # get PV ground mounted capacity per grid
86 | if scenario == "status_quo":
87 | query = (
88 | session.query(
89 | orm["generators_pv_status_quo"].bus_id,
90 | func.sum(orm["generators_pv_status_quo"].capacity).label("p_openspace"),
91 | )
92 | .filter(
93 | orm["generators_pv_status_quo"].bus_id.in_(grid_ids),
94 | orm["generators_pv_status_quo"].site_type == "Freifläche",
95 | orm["generators_pv_status_quo"].status == "InBetrieb",
96 | orm["generators_pv_status_quo"].capacity <= 20,
97 | orm["generators_pv_status_quo"].voltage_level.in_([4, 5, 6, 7]),
98 | )
99 | .group_by(
100 | orm["generators_pv_status_quo"].bus_id,
101 | )
102 | )
103 | cap_open_space_df = pd.read_sql(
104 | sql=query.statement, con=session.bind, index_col="bus_id"
105 | )
106 | else:
107 | query = (
108 | session.query(
109 | orm["generators"].bus_id,
110 | func.sum(orm["generators"].el_capacity).label("p_openspace"),
111 | )
112 | .filter(
113 | orm["generators"].scenario == scenario,
114 | orm["generators"].bus_id.in_(grid_ids),
115 | orm["generators"].voltage_level >= 4,
116 | orm["generators"].el_capacity <= 20,
117 | orm["generators"].carrier == "solar",
118 | )
119 | .group_by(
120 | orm["generators"].bus_id,
121 | )
122 | )
123 | cap_open_space_df = pd.read_sql(
124 | sql=query.statement, con=session.bind, index_col="bus_id"
125 | )
126 | # get PV rooftop capacity per grid
127 | query = (
128 | session.query(
129 | orm["generators_pv_rooftop"].bus_id,
130 | func.sum(orm["generators_pv_rooftop"].capacity).label("p_rooftop"),
131 | )
132 | .filter(
133 | orm["generators_pv_rooftop"].bus_id.in_(grid_ids),
134 | orm["generators_pv_rooftop"].scenario == scenario,
135 | orm["generators_pv_rooftop"].capacity <= 20,
136 | orm["generators_pv_rooftop"].voltage_level.in_([4, 5, 6, 7]),
137 | )
138 | .group_by(
139 | orm["generators_pv_rooftop"].bus_id,
140 | )
141 | )
142 | cap_rooftop_df = pd.read_sql(
143 | sql=query.statement, con=session.bind, index_col="bus_id"
144 | )
145 |
146 | return (
147 | cap_open_space_df.join(cap_rooftop_df, how="outer")
148 | .fillna(value=0)
149 | .sum(axis="columns")
150 | .to_frame("pv_capacity_mw")
151 | )
152 |
153 |
154 | @session_decorator
155 | def get_wind_capacity(scenario, grid_ids, orm=None, session=None):
156 | """
157 | Gets wind onshore capacity in MW per grid in specified scenario.
158 |
159 | Parameters
160 | -----------
161 | scenario : str
162 | Scenario to obtain data for. Possible options are "status_quo", "eGon2035",
163 | and "eGon100RE".
164 | grid_ids : list(int)
165 | List of grid IDs to obtain data for.
166 | orm : dict
167 | Dictionary with tables to retrieve data from.
168 |
169 | Returns
170 | -------
171 | pandas.DataFrame
172 | DataFrame with grid ID in index and corresponding Wind capacity in MW in
173 | column "wind_capacity_mw".
174 |
175 | """
176 | if scenario == "status_quo":
177 | query = (
178 | session.query(
179 | orm["generators_wind_status_quo"].bus_id,
180 | func.sum(orm["generators_wind_status_quo"].capacity).label(
181 | "wind_capacity_mw"
182 | ),
183 | )
184 | .filter(
185 | orm["generators_wind_status_quo"].bus_id.in_(grid_ids),
186 | orm["generators_wind_status_quo"].site_type == "Windkraft an Land",
187 | orm["generators_wind_status_quo"].status == "InBetrieb",
188 | orm["generators_wind_status_quo"].capacity <= 20,
189 | orm["generators_wind_status_quo"].voltage_level.in_([4, 5, 6, 7]),
190 | )
191 | .group_by(
192 | orm["generators_wind_status_quo"].bus_id,
193 | )
194 | )
195 | cap_wind_df = pd.read_sql(
196 | sql=query.statement, con=session.bind, index_col="bus_id"
197 | )
198 | else:
199 | query = (
200 | session.query(
201 | orm["generators"].bus_id,
202 | func.sum(orm["generators"].el_capacity).label("wind_capacity_mw"),
203 | )
204 | .filter(
205 | orm["generators"].scenario == scenario,
206 | orm["generators"].bus_id.in_(grid_ids),
207 | orm["generators"].voltage_level >= 4,
208 | orm["generators"].el_capacity <= 20,
209 | orm["generators"].carrier == "wind_onshore",
210 | )
211 | .group_by(
212 | orm["generators"].bus_id,
213 | )
214 | )
215 | cap_wind_df = pd.read_sql(
216 | sql=query.statement, con=session.bind, index_col="bus_id"
217 | )
218 | return cap_wind_df
219 |
220 |
221 | @session_decorator
222 | def get_electromobility_maximum_load(scenario, grid_ids, orm=None, session=None):
223 | """
224 | Parameters
225 | -----------
226 | scenario : str
227 | Scenario to obtain data for. Possible options are "status_quo", "eGon2035",
228 | and "eGon100RE".
229 | grid_ids : list(int)
230 | List of grid IDs to obtain data for.
231 | orm : dict
232 | Dictionary with tables to retrieve data from.
233 |
234 | Returns
235 | -------
236 | pandas.DataFrame
237 | DataFrame with grid ID in index and corresponding maximum electromobility load
238 | in MW in column "electromobility_max_load_mw".
239 |
240 | """
241 | if scenario == "status_quo":
242 | return pd.DataFrame(columns=["electromobility_max_load_mw"])
243 | else:
244 | load_timeseries_nested = (
245 | session.query(
246 | orm["etrago_load"].bus.label("bus_id"),
247 | orm["etrago_load_timeseries"].p_set,
248 | )
249 | .join(
250 | orm["etrago_load_timeseries"],
251 | orm["etrago_load_timeseries"].load_id == orm["etrago_load"].load_id,
252 | )
253 | .filter(
254 | orm["etrago_load"].scn_name == f"{scenario}_lowflex",
255 | orm["etrago_load"].carrier == "land_transport_EV",
256 | orm["etrago_load"].bus.in_(grid_ids),
257 | )
258 | ).subquery(name="load_timeseries_nested")
259 | load_timeseries_unnested = (
260 | session.query(
261 | load_timeseries_nested.c.bus_id,
262 | func.unnest(load_timeseries_nested.c.p_set).label("p_set"),
263 | )
264 | ).subquery(name="load_timeseries_unnested")
265 | load_timeseries_maximal = (
266 | session.query(
267 | load_timeseries_unnested.c.bus_id,
268 | func.max(load_timeseries_unnested.c.p_set).label("p_set_max"),
269 | ).group_by(
270 | load_timeseries_unnested.c.bus_id,
271 | )
272 | ).subquery(name="load_timeseries_maximal")
273 | load_p_nom = session.query(
274 | load_timeseries_maximal.c.bus_id,
275 | load_timeseries_maximal.c.p_set_max.label("electromobility_max_load_mw"),
276 | )
277 | return pd.read_sql(
278 | sql=load_p_nom.statement, con=session.bind, index_col="bus_id"
279 | )
280 |
281 |
282 | @session_decorator
283 | def get_pth_capacity(scenario, grid_ids, orm=None, session=None):
284 | """
285 | Gets PtH capacity (individual heating and district heating) in MW per grid
286 | in specified scenario.
287 |
288 | Parameters
289 | -----------
290 | scenario : str
291 | Scenario to obtain data for. Possible options are "status_quo", "eGon2035",
292 | and "eGon100RE".
293 | grid_ids : list(int)
294 | List of grid IDs to obtain data for.
295 | orm : dict
296 | Dictionary with tables to retrieve data from.
297 |
298 | Returns
299 | -------
300 | pandas.DataFrame
301 | DataFrame with grid ID in index and corresponding PtH capacity in MW in
302 | column "pth_capacity_mw".
303 |
304 | """
305 | if scenario == "status_quo":
306 | return pd.DataFrame(columns=["pth_capacity_mw"])
307 | else:
308 | # get individual heat pump capacity
309 | query = (
310 | session.query(
311 | orm["heat_pump_capacity_individual"].mv_grid_id.label("bus_id"),
312 | func.sum(orm["heat_pump_capacity_individual"].capacity).label(
313 | "cap_individual"
314 | ),
315 | )
316 | .filter(
317 | orm["heat_pump_capacity_individual"].mv_grid_id.in_(grid_ids),
318 | orm["heat_pump_capacity_individual"].carrier == "heat_pump",
319 | orm["heat_pump_capacity_individual"].scenario == scenario,
320 | )
321 | .group_by(
322 | orm["heat_pump_capacity_individual"].mv_grid_id,
323 | )
324 | )
325 | cap_individual_df = pd.read_sql(
326 | sql=query.statement, con=session.bind, index_col="bus_id"
327 | )
328 | # get central heat pump and resistive heater capacity
329 | query = (
330 | session.query(
331 | orm["pth_capacity_district_heating"].bus0,
332 | func.sum(orm["pth_capacity_district_heating"].p_nom).label("p_set"),
333 | )
334 | .filter(
335 | orm["pth_capacity_district_heating"].bus0.in_(grid_ids),
336 | orm["pth_capacity_district_heating"].scn_name == scenario,
337 | orm["pth_capacity_district_heating"].carrier.in_(
338 | ["central_heat_pump", "central_resistive_heater"]
339 | ),
340 | orm["pth_capacity_district_heating"].p_nom <= 20.0,
341 | )
342 | .group_by(
343 | orm["pth_capacity_district_heating"].bus0,
344 | )
345 | )
346 | cap_dh_df = pd.read_sql(sql=query.statement, con=session.bind, index_col="bus0")
347 | return (
348 | cap_individual_df.join(cap_dh_df, how="outer")
349 | .fillna(value=0)
350 | .sum(axis="columns")
351 | .to_frame("pth_capacity_mw")
352 | )
353 |
--------------------------------------------------------------------------------
/ego/mv_clustering/mv_clustering.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2016-2018 Europa-Universität Flensburg,
3 | # Flensburg University of Applied Sciences,
4 | # Centre for Sustainable Energy Systems
5 | #
6 | # This program is free software; you can redistribute it and/or
7 | # modify it under the terms of the GNU Affero General Public License as
8 | # published by the Free Software Foundation; either version 3 of the
9 | # License, or (at your option) any later version.
10 | #
11 | # This program is distributed in the hope that it will be useful,
12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 | # GNU Affero General Public License for more details.
15 | #
16 | # You should have received a copy of the GNU Affero General Public License
17 | # along with this program. If not, see .
18 |
19 | # File description
20 | """
21 | This file contains all functions regarding the clustering of MV grids
22 | """
23 | __copyright__ = (
24 | "Flensburg University of Applied Sciences, "
25 | "Europa-Universität Flensburg, "
26 | "Centre for Sustainable Energy Systems"
27 | )
28 | __license__ = "GNU Affero General Public License Version 3 (AGPL-3.0)"
29 | __author__ = "wolf_bunke, maltesc, mltja"
30 |
31 | import logging
32 | import os
33 |
34 | if "READTHEDOCS" not in os.environ:
35 | import numpy as np
36 | import pandas as pd
37 |
38 | from sklearn.cluster import KMeans
39 |
40 | import ego.mv_clustering.egon_data_io as db_io
41 |
42 | from ego.mv_clustering.database import (
43 | get_engine,
44 | register_tables_in_saio,
45 | sshtunnel,
46 | )
47 |
48 | logger = logging.getLogger(__name__)
49 |
50 |
51 | def get_cluster_attributes(attributes_path, scenario, config=None):
52 | """
53 | Determines attributes to cluster MV grids by.
54 |
55 | Considered attributes are PV, wind onshore and PtH capacity, as well as
56 | maximum load of EVs (in case of uncoordinated charging). All attributes are given
57 | in MW as well as in MW per km^2.
58 |
59 | Data is written to csv file and returned.
60 |
61 | Parameters
62 | ----------
63 | attributes_path : str
64 | Path to save attributes csv to, including the file name.
65 | scenario : str
66 | Scenario to determine attributes for. Possible options are "status_quo",
67 | "eGon2035", and "eGon100RE".
68 | config : dict
69 | Config dict.
70 |
71 | Returns
72 | -------
73 | pandas.DataFrame
74 | DataFrame with grid ID in index and corresponding attributes in columns:
75 | * "area" : area of MV grid in m^2
76 | * "pv_capacity_mw" : PV capacity in MW
77 | * "pv_capacity_mw_per_km2" : PV capacity in MW per km^2
78 | * "pv_capacity_expansion_mw" : PV expansion from status quo to given
79 | scenario in MW
80 | * "pv_capacity_expansion_mw_per_km2" : PV expansion from status quo to given
81 | scenario in MW per km^2
82 | * "wind_capacity_mw" : wind onshore capacity in MW
83 | * "wind_capacity_mw_per_km2" : wind onshore capacity in MW per km^2
84 | * "wind_capacity_expansion_mw" : wind onshore expansion from status quo to given
85 | scenario in MW
86 | * "wind_capacity_expansion_mw_per_km2" : wind onshore expansion from status quo
87 | to given scenario in MW per km^2
88 | * "electromobility_max_load_mw" : maximum load of EVs (in case of
89 | uncoordinated charging) in MW
90 | * "electromobility_max_load_mw_per_km2" : maximum load of EVs (in case of
91 | uncoordinated charging) in MW per km^2
92 | * "electromobility_max_load_expansion_mw" : increase in maximum load of EVs
93 | from status quo to given scenario (in case of uncoordinated charging) in MW
94 | * "electromobility_max_load_expansion_mw_per_km2" : increase in maximum load of
95 | EVs from status quo to given scenario (in case of uncoordinated charging)
96 | in MW per km^2
97 | * "pth_capacity_mw" : PtH capacity (for individual and district
98 | heating) in MW
99 | * "pth_capacity_mw_per_km2" : PtH capacity (for individual and
100 | district heating) in MW per km^2
101 | * "pth_capacity_expansion_mw" : increase in PtH capacity (for individual and
102 | district heating) from status quo to given scenario in MW
103 | * "pth_capacity_expansion_mw_per_km2" : increase in PtH capacity (for individual
104 | and district heating) from status quo to given scenario in MW per km^2
105 |
106 | """
107 | # get attributes from database
108 | with sshtunnel(config=config):
109 | engine = get_engine(config=config)
110 | orm = register_tables_in_saio(engine)
111 |
112 | grid_ids_df = db_io.get_grid_ids(engine=engine, orm=orm)
113 | solar_capacity_df = db_io.get_solar_capacity(
114 | scenario, grid_ids_df.index, orm, engine=engine
115 | )
116 | if scenario == "status_quo":
117 | solar_capacity_sq_df = solar_capacity_df
118 | else:
119 | solar_capacity_sq_df = db_io.get_solar_capacity(
120 | "status_quo", grid_ids_df.index, orm, engine=engine
121 | )
122 | wind_capacity_df = db_io.get_wind_capacity(
123 | scenario, grid_ids_df.index, orm, engine=engine
124 | )
125 | if scenario == "status_quo":
126 | wind_capacity_sq_df = wind_capacity_df
127 | else:
128 | wind_capacity_sq_df = db_io.get_wind_capacity(
129 | "status_quo", grid_ids_df.index, orm, engine=engine
130 | )
131 | emob_capacity_df = db_io.get_electromobility_maximum_load(
132 | scenario, grid_ids_df.index, orm, engine=engine
133 | )
134 | if scenario == "status_quo":
135 | emob_capacity_sq_df = emob_capacity_df
136 | else:
137 | emob_capacity_sq_df = db_io.get_electromobility_maximum_load(
138 | "status_quo", grid_ids_df.index, orm, engine=engine
139 | )
140 | pth_capacity_df = db_io.get_pth_capacity(
141 | scenario, grid_ids_df.index, orm, engine=engine
142 | )
143 | if scenario == "status_quo":
144 | pth_capacity_sq_df = pth_capacity_df
145 | else:
146 | pth_capacity_sq_df = db_io.get_pth_capacity(
147 | "status_quo", grid_ids_df.index, orm, engine=engine
148 | )
149 | emob_rename_col = "electromobility_max_load_expansion_mw"
150 | df = pd.concat(
151 | [
152 | grid_ids_df,
153 | solar_capacity_df,
154 | wind_capacity_df,
155 | emob_capacity_df,
156 | pth_capacity_df,
157 | solar_capacity_sq_df.rename(
158 | columns={"pv_capacity_mw": "pv_capacity_expansion_mw"}
159 | ),
160 | wind_capacity_sq_df.rename(
161 | columns={"wind_capacity_mw": "wind_capacity_expansion_mw"}
162 | ),
163 | emob_capacity_sq_df.rename(
164 | columns={"electromobility_max_load_mw": emob_rename_col}
165 | ),
166 | pth_capacity_sq_df.rename(
167 | columns={"pth_capacity_mw": "pth_capacity_expansion_mw"}
168 | ),
169 | ],
170 | axis="columns",
171 | ).fillna(0)
172 |
173 | # calculate expansion values
174 | df["pv_capacity_expansion_mw"] = (
175 | df["pv_capacity_mw"] - df["pv_capacity_expansion_mw"]
176 | )
177 | df["wind_capacity_expansion_mw"] = (
178 | df["wind_capacity_mw"] - df["wind_capacity_expansion_mw"]
179 | )
180 | df["electromobility_max_load_expansion_mw"] = (
181 | df["electromobility_max_load_mw"] - df["electromobility_max_load_expansion_mw"]
182 | )
183 | df["pth_capacity_expansion_mw"] = (
184 | df["pth_capacity_mw"] - df["pth_capacity_expansion_mw"]
185 | )
186 |
187 | # calculate relative values
188 | df["pv_capacity_mw_per_km2"] = df["pv_capacity_mw"] / (df["area_m2"] / 1e6)
189 | df["wind_capacity_mw_per_km2"] = df["wind_capacity_mw"] / (df["area_m2"] / 1e6)
190 | df["electromobility_max_load_mw_per_km2"] = df["electromobility_max_load_mw"] / (
191 | df["area_m2"] / 1e6
192 | )
193 | df["pth_capacity_mw_per_km2"] = df["pth_capacity_mw"] / (df["area_m2"] / 1e6)
194 | df["pv_capacity_expansion_mw_per_km2"] = df["pv_capacity_expansion_mw"] / (
195 | df["area_m2"] / 1e6
196 | )
197 | df["wind_capacity_expansion_mw_per_km2"] = df["wind_capacity_expansion_mw"] / (
198 | df["area_m2"] / 1e6
199 | )
200 | df["electromobility_max_load_expansion_mw_per_km2"] = df[
201 | "electromobility_max_load_expansion_mw"
202 | ] / (df["area_m2"] / 1e6)
203 | df["pth_capacity_expansion_mw_per_km2"] = df["pth_capacity_expansion_mw"] / (
204 | df["area_m2"] / 1e6
205 | )
206 |
207 | # write to csv
208 | df.to_csv(attributes_path)
209 | return df
210 |
211 |
212 | def mv_grid_clustering(cluster_attributes_df, working_grids=None, config=None):
213 | """
214 | Clusters the MV grids based on the attributes, for a given number of MV grids.
215 |
216 | Parameters
217 | ----------
218 | cluster_attributes_df : pandas.DataFrame
219 | Dataframe with data to cluster grids by. Columns contain the attributes to
220 | cluster and index contains the MV grid IDs.
221 | working_grids : pandas.DataFrame
222 | DataFrame with information on whether MV grid can be used for calculations.
223 | Index of the dataframe contains the MV grid ID and boolean value in column
224 | "working" specifies whether respective grid can be used.
225 | config : dict
226 | Config dict.
227 |
228 | Returns
229 | -------
230 | pandas.DataFrame
231 | Dataframe containing the clustered MV grids and their weightings
232 |
233 | """
234 | random_seed = config["eGo"]["random_seed"]
235 | n_clusters = config["eDisGo"]["n_clusters"]
236 |
237 | # Norm attributes
238 | for attribute in cluster_attributes_df:
239 | attribute_max = cluster_attributes_df[attribute].max()
240 | cluster_attributes_df[attribute] = (
241 | cluster_attributes_df[attribute] / attribute_max
242 | )
243 |
244 | # Starting KMeans clustering
245 | logger.info(
246 | f"Used clustering attributes: {cluster_attributes_df.columns.to_list()}"
247 | )
248 | kmeans = KMeans(n_clusters=n_clusters, random_state=random_seed)
249 | data_array = cluster_attributes_df.to_numpy()
250 | labels = kmeans.fit_predict(data_array)
251 | centroids = kmeans.cluster_centers_
252 |
253 | result_df = pd.DataFrame(index=cluster_attributes_df.index)
254 | result_df["label"] = labels
255 | # For each sample, calculate the distance to its assigned centroid.
256 | result_df["centroid_distance"] = np.linalg.norm(
257 | data_array - centroids[labels], axis=1
258 | )
259 | result_df["representative"] = False
260 |
261 | if working_grids is None:
262 | result_df["working"] = True
263 | else:
264 | result_df["working"] = result_df.join(working_grids).fillna(False)["working"]
265 |
266 | failing_labels = []
267 | for label in np.unique(labels):
268 | try:
269 | rep = result_df.loc[
270 | result_df["working"] & (result_df["label"] == label),
271 | "centroid_distance",
272 | ].idxmin()
273 | rep_orig = result_df.loc[
274 | result_df["label"] == label, "centroid_distance"
275 | ].idxmin()
276 | result_df.loc[rep, "representative"] = True
277 | result_df.loc[rep, "representative_orig"] = rep_orig
278 | except ValueError:
279 | failing_labels.append(label)
280 |
281 | if len(failing_labels) > 0:
282 | logger.warning(
283 | f"There are {len(failing_labels)} clusters for which no representative "
284 | f"could be determined."
285 | )
286 |
287 | n_grids = result_df.shape[0]
288 | df_data = []
289 | columns = [
290 | "representative",
291 | "n_grids_per_cluster",
292 | "relative_representation",
293 | "represented_grids",
294 | "representative_orig",
295 | ]
296 | for label in np.unique(labels):
297 | represented_grids = result_df[result_df["label"] == label].index.to_list()
298 | n_grids_per_cluster = len(represented_grids)
299 | relative_representation = (n_grids_per_cluster / n_grids) * 100
300 | try:
301 | representative = result_df[
302 | result_df["representative"] & (result_df["label"] == label)
303 | ].index.values[0]
304 | except IndexError:
305 | representative = False
306 | try:
307 | representative_orig = result_df[
308 | result_df["representative"] & (result_df["label"] == label)
309 | ].representative_orig.values[0]
310 | representative_orig = (
311 | True if representative == representative_orig else False
312 | )
313 | except IndexError:
314 | representative_orig = False
315 |
316 | row = [
317 | representative,
318 | n_grids_per_cluster,
319 | relative_representation,
320 | represented_grids,
321 | representative_orig,
322 | ]
323 | df_data.append(row)
324 |
325 | cluster_df = pd.DataFrame(df_data, index=np.unique(labels), columns=columns)
326 | cluster_df.index.name = "cluster_id"
327 |
328 | return cluster_df.sort_values("n_grids_per_cluster", ascending=False)
329 |
330 |
331 | def cluster_workflow(config=None):
332 | """
333 | Get cluster attributes per grid if needed and conduct MV grid clustering.
334 |
335 | Parameters
336 | ----------
337 | config : dict
338 | Config dict from config json. Can be obtained by calling
339 | ego.tools.utilities.get_scenario_setting(jsonpath=config_path).
340 |
341 | Returns
342 | --------
343 | pandas.DataFrame
344 | DataFrame with clustering results. Columns are "representative" containing
345 | the grid ID of the representative grid, "n_grids_per_cluster" containing the
346 | number of grids that are represented, "relative_representation" containing the
347 | percentage of grids represented, "represented_grids" containing a list of
348 | grid IDs of all represented grids and "representative_orig" containing
349 | information on whether the representative is the actual cluster center (in which
350 | case this value is True) or chosen because the grid in the cluster center is
351 | not a working grid.
352 |
353 | """
354 | # determine cluster attributes
355 | logger.info("Determine cluster attributes.")
356 | attributes_path = os.path.join(
357 | config["eDisGo"]["results"], "mv_grid_cluster_attributes.csv"
358 | )
359 | if not os.path.exists(config["eDisGo"]["results"]):
360 | os.makedirs(config["eDisGo"]["results"])
361 | scenario = config["eTraGo"]["scn_name"]
362 | cluster_attributes_df = get_cluster_attributes(
363 | attributes_path=attributes_path, scenario=scenario, config=config
364 | )
365 |
366 | # select attributes to cluster by
367 | cluster_attributes_df = cluster_attributes_df[
368 | config["eDisGo"]["cluster_attributes"]
369 | ]
370 | working_grids_path = os.path.join(
371 | config["eDisGo"]["grid_path"], "working_grids.csv"
372 | )
373 | if os.path.isfile(working_grids_path):
374 | working_grids = pd.read_csv(working_grids_path, index_col=0)
375 | else:
376 | raise FileNotFoundError(
377 | "working_grids.csv is missing. Cannot conduct MV grid clustering."
378 | )
379 | # conduct MV grid clustering
380 | cluster_df = mv_grid_clustering(
381 | cluster_attributes_df, working_grids=working_grids, config=config
382 | )
383 | cluster_results_path = os.path.join(
384 | config["eDisGo"]["results"], "mv_grid_cluster_results_new.csv"
385 | )
386 | cluster_df.to_csv(cluster_results_path)
387 | return cluster_df
388 |
--------------------------------------------------------------------------------
/ego/run_test.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | import os
3 | import sys
4 |
5 | from datetime import datetime
6 |
7 | import matplotlib.pyplot as plt
8 | import pandas as pd
9 |
10 | from pycallgraph import Config, PyCallGraph
11 | from pycallgraph.output import GraphvizOutput
12 | from tools.io import eGo
13 | from tools.utilities import define_logging
14 |
15 | logger = define_logging(name="ego")
16 |
17 |
18 | def ego_testing(ego):
19 | """Call and test all ego Funktion"""
20 | # full networks
21 | try:
22 | logger.info("ego.etrago.network: {} ".format(ego.etrago.network))
23 | logger.info(
24 | "ego.etrago.disaggregated_network: {} ".format(
25 | ego.etrago.disaggregated_network
26 | )
27 | )
28 |
29 | # aggregated results
30 | logger.info("Testing of aggregated results ego.etrago. ")
31 | logger.info(
32 | "storage_investment_costs: {} ".format(ego.etrago.storage_investment_costs)
33 | )
34 | logger.info("storage_charges: {} ".format(ego.etrago.storage_charges))
35 |
36 | ego.etrago.operating_costs
37 | ego.etrago.generator
38 | ego.etrago.grid_investment_costs
39 | # eTraGo functions
40 | try:
41 | ego.etrago.plot_line_loading()
42 | ego.etrago.plot_stacked_gen()
43 | ego.etrago.plot_curtailment()
44 | ego.etrago.plot_gen_dist()
45 | ego.etrago.plot_storage_distribution(scaling=1, filename=None)
46 | ego.etrago.plot_full_load_hours()
47 | # ego.etrago.plot_line_loading_diff(networkB=) # Error
48 | # ego.etrago.plot_plot_residual_load() # Error
49 | # ego.etrago.plot_voltage() # Error
50 | ego.etrago.plot_nodal_gen_dispatch()
51 | except:
52 | logger.info("eTraGo plotting failed testing")
53 |
54 | except:
55 | logger.info("eTraGo failed testing")
56 | # eDisGo
57 | try:
58 | logger.info("ego.edisgo: {} ".format(ego.edisgo))
59 | except:
60 | logger.info("ego.ego.edisgo failed testing")
61 | try:
62 | logger.info("ego.edisgo.network: {} ".format(ego.edisgo.network))
63 | except:
64 | logger.info("ego.edisgo.network failed testing")
65 | try:
66 | logger.info(
67 | "ego.edisgo.grid_investment_costs: {} ".format(
68 | ego.edisgo.grid_investment_costs
69 | )
70 | )
71 | except:
72 | logger.info("ego.edisgo.grid_investment_costs failed testing")
73 | try:
74 | logger.info("ego.edisgo.grid_choice: {} ".format(ego.edisgo.grid_choice))
75 | except:
76 | logger.info("ego.edisgo.grid_choice failed testing")
77 | try:
78 | logger.info(
79 | "ego.edisgo.successfull_grids: {} ".format(ego.edisgo.successfull_grids)
80 | )
81 | except:
82 | logger.info("ego.edisgo.successfull_grids failed testing")
83 | # eGo
84 | logger.info("ego.total_investment_costs: {} ".format(ego.total_investment_costs))
85 | logger.info("ego.total_operation_costs: {} ".format(ego.total_operation_costs))
86 | # ego plot functions
87 | try:
88 | ego.plot_total_investment_costs(
89 | filename="results/plot_total_investment_costs.pdf"
90 | )
91 | except:
92 | logger.info("ego.plot_total_investment_costs failed testing")
93 | try:
94 | ego.plot_power_price(filename="results/plot_power_price.pdf")
95 | except:
96 | logger.info("ego.plot_power_price failed testing")
97 | try:
98 | ego.plot_storage_usage(filename="results/plot_storage_usage.pdf")
99 | except:
100 | logger.info("ego.plot_storage_usage failed testing")
101 | try:
102 | ego.iplot
103 | except:
104 | logger.info("ego.iplot failed testing")
105 | try:
106 | ego.plot_edisgo_cluster(filename="results/plot_edisgo_cluster.pdf")
107 | except:
108 | logger.info(" plot_edisgo_cluster failed testing")
109 | try:
110 | ego.plot_line_expansion(
111 | column="investment_costs", filename="results/investment_costs.pdf"
112 | )
113 | except:
114 | logger.info(" plot_line_expansion failed testing")
115 | try:
116 | ego.plot_line_expansion(
117 | column="overnight_costs", filename="results/overnight_costs.pdf"
118 | )
119 | except:
120 | logger.info(" plot_line_expansion failed testing")
121 | try:
122 | ego.plot_line_expansion(
123 | column="s_nom_expansion", filename="results/s_nom_expansion.pdf"
124 | )
125 | except:
126 | logger.info(" plot_line_expansion failed testing")
127 | try:
128 | ego.plot_storage_expansion(
129 | column="overnight_costs", filename="results/storage_capital_investment.pdf"
130 | )
131 | except:
132 | logger.info(" plot_storage_expansion failed testing")
133 |
134 |
135 | def main():
136 | logger.info("Start calculation")
137 | graphviz = GraphvizOutput()
138 | date = str(datetime.now().strftime("%Y-%m-%d-%H-%M-%S"))
139 |
140 | graphviz.output_file = "results/" + str(date) + "_basic_process_plot.png"
141 | logger.info("Time: {} ".format(date))
142 |
143 | with PyCallGraph(output=graphviz, config=Config(groups=True)):
144 |
145 | ego = eGo(jsonpath="scenario_setting_local.json")
146 | logger.info("Start testing")
147 | ego_testing(ego)
148 |
149 | # object size
150 | logger.info("eGo object size: {} ".format(sys.getsizeof(ego)))
151 |
152 | logger.info("Time: {} ".format(str(datetime.now())))
153 |
154 |
155 | if __name__ == "__main__":
156 | main()
157 |
--------------------------------------------------------------------------------
/ego/scenario_setting.json:
--------------------------------------------------------------------------------
1 | {
2 | "eGo": {
3 | "eTraGo": true,
4 | "eDisGo": true,
5 | "csv_import_eTraGo": false,
6 | "csv_import_eDisGo": false,
7 | "random_seed": 42
8 | },
9 | "eTraGo": {
10 | "db": "egon-data",
11 | "gridversion": null,
12 | "method": {
13 | "type": "lopf",
14 | "n_iter": 4,
15 | "pyomo": true
16 | },
17 | "pf_post_lopf": {
18 | "active": true,
19 | "add_foreign_lopf": true,
20 | "q_allocation": "p_nom"
21 | },
22 | "start_snapshot": 1,
23 | "end_snapshot": 2,
24 | "solver": "gurobi",
25 | "solver_options": {
26 | "BarConvTol": 1e-05,
27 | "FeasibilityTol": 1e-05,
28 | "method": 2,
29 | "crossover": 0,
30 | "logFile": "solver_etragos.log",
31 | "threads": 4
32 | },
33 | "model_formulation": "kirchhoff",
34 | "scn_name": "eGon2035",
35 | "scn_extension": null,
36 | "scn_decommissioning": null,
37 | "lpfile": false,
38 | "csv_export": "test",
39 | "extendable": {
40 | "extendable_components": [
41 | "as_in_db"
42 | ],
43 | "upper_bounds_grid": {
44 | "grid_max_D": null,
45 | "grid_max_abs_D": {
46 | "380": {
47 | "i": 1020,
48 | "wires": 4,
49 | "circuits": 4
50 | },
51 | "220": {
52 | "i": 1020,
53 | "wires": 4,
54 | "circuits": 4
55 | },
56 | "110": {
57 | "i": 1020,
58 | "wires": 4,
59 | "circuits": 2
60 | },
61 | "dc": 0
62 | },
63 | "grid_max_foreign": 4,
64 | "grid_max_abs_foreign": null
65 | }
66 | },
67 | "generator_noise": 789456,
68 | "extra_functionality": {},
69 | "network_clustering": {
70 | "random_state": 42,
71 | "active": true,
72 | "method": "kmedoids-dijkstra",
73 | "n_clusters_AC": 30,
74 | "cluster_foreign_AC": false,
75 | "method_gas": "kmedoids-dijkstra",
76 | "n_clusters_gas": 20,
77 | "cluster_foreign_gas": false,
78 | "k_busmap": false,
79 | "kmeans_gas_busmap": false,
80 | "line_length_factor": 1,
81 | "remove_stubs": false,
82 | "use_reduced_coordinates": false,
83 | "bus_weight_tocsv": null,
84 | "bus_weight_fromcsv": null,
85 | "gas_weight_tocsv": null,
86 | "gas_weight_fromcsv": null,
87 | "n_init": 10,
88 | "max_iter": 100,
89 | "tol": 1e-06,
90 | "CPU_cores": 4
91 | },
92 | "sector_coupled_clustering": {
93 | "active": true,
94 | "carrier_data": {
95 | "central_heat": {
96 | "base": [
97 | "CH4",
98 | "AC"
99 | ],
100 | "strategy": "simultaneous"
101 | }
102 | }
103 | },
104 | "network_clustering_ehv": false,
105 | "disaggregation": "uniform",
106 | "snapshot_clustering": {
107 | "active": false,
108 | "method": "segmentation",
109 | "extreme_periods": null,
110 | "how": "daily",
111 | "storage_constraints": "soc_constraints",
112 | "n_clusters": 5,
113 | "n_segments": 5
114 | },
115 | "skip_snapshots": false,
116 | "dispatch_disaggregation": false,
117 | "branch_capacity_factor": {
118 | "HV": 0.5,
119 | "eHV": 0.7
120 | },
121 | "load_shedding": false,
122 | "foreign_lines": {
123 | "carrier": "AC",
124 | "capacity": "osmTGmod"
125 | },
126 | "comments": null
127 | },
128 | "eDisGo": {
129 | "grid_path": "/path/to_your/.dingo/grids",
130 | "choice_mode": "cluster",
131 | "cluster_attributes":["pv_capacity_expansion_mw_per_km2", "wind_capacity_expansion_mw_per_km2", "electromobility_max_load_expansion_mw_per_km2", "pth_capacity_expansion_mw_per_km2"],
132 | "only_cluster": false,
133 | "manual_grids": [],
134 | "n_clusters": 2,
135 | "parallelization":true,
136 | "max_calc_time": 0.5,
137 | "max_workers":2,
138 | "max_cos_phi_renewable": 0.9,
139 | "results": "results/another_result",
140 | "solver": "gurobi",
141 | "tasks": ["1_setup_grid", "2_specs_overlying_grid", "3_temporal_complexity_reduction", "4_optimisation", "5_grid_reinforcement"],
142 | "gridversion": "v0.4.5"
143 | },
144 | "database": {
145 | "database_name": "",
146 | "host": "127.0.0.1",
147 | "port": "59700",
148 | "user": "",
149 | "password": ""
150 | },
151 | "ssh": {
152 | "enabled": true,
153 | "user": "",
154 | "ip": "",
155 | "port": "",
156 | "local_address": "127.0.0.1",
157 | "local_port": "59700"
158 | },
159 | "external_config": "~/.ego/secondary_ego_config.json"
160 | }
161 |
--------------------------------------------------------------------------------
/ego/tools/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | """
3 |
4 | __copyright__ = "Europa-Universität Flensburg, Centre for Sustainable Energy Systems"
5 | __license__ = "GNU Affero General Public License Version 3 (AGPL-3.0)"
6 | __author__ = "wolf_bunke"
7 |
--------------------------------------------------------------------------------
/ego/tools/config.json:
--------------------------------------------------------------------------------
1 | {
2 | "lopf":
3 | {
4 | "Bus": null,
5 | "Generator":
6 | {
7 | "GeneratorPqSet": ["p_set", "p_max_pu"]
8 | },
9 | "Line": null,
10 | "Transformer": null,
11 | "Load":
12 | {
13 | "LoadPqSet": ["p_set", "q_set"]
14 | },
15 | "Storage":
16 | {
17 | "StoragePqSet": ["p_set"]
18 | }
19 | },
20 | "pf":
21 | {
22 | "Bus":
23 | {
24 | "BusVMagSet":["v_mag_pu_set"]
25 | },
26 | "Generator":
27 | {
28 | "GeneratorPqSet": ["p_set", "q_set"]
29 | },
30 | "Line": null,
31 | "Transformer": null,
32 | "Load":
33 | {
34 | "LoadPqSet": ["p_set", "q_set"]
35 | },
36 | "Storage":
37 | {
38 | "StoragePqSet": ["p_set", "q_set"]
39 | }
40 | },
41 | "results":
42 | {
43 | "Bus": null,
44 | "BusT":
45 | {
46 | "BusT":[ "p", "v_mag_pu","v_ang","marginal_price"]
47 | },
48 | "Storage": null,
49 | "StorageT":
50 | {
51 | "StorageT": ["p","state_of_charge","spill"]
52 | },
53 | "Generator": null,
54 | "GeneratorT":
55 | {
56 | "GeneratorT": ["p_set","p_max_pu", "p" ]
57 | },
58 | "Line": null,
59 | "LineT":
60 | {
61 | "LineT": ["p0", "p1"]
62 | },
63 | "Load": null,
64 | "LoadT":
65 | {
66 | "LoadT": ["p_set","q_set","p"]
67 | },
68 | "Transformer": null,
69 | "TransformerT":
70 | {
71 | "TransformerT": ["p0", "p1"]
72 | }
73 | }
74 | }
75 |
--------------------------------------------------------------------------------
/ego/tools/economics.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2016-2018 Europa-Universität Flensburg,
3 | # Flensburg University of Applied Sciences,
4 | # Centre for Sustainable Energy Systems
5 | #
6 | # This program is free software; you can redistribute it and/or
7 | # modify it under the terms of the GNU Affero General Public License as
8 | # published by the Free Software Foundation; either version 3 of the
9 | # License, or (at your option) any later version.
10 | #
11 | # This program is distributed in the hope that it will be useful,
12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 | # GNU Affero General Public License for more details.
15 | #
16 | # You should have received a copy of the GNU Affero General Public License
17 | # along with this program. If not, see .
18 |
19 | # File description
20 | """This module collects useful functions for economic calculation of eGo
21 | which can mainly distinguished in operational and investment costs.
22 | """
23 |
24 | import io
25 | import logging
26 | import os
27 | import pkgutil
28 |
29 | logger = logging.getLogger("ego")
30 |
31 | if not "READTHEDOCS" in os.environ:
32 | import numpy as np
33 | import pandas as pd
34 |
35 | from etrago.tools.utilities import geolocation_buses
36 |
37 | from ego.tools.utilities import get_time_steps
38 |
39 | __copyright__ = (
40 | "Flensburg University of Applied Sciences, Europa-Universität"
41 | "Flensburg, Centre for Sustainable Energy Systems"
42 | )
43 | __license__ = "GNU Affero General Public License Version 3 (AGPL-3.0)"
44 | __author__ = "wolfbunke"
45 |
46 |
47 | # calculate annuity per time step or periode
48 | def annuity_per_period(capex, n, wacc, t, p):
49 | """Calculate per given period
50 |
51 | Parameters
52 | ----------
53 | capex : float
54 | Capital expenditure (NPV of investment)
55 | n : int
56 | Number of years that the investment is used (economic lifetime)
57 | wacc : float
58 | Weighted average cost of capital
59 | t : int
60 | Timesteps in hours
61 | p : float
62 | interest rate
63 |
64 | """
65 |
66 | # ToDo change formular to hourly annuity costs
67 | return capex * (wacc * (1 + wacc) ** n) / ((1 + wacc) ** n - 1)
68 |
69 |
70 | def edisgo_convert_capital_costs(overnight_cost, t, p, json_file):
71 | """Get scenario and calculation specific annuity cost by given capital
72 | costs and lifetime.
73 |
74 |
75 | Parameters
76 | ----------
77 | json_file : :obj:dict
78 | Dictionary of the ``scenario_setting.json`` file
79 | _start_snapshot : int
80 | Start point of calculation from ``scenario_setting.json`` file
81 | _end_snapshot : int
82 | End point of calculation from ``scenario_setting.json`` file
83 | _p : numeric
84 | interest rate of investment
85 | _t : int
86 | lifetime of investment
87 |
88 | Returns
89 | -------
90 | annuity_cost : numeric
91 | Scenario and calculation specific annuity cost by given capital
92 | costs and lifetime
93 |
94 | Examples
95 | --------
96 | .. math::
97 |
98 | PVA = (1 / p) - (1 / (p*(1 + p)^t))
99 |
100 | """
101 | # Based on eTraGo calculation in
102 | # https://github.com/openego/eTraGo/blob/dev/etrago/tools/utilities.py#L651
103 |
104 | # Calculate present value of an annuity (PVA)
105 | PVA = (1 / p) - (1 / (p * (1 + p) ** t))
106 |
107 | year = 8760
108 | # get period of calculation
109 | period = json_file["eTraGo"]["end_snapshot"] - json_file["eTraGo"]["start_snapshot"]
110 |
111 | # calculation of capital_cost
112 | annuity_cost = overnight_cost / (PVA * (year / (period + 1)))
113 |
114 | return annuity_cost
115 |
116 |
117 | def etrago_convert_overnight_cost(annuity_cost, json_file, t=40, p=0.05):
118 | """Get annuity cost of simulation and calculation total
119 | ``overnight_costs`` by given capital costs and lifetime.
120 |
121 | Parameters
122 | ----------
123 | json_file : :obj:dict
124 | Dictionary of the ``scenario_setting.json`` file
125 | _start_snapshot : int
126 | Start point of calculation from ``scenario_setting.json`` file
127 | _end_snapshot : int
128 | End point of calculation from ``scenario_setting.json`` file
129 | _p : numeric
130 | interest rate of investment
131 | _T : int
132 | lifetime of investment
133 |
134 | Returns
135 | -------
136 | overnight_cost : numeric
137 | Scenario and calculation total ``overnight_costs`` by given
138 | annuity capital costs and lifetime.
139 |
140 | Examples
141 | --------
142 | .. math::
143 |
144 | PVA = (1 / p) - (1 / (p*(1 + p)^t))
145 |
146 | K_{ON} = K_a*PVA*((t/(period+1))
147 |
148 | """
149 | # Based on eTraGo calculation in
150 | # https://github.com/openego/eTraGo/blob/dev/etrago/tools/utilities.py#L651
151 |
152 | # Calculate present value of an annuity (PVA)
153 | PVA = (1 / p) - (1 / (p * (1 + p) ** t))
154 |
155 | year = 8760
156 | # get period of calculation
157 | period = json_file["eTraGo"]["end_snapshot"] - json_file["eTraGo"]["start_snapshot"]
158 |
159 | # calculation of overnight_cost
160 | overnight_cost = annuity_cost * (PVA * (year / (period + 1)))
161 |
162 | return overnight_cost
163 |
164 |
165 | def etrago_operating_costs(network):
166 | """Function to get all operating costs of eTraGo.
167 |
168 | Parameters
169 | ----------
170 | network_etrago: :class:`etrago.tools.io.NetworkScenario`
171 | eTraGo network object compiled by :meth:`etrago.appl.etrago`
172 |
173 | Returns
174 | -------
175 | operating_costs : :pandas:`pandas.Dataframe`
176 | DataFrame with aggregate operational costs per component and voltage
177 | level in [EUR] per calculated time steps.
178 |
179 | Example
180 | -------
181 |
182 | .. code-block:: python
183 |
184 | >>> from ego.tools.io import eGo
185 | >>> ego = eGo(jsonpath='scenario_setting.json')
186 | >>> ego.etrago.operating_costs
187 |
188 | +-------------+-------------------+------------------+
189 | | component |operation_costs | voltage_level |
190 | +=============+===================+==================+
191 | |biomass | 27.0 | ehv |
192 | +-------------+-------------------+------------------+
193 | |line losses | 0.0 | ehv |
194 | +-------------+-------------------+------------------+
195 | |wind_onshore | 0.0 | ehv |
196 | +-------------+-------------------+------------------+
197 |
198 | """
199 |
200 | etg = network
201 |
202 | # get v_nom
203 | _bus = pd.DataFrame(etg.buses["v_nom"])
204 | _bus.index.name = "name"
205 | _bus.reset_index(level=0, inplace=True)
206 |
207 | # Add voltage level
208 | idx = etg.generators.index
209 | etg.generators = pd.merge(etg.generators, _bus, left_on="bus", right_on="name")
210 | etg.generators.index = idx
211 |
212 | etg.generators["voltage_level"] = "unknown"
213 |
214 | # add ehv
215 | ix_ehv = etg.generators[etg.generators["v_nom"] >= 380].index
216 | etg.generators.set_value(ix_ehv, "voltage_level", "ehv")
217 | # add hv
218 | ix_hv = etg.generators[
219 | (etg.generators["v_nom"] <= 220) & (etg.generators["v_nom"] >= 110)
220 | ].index
221 | etg.generators.set_value(ix_hv, "voltage_level", "hv")
222 |
223 | # get voltage_level index
224 | ix_by_ehv = etg.generators[etg.generators.voltage_level == "ehv"].index
225 | ix_by_hv = etg.generators[etg.generators.voltage_level == "hv"].index
226 | ix_slack = etg.generators[etg.generators.control != "Slack"].index
227 |
228 | ix_by_ehv = ix_slack.join(
229 | ix_by_ehv, how="left", level=None, return_indexers=False, sort=False
230 | )
231 | ix_by_hv = ix_slack.join(
232 | ix_by_hv, how="right", level=None, return_indexers=False, sort=False
233 | )
234 |
235 | # groupby v_nom ehv
236 | operating_costs_ehv = (
237 | etg.generators_t.p[ix_by_ehv] * etg.generators.marginal_cost[ix_by_ehv]
238 | )
239 | operating_costs_ehv = (
240 | operating_costs_ehv.groupby(etg.generators.carrier, axis=1).sum().sum()
241 | )
242 |
243 | operating_costs = pd.DataFrame(operating_costs_ehv)
244 | operating_costs.columns = ["operation_costs"]
245 | operating_costs["voltage_level"] = "ehv"
246 | # groupby v_nom ehv
247 | operating_costs_hv = (
248 | etg.generators_t.p[ix_by_hv] * etg.generators.marginal_cost[ix_by_hv]
249 | )
250 | operating_costs_hv = (
251 | operating_costs_hv.groupby(etg.generators.carrier, axis=1).sum().sum()
252 | )
253 |
254 | opt_costs_hv = pd.DataFrame(operating_costs_hv)
255 | opt_costs_hv.columns = ["operation_costs"]
256 | opt_costs_hv["voltage_level"] = "hv"
257 | # add df
258 | operating_costs = operating_costs.append(opt_costs_hv)
259 |
260 | tpc_ehv = pd.DataFrame(
261 | operating_costs_ehv.sum(),
262 | columns=["operation_costs"],
263 | index=["total_power_costs"],
264 | )
265 | tpc_ehv["voltage_level"] = "ehv"
266 | operating_costs = operating_costs.append(tpc_ehv)
267 |
268 | tpc_hv = pd.DataFrame(
269 | operating_costs_hv.sum(),
270 | columns=["operation_costs"],
271 | index=["total_power_costs"],
272 | )
273 | tpc_hv["voltage_level"] = "hv"
274 | operating_costs = operating_costs.append(tpc_hv)
275 |
276 | # add Grid and Transform Costs
277 | try:
278 | etg.lines["voltage_level"] = "unknown"
279 | ix_ehv = etg.lines[etg.lines["v_nom"] >= 380].index
280 | etg.lines.set_value(ix_ehv, "voltage_level", "ehv")
281 | ix_hv = etg.lines[
282 | (etg.lines["v_nom"] <= 220) & (etg.lines["v_nom"] >= 110)
283 | ].index
284 | etg.lines.set_value(ix_hv, "voltage_level", "hv")
285 |
286 | losses_total = sum(etg.lines.losses) + sum(etg.transformers.losses)
287 | losses_costs = losses_total * np.average(etg.buses_t.marginal_price)
288 |
289 | # add Transform and Grid losses
290 | # etg.lines[['losses','voltage_level']].groupby('voltage_level',
291 | # axis=0).sum().reset_index()
292 |
293 | except AttributeError:
294 | logger.info(
295 | "No Transform and Line losses are calcualted! \n"
296 | "Use eTraGo pf_post_lopf method"
297 | )
298 | losses_total = 0
299 | losses_costs = 0
300 | # total grid losses costs
301 | tgc = pd.DataFrame(
302 | losses_costs, columns=["operation_costs"], index=["total_grid_losses"]
303 | )
304 | tgc["voltage_level"] = "ehv/hv"
305 | operating_costs = operating_costs.append(tgc)
306 |
307 | # power_price = power_price.T.iloc[0]
308 |
309 | return operating_costs
310 |
311 |
312 | def etrago_grid_investment(network, json_file, session):
313 | """Function to get grid expantion costs from eTraGo
314 |
315 | Parameters
316 | ----------
317 |
318 | network_etrago: :class:`etrago.tools.io.NetworkScenario`
319 | eTraGo network object compiled by :meth:`etrago.appl.etrago`
320 | json_file : :obj:dict
321 | Dictionary of the ``scenario_setting.json`` file
322 |
323 | Returns
324 | -------
325 | grid_investment_costs : :pandas:`pandas.Dataframe`
326 | Dataframe with ``voltage_level``, ``number_of_expansion`` and
327 | ``capital_cost`` per calculated time steps
328 |
329 | Example
330 | -------
331 |
332 | .. code-block:: python
333 |
334 | >>> from ego.tools.io import eGo
335 | >>> ego = eGo(jsonpath='scenario_setting.json')
336 | >>> ego.etrago.grid_investment_costs
337 |
338 | +---------------+---------------+-------------------+--------------+
339 | |differentiation| voltage_level |number_of_expansion| capital_cost|
340 | +===============+===============+===================+==============+
341 | | cross-border | ehv | 27.0 | 31514.1305 |
342 | +---------------+---------------+-------------------+--------------+
343 | | domestic | hv | 0.0 | 0.0 |
344 | +---------------+---------------+-------------------+--------------+
345 | """
346 |
347 | # check settings for extendable
348 | if "network" not in json_file["eTraGo"]["extendable"]:
349 | logger.info(
350 | "The optimizition was not using parameter"
351 | " 'extendable': network \n"
352 | "No grid expantion costs from etrago"
353 | )
354 |
355 | if "network" in json_file["eTraGo"]["extendable"]:
356 |
357 | network = geolocation_buses(network, session)
358 | # differentiation by country_code
359 |
360 | network.lines["differentiation"] = "none"
361 |
362 | network.lines["bus0_c"] = network.lines.bus0.map(network.buses.country_code)
363 | network.lines["bus1_c"] = network.lines.bus1.map(network.buses.country_code)
364 |
365 | for idx, val in network.lines.iterrows():
366 |
367 | check = val["bus0_c"] + val["bus1_c"]
368 |
369 | if "DE" in check:
370 | network.lines["differentiation"][idx] = "cross-border"
371 | if "DEDE" in check:
372 | network.lines["differentiation"][idx] = "domestic"
373 | if "DE" not in check:
374 | network.lines["differentiation"][idx] = "foreign"
375 |
376 | lines = network.lines[
377 | [
378 | "v_nom",
379 | "capital_cost",
380 | "s_nom",
381 | "s_nom_min",
382 | "s_nom_opt",
383 | "differentiation",
384 | ]
385 | ].reset_index()
386 |
387 | lines["s_nom_expansion"] = lines.s_nom_opt.subtract(lines.s_nom, axis="index")
388 | lines["capital_cost"] = lines.s_nom_expansion.multiply(
389 | lines.capital_cost, axis="index"
390 | )
391 | lines["number_of_expansion"] = lines.s_nom_expansion > 0.0
392 | lines["time_step"] = get_time_steps(json_file)
393 |
394 | # add v_level
395 | lines["voltage_level"] = "unknown"
396 |
397 | ix_ehv = lines[lines["v_nom"] >= 380].index
398 | lines.set_value(ix_ehv, "voltage_level", "ehv")
399 |
400 | ix_hv = lines[(lines["v_nom"] <= 220) & (lines["v_nom"] >= 110)].index
401 | lines.set_value(ix_hv, "voltage_level", "hv")
402 |
403 | # based on eTraGo Function:
404 | # https://github.com/openego/eTraGo/blob/dev/etrago/tools/utilities.py#L651
405 | # Definition https://pypsa.org/doc/components.html#line
406 |
407 | trafo = pd.DataFrame()
408 | # get costs of transfomers
409 | if json_file["eTraGo"]["network_clustering_kmeans"] == False:
410 |
411 | network.transformers["differentiation"] = "none"
412 |
413 | trafos = network.transformers[
414 | [
415 | "v_nom0",
416 | "v_nom1",
417 | "capital_cost",
418 | "s_nom_extendable",
419 | "s_nom",
420 | "s_nom_opt",
421 | ]
422 | ]
423 |
424 | trafos.columns.name = ""
425 | trafos.index.name = ""
426 | trafos.reset_index()
427 |
428 | trafos["s_nom_extendable"] = trafos.s_nom_opt.subtract(
429 | trafos.s_nom, axis="index"
430 | )
431 |
432 | trafos["capital_cost"] = trafos.s_nom_extendable.multiply(
433 | trafos.capital_cost, axis="index"
434 | )
435 | trafos["number_of_expansion"] = trafos.s_nom_extendable > 0.0
436 | trafos["time_step"] = get_time_steps(json_file)
437 | # add v_level
438 | trafos["voltage_level"] = "unknown"
439 |
440 | # TODO check
441 | ix_ehv = trafos[trafos["v_nom0"] >= 380].index
442 | trafos.set_value(ix_ehv, "voltage_level", "ehv")
443 |
444 | ix_hv = trafos[(trafos["v_nom0"] <= 220) & (trafos["v_nom0"] >= 110)].index
445 | trafos.set_value(ix_hv, "voltage_level", "hv")
446 | # aggregate trafo
447 | trafo = (
448 | trafos[["voltage_level", "capital_cost", "differentiation"]]
449 | .groupby(["differentiation", "voltage_level"])
450 | .sum()
451 | .reset_index()
452 | )
453 |
454 | # aggregate lines
455 | line = (
456 | lines[["voltage_level", "capital_cost", "differentiation"]]
457 | .groupby(["differentiation", "voltage_level"])
458 | .sum()
459 | .reset_index()
460 | )
461 |
462 | # merge trafos and line
463 | frames = [line, trafo]
464 |
465 | grid_investment_costs = pd.concat(frames)
466 |
467 | return grid_investment_costs
468 |
469 | # ToDo: add .agg({'number_of_expansion':lambda x: x.count(),
470 | # 's_nom_expansion': np.sum,
471 | # 'grid_costs': np.sum}) <- time_step
472 | pass
473 |
474 |
475 | def edisgo_grid_investment(edisgo, json_file):
476 | """
477 | Function aggregates all costs, based on all calculated eDisGo
478 | grids and their weightings
479 | Parameters
480 | ----------
481 | edisgo : :class:`ego.tools.edisgo_integration.EDisGoNetworks`
482 | Contains multiple eDisGo networks
483 | Returns
484 | -------
485 | None or :pandas:`pandas.DataFrame`
486 | Dataframe containing annuity costs per voltage level
487 | """
488 |
489 | t = 40
490 | p = 0.05
491 | logger.info("For all components T={} and p={} is used".format(t, p))
492 |
493 | costs = pd.DataFrame(columns=["voltage_level", "annuity_costs", "overnight_costs"])
494 |
495 | # Loop through all calculated eDisGo grids
496 | for key, value in edisgo.network.items():
497 |
498 | if not hasattr(value, "network"):
499 | logger.warning("No results available for grid {}".format(key))
500 | continue
501 |
502 | # eDisGo results (overnight costs) for this grid
503 | costs_single = value.network.results.grid_expansion_costs
504 | costs_single.rename(columns={"total_costs": "overnight_costs"}, inplace=True)
505 |
506 | # continue if this grid was not reinforced
507 | if costs_single["overnight_costs"].sum() == 0.0:
508 | logger.info("No expansion costs for grid {}".format(key))
509 | continue
510 |
511 | # Overnight cost translated in annuity costs
512 | costs_single["capital_cost"] = edisgo_convert_capital_costs(
513 | costs_single["overnight_costs"], t=t, p=p, json_file=json_file
514 | )
515 |
516 | # Weighting (retrieves the singe (absolute) weighting for this grid)
517 | choice = edisgo.grid_choice
518 | weighting = choice.loc[choice["the_selected_network_id"] == key][
519 | "no_of_points_per_cluster"
520 | ].values[0]
521 |
522 | costs_single[["capital_cost", "overnight_costs"]] = (
523 | costs_single[["capital_cost", "overnight_costs"]] * weighting
524 | )
525 |
526 | # Append costs of this grid
527 | costs = costs.append(
528 | costs_single[["voltage_level", "capital_cost", "overnight_costs"]],
529 | ignore_index=True,
530 | )
531 |
532 | if len(costs) == 0:
533 | logger.info("No expansion costs in any MV grid")
534 | return None
535 |
536 | else:
537 | aggr_costs = costs.groupby(["voltage_level"]).sum().reset_index()
538 |
539 | # In eDisGo all costs are in kEuro (eGo only takes Euro)
540 | aggr_costs[["capital_cost", "overnight_costs"]] = (
541 | aggr_costs[["capital_cost", "overnight_costs"]] * 1000
542 | )
543 |
544 | successfull_grids = edisgo.successfull_grids
545 | if successfull_grids < 1:
546 | logger.warning(
547 | "Only {} % of the grids were calculated.\n".format(
548 | "{:,.2f}".format(successfull_grids * 100)
549 | )
550 | + "Costs are extrapolated..."
551 | )
552 |
553 | aggr_costs[["capital_cost", "overnight_costs"]] = (
554 | aggr_costs[["capital_cost", "overnight_costs"]] / successfull_grids
555 | )
556 |
557 | return aggr_costs
558 |
559 |
560 | def get_generator_investment(network, scn_name):
561 | """Get investment costs per carrier/ generator."""
562 | etg = network
563 |
564 | try:
565 |
566 | data = pkgutil.get_data("ego", "data/investment_costs.csv")
567 | invest = pd.read_csv(
568 | io.BytesIO(data), encoding="utf8", sep=",", index_col="carriers"
569 | )
570 |
571 | except FileNotFoundError:
572 | path = os.getcwd()
573 | filename = "investment_costs.csv"
574 | invest = pd.DataFrame.from_csv(path + "/data/" + filename)
575 |
576 | if scn_name in ["SH Status Quo", "Status Quo"]:
577 | invest_scn = "Status Quo"
578 |
579 | if scn_name in ["SH NEP 2035", "NEP 2035"]:
580 | invest_scn = "NEP 2035"
581 |
582 | if scn_name in ["SH eGo 100", "eGo 100"]:
583 | invest_scn = "eGo 100"
584 |
585 | gen_invest = pd.concat(
586 | [invest[invest_scn], etg.generators.groupby("carrier")["p_nom"].sum()],
587 | axis=1,
588 | join="inner",
589 | )
590 |
591 | gen_invest = pd.concat(
592 | [invest[invest_scn], etg.generators.groupby("carrier")["p_nom"].sum()],
593 | axis=1,
594 | join="inner",
595 | )
596 | gen_invest["carrier_costs"] = (
597 | gen_invest[invest_scn] * gen_invest["p_nom"] * 1000
598 | ) # in MW
599 |
600 | return gen_invest
601 |
--------------------------------------------------------------------------------
/ego/tools/results.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2016-2018 Europa-Universität Flensburg,
3 | # Flensburg University of Applied Sciences,
4 | # Centre for Sustainable Energy Systems
5 | #
6 | # This program is free software; you can redistribute it and/or
7 | # modify it under the terms of the GNU Affero General Public License as
8 | # published by the Free Software Foundation; either version 3 of the
9 | # License, or (at your option) any later version.
10 | #
11 | # This program is distributed in the hope that it will be useful,
12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 | # GNU Affero General Public License for more details.
15 | #
16 | # You should have received a copy of the GNU Affero General Public License
17 | # along with this program. If not, see .
18 |
19 | # File description
20 | """This module include the results functions for analyze and creating results
21 | based on eTraGo or eDisGo for eGo.
22 | """
23 | # TODO - write results to database
24 |
25 | import io
26 | import logging
27 | import os
28 |
29 | logger = logging.getLogger("ego")
30 |
31 | if not "READTHEDOCS" in os.environ:
32 | import numpy as np
33 | import pandas as pd
34 |
35 | from ego.tools.economics import get_generator_investment
36 |
37 | __copyright__ = (
38 | "Flensburg University of Applied Sciences, Europa-Universität"
39 | "Flensburg, Centre for Sustainable Energy Systems"
40 | )
41 | __license__ = "GNU Affero General Public License Version 3 (AGPL-3.0)"
42 | __author__ = "wolfbunke"
43 |
44 |
45 | def create_etrago_results(network, scn_name): # rename function
46 | """
47 | Create eTraGo results
48 |
49 | Parameters
50 | ----------
51 | network : :class:`~.etrago.tools.io.NetworkScenario`
52 | eTraGo ``NetworkScenario`` based on PyPSA Network. See also
53 | `pypsa.network `_
54 |
55 | scn_name : str
56 | Name of used scenario
57 |
58 |
59 | Returns
60 | -------
61 | generator : :pandas:`pandas.DataFrame`
62 | Result of generator as DataFrame in ``ego.etrago.generator``
63 |
64 | """
65 |
66 | etg = network
67 | etrago = pd.DataFrame()
68 |
69 | etrago["p_nom"] = etg.generators.groupby("carrier")["p_nom"].sum() # in MW
70 | etrago["p_nom_opt"] = etg.generators.groupby("carrier")["p_nom_opt"].sum() # in MW
71 | # power price
72 | etrago["marginal_cost"] = etg.generators.groupby("carrier")["marginal_cost"].mean()
73 | # in in [EUR]
74 |
75 | # get power price by production MWh _t.p * marginal_cost
76 | power_price = (
77 | etg.generators_t.p[etg.generators[etg.generators.control != "Slack"].index]
78 | * etg.generators.marginal_cost[
79 | etg.generators[etg.generators.control != "Slack"].index
80 | ]
81 | ) # without Slack
82 |
83 | power_price = power_price.groupby(etg.generators.carrier, axis=1).sum().sum()
84 | etrago["power_price"] = power_price
85 |
86 | # use country code
87 | p_by_carrier = (
88 | pd.concat(
89 | [
90 | etg.generators_t.p[
91 | etg.generators[etg.generators.control != "Slack"].index
92 | ],
93 | etg.generators_t.p[
94 | etg.generators[etg.generators.control == "Slack"].index
95 | ]
96 | .iloc[:, 0]
97 | .apply(lambda x: x if x > 0 else 0),
98 | ],
99 | axis=1,
100 | )
101 | .groupby(etg.generators.carrier, axis=1)
102 | .sum()
103 | ) # in MWh
104 |
105 | etrago["p"] = p_by_carrier.sum()
106 | # add invetment
107 | result_invest = get_generator_investment(network, scn_name)
108 |
109 | etrago = etrago.assign(investment_costs=result_invest["carrier_costs"])
110 |
111 | return etrago
112 |
113 |
114 | if __name__ == "__main__":
115 | pass
116 |
--------------------------------------------------------------------------------
/ego/tools/storages.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2016-2018 Europa-Universität Flensburg,
3 | # Flensburg University of Applied Sciences,
4 | # Centre for Sustainable Energy Systems
5 | #
6 | # This program is free software; you can redistribute it and/or
7 | # modify it under the terms of the GNU Affero General Public License as
8 | # published by the Free Software Foundation; either version 3 of the
9 | # License, or (at your option) any later version.
10 | #
11 | # This program is distributed in the hope that it will be useful,
12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 | # GNU Affero General Public License for more details.
15 | #
16 | # You should have received a copy of the GNU Affero General Public License
17 | # along with this program. If not, see .
18 |
19 | # File description
20 | """This module contains functions for storage units.
21 | """
22 |
23 | import io
24 | import logging
25 | import os
26 |
27 | logger = logging.getLogger("ego")
28 |
29 | if not "READTHEDOCS" in os.environ:
30 | import numpy as np
31 | import pandas as pd
32 |
33 | from etrago.tools.utilities import geolocation_buses
34 |
35 | __copyright__ = "Europa-Universität Flensburg, " "Centre for Sustainable Energy Systems"
36 | __license__ = "GNU Affero General Public License Version 3 (AGPL-3.0)"
37 | __author__ = "wolf_bunke,maltesc"
38 |
39 |
40 | def etrago_storages(network):
41 | """Sum up the pysical storage values of the total scenario based on
42 | eTraGo results.
43 |
44 | Parameters
45 | ----------
46 | network : :class:`etrago.tools.io.NetworkScenario`
47 | eTraGo ``NetworkScenario`` based on PyPSA Network. See also
48 | `pypsa.network `_
49 |
50 | Returns
51 | -------
52 | results : :pandas:`pandas.DataFrame`
53 | Summarize and returns a ``DataFrame`` of the storage optimaziation.
54 |
55 | Notes
56 | -----
57 |
58 | The ``results`` dataframe incluedes following parameters:
59 |
60 | charge : numeric
61 | Quantity of charged energy in MWh over scenario time steps
62 | discharge : numeric
63 | Quantity of discharged energy in MWh over scenario time steps
64 | count : int
65 | Number of storage units
66 | p_nom_o_sum: numeric
67 | Sum of optimal installed power capacity
68 | """
69 | if len(network.storage_units_t.p.sum()) > 0:
70 | charge = (
71 | network.storage_units_t.p[
72 | network.storage_units_t.p[
73 | network.storage_units[network.storage_units.p_nom_opt > 0].index
74 | ].values
75 | > 0.0
76 | ]
77 | .groupby(network.storage_units.carrier, axis=1)
78 | .sum()
79 | .sum()
80 | )
81 |
82 | discharge = (
83 | network.storage_units_t.p[
84 | network.storage_units_t.p[
85 | network.storage_units[network.storage_units.p_nom_opt > 0].index
86 | ].values
87 | < 0.0
88 | ]
89 | .groupby(network.storage_units.carrier, axis=1)
90 | .sum()
91 | .sum()
92 | )
93 |
94 | count = (
95 | network.storage_units.bus[network.storage_units.p_nom_opt > 0]
96 | .groupby(network.storage_units.carrier, axis=0)
97 | .count()
98 | )
99 |
100 | p_nom_sum = network.storage_units.p_nom.groupby(
101 | network.storage_units.carrier, axis=0
102 | ).sum()
103 |
104 | p_nom_o_sum = network.storage_units.p_nom_opt.groupby(
105 | network.storage_units.carrier, axis=0
106 | ).sum()
107 | p_nom_o = p_nom_sum - p_nom_o_sum # Zubau
108 |
109 | results = pd.concat(
110 | [
111 | charge.rename("charge"),
112 | discharge.rename("discharge"),
113 | p_nom_sum,
114 | count.rename("total_units"),
115 | p_nom_o.rename("extension"),
116 | ],
117 | axis=1,
118 | join="outer",
119 | )
120 |
121 | else:
122 | logger.info("No timeseries p for storages!")
123 | results = None
124 |
125 | return results
126 |
127 |
128 | def etrago_storages_investment(network, json_file, session):
129 | """Calculate storage investment costs of eTraGo
130 |
131 | Parameters
132 | ----------
133 | network : :class:`etrago.tools.io.NetworkScenario`
134 | eTraGo ``NetworkScenario`` based on PyPSA Network. See also
135 | `pypsa.network `_
136 |
137 |
138 | Returns
139 | -------
140 | storage_costs : numeric
141 | Storage costs of selected snapshots in [EUR]
142 |
143 | """
144 | # check spelling of storages and storage
145 | logger.info(json_file["eTraGo"]["extendable"])
146 |
147 | stos = "storage"
148 |
149 | # check settings for extendable
150 | if stos not in json_file["eTraGo"]["extendable"]:
151 | logger.info(
152 | "The optimizition was not using parameter "
153 | " 'extendable': storage"
154 | "No storage expantion costs from etrago"
155 | )
156 |
157 | if stos in json_file["eTraGo"]["extendable"]:
158 |
159 | network = geolocation_buses(network, session)
160 | # get v_nom
161 | _bus = pd.DataFrame(network.buses[["v_nom", "country_code"]])
162 | _bus.index.name = "name"
163 | _bus.reset_index(level=0, inplace=True)
164 |
165 | _storage = network.storage_units[network.storage_units.p_nom_extendable == True]
166 | _storage.reset_index(level=0, inplace=True)
167 | # provide storage installation costs per voltage level
168 | installed_storages = pd.merge(_storage, _bus, left_on="bus", right_on="name")
169 |
170 | installed_storages["investment_costs"] = (
171 | installed_storages.capital_cost * installed_storages.p_nom_opt
172 | )
173 |
174 | # add voltage_level
175 | installed_storages["voltage_level"] = "unknown"
176 |
177 | ix_ehv = installed_storages[installed_storages["v_nom"] >= 380].index
178 | installed_storages.set_value(ix_ehv, "voltage_level", "ehv")
179 |
180 | ix_hv = installed_storages[
181 | (installed_storages["v_nom"] <= 220) & (installed_storages["v_nom"] >= 110)
182 | ].index
183 | installed_storages.set_value(ix_hv, "voltage_level", "hv")
184 |
185 | # add country differentiation
186 | installed_storages["differentiation"] = "none"
187 |
188 | for idx, val in installed_storages.iterrows():
189 |
190 | check = val["country_code"]
191 |
192 | if "DE" in check:
193 | installed_storages["differentiation"][idx] = "domestic"
194 | if "DE" not in check:
195 | installed_storages["differentiation"][idx] = "foreign"
196 |
197 | storages_investment = (
198 | installed_storages[["voltage_level", "investment_costs", "differentiation"]]
199 | .groupby(["differentiation", "voltage_level"])
200 | .sum()
201 | .reset_index()
202 | )
203 |
204 | storages_investment = storages_investment.rename(
205 | columns={"investment_costs": "capital_cost"}
206 | )
207 |
208 | return storages_investment
209 |
--------------------------------------------------------------------------------
/ego/tools/utilities.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright 2016-2018 Europa-Universität Flensburg,
3 | # Flensburg University of Applied Sciences,
4 | # Centre for Sustainable Energy Systems
5 | #
6 | # This program is free software; you can redistribute it and/or
7 | # modify it under the terms of the GNU Affero General Public License as
8 | # published by the Free Software Foundation; either version 3 of the
9 | # License, or (at your option) any later version.
10 | #
11 | # This program is distributed in the hope that it will be useful,
12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 | # GNU Affero General Public License for more details.
15 | #
16 | # You should have received a copy of the GNU Affero General Public License
17 | # along with this program. If not, see .
18 |
19 | # File description
20 | """This module contains utility functions for the eGo application.
21 | """
22 | import csv
23 | import json
24 | import logging
25 | import os
26 | import sys
27 |
28 | from time import localtime, strftime
29 |
30 | from sqlalchemy.orm import scoped_session, sessionmaker
31 |
32 | if "READTHEDOCS" not in os.environ:
33 | from egoio.tools import db
34 |
35 | logger = logging.getLogger(__name__)
36 |
37 |
38 | __copyright__ = (
39 | "Flensburg University of Applied Sciences, "
40 | "Europa-Universität Flensburg, "
41 | "Centre for Sustainable Energy Systems"
42 | )
43 | __license__ = "GNU Affero General Public License Version 3 (AGPL-3.0)"
44 | __author__ = "wolf_bunke"
45 |
46 |
47 | def define_logging(name):
48 | """Helps to log your modeling process with eGo and defines all settings.
49 |
50 | Parameters
51 | ----------
52 | log_name : str
53 | Name of log file. Default: ``ego.log``.
54 |
55 | Returns
56 | -------
57 | logger : :class:`logging.basicConfig`.
58 | Set up ``logger`` object of package ``logging``
59 | """
60 |
61 | # ToDo: Logger should be set up more specific
62 | # add pypsa and other logger INFO to ego.log
63 | now = strftime("%Y-%m-%d_%H%M%S", localtime())
64 |
65 | log_dir = "logs"
66 | if not os.path.exists(log_dir):
67 | os.makedirs(log_dir)
68 |
69 | # Logging
70 | logging.basicConfig(
71 | stream=sys.stdout, format="%(asctime)s %(message)s", level=logging.INFO
72 | )
73 |
74 | logger = logging.getLogger(name)
75 |
76 | formatter = logging.Formatter(
77 | "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
78 | )
79 |
80 | # logger = logging.FileHandler(log_name, mode='w')
81 | fh = logging.FileHandler(log_dir + "/" + name + "_" + now + ".log", mode="w")
82 | fh.setLevel(logging.INFO)
83 | fh.setFormatter(formatter)
84 | logger.addHandler(fh)
85 |
86 | return logger
87 |
88 |
89 | def get_scenario_setting(jsonpath=None):
90 | """Get and open json file with scenaio settings of eGo.
91 | The settings incluede eGo, eTraGo and eDisGo specific
92 | settings of arguments and parameters for a reproducible
93 | calculation.
94 |
95 | Parameters
96 | ----------
97 | json_file : str
98 | Default: ``scenario_setting.json``
99 | Name of scenario setting json file
100 |
101 | Returns
102 | -------
103 | json_file : dict
104 | Dictionary of json file
105 | """
106 | if jsonpath is None:
107 | path = os.getcwd()
108 | # add try ego/
109 | logger.info("Your path is: {}".format(path))
110 | jsonpath = os.path.join(path, "scenario_setting.json")
111 |
112 | with open(jsonpath) as f:
113 | json_file = json.load(f)
114 |
115 | # fix remove result_id
116 | json_file["eGo"].update({"result_id": None})
117 |
118 | # check settings
119 | if json_file["eGo"]["eTraGo"] is False and json_file["eGo"]["eDisGo"] is False:
120 | logger.warning(
121 | "Something went wrong! \n"
122 | "Please contoll your settings and restart. \n"
123 | "Set at least eTraGo = true"
124 | )
125 | return
126 |
127 | if json_file["eGo"]["eTraGo"] is None and json_file["eGo"]["eDisGo"] is None:
128 | logger.warning(
129 | "Something went wrong! \n"
130 | "Please contoll your settings and restart. \n"
131 | "Set at least eTraGo = true"
132 | )
133 | return
134 |
135 | if json_file["eGo"]["result_id"] and json_file["eGo"]["csv_import_eTraGo"]:
136 | logger.warning(
137 | "You set a DB result_id and a csv import path! \n"
138 | "Please remove on of this settings"
139 | )
140 | return
141 | # or ? json_file['eGo']['result_id'] = None
142 |
143 | if json_file["eGo"]["eTraGo"] is None and json_file["eGo"]["eDisGo"]:
144 | logger.info("eDisGo needs eTraGo results. Please change your settings!\n")
145 | return
146 |
147 | if json_file["eGo"]["eTraGo"] is False and json_file["eGo"]["eDisGo"]:
148 | logger.info("eDisGo needs eTraGo results. Please change your settings!\n")
149 | return
150 |
151 | if (
152 | json_file["eGo"]["result_id"] is None
153 | and json_file["eGo"]["csv_import_eTraGo"] is None
154 | ):
155 | logger.info(
156 | "No data import from results is set \n" "eGo runs by given settings"
157 | )
158 |
159 | if json_file["eGo"]["csv_import_eTraGo"] and json_file["eGo"]["csv_import_eDisGo"]:
160 | logger.info("eDisGo and eTraGo results will be imported from csv\n")
161 |
162 | if json_file["eGo"].get("eTraGo") is True:
163 |
164 | logger.info("Using and importing eTraGo settings")
165 |
166 | # special case of SH and model_draft
167 | # TODO: check and maybe remove this part
168 | sh_scen = ["SH Status Quo", "SH NEP 2035", "SH eGo 100"]
169 | if (
170 | json_file["eTraGo"].get("scn_name") in sh_scen
171 | and json_file["eTraGo"].get("gridversion") is not None
172 | ):
173 | json_file["eTraGo"]["gridversion"] = None
174 |
175 | if json_file["eTraGo"].get("extendable") == "['network', 'storages']":
176 | json_file["eTraGo"].update({"extendable": ["network", "storage"]})
177 |
178 | if json_file["eTraGo"].get("extendable") == "['network', 'storage']":
179 | json_file["eTraGo"].update({"extendable": ["network", "storage"]})
180 |
181 | if json_file["eTraGo"].get("extendable") == "['network']":
182 | json_file["eTraGo"].update({"extendable": ["network"]})
183 |
184 | if json_file["eTraGo"].get("extendable") == "['storages']":
185 | json_file["eTraGo"].update({"extendable": ["storage"]})
186 |
187 | if json_file["eTraGo"].get("extendable") == "['storage']":
188 | json_file["eTraGo"].update({"extendable": ["storage"]})
189 |
190 | if json_file["eGo"].get("eDisGo") is True:
191 | logger.info("Using and importing eDisGo settings")
192 |
193 | if isinstance(json_file["external_config"], str):
194 | path_external_config = os.path.expanduser(json_file["external_config"])
195 | logger.info(f"Load external config with path: {path_external_config}")
196 | with open(path_external_config) as f:
197 | external_config = json.load(f)
198 | for key in external_config.keys():
199 | try:
200 | json_file[key].update(external_config[key])
201 | except KeyError:
202 | json_file[key] = external_config[key]
203 | else:
204 | logger.info("Don't load external config.")
205 |
206 | # Serializing json
207 | json_object = json.dumps(json_file, indent=4)
208 |
209 | # Writing to sample.json
210 | results_dir = os.path.join(json_file["eDisGo"]["results"])
211 | if not os.path.exists(results_dir):
212 | os.makedirs(results_dir)
213 | with open(os.path.join(results_dir, "config.json"), "w") as outfile:
214 | outfile.write(json_object)
215 |
216 | return json_file
217 |
218 |
219 | def fix_leading_separator(csv_file, **kwargs):
220 | """
221 | Takes the path to a csv-file. If the first line of this file has a leading
222 | separator in its header, this field is deleted. If this is done the second
223 | field of every row is removed, too.
224 | """
225 | with open(csv_file, "r") as f:
226 | lines = csv.reader(f, **kwargs)
227 | if not lines:
228 | raise Exception("File %s contained no data" % csv_file)
229 | first_line = next(lines)
230 | if first_line[0] == "":
231 | path, fname = os.path.split(csv_file)
232 | tmp_file = os.path.join(path, "tmp_" + fname)
233 | with open(tmp_file, "w+") as out:
234 | writer = csv.writer(out, **kwargs)
235 | writer.writerow(first_line[1:])
236 | for line in lines:
237 | line_selection = line[2:]
238 | line_selection.insert(0, line[0])
239 | writer.writerow(line_selection, **kwargs)
240 | os.rename(tmp_file, csv_file)
241 |
242 |
243 | def get_time_steps(json_file):
244 | """Get time step of calculation by scenario settings.
245 |
246 | Parameters
247 | ----------
248 | json_file : :obj:`dict`
249 | Dictionary of the ``scenario_setting.json`` file
250 |
251 | Returns
252 | -------
253 | time_step : int
254 | Number of timesteps of the calculation.
255 | """
256 |
257 | end = json_file["eTraGo"].get("end_snapshot")
258 | start = json_file["eTraGo"].get("start_snapshot")
259 | time_step = end - start
260 |
261 | return time_step
262 |
263 |
264 | def open_oedb_session(ego):
265 | """ """
266 | _db_section = ego.json_file["eTraGo"]["db"]
267 | conn = db.connection(section=_db_section)
268 | session_factory = sessionmaker(bind=conn)
269 | Session = scoped_session(session_factory)
270 | session = Session()
271 |
272 | return session
273 |
--------------------------------------------------------------------------------
/pytest.ini:
--------------------------------------------------------------------------------
1 | # pytest.ini
2 | [pytest]
3 | log_cli = True
4 | log_level = INFO
5 | testpaths =
6 | tests
7 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | # Packages for read the docs
2 | # Using single requirments for docs, see:
3 | # https://github.com/rtfd/readthedocs.org/issues/2070
4 | sphinx_rtd_theme
5 | numpy
6 | numpydoc
7 | sphinxcontrib-httpdomain
8 | aiohttp_jinja2
9 | sphinx-jsondomain
10 | sqlalchemy
11 | dill
12 | multiprocess
13 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | # flake8: noqa: F401, F601
4 | import os
5 |
6 | from pip._internal.req import parse_requirements
7 | from setuptools import find_packages, setup
8 |
9 | __copyright__ = (
10 | "Flensburg University of Applied Sciences, "
11 | "Europa-Universität Flensburg, "
12 | "Centre for Sustainable Energy Systems"
13 | )
14 | __license__ = "GNU Affero General Public License Version 3 (AGPL-3.0)"
15 | __author__ = "wolf_bunke, maltesc"
16 |
17 |
18 | def read(fname):
19 | return open(os.path.join(os.path.dirname(__file__), fname)).read()
20 |
21 |
22 | req = []
23 |
24 | dev_req = [
25 | "pre-commit",
26 | "black",
27 | "isort",
28 | "pyupgrade",
29 | "flake8",
30 | ]
31 |
32 | doc_req = ["numpydoc", "sphinxcontrib.httpdomain", "sphinx-jsondomain"]
33 |
34 | full_req = list(set(dev_req + doc_req))
35 |
36 | extras = {
37 | "dev": dev_req,
38 | "doc": doc_req,
39 | "full": full_req,
40 | }
41 |
42 |
43 | setup(
44 | name="eGo",
45 | version="0.3.4",
46 | author="wolfbunke, maltesc",
47 | author_email="wolf-dieter.bunke@uni-flensburg.de",
48 | description=("A cross-grid-level electricity grid and storage optimization tool."),
49 | long_description=read("README.rst"),
50 | url="https://github.com/openego/eGo",
51 | license="GNU Affero General Public License Version 3 (AGPL-3.0)",
52 | packages=find_packages(),
53 | package_dir={"ego": "ego"},
54 | include_package_data=True,
55 | install_requires=req,
56 | extras_require=extras,
57 | package_data={
58 | "ego": [os.path.join("tools", "*.json")] + [os.path.join("", "*.json")],
59 | },
60 | )
61 |
--------------------------------------------------------------------------------
/tests/conftest.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | import pytest
4 |
5 |
6 | def pytest_configure(config):
7 | pytest.etrago_test_network_1_path = os.path.join(
8 | os.path.realpath(os.path.dirname(__file__)), "data/etrago_test_network_1"
9 | )
10 | pytest.interface_results_reference_data_path = os.path.join(
11 | os.path.realpath(os.path.dirname(__file__)),
12 | "data/interface_results_reference_data",
13 | )
14 |
15 | config.addinivalue_line("markers", "slow: mark test as slow to run")
16 |
17 |
18 | def pytest_addoption(parser):
19 | parser.addoption(
20 | "--runslow", action="store_true", default=False, help="run slow tests"
21 | )
22 |
23 |
24 | def pytest_collection_modifyitems(config, items):
25 | if config.getoption("--runslow"):
26 | # --runslow given in cli: do not skip slow tests
27 | return
28 | skip_slow = pytest.mark.skip(reason="need --runslow option to run")
29 | for item in items:
30 | if "slow" in item.keywords:
31 | item.add_marker(skip_slow)
32 |
--------------------------------------------------------------------------------
/tests/data/etrago_test_network_1/buses.csv:
--------------------------------------------------------------------------------
1 | name,carrier
2 | 0,AC
3 | 1,CH4
4 | 2,dsm
5 | 3,Li ion
6 | 4,central_heat
7 | 5,central_heat_store
8 | 6,rural_heat
9 | 7,rural_heat_store
10 | 10,junk
11 |
--------------------------------------------------------------------------------
/tests/data/etrago_test_network_1/generators-p.csv:
--------------------------------------------------------------------------------
1 | ,0 biomass,10 biomass,0 central_biomass_CHP,10 central_biomass_CHP,0 run_of_river,10 run_of_river,0 gas,10 gas,0 other_non_renewable,10 other_non_renewable,0 reservoir,10 reservoir,0 solar_0,0 solar_1,10 solar_0,10 solar_1,0 solar_rooftop_0,0 solar_rooftop_1,10 solar_rooftop_0,10 solar_rooftop_1,0 wind_onshore_0,0 wind_onshore_1,10 wind_onshore_0,10 wind_onshore_1,4 solar_thermal_collector,10 solar_thermal_collector,4 geo_thermal,10 geo_thermal,0 junk,10 junk
2 | 0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0
3 | 1,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
4 | 2,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0
5 |
--------------------------------------------------------------------------------
/tests/data/etrago_test_network_1/generators-p_max_pu.csv:
--------------------------------------------------------------------------------
1 | ,0 biomass,10 biomass,0 central_biomass_CHP,10 central_biomass_CHP,0 run_of_river,10 run_of_river,0 gas,10 gas,0 other_non_renewable,10 other_non_renewable,0 reservoir,10 reservoir,0 solar_0,0 solar_1,10 solar_0,10 solar_1,0 solar_rooftop_0,0 solar_rooftop_1,10 solar_rooftop_0,10 solar_rooftop_1,0 wind_onshore_0,0 wind_onshore_1,10 wind_onshore_0,10 wind_onshore_1,4 solar_thermal_collector,10 solar_thermal_collector,4 geo_thermal,10 geo_thermal,0 junk,10 junk
2 | 0,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
3 | 1,0.75,0.75,0.75,0.75,0.75,0.75,0.75,0.75,0.75,0.75,0.75,0.75,0.75,0.75,0.75,0.75,0.75,0.75,0.75,0.75,0.75,0.75,0.75,0.75,0.75,0.75,0.75,0.75,0.75,0.75
4 | 2,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0
5 |
--------------------------------------------------------------------------------
/tests/data/etrago_test_network_1/generators-p_min_pu.csv:
--------------------------------------------------------------------------------
1 | ,0 biomass,10 biomass,0 central_biomass_CHP,10 central_biomass_CHP,0 run_of_river,10 run_of_river,0 gas,10 gas,0 other_non_renewable,10 other_non_renewable,0 reservoir,10 reservoir,0 solar_0,0 solar_1,10 solar_0,10 solar_1,0 solar_rooftop_0,0 solar_rooftop_1,10 solar_rooftop_0,10 solar_rooftop_1,0 wind_onshore_0,0 wind_onshore_1,10 wind_onshore_0,10 wind_onshore_1,4 solar_thermal_collector,10 solar_thermal_collector,4 geo_thermal,10 geo_thermal,0 junk,10 junk
2 | 0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0
3 | 1,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
4 | 2,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0
5 |
--------------------------------------------------------------------------------
/tests/data/etrago_test_network_1/generators-q.csv:
--------------------------------------------------------------------------------
1 | ,0 biomass,10 biomass,0 central_biomass_CHP,10 central_biomass_CHP,0 run_of_river,10 run_of_river,0 gas,10 gas,0 other_non_renewable,10 other_non_renewable,0 reservoir,10 reservoir,0 solar_0,0 solar_1,10 solar_0,10 solar_1,0 solar_rooftop_0,0 solar_rooftop_1,10 solar_rooftop_0,10 solar_rooftop_1,0 wind_onshore_0,0 wind_onshore_1,10 wind_onshore_0,10 wind_onshore_1,4 solar_thermal_collector,10 solar_thermal_collector,4 geo_thermal,10 geo_thermal,0 junk,10 junk
2 | 0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0
3 | 1,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
4 | 2,-1.0,-1.0,-1.0,-1.0,-1.0,-1.0,-1.0,-1.0,-1.0,-1.0,-1.0,-1.0,-1.0,-1.0,-1.0,-1.0,-1.0,-1.0,-1.0,-1.0,-1.0,-1.0,-1.0,-1.0,-1.0,-1.0,-1.0,-1.0,-1.0,-1.0
5 |
--------------------------------------------------------------------------------
/tests/data/etrago_test_network_1/generators.csv:
--------------------------------------------------------------------------------
1 | name,bus,carrier,p_nom,p_nom_opt
2 | 0 biomass,0,biomass,1.0,1.0
3 | 10 biomass,10,biomass,10.0,10.0
4 | 0 central_biomass_CHP,0,central_biomass_CHP,1.0,1.0
5 | 10 central_biomass_CHP,10,central_biomass_CHP,10.0,10.0
6 | 0 run_of_river,0,run_of_river,1.0,1.0
7 | 10 run_of_river,10,run_of_river,10.0,10.0
8 | 0 gas,0,gas,1.0,1.0
9 | 10 gas,10,gas,10.0,10.0
10 | 0 other_non_renewable,0,other_non_renewable,1.0,1.0
11 | 10 other_non_renewable,10,other_non_renewable,10.0,10.0
12 | 0 reservoir,0,reservoir,1.0,1.0
13 | 10 reservoir,10,reservoir,10.0,10.0
14 | 0 solar_0,0,solar,1.0,1.0
15 | 0 solar_1,0,solar,10.0,10.0
16 | 10 solar_0,10,solar,1.0,1.0
17 | 10 solar_1,10,solar,10.0,10.0
18 | 0 solar_rooftop_0,0,solar_rooftop,1.0,1.0
19 | 0 solar_rooftop_1,0,solar_rooftop,10.0,10.0
20 | 10 solar_rooftop_0,10,solar_rooftop,1.0,1.0
21 | 10 solar_rooftop_1,10,solar_rooftop,10.0,10.0
22 | 0 wind_onshore_0,0,wind_onshore,1.0,1.0
23 | 0 wind_onshore_1,0,wind_onshore,10.0,10.0
24 | 10 wind_onshore_0,10,wind_onshore,1.0,1.0
25 | 10 wind_onshore_1,10,wind_onshore,10.0,10.0
26 | 4 solar_thermal_collector,4,solar_thermal_collector,1.0,1.0
27 | 10 solar_thermal_collector,10,solar_thermal_collector,10.0,10.0
28 | 4 geo_thermal,4,geo_thermal,1.0,1.0
29 | 10 geo_thermal,10,geo_thermal,10.0,10.0
30 | 0 junk,0,junk,100.0,100.0
31 | 10 junk,10,junk,100.0,100.0
32 |
--------------------------------------------------------------------------------
/tests/data/etrago_test_network_1/links-p0.csv:
--------------------------------------------------------------------------------
1 | ,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16
2 | 0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0
3 | 1,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
4 | 2,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0
5 |
--------------------------------------------------------------------------------
/tests/data/etrago_test_network_1/links-p1.csv:
--------------------------------------------------------------------------------
1 | ,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16
2 | 0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0
3 | 1,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5,0.5
4 | 2,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0
5 |
--------------------------------------------------------------------------------
/tests/data/etrago_test_network_1/links.csv:
--------------------------------------------------------------------------------
1 | name,bus0,bus1,carrier,p_nom,efficiency
2 | 0,1,0,central_gas_CHP,1.0,1.0
3 | 1,10,10,central_gas_CHP,10.0,1.0
4 | 2,0,3,dsm,1.0,1.0
5 | 3,10,10,dsm,10.0,1.0
6 | 4,0,4,central_heat_pump,1.0,1.0
7 | 5,10,10,central_heat_pump,10.0,1.0
8 | 6,0,4,central_resistive_heater,1.0,1.0
9 | 7,10,10,central_resistive_heater,10.0,1.0
10 | 8,4,5,central_heat_store_charger,1.0,0.84
11 | 9,10,10,central_heat_store_charger,10.0,0.84
12 | 10,0,6,rural_heat_pump,1.0,1.0
13 | 11,10,10,rural_heat_pump,10.0,1.0
14 | 12,6,7,rural_heat_store_charger,1.0,0.8
15 | 13,10,10,rural_heat_store_charger,10.0,0.8
16 | 14,0,3,BEV_charger,1.0,1.0
17 | 15,10,10,BEV_charger,10.0,1.0
18 | 16,10,10,junk,100.0,1.0
19 |
--------------------------------------------------------------------------------
/tests/data/etrago_test_network_1/snapshots.csv:
--------------------------------------------------------------------------------
1 | ,snapshot
2 | 0,2011-01-01 00:00:00
3 | 1,2011-01-01 12:00:00
4 | 2,2011-01-02 00:00:00
5 |
--------------------------------------------------------------------------------
/tests/data/etrago_test_network_1/storage_units-p.csv:
--------------------------------------------------------------------------------
1 | ,0 battery,10 battery,10 junk
2 | 0,0.0,0.0,0.0
3 | 1,0.5,0.5,0.5
4 | 2,1.0,1.0,1.0
5 |
--------------------------------------------------------------------------------
/tests/data/etrago_test_network_1/storage_units-q.csv:
--------------------------------------------------------------------------------
1 | ,0 battery,10 battery,10 junk
2 | 0,0.0,0.0,0.0
3 | 1,0.5,0.5,0.5
4 | 2,1.0,1.0,1.0
5 |
--------------------------------------------------------------------------------
/tests/data/etrago_test_network_1/storage_units-state_of_charge.csv:
--------------------------------------------------------------------------------
1 | ,0 battery,10 battery,10 junk
2 | 0,0.0,0.0,0.0
3 | 1,0.5,0.5,0.5
4 | 2,1.0,1.0,1.0
5 |
--------------------------------------------------------------------------------
/tests/data/etrago_test_network_1/storage_units.csv:
--------------------------------------------------------------------------------
1 | name,bus,carrier,p_nom_opt,p_nom_extendable,max_hours
2 | 0 battery,0,battery,1.0,True,10.0
3 | 10 battery,10,battery,10.0,True,10.0
4 | 10 junk,10,junk,10.0,True,10.0
5 |
--------------------------------------------------------------------------------
/tests/data/etrago_test_network_1/stores-e.csv:
--------------------------------------------------------------------------------
1 | ,5 central_heat_store,10 central_heat_store,7 rural_heat_store,10 rural_heat_store
2 | 0,0.0,0.0,0.0,0.0
3 | 1,0.5,0.5,0.5,0.5
4 | 2,1.0,1.0,1.0,1.0
5 |
--------------------------------------------------------------------------------
/tests/data/etrago_test_network_1/stores.csv:
--------------------------------------------------------------------------------
1 | name,bus,carrier,e_nom_opt
2 | 5 central_heat_store,5,central_heat_store,1
3 | 10 central_heat_store,10,central_heat_store,10
4 | 7 rural_heat_store,7,rural_heat_store,1
5 | 10 rural_heat_store,10,rural_heat_store,10
6 |
--------------------------------------------------------------------------------
/tests/data/interface_results_reference_data/dispatchable_generators_active_power.csv:
--------------------------------------------------------------------------------
1 | snapshot,biomass,biomass_CHP,run_of_river,gas,other_non_renewable,junk
2 | 2011-01-01 00:00:00,0.0,0.0,0.0,0.0,0.0,0.0
3 | 2011-01-01 12:00:00,0.5,0.5,0.5,0.5,0.5,0.005
4 | 2011-01-02 00:00:00,1.0,1.0,1.0,1.0,1.0,0.01
5 |
--------------------------------------------------------------------------------
/tests/data/interface_results_reference_data/dispatchable_generators_reactive_power.csv:
--------------------------------------------------------------------------------
1 | snapshot,biomass,biomass_CHP,run_of_river,gas,other_non_renewable,junk
2 | 2011-01-01 00:00:00,0.0,0.0,0.0,0.0,0.0,0.0
3 | 2011-01-01 12:00:00,0.5,0.5,0.5,0.5,0.5,0.005
4 | 2011-01-02 00:00:00,-1.0,-1.0,-1.0,-1.0,-1.0,-0.01
5 |
--------------------------------------------------------------------------------
/tests/data/interface_results_reference_data/dsm_active_power.csv:
--------------------------------------------------------------------------------
1 | snapshot,dsm
2 | 2011-01-01 00:00:00,0.0
3 | 2011-01-01 12:00:00,0.5
4 | 2011-01-02 00:00:00,1.0
5 |
--------------------------------------------------------------------------------
/tests/data/interface_results_reference_data/dsm_reactive_power.csv:
--------------------------------------------------------------------------------
1 | snapshot,dsm
2 | 2011-01-01 00:00:00,0.0
3 | 2011-01-01 12:00:00,0.0
4 | 2011-01-02 00:00:00,0.0
5 |
--------------------------------------------------------------------------------
/tests/data/interface_results_reference_data/electromobility_active_power.csv:
--------------------------------------------------------------------------------
1 | snapshot,BEV charger
2 | 2011-01-01 00:00:00,0.0
3 | 2011-01-01 12:00:00,0.5
4 | 2011-01-02 00:00:00,1.0
5 |
--------------------------------------------------------------------------------
/tests/data/interface_results_reference_data/electromobility_reactive_power.csv:
--------------------------------------------------------------------------------
1 | snapshot,BEV charger
2 | 2011-01-01 00:00:00,0.0
3 | 2011-01-01 12:00:00,0.0
4 | 2011-01-02 00:00:00,0.0
5 |
--------------------------------------------------------------------------------
/tests/data/interface_results_reference_data/feedin_district_heating.csv:
--------------------------------------------------------------------------------
1 | snapshot,4
2 | 2011-01-01 00:00:00,0.0
3 | 2011-01-01 12:00:00,1.0
4 | 2011-01-02 00:00:00,2.0
5 |
--------------------------------------------------------------------------------
/tests/data/interface_results_reference_data/heat_pump_central_active_power.csv:
--------------------------------------------------------------------------------
1 | snapshot,central_heat_pump
2 | 2011-01-01 00:00:00,0.0
3 | 2011-01-01 12:00:00,1.0
4 | 2011-01-02 00:00:00,2.0
5 |
--------------------------------------------------------------------------------
/tests/data/interface_results_reference_data/heat_pump_central_reactive_power.csv:
--------------------------------------------------------------------------------
1 | snapshot,central_heat_pump
2 | 2011-01-01 00:00:00,0.0
3 | 2011-01-01 12:00:00,0.0
4 | 2011-01-02 00:00:00,0.0
5 |
--------------------------------------------------------------------------------
/tests/data/interface_results_reference_data/heat_pump_rural_active_power.csv:
--------------------------------------------------------------------------------
1 | snapshot,rural_heat_pump
2 | 2011-01-01 00:00:00,0.0
3 | 2011-01-01 12:00:00,0.5
4 | 2011-01-02 00:00:00,1.0
5 |
--------------------------------------------------------------------------------
/tests/data/interface_results_reference_data/heat_pump_rural_reactive_power.csv:
--------------------------------------------------------------------------------
1 | snapshot,rural_heat_pump
2 | 2011-01-01 00:00:00,0.0
3 | 2011-01-01 12:00:00,0.0
4 | 2011-01-02 00:00:00,0.0
5 |
--------------------------------------------------------------------------------
/tests/data/interface_results_reference_data/renewables_curtailment.csv:
--------------------------------------------------------------------------------
1 | snapshot,solar,wind
2 | 2011-01-01 00:00:00,11.0,5.5
3 | 2011-01-01 12:00:00,14.5,7.25
4 | 2011-01-02 00:00:00,18.0,9.0
5 |
--------------------------------------------------------------------------------
/tests/data/interface_results_reference_data/renewables_dispatch_reactive_power.csv:
--------------------------------------------------------------------------------
1 | snapshot,solar,wind
2 | 2011-01-01 00:00:00,0.0,0.0
3 | 2011-01-01 12:00:00,0.09091,0.09091
4 | 2011-01-02 00:00:00,-0.18182,-0.18182
5 |
--------------------------------------------------------------------------------
/tests/data/interface_results_reference_data/renewables_dispatch_reactive_power_max_cosphi.csv:
--------------------------------------------------------------------------------
1 | snapshot,solar,wind
2 | 2011-01-01 00:00:00,0.0,0.0
3 | 2011-01-01 12:00:00,0.04403,0.04403
4 | 2011-01-02 00:00:00,-0.08806,-0.08806
5 |
--------------------------------------------------------------------------------
/tests/data/interface_results_reference_data/renewables_p_nom.csv:
--------------------------------------------------------------------------------
1 | carrier,p_nom
2 | solar,22.0
3 | wind,11.0
4 |
--------------------------------------------------------------------------------
/tests/data/interface_results_reference_data/renewables_potential.csv:
--------------------------------------------------------------------------------
1 | snapshot,solar,wind
2 | 2011-01-01 00:00:00,0.5,0.5
3 | 2011-01-01 12:00:00,0.75,0.75
4 | 2011-01-02 00:00:00,1.0,1.0
5 |
--------------------------------------------------------------------------------
/tests/data/interface_results_reference_data/storage_units_active_power.csv:
--------------------------------------------------------------------------------
1 | snapshot,battery
2 | 2011-01-01 00:00:00,0.0
3 | 2011-01-01 12:00:00,0.5
4 | 2011-01-02 00:00:00,1.0
5 |
--------------------------------------------------------------------------------
/tests/data/interface_results_reference_data/storage_units_reactive_power.csv:
--------------------------------------------------------------------------------
1 | snapshot,battery
2 | 2011-01-01 00:00:00,0.0
3 | 2011-01-01 12:00:00,0.5
4 | 2011-01-02 00:00:00,1.0
5 |
--------------------------------------------------------------------------------
/tests/data/interface_results_reference_data/storage_units_soc.csv:
--------------------------------------------------------------------------------
1 | snapshot,battery
2 | 2011-01-01 00:00:00,0.0
3 | 2011-01-01 12:00:00,0.05
4 | 2011-01-02 00:00:00,0.1
5 |
--------------------------------------------------------------------------------
/tests/data/interface_results_reference_data/thermal_storage_central_soc.csv:
--------------------------------------------------------------------------------
1 | snapshot,4
2 | 2011-01-01 00:00:00,0.0
3 | 2011-01-01 12:00:00,0.5
4 | 2011-01-02 00:00:00,1.0
5 |
--------------------------------------------------------------------------------
/tests/data/interface_results_reference_data/thermal_storage_rural_soc.csv:
--------------------------------------------------------------------------------
1 | snapshot,
2 | 2011-01-01 00:00:00,0.0
3 | 2011-01-01 12:00:00,0.5
4 | 2011-01-02 00:00:00,1.0
5 |
--------------------------------------------------------------------------------
/tests/tools/test_interface.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import os
3 | import random
4 |
5 | import pandas as pd
6 | import pytest
7 |
8 | from pypsa import Network as PyPSANetwork
9 |
10 | from ego.tools.interface import ETraGoMinimalData, get_etrago_results_per_bus
11 |
12 | logger = logging.getLogger(__name__)
13 |
14 | random.seed(42)
15 |
16 |
17 | class TestSpecs:
18 | @classmethod
19 | def setup_class(cls):
20 | cls.etrago_network = PyPSANetwork(pytest.etrago_test_network_1_path)
21 |
22 | def test_class_etrago_minimal_data(self):
23 | etrago_network = ETraGoMinimalData(self.etrago_network)
24 | assert "p_min_pu" not in etrago_network.generators_t
25 |
26 | def test_get_etrago_results_per_bus(self):
27 |
28 | bus_id = 0
29 | etrago_network = ETraGoMinimalData(self.etrago_network)
30 | pf_post_lopf = True
31 | max_cos_phi_renewable = False
32 |
33 | etrago_results_per_bus = get_etrago_results_per_bus(
34 | bus_id,
35 | etrago_network,
36 | pf_post_lopf,
37 | max_cos_phi_renewable,
38 | )
39 |
40 | for key, value in etrago_results_per_bus.items():
41 | logger.info(f"Check Result: {key}")
42 | if key == "timeindex":
43 | assert type(value) is pd.DatetimeIndex
44 | pd.testing.assert_index_equal(
45 | value,
46 | pd.DatetimeIndex(
47 | data=[
48 | "2011-01-01 00:00:00",
49 | "2011-01-01 12:00:00",
50 | "2011-01-02 00:00:00",
51 | ],
52 | name="snapshot",
53 | ),
54 | )
55 | elif key == "storage_units_p_nom":
56 | assert value == 1.0
57 | elif key == "storage_units_max_hours":
58 | assert value == 10.0
59 | elif key == "thermal_storage_central_capacity":
60 | pd.testing.assert_series_equal(
61 | value, pd.Series(index=["4"], data=[1.0]), check_names=False
62 | )
63 | elif key == "thermal_storage_rural_capacity":
64 | assert value == 1.0
65 | elif key == "heat_pump_rural_p_nom":
66 | assert value == 1.0
67 | elif key == "heat_pump_central_p_nom":
68 | assert value == 2.0
69 | elif key == "thermal_storage_rural_efficiency":
70 | assert value == 0.8
71 | elif key == "thermal_storage_central_efficiency":
72 | assert value == 0.84
73 | else:
74 | path_reference_df = os.path.join(
75 | pytest.interface_results_reference_data_path, f"{key}.csv"
76 | )
77 | if isinstance(value, pd.DataFrame):
78 | reference_df = pd.read_csv(
79 | path_reference_df, index_col=0, parse_dates=True
80 | )
81 | pd.testing.assert_frame_equal(
82 | value, reference_df, check_index_type=False, check_names=False
83 | )
84 | else:
85 | reference_s = pd.read_csv(
86 | path_reference_df, index_col=0, parse_dates=True
87 | ).iloc[:, 0]
88 | pd.testing.assert_series_equal(
89 | value, reference_s, check_index_type=False, check_names=False
90 | )
91 |
92 | def test_get_etrago_results_per_bus_empty(self):
93 |
94 | bus_id = 11
95 | etrago_network = ETraGoMinimalData(self.etrago_network)
96 | pf_post_lopf = True
97 | max_cos_phi_renewable = False
98 |
99 | etrago_results_per_bus = get_etrago_results_per_bus(
100 | bus_id,
101 | etrago_network,
102 | pf_post_lopf,
103 | max_cos_phi_renewable,
104 | )
105 |
106 | float_results = [
107 | "storage_units_p_nom",
108 | "storage_units_max_hours",
109 | "heat_pump_rural_p_nom",
110 | "heat_pump_central_p_nom",
111 | "thermal_storage_rural_capacity",
112 | "thermal_storage_rural_efficiency",
113 | "thermal_storage_central_efficiency",
114 | ]
115 | series_results = [
116 | "renewables_p_nom",
117 | "storage_units_active_power",
118 | "storage_units_reactive_power",
119 | "storage_units_soc",
120 | "dsm_active_power",
121 | "heat_pump_rural_active_power",
122 | "heat_pump_rural_reactive_power",
123 | "thermal_storage_rural_soc",
124 | "heat_central_active_power",
125 | "heat_central_reactive_power",
126 | "thermal_storage_central_capacity",
127 | "electromobility_active_power",
128 | "electromobility_reactive_power",
129 | ]
130 | dataframes_results = [
131 | "dispatchable_generators_active_power",
132 | "dispatchable_generators_reactive_power",
133 | "renewables_potential",
134 | "renewables_curtailment",
135 | "renewables_dispatch_reactive_power",
136 | "thermal_storage_central_soc",
137 | "feedin_district_heating",
138 | ] #
139 |
140 | for key, value in etrago_results_per_bus.items():
141 | if key in float_results:
142 | if value == 0.0:
143 | float_results.remove(key)
144 | elif key in series_results:
145 | if value.empty:
146 | series_results.remove(key)
147 | elif key in dataframes_results:
148 | if len(value.columns) == 0:
149 | dataframes_results.remove(key)
150 |
151 | assert len(float_results) == 0
152 |
153 | def test_get_etrago_results_per_bus_with_set_max_cosphi(self):
154 |
155 | bus_id = 0
156 | etrago_network = ETraGoMinimalData(self.etrago_network)
157 | pf_post_lopf = True
158 | max_cos_phi_renewable = 0.9
159 |
160 | etrago_results_per_bus = get_etrago_results_per_bus(
161 | bus_id,
162 | etrago_network,
163 | pf_post_lopf,
164 | max_cos_phi_renewable,
165 | )
166 | renewables_dispatch_reactive_power = etrago_results_per_bus[
167 | "renewables_dispatch_reactive_power"
168 | ]
169 | path_reference_df = os.path.join(
170 | pytest.interface_results_reference_data_path,
171 | "renewables_dispatch_reactive_power_max_cosphi.csv",
172 | )
173 | reference_df = pd.read_csv(path_reference_df, index_col=0, parse_dates=True)
174 | pd.testing.assert_frame_equal(
175 | renewables_dispatch_reactive_power,
176 | reference_df,
177 | check_index_type=False,
178 | check_names=False,
179 | atol=1e-4,
180 | )
181 |
--------------------------------------------------------------------------------