├── .bumpversion.cfg
├── .gitattributes
├── .gitignore
├── CHANGELOG.md
├── LICENSE.md
├── MANIFEST.in
├── README.md
├── ct.bat
├── ct.sh
├── doc
└── images
│ ├── MTU_HKA_Logo.svg
│ ├── all.svg
│ ├── draf_architecture.svg
│ ├── draf_process.svg
│ └── text+vulcano.svg
├── draf
├── __init__.py
├── abstract_component.py
├── components
│ ├── __init__.py
│ ├── autocollectors.py
│ ├── component_templates.py
│ └── user_defined_components.py
├── conventions.py
├── core
│ ├── __init__.py
│ ├── case_study.py
│ ├── datetime_handler.py
│ ├── draf_base_class.py
│ ├── entity_stores.py
│ ├── mappings.py
│ ├── scenario.py
│ └── time_series_prepper.py
├── data
│ ├── demand
│ │ └── electricity
│ │ │ └── SLP_BDEW
│ │ │ ├── Repräsentative Profile VDEW.xls
│ │ │ └── readme.txt
│ ├── pv
│ │ └── backup
│ │ │ └── pv_el.csv
│ └── wind
│ │ ├── 2019_wind_kelmarsh2.csv
│ │ └── Prep_Kelmarsh_wind.ipynb
├── helper.py
├── paths.py
├── plotting
│ ├── __init__.py
│ ├── base_plotter.py
│ ├── cs_plotting.py
│ ├── plotting_util.py
│ └── scen_plotting.py
├── prep
│ ├── __init__.py
│ ├── data_base.py
│ ├── demand.py
│ ├── gsee_module
│ │ ├── cec_tools.py
│ │ ├── pv.py
│ │ └── trigon.py
│ ├── par_dat.py
│ ├── param_funcs.py
│ ├── pv.py
│ └── weather.py
├── sort_sections.py
└── tsa
│ ├── __init__.py
│ ├── demand_analyzer.py
│ └── peak_load.py
├── draf_cheat_sheet.md
├── environment.yml
├── environments
├── environment_py37.yml
├── environment_py37explicit_win64.txt
├── environment_py37general.yml
├── environment_py37specific_all.yml
├── environment_py39all_mac.yml
└── environment_py39all_win64.yml
├── examples
├── bev.py
├── der_hut.py
├── minimal.py
├── prod.py
├── pv.py
├── pv_bes.py
└── pyomo_pv.py
├── fmt.bat
├── fmt.sh
├── pyproject.toml
├── setup.py
└── tests
├── core
├── test_case_study.py
└── test_scenario.py
├── plotting
└── test_cs_plotting.py
├── prep
├── test_demand.py
├── test_pv.py
└── test_weather.py
├── test_examples.py
├── test_helper.py
├── test_sort_sections.py
└── test_tsa.py
/.bumpversion.cfg:
--------------------------------------------------------------------------------
1 | [bumpversion]
2 | current_version = 0.3.1
3 | commit = True
4 | tag = True
5 |
6 | [bumpversion:file:setup.py]
7 |
8 | [bumpversion:file:draf/__init__.py]
9 |
--------------------------------------------------------------------------------
/.gitattributes:
--------------------------------------------------------------------------------
1 | * text=auto eol=lf
2 | *.{cmd,[cC][mM][dD]} text eol=crlf
3 | *.{bat,[bB][aA][tT]} text eol=crlf
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # directories
2 | local/
3 | notebooks/
4 | results/
5 |
6 | # symlinks
7 | cache
8 |
9 | # results
10 | *.p
11 | *.pdf
12 | *.png
13 |
14 | # gurobi logs
15 | *.log
16 |
17 | # models
18 | *.lp
19 |
20 | # Pycharm settings
21 | .idea/
22 |
23 | # Visual Studio Code settings
24 | .vscode/
25 | *.code
26 |
27 | # Spyder project settings
28 | .spyderproject
29 | .spyproject
30 |
31 | # Byte-compiled / optimized / DLL files
32 | __pycache__/
33 | *.py[cod]
34 | *$py.class
35 |
36 | # C extensions
37 | *.so
38 |
39 | # Distribution / packaging
40 | .Python
41 | build/
42 | develop-eggs/
43 | dist/
44 | downloads/
45 | eggs/
46 | .eggs/
47 | lib/
48 | lib64/
49 | parts/
50 | sdist/
51 | var/
52 | wheels/
53 | share/python-wheels/
54 | *.egg-info/
55 | .installed.cfg
56 | *.egg
57 | MANIFEST
58 |
59 | # PyInstaller
60 | # Usually these files are written by a python script from a template
61 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
62 | *.manifest
63 | *.spec
64 |
65 | # Installer logs
66 | pip-log.txt
67 | pip-delete-this-directory.txt
68 |
69 | # Unit test / coverage reports
70 | htmlcov/
71 | .tox/
72 | .nox/
73 | .coverage
74 | .coverage.*
75 | .cache
76 | nosetests.xml
77 | coverage.xml
78 | *.cover
79 | .hypothesis/
80 | .pytest_cache/
81 |
82 | # Translations
83 | *.mo
84 | *.pot
85 |
86 | # Django stuff:
87 | *.log
88 | local_settings.py
89 | db.sqlite3
90 |
91 | # Flask stuff:
92 | instance/
93 | .webassets-cache
94 |
95 | # Scrapy stuff:
96 | .scrapy
97 |
98 | # Sphinx documentation
99 | docs/_build/
100 |
101 | # PyBuilder
102 | target/
103 |
104 | # Jupyter Notebook
105 | .ipynb_checkpoints
106 |
107 | # IPython
108 | profile_default/
109 | ipython_config.py
110 |
111 | # pyenv
112 | .python-version
113 |
114 | # celery beat schedule file
115 | celerybeat-schedule
116 |
117 | # SageMath parsed files
118 | *.sage.py
119 |
120 | # Environments
121 | .env
122 | .venv
123 | env/
124 | venv/
125 | ENV/
126 | env.bak/
127 | venv.bak/
128 |
129 | # Rope project settings
130 | .ropeproject
131 |
132 | # mkdocs documentation
133 | /site
134 |
135 | # mypy
136 | .mypy_cache/
137 | .dmypy.json
138 | dmypy.json
139 |
140 | # Pyre type checker
141 | .pyre/
142 |
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | # Changelog
2 |
3 | All notable changes to this project will be documented in this file.
4 |
5 | The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
6 |
7 | ## [main]
8 |
9 | - Add cheat sheet
10 |
11 | ## [v0.3.1] - 2023-04-19
12 |
13 | ## [v0.3.0] - 2023-01-14
14 |
15 | ### Added
16 |
17 | - Add `sc.execute_model_func(model_func)`.
18 | - Add `cs.plot()` which interactively provides most important plots and tables.
19 | - Add `cs.plot.collector_table()` and `cs.plot.collector_balance()`.
20 | - Add `cs.plot.heatmap_all_T()` interactive heatmap plotting that considers multi-dimensional time series.
21 | - Add metrics to `cs.plot.eFlex_table()`.
22 | - Add caption to `cs.plot.tables()`.
23 | - Add option to provide series in `sc.plot.ts_balance()`.
24 | - Add `only_scalars` and `number_format` arguments to `cs.plot.table()`.
25 | - Add `cs.plot.capa_TES_table()`.
26 | - Add link to draf demo paper and its case studies.
27 | - Add `maxsell` and `maxbuy` to `EG` component template.
28 | - Add wind turbine (`WT`) component.
29 | - Add hydrogen components (`FC`, `Elc`, `H2S`).
30 | - Add direct air capture (`DAC`) component.
31 | - Add `H_level_target` to thermal generation components (`HOB`, `CHP`, `P2H`, `HP`).
32 | - Add `draf.helper.get_TES_volume()`.
33 | - Add functionality to update dimensions with dictionary through `cs.add_scen()`.
34 | - Add `sc.update_var_bound()`, `sc.update_upper_bound()`, `sc.update_lower_bound()`.
35 | - Add `cs.scens.get()`.
36 | - Add option to iterate over `cs.scens`.
37 | - Add `draf.helper.play_beep_sound()` and `play_sound` argument in `cs.optimize()`.
38 | - Add possibility to use a solved scenario as base scenario.
39 | - Add `sc.fix_vars()`.
40 |
41 | ### Removed
42 |
43 | - Remove `sc.has_thermal_entities` property.
44 | - Remove German EEG levy on own consumption for PV and CHP.
45 |
46 | ### Fixed
47 |
48 | - Fix disappearing collector values and solving times, when solving scenarios in parallel.
49 | - Fix missing registration of feed-in (FI) in `P_EL_source_T` for component templates `PV` and `CHP`.
50 | - `dQ_amb_source_` is now "Thermal energy flow to ambient".
51 |
52 | ### Changed
53 |
54 | - Rename heat downgrading component `H2H1` to `HD`.
55 | - Repair, maintenance and inspection (RMI) are now part of the operation expenses (OpEx).
56 |
57 | ## [v0.2.0] - 2022-05-10
58 |
59 | [main]: https://github.com/DrafProject/draf/compare/v0.3.1...main
60 | [v0.3.1]: https://github.com/DrafProject/draf/compare/v0.3.0...v0.3.1
61 | [v0.3.0]: https://github.com/DrafProject/draf/compare/v0.2.0...v0.3.0
62 | [v0.2.0]: https://github.com/DrafProject/draf/releases/tag/v0.2.0
63 |
--------------------------------------------------------------------------------
/LICENSE.md:
--------------------------------------------------------------------------------
1 | GNU LESSER GENERAL PUBLIC LICENSE
2 | Version 3, 29 June 2007
3 |
4 | Copyright (C) 2007 Free Software Foundation, Inc.
5 | Everyone is permitted to copy and distribute verbatim copies
6 | of this license document, but changing it is not allowed.
7 |
8 |
9 | This version of the GNU Lesser General Public License incorporates
10 | the terms and conditions of version 3 of the GNU General Public
11 | License, supplemented by the additional permissions listed below.
12 |
13 | 0. Additional Definitions.
14 |
15 | As used herein, "this License" refers to version 3 of the GNU Lesser
16 | General Public License, and the "GNU GPL" refers to version 3 of the GNU
17 | General Public License.
18 |
19 | "The Library" refers to a covered work governed by this License,
20 | other than an Application or a Combined Work as defined below.
21 |
22 | An "Application" is any work that makes use of an interface provided
23 | by the Library, but which is not otherwise BASEd on the Library.
24 | Defining a subclass of a class defined by the Library is deemed a mode
25 | of using an interface provided by the Library.
26 |
27 | A "Combined Work" is a work produced by combining or linking an
28 | Application with the Library. The particular version of the Library
29 | with which the Combined Work was made is also called the "Linked
30 | Version".
31 |
32 | The "Minimal Corresponding Source" for a Combined Work means the
33 | Corresponding Source for the Combined Work, excluding any source code
34 | for portions of the Combined Work that, considered in isolation, are
35 | BASEd on the Application, and not on the Linked Version.
36 |
37 | The "Corresponding Application Code" for a Combined Work means the
38 | object code and/or source code for the Application, including any data
39 | and utility programs needed for reproducing the Combined Work from the
40 | Application, but excluding the System Libraries of the Combined Work.
41 |
42 | 1. Exception to Section 3 of the GNU GPL.
43 |
44 | You may convey a covered work under sections 3 and 4 of this License
45 | without being bound by section 3 of the GNU GPL.
46 |
47 | 2. Conveying Modified Versions.
48 |
49 | If you modify a copy of the Library, and, in your modifications, a
50 | facility refers to a function or data to be supplied by an Application
51 | that uses the facility (other than as an argument passed when the
52 | facility is invoked), then you may convey a copy of the modified
53 | version:
54 |
55 | a) under this License, provided that you make a good faith effort to
56 | ensure that, in the event an Application does not supply the
57 | function or data, the facility still operates, and performs
58 | whatever part of its purpose remains meaningful, or
59 |
60 | b) under the GNU GPL, with none of the additional permissions of
61 | this License applicable to that copy.
62 |
63 | 3. Object Code Incorporating Material from Library Header Files.
64 |
65 | The object code form of an Application may incorporate material from
66 | a header file that is part of the Library. You may convey such object
67 | code under terms of your choice, provided that, if the incorporated
68 | material is not limited to numerical parameters, data structure
69 | layouts and accessors, or small macros, inline functions and templates
70 | (ten or fewer lines in length), you do both of the following:
71 |
72 | a) Give prominent notice with each copy of the object code that the
73 | Library is used in it and that the Library and its use are
74 | covered by this License.
75 |
76 | b) Accompany the object code with a copy of the GNU GPL and this license
77 | document.
78 |
79 | 4. Combined Works.
80 |
81 | You may convey a Combined Work under terms of your choice that,
82 | taken together, effectively do not restrict modification of the
83 | portions of the Library contained in the Combined Work and reverse
84 | engineering for debugging such modifications, if you also do each of
85 | the following:
86 |
87 | a) Give prominent notice with each copy of the Combined Work that
88 | the Library is used in it and that the Library and its use are
89 | covered by this License.
90 |
91 | b) Accompany the Combined Work with a copy of the GNU GPL and this license
92 | document.
93 |
94 | c) For a Combined Work that displays copyright notices during
95 | execution, include the copyright notice for the Library among
96 | these notices, as well as a reference directing the user to the
97 | copies of the GNU GPL and this license document.
98 |
99 | d) Do one of the following:
100 |
101 | 0) Convey the Minimal Corresponding Source under the terms of this
102 | License, and the Corresponding Application Code in a form
103 | suitable for, and under terms that permit, the user to
104 | recombine or relink the Application with a modified version of
105 | the Linked Version to produce a modified Combined Work, in the
106 | manner specified by section 6 of the GNU GPL for conveying
107 | Corresponding Source.
108 |
109 | 1) Use a suitable shared library mechanism for linking with the
110 | Library. A suitable mechanism is one that (a) uses at run time
111 | a copy of the Library already present on the user's computer
112 | system, and (b) will operate properly with a modified version
113 | of the Library that is interface-compatible with the Linked
114 | Version.
115 |
116 | e) Provide Installation Information, but only if you would otherwise
117 | be required to provide such information under section 6 of the
118 | GNU GPL, and only to the extent that such information is
119 | necessary to install and execute a modified version of the
120 | Combined Work produced by recombining or relinking the
121 | Application with a modified version of the Linked Version. (If
122 | you use option 4d0, the Installation Information must accompany
123 | the Minimal Corresponding Source and Corresponding Application
124 | Code. If you use option 4d1, you must provide the Installation
125 | Information in the manner specified by section 6 of the GNU GPL
126 | for conveying Corresponding Source.)
127 |
128 | 5. Combined Libraries.
129 |
130 | You may place library facilities that are a work BASEd on the
131 | Library side by side in a single library together with other library
132 | facilities that are not Applications and are not covered by this
133 | License, and convey such a combined library under terms of your
134 | choice, if you do both of the following:
135 |
136 | a) Accompany the combined library with a copy of the same work BASEd
137 | on the Library, uncombined with any other library facilities,
138 | conveyed under the terms of this License.
139 |
140 | b) Give prominent notice with the combined library that part of it
141 | is a work BASEd on the Library, and explaining where to find the
142 | accompanying uncombined form of the same work.
143 |
144 | 6. Revised Versions of the GNU Lesser General Public License.
145 |
146 | The Free Software Foundation may publish revised and/or new versions
147 | of the GNU Lesser General Public License from time to time. Such new
148 | versions will be similar in spirit to the present version, but may
149 | differ in detail to address new problems or concerns.
150 |
151 | Each version is given a distinguishing version number. If the
152 | Library as you received it specifies that a certain numbered version
153 | of the GNU Lesser General Public License "or any later version"
154 | applies to it, you have the option of following the terms and
155 | conditions either of that published version or of any later version
156 | published by the Free Software Foundation. If the Library as you
157 | received it does not specify a version number of the GNU Lesser
158 | General Public License, you may choose any version of the GNU Lesser
159 | General Public License ever published by the Free Software Foundation.
160 |
161 | If the Library as you received it specifies that a proxy can decide
162 | whether future versions of the GNU Lesser General Public License shall
163 | apply, that proxy's public statement of acceptance of any version is
164 | permanent authorization for you to choose that version for the
165 | Library.
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include doc/images/**
2 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | [][draf demo paper]
6 | [](https://www.gnu.org/licenses/lgpl-3.0)
7 | [](https://github.com/DrafProject/draf)
8 | [](https://pycqa.github.io/isort/)
9 | [](https://github.com/psf/black)
10 |
11 | **d**emand **r**esponse **a**nalysis **f**ramework (`draf`) is an analysis and decision support framework for local multi-energy hubs focusing on demand response.
12 | It uses the power of ([mixed integer]) [linear programming] optimization, [`pandas`], [`Plotly`], [`Matplotlib`], [`elmada`], [`GSEE`], [`Jupyter`] and more to help users along the energy system analysis process.
13 | The software is described and demonstrated in the open-access [draf demo paper].
14 | `draf` runs on Windows, macOS, and Linux.
15 | A `draf`-version supporting time series aggregation is provided in the [dev-TSA](https://github.com/DrafProject/draf/tree/dev-TSA) branch.
16 |
17 | ## Features
18 |
19 | 
20 |
21 | - **Time series analysis tools:**
22 | - `DemandAnalyzer` - Analyze energy demand profiles
23 | - `PeakLoadAnalyzer` - Analyze peak loads or run simple battery simulation
24 | - **Easily parameterizable [component templates](draf/components/component_templates.py):**
25 | - battery energy storage (`BES`), battery electric vehicle (`BEV`), combined heat and power (`CHP`), heat-only boiler (`HOB`), heat pump (`HP`), power-to-heat (`P2H`), photovoltaic (`PV`), wind turbine (`WT`), thermal energy storage (`TES`), fuel cell (`FC`), electrolyzer (`Elc`), hydrogen storage (`H2S`), production process (`PP`), product storage (`PS`), direct air capture (`DAC`), and more.
26 | - Sensible naming conventions for parameters and variables, see section [Naming conventions](#naming-conventions).
27 | - **Parameter preparation tools:**
28 | - `TimeSeriesPrepper` - For time series data
29 | - Electricity prices via [`elmada`]
30 | - Carbon emission factors via [`elmada`]
31 | - Standard load profiles from [BDEW]
32 | - PV profiles via [`GSEE`] (In Germany, using weather data from [DWD])
33 | - [`DataBase`](draf/prep/data_base.py) - For scientific data such as cost or efficiency factors
34 | - **Scenario generation tools:** Easily build individual scenarios or sensitivity analyses
35 | - **Multi-objective mathematical optimization** with support of different model languages and solvers:
36 | - [`Pyomo`] - A free and open-source modeling language in Python that supports multiple solvers.
37 | - [`GurobiPy`] - The Python interface to Gurobi, the fastest MILP solver (see [Mittelmann benchmark]).
38 | - **Plotting tools:** Convenient plotting of heatmaps, Sankeys, tables, pareto plots, etc. using [`Plotly`], [`Matplotlib`], and [`seaborn`].
39 | - Support of meta data such as `unit`, `doc`, `src`, and `dims`
40 | - Automatic unit conversion
41 | - **Export tools:**
42 | - `CaseStudy` objects including all parameters, meta data, and results can be saved to files.
43 | - Data can be exported to [xarray] format.
44 |
45 | ## Quick start
46 |
47 | 1. Install [miniconda] or [anaconda]
48 |
49 | 1. Open a terminal in the directory where you want to place `draf` in.
50 |
51 | 1. Clone `draf`:
52 |
53 | ```sh
54 | git clone https://github.com/DrafProject/draf
55 | cd draf
56 | ```
57 |
58 | 1. Create and activate the `draf` conda environment (`conda env create` will create a conda environment based on [environment.yml](environment.yml) which will install the newest versions of the required packages including the full editable local version of `draf`.):
59 |
60 | ```sh
61 | conda env create
62 | conda activate draf
63 | ```
64 |
65 | 1. (OPTIONAL) If the draf environment caused issues, you could install an older but more specific conda environment, e.g.:
66 |
67 | ```sh
68 | conda env create --file environments/environment_py39all_mac.yml --force
69 | conda activate draf39
70 | ```
71 |
72 | 1. (OPTIONAL) To use Gurobi (fast optimization), install a valid Gurobi license (its [free for academics](https://www.gurobi.com/academia/academic-program-and-licenses)).
73 |
74 | 1. Open Jupyter notebook:
75 |
76 | ```sh
77 | jupyter notebook
78 | ```
79 |
80 | 1. Check if the imports work:
81 |
82 | ```py
83 | import draf
84 | import elmada
85 | ```
86 |
87 | 1. (OPTIONAL) To use the latest electricity prices and carbon emission factors from [`elmada`], request an [ENTSO-E API key] and set it to elmada:
88 |
89 | ```py
90 | # You have to run this Python code only once (it writes to a permanent file):
91 | import elmada
92 | elmada.set_api_keys(entsoe="YOUR_ENTSOE_KEY")
93 | ```
94 |
95 | 1. Start modeling. Have a look at the [examples](examples).
96 | Start with the [`minimal`](examples/minimal.py) if you want to write your own component.
97 | Start with the [`PV`](examples/pv.py) example if you want to import existing components.
98 | For more advanced modeling look at the [draf_demo_case_studies].
99 | Consider the [DRAF CHEAT SHEET](draf_cheat_sheet.md)
100 |
101 | ## Structure
102 |
103 | A `CaseStudy` object can contain several `Scenario` instances:
104 |
105 | 
106 |
107 | ### Naming conventions
108 |
109 | All parameter and variable names must satisfy the structure `___`.
110 | E.g. in `P_EG_buy_T`, `P` is the entity type (here: electrical power), `EG` the component (here: Electricity Grid), `buy` the descriptor and `T` the dimension (here: time).
111 | Dimensions are denoted with individual capital letters, so `` is `TE` if the entity has the dimensions `T` and `E`.
112 | See [conventions.py](draf/conventions.py) for examples of types, components, and descriptors.
113 |
114 | ## Contributing
115 |
116 | Contributions in any form are welcome!
117 | Please contact [Markus Fleschutz].
118 |
119 | ## Citing
120 |
121 | If you use `draf` for academic work please cite the [draf demo paper]:
122 |
123 | ```bibtex
124 | @article{Fleschutz2022,
125 | author = {Markus Fleschutz and Markus Bohlayer and Marco Braun and Michael D. Murphy},
126 | title = {Demand Response Analysis Framework ({DRAF}): An Open-Source Multi-Objective Decision Support Tool for Decarbonizing Local Multi-Energy Systems},
127 | publisher = {{MDPI} {AG}},
128 | journal = {Sustainability}
129 | year = {2022},
130 | volume = {14},
131 | number = {13},
132 | pages = {8025},
133 | url = {https://doi.org/10.3390/su14138025},
134 | doi = {10.3390/su14138025},
135 | }
136 | ```
137 |
138 | ## Publications using `draf`
139 |
140 | - Fleschutz et al. (2023): [Impact of Landing Interruptions on the Optimal Design and Operation of Green Hydrogen Hubs](https://doi.org/10.1109/ISGTEUROPE56780.2023.10408039)
141 | - Fleschutz et al. (2023): [From prosumer to flexumer: Case study on the value of flexibility in decarbonizing the multi-energy system of a manufacturing company](https://doi.org/10.1016/j.apenergy.2023.121430) (open access)
142 | - Fleschutz et al. (2022): [Industrial grid fees vs. demand response: A case study of a multi-use battery in a German chemical plant](https://doi.org/10.1109/EEM54602.2022.9921156)
143 | - Fleschutz et al. (2022): [Demand Response Analysis Framework (DRAF): An Open-Source Multi-Objective Decision Support Tool for Decarbonizing Local Multi-Energy Systems](https://doi.org/10.3390/su14138025) (open access)
144 | - Fleschutz et al. (2021): [The effect of price-based demand response on carbon emissions in European electricity markets: The importance of adequate carbon prices](https://doi.org/10.1016/j.apenergy.2021.117040) (open access)
145 | - Fleschutz et al. (2017): [Electricity Cost Reduction Potential of Industrial Processes using Real Time Pricing in a Production Planning Problem](https://www.cerc-conf.eu/wp-content/uploads/2018/06/CERC-2017-proceedings.pdf) (open access)
146 |
147 | ## License and status
148 |
149 | Copyright (c) 2017-2024 Markus Fleschutz
150 |
151 | License: [LGPL v3]
152 |
153 | The development of `draf` was initiated by [Markus Fleschutz] in 2017 and continued in a cooperative PhD between the [MeSSO Research Group] of the [Munster Technological University], Ireland and the [Energy System Analysis Research Group] of the [Karlsruhe University of Applied Sciences], Germany.
154 | This project was supported by the MTU Rísam PhD Scholarship scheme and by the Federal Ministry for Economic Affairs and Climate Action (BMWK) on the basis of a decision by the German Bundestag.
155 |
156 | Thank you [Dr. Markus Bohlayer], [Dr. Ing. Adrian Bürger], [Andre Leippi], [Dr. Ing. Marco Braun], and [Dr. Michael D. Murphy] for your valuable feedback.
157 |
158 |
159 |
160 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
161 |
162 |
163 | [`elmada`]: https://github.com/DrafProject/elmada
164 | [`GSEE`]: https://github.com/renewables-ninja/gsee
165 | [`GurobiPy`]: https://pypi.org/project/gurobipy
166 | [`Jupyter`]: https://jupyter.org
167 | [`Matplotlib`]: https://matplotlib.org
168 | [`pandas`]: https://pandas.pydata.org
169 | [`Plotly`]: https://plotly.com
170 | [`Pyomo`]: https://github.com/Pyomo/pyomo
171 | [`seaborn`]: https://seaborn.pydata.org
172 | [anaconda]: https://www.anaconda.com/products/individual
173 | [Andre Leippi]: https://www.linkedin.com/in/andre-leippi-3187a81a7
174 | [BDEW]: https://www.bdew.de
175 | [Dr. Ing. Adrian Bürger]: https://www.linkedin.com/in/adrian-b%C3%BCrger-251205236/
176 | [Dr. Ing. Marco Braun]: https://www.h-ka.de/en/about-hka/organization-people/staff-search/person/marco-braun
177 | [Dr. Markus Bohlayer]: https://www.linkedin.com/in/markus-bohlayer
178 | [Dr. Michael D. Murphy]: https://www.linkedin.com/in/michael-d-murphy-16134118
179 | [draf demo paper]: https://doi.org/10.3390/su14138025
180 | [draf_demo_case_studies]: https://github.com/DrafProject/draf_demo_case_studies
181 | [DWD]: https://www.dwd.de
182 | [Energy System Analysis Research Group]: https://www.h-ka.de/en/ikku/energy-system-analysis
183 | [ENTSO-E API key]: https://transparency.entsoe.eu/content/static_content/Static%20content/web%20api/Guide.html#_authentication_and_authorisation
184 | [Karlsruhe University of Applied Sciences]: https://www.h-ka.de/en
185 | [LGPL v3]: https://www.gnu.org/licenses/lgpl-3.0.de.html
186 | [linear programming]: https://en.wikipedia.org/wiki/Linear_programming
187 | [Markus Fleschutz]: https://mfleschutz.github.io
188 | [MeSSO Research Group]: https://messo.cit.ie
189 | [miniconda]: https://docs.conda.io/en/latest/miniconda.html
190 | [Mittelmann benchmark]: http://plato.asu.edu/ftp/milp.html
191 | [mixed integer]: https://en.wikipedia.org/wiki/Integer_programming
192 | [Munster Technological University]: https://www.mtu.ie
193 | [xarray]: http://xarray.pydata.org/en/stable
--------------------------------------------------------------------------------
/ct.bat:
--------------------------------------------------------------------------------
1 | @REM coverage test
2 | call fmt.bat
3 | pytest --cov-report=html
--------------------------------------------------------------------------------
/ct.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | # format and test with coverage report
4 |
5 | sh fmt.sh
6 | pytest --cov-report=html
7 |
--------------------------------------------------------------------------------
/doc/images/all.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/doc/images/text+vulcano.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/draf/__init__.py:
--------------------------------------------------------------------------------
1 | """The draf module provides a toolbox to simulate and optimize energy systems including data
2 | analysis and visualization.
3 | """
4 |
5 | __title__ = "DRAF"
6 | __summary__ = "Demand Response Analysis Framework"
7 | __uri__ = "https://github.com/DrafProject/draf"
8 |
9 | __version__ = "0.3.1"
10 |
11 | __author__ = "Markus Fleschutz"
12 | __email__ = "mfleschutz@gmail.com"
13 |
14 | __license__ = "LGPLv3"
15 | __copyright__ = f"Copyright (C) 2022 {__author__}"
16 |
17 | from draf.core.case_study import CaseStudy, open_casestudy, open_latest_casestudy
18 | from draf.core.entity_stores import Collectors, Dimensions, Params, Results, Vars
19 | from draf.core.scenario import Scenario
20 | from draf.helper import address2coords
21 |
--------------------------------------------------------------------------------
/draf/abstract_component.py:
--------------------------------------------------------------------------------
1 | from abc import ABC, abstractmethod
2 |
3 |
4 | class Component(ABC):
5 | def dim_func(self, sc) -> None:
6 | pass
7 |
8 | @abstractmethod
9 | def param_func(self, sc) -> None:
10 | pass
11 |
12 | @abstractmethod
13 | def model_func(self, sc, m, d, p, v, c) -> None:
14 | pass
15 |
--------------------------------------------------------------------------------
/draf/components/__init__.py:
--------------------------------------------------------------------------------
1 | from .component_templates import *
2 | from .user_defined_components import *
3 |
--------------------------------------------------------------------------------
/draf/components/autocollectors.py:
--------------------------------------------------------------------------------
1 | from typing import Dict, List, Tuple
2 |
3 | from draf import Params, Vars
4 | from draf import helper as hp
5 |
6 |
7 | def _agg_cap(capEntName: str, v: Vars) -> float:
8 | """Return aggregated new capacity of an entity such as `P_HP_CAPn_N` or `E_BES_CAPn_`."""
9 | if hp.get_dims(capEntName) == "":
10 | return v.get(capEntName)
11 | else:
12 | return v.get(capEntName).sum()
13 |
14 |
15 | def _capas(v: Vars) -> Dict[str, float]:
16 | """Return new capacities per component type."""
17 | return {hp.get_component(key): _agg_cap(key, v) for key in v.filtered(desc="CAPn")}
18 |
19 |
20 | def C_TOT_inv_(p: Params, v: Vars):
21 | """Return sum product of all scalar capacities and investment costs.
22 |
23 | Example:
24 | >>> model.addConstr((v.C_TOT_inv_ == collectors.C_TOT_inv_(p, v)))
25 | """
26 | return sum([cap * p.get(f"c_{c}_inv_") for c, cap in _capas(v).items()])
27 |
28 |
29 | def C_invAnnual_(p: Params, v: Vars, r: float):
30 | """Return annualized investment costs.
31 |
32 | Example:
33 | >>> model.addConstr((v.C_invAnnual_ == collectors.C_invAnnual_(p, v)))
34 |
35 | """
36 | return sum(
37 | [
38 | cap * p.get(f"c_{c}_inv_") * hp.get_annuity_factor(r=r, N=p.get(f"N_{c}_"))
39 | for c, cap in _capas(v).items()
40 | ]
41 | )
42 |
43 |
44 | def C_TOT_RMI_(p: Params, v: Vars):
45 | """Return linear expression for the repair, maintenance, and inspection per year.
46 |
47 | Example:
48 | >>> model.addConstr((v.C_TOT_RMI_ == collectors.C_TOT_RMI_(p, v)))
49 | """
50 | return sum([cap * p.get(f"c_{c}_inv_") * p.get(f"k_{c}_RMI_") for c, cap in _capas(v).items()])
51 |
--------------------------------------------------------------------------------
/draf/components/user_defined_components.py:
--------------------------------------------------------------------------------
1 | import logging
2 | from dataclasses import dataclass
3 | from typing import Any, Callable, Dict, Iterable, List, Optional, Tuple, Union
4 |
5 | import pandas as pd
6 | from gurobipy import GRB, Model, quicksum
7 |
8 | from draf import Collectors, Dimensions, Params, Results, Scenario, Vars
9 | from draf.abstract_component import Component
10 | from draf.conventions import Descs
11 | from draf.helper import conv, get_annuity_factor
12 | from draf.prep import DataBase as db
13 |
14 | logger = logging.getLogger(__name__)
15 | logger.setLevel(level=logging.WARN)
16 |
17 |
18 | class EXAMPLE(Component):
19 | """An example component"""
20 |
21 | def param_func(self, sc: Scenario):
22 | pass
23 |
24 | def model_func(self, sc: Scenario, m: Model, d: Dimensions, p: Params, v: Vars, c: Collectors):
25 | pass
26 |
--------------------------------------------------------------------------------
/draf/conventions.py:
--------------------------------------------------------------------------------
1 | """Naming conventions for documentation and testing purpose
2 |
3 | In general, entity names should adhere to the following structure:
4 | ___
5 | """
6 |
7 | from dataclasses import dataclass
8 | from typing import List, Optional
9 |
10 |
11 | @dataclass
12 | class Alias:
13 | en: str
14 | de: str
15 | units: Optional[List] = None
16 |
17 |
18 | # fmt: off
19 | class Etypes:
20 | # SORTING_START
21 | A = Alias(en="Area", de="Fläche", units=["m²"])
22 | C = Alias(en="Costs", de="Kosten", units=["k€/a", "k€"])
23 | c = Alias(en="Specific costs", de="Spezifische Kosten", units=["€/kW", "€/kWh", "€/kW_th", "€/kWh_el", "€/kWh_th", "€/kW_el", "€/kW_el/a", "€/kW_peak", "€/tCO2eq", "€/SU", "€/t", "€/change"])
24 | CE = Alias(en="Carbon emissions", de="Kohlenstoff-vergleichs-Emissionen", units=["kgCO2eq/a"])
25 | ce = Alias(en="Specific carbon emissions", de="Spezifische Kohlenstoff-Emissionen", units=["kgCO2eq/kWh_el", "kgCO2eq/kWh"])
26 | cop = Alias(en="Coefficient of performance", de="Leistungszahl")
27 | dG = Alias(en="Product flow", de="Produkt fluss", units=["t/h"])
28 | dH = Alias(en="Hydrogen flow", de="Wasserstofffluss", units=["kW"])
29 | dQ = Alias(en="Heat flow", de="Wärmestrom", units=["kW_th", "kW"])
30 | E = Alias(en="Electrical energy", de="Elektrische Energie", units=["kWh_el"])
31 | eta = Alias(en="Efficiency", de="Effizienz", units=["", "kW_th/kW", "kW_el/kW", "kWh_th/kWh", "t/kWh_el"])
32 | F = Alias(en="Fuel", de="Brennstoff", units=["kW", "kWh"])
33 | G = Alias(en="Product", de="Produkt", units=["t"])
34 | H = Alias(en="Hydrogen", de="Wasserstoff", units=["kWh"])
35 | k = Alias(en="A ratio", de="Ein Verhältnis", units=["", "h", "m²/kW_peak"])
36 | n = Alias(en="A natural number", de="Eine natürliche Zahl")
37 | N = Alias(en="Operation life", de="Betriebsdauer", units=["a"])
38 | P = Alias(en="Electrical power", de="Elektrische Leistung", units=["kW_el", "kW_peak", "kW_el/kW_peak"])
39 | Q = Alias(en="Thermal Energy", de="Thermische Energie", units=["kWh_th"])
40 | T = Alias(en="Temperature", de="Temperatur", units=["°C"])
41 | t = Alias(en="Time", de="Zeit", units=["seconds", "h", "a"])
42 | X = Alias(en="A real number", de="Eine reelle Zahl")
43 | y = Alias(en="Binary indicator", de="Binärindikator")
44 | z = Alias(en="Binary allowance indicator", de="Binärindikator")
45 | # SORTING_END
46 |
47 |
48 | class Descs:
49 | # SORTING_START
50 | amb = Alias(en="ambient", de="Umgebungs-")
51 | CAPn = Alias(en="New Capacity", de="Neue Kapazität")
52 | CAPx = Alias(en="Existing Capacity", de="Bestehende Kapazität")
53 | ch = Alias(en="Charging", de="Lade-")
54 | dis = Alias(en="Discharging", de="Entlade-")
55 | FI = Alias(en="Feed-in", de="Einspeisungsanteil")
56 | inv = Alias(en="Investment", de="Investitionen")
57 | invAnn = Alias(en="Annualized investment", de="Annualisierte Investitionen")
58 | MEF = Alias(en="Marginal Power Plant Emission Factors", de="Marginale CO2-Emissionsfaktoren des Stromsystems")
59 | OC = Alias(en="Own consumption", de="Eigenerzeugungsanteil")
60 | RMI = Alias(en="Repair, maintenance, and inspection per year and investment cost", de="Reparatur, Wartung und Inspektion pro Jahr und Investitionskosten")
61 | RTP = Alias(en="Real-time-prices", de="Dynamische Strompreise")
62 | TOU = Alias(en="Time-of-use", de="Zeitabhängige Strompreise")
63 | XEF = Alias(en="Average Electricity Mix Emission Factors", de="Durchschnittliche CO2-Emissionsfaktoren des Stromsystems")
64 | # SORTING_END
65 |
66 |
67 | class Components:
68 | # SORTING_START
69 | BES = Alias(en="Battery energy storage", de="Batterie-Energiespeicher")
70 | BEV = Alias(en="Battery electric vehicle", de="Batterie-Elektrofahrzeug")
71 | cDem = Alias(en="Cooling demand", de="Kältebedarf")
72 | CHP = Alias(en="Combined heat and power", de="Kraft-Wärme-Kopplung (BHKW)")
73 | CM = Alias(en="Cooling machine", de="Kältemaschine")
74 | DAC = Alias(en="Direct air capture", de="CO2-Abscheidung")
75 | Dem = Alias(en="Demands", de="Bedarf")
76 | eDem = Alias(en="Electricity demand", de="Strombedarf")
77 | EG = Alias(en="Electricity grid", de="Stromnetz")
78 | Elc = Alias(en="Electrolyzer", de="Elektrolyseur")
79 | FC = Alias(en="Fuel cell", de="Brennstoffzelle")
80 | Fuel = Alias(en="Fuels", de="Brennstoffe")
81 | H2H = Alias(en="Heat downgrading", de="Wärmeabstufung")
82 | H2S = Alias(en="Hydrogen storage", de="Wasserstoffspeicher")
83 | hDem = Alias(en="Heat demand", de="Heizbedarf")
84 | HOB = Alias(en="Heat-only boiler", de="Heizkessel")
85 | HP = Alias(en="Heat pump", de="Wärmepumpe")
86 | HSB = Alias(en="High-speed steam boiler", de="Schnelldampferzeuger")
87 | P2H = Alias(en="Power to heat", de="Strom zu Wärme")
88 | PIT = Alias(en="Powered industrial truck", de="Flurförderfahrzeug")
89 | PP = Alias(en="Production process", de="Produktionsprozess")
90 | PS = Alias(en="Product storage", de="Produktlager")
91 | PV = Alias(en="Photovoltaic system", de="Fotovoltaik anlage")
92 | SB = Alias(en="Steam boiler", de="Dampfkessel")
93 | SN = Alias(en="Steam network", de="Dampfnetz")
94 | TES = Alias(en="Thermal energy storage", de="Thermischer speicher")
95 | TOT = Alias(en="Total", de="Total")
96 | WH = Alias(en="Waste heat", de="Abfallwärme")
97 | WT = Alias(en="Wind turbine", de="Windkraftanlage")
98 | # SORTING_END
99 |
100 | # fmt: on
101 |
--------------------------------------------------------------------------------
/draf/core/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DrafProject/draf/949c3d22836d4df01ec582c37034ef5bfc920f23/draf/core/__init__.py
--------------------------------------------------------------------------------
/draf/core/datetime_handler.py:
--------------------------------------------------------------------------------
1 | import logging
2 | from abc import ABC
3 | from typing import List, Optional, Tuple, Union
4 |
5 | import pandas as pd
6 |
7 | from draf import helper as hp
8 |
9 | logger = logging.getLogger(__name__)
10 | logger.setLevel(level=logging.WARN)
11 |
12 |
13 | class DateTimeHandler(ABC):
14 | @property
15 | def step_width(self) -> float:
16 | """Returns the step width of the current datetimeindex.
17 | e.g. 0.25 for a frequency of 15min."""
18 | return hp.get_step_width(self.freq)
19 |
20 | @property
21 | def dt_info(self) -> str:
22 | """Get an info string of the chosen time horizon of the case study."""
23 | t1_str = f"{self.dtindex_custom[0].day_name()}, {self.dtindex_custom[0]}"
24 | t2_str = f"{self.dtindex_custom[-1].day_name()}, {self.dtindex_custom[-1]}"
25 | return (
26 | f"t1 = {self._t1:<5} ({t1_str}),\n"
27 | f"t2 = {self._t2:<5} ({t2_str})\n"
28 | f"Length = {self.dtindex_custom.size}"
29 | )
30 |
31 | @property
32 | def steps_per_day(self):
33 | steps_per_hour = 60 / hp.int_from_freq(self.freq)
34 | return int(steps_per_hour * 24)
35 |
36 | @property
37 | def freq_unit(self):
38 | if self.freq == "15min":
39 | return "1/4 h"
40 | elif self.freq == "30min":
41 | return "1/2 h"
42 | elif self.freq == "60min":
43 | return "h"
44 |
45 | def match_dtindex(
46 | self, data: Union[pd.DataFrame, pd.Series], resample: bool = False
47 | ) -> Union[pd.DataFrame, pd.Series]:
48 | if resample:
49 | data = self.resample(data)
50 | return data[self._t1 : self._t2 + 1]
51 |
52 | def resample(self, data: Union[pd.DataFrame, pd.Series]) -> Union[pd.DataFrame, pd.Series]:
53 | return hp.resample(
54 | data, year=self.year, start_freq=hp.estimate_freq(data), target_freq=self.freq
55 | )
56 |
57 | def _set_dtindex(self, year: int, freq: str) -> None:
58 | assert year in range(1980, 2100)
59 | self.year = year
60 | self.freq = freq
61 | self.dtindex = hp.make_datetimeindex(year=year, freq=freq)
62 | self.dtindex_custom = self.dtindex
63 | self._t1 = 0
64 | self._t2 = self.dtindex.size - 1 # =8759 for a normal year
65 |
66 | def _get_int_loc_from_dtstring(self, s: str) -> int:
67 | return self.dtindex.get_loc(f"{self.year}-{s}")
68 |
69 | def _get_first_int_loc_from_dtstring(self, s: str) -> int:
70 | x = self._get_int_loc_from_dtstring(s)
71 | try:
72 | return x.start
73 | except AttributeError:
74 | return x
75 |
76 | def _get_last_int_loc_from_dtstring(self, s: str) -> int:
77 | x = self._get_int_loc_from_dtstring(s)
78 | try:
79 | return x.stop
80 | except AttributeError:
81 | return x
82 |
83 | def _get_integer_locations(self, start, steps, end) -> Tuple[int, int]:
84 | t1 = self._get_first_int_loc_from_dtstring(start) if isinstance(start, str) else start
85 | if steps is not None and end is None:
86 | assert t1 + steps < self.dtindex.size, "Too many steps are given."
87 | t2 = t1 + steps - 1
88 | elif steps is None and end is not None:
89 | t2 = self._get_last_int_loc_from_dtstring(end) if isinstance(end, str) else end
90 | elif steps is None and end is None:
91 | t2 = self.dtindex.size - 1
92 | else:
93 | raise ValueError("One of steps or end must be given.")
94 | return t1, t2
95 |
96 | def timeslice(self, start: Optional[str], stop: Optional[str]) -> "Slice":
97 | """Get timeslice from start and stop strings.
98 |
99 | Example slicing from 17th to 26th of August
100 | >>> ts = cs.timeslice("8-17", "8-26")
101 | >>> sc.params.c_EG_T[ts].plot()
102 | """
103 | start_int = None if start is None else self._get_first_int_loc_from_dtstring(start)
104 | stop_int = None if stop is None else self._get_last_int_loc_from_dtstring(stop)
105 | return slice(start_int, stop_int)
106 |
107 | def dated(
108 | self, data: Union[pd.Series, pd.DataFrame], activated=True
109 | ) -> Union[pd.Series, pd.DataFrame]:
110 | """Add datetime index to a data entity.
111 |
112 | The frequency and year are taken from the CaseStudy or the Scenario object.
113 |
114 | Args:
115 | data: A pandas data entity.
116 | activated: If False, the data is returned without modification.
117 |
118 | """
119 | if activated:
120 | assert isinstance(
121 | data, (pd.Series, pd.DataFrame)
122 | ), f"No data given, but type {type(data)}"
123 | data = data.copy()
124 | dtindex_to_use = self.dtindex[data.index.min() : data.index.max() + 1]
125 | data.index = dtindex_to_use
126 | return data
127 |
--------------------------------------------------------------------------------
/draf/core/draf_base_class.py:
--------------------------------------------------------------------------------
1 | import textwrap
2 | from typing import Any, Callable, Dict, Iterable, List, Optional, Tuple, Union
3 |
4 |
5 | class DrafBaseClass:
6 | """This class mainly provides functions."""
7 |
8 | def _build_repr(self, layout: str, which_metadata: Iterable) -> str:
9 |
10 | preface = f"<{self.__class__.__name__} object>"
11 |
12 | self_mapping = {k: k.capitalize() for k in which_metadata}
13 | header = layout.format(bullet=" ", name="Name", **self_mapping)
14 | header += f"{'='*100}\n"
15 | this_list = []
16 |
17 | if hasattr(self, "_meta"):
18 | for ent_name, meta_obj in self._meta.items():
19 | metas = {meta_type: meta_obj.get(meta_type, None) for meta_type in which_metadata}
20 | try:
21 | metas["doc"] = textwrap.shorten(metas["doc"], width=68)
22 | except KeyError:
23 | pass
24 | try:
25 | appender = layout.format(bullet=" ⤷ ", name=ent_name, **metas)
26 | except TypeError:
27 | appender = f" ⤷ {ent_name} ====No Metadata found===="
28 | this_list.append(appender)
29 |
30 | else:
31 | for k, v in self.get_all().items():
32 | metas = {meta_type: getattr(v, meta_type) for meta_type in which_metadata}
33 | this_list.append(layout.format(bullet=" ⤷ ", name=k, **metas))
34 |
35 | if this_list:
36 | data = "".join(this_list)
37 | return f"{preface} preview:\n{header}{data}"
38 | else:
39 | return f"{preface} (empty)"
40 |
41 | def get_all(self) -> Dict:
42 | """Returns a Dict with all public attributes from this container."""
43 | return {k: v for k, v in self.__dict__.items() if not (k.startswith("_"))}
44 |
45 | def delete_all(self) -> None:
46 | """Deletes all objects in the container without deleting the meta data."""
47 | for k in self.get_all().keys():
48 | delattr(self, k)
49 |
50 | def __iter__(self):
51 | return iter(self.get_all().values())
52 |
53 | def __len__(self):
54 | return len(self.get_all())
55 |
--------------------------------------------------------------------------------
/draf/core/entity_stores.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import textwrap
3 | from typing import Any, Callable, Dict, Iterable, List, Optional, Tuple, Union
4 |
5 | import pandas as pd
6 | from tabulate import tabulate
7 |
8 | from draf import helper as hp
9 | from draf.core.draf_base_class import DrafBaseClass
10 |
11 | logger = logging.getLogger(__name__)
12 | logger.setLevel(level=logging.WARN)
13 |
14 |
15 | def make_table(l: List[Tuple], lead_text: str = "", table_prefix=" "):
16 | headers, col_data = zip(*l)
17 | rows = list(zip(*col_data))
18 | return lead_text + textwrap.indent(
19 | text=tabulate(rows, headers=headers, floatfmt=".3n"), prefix=table_prefix
20 | )
21 |
22 |
23 | class Collectors(DrafBaseClass):
24 | """Stores collectors."""
25 |
26 | def __init__(self):
27 | self._meta: Dict[str, Dict] = dict()
28 |
29 | def __repr__(self):
30 | data = self.get_all()
31 | meta = self._meta
32 | l = [
33 | ("Name", list(data.keys())),
34 | ("N", list(map(len, data.values()))),
35 | ("Content", [textwrap.shorten(", ".join(v.keys()), 50) for v in data.values()]),
36 | ("Unit", [meta[name]["unit"] for name in data.keys()]),
37 | ("Doc", [meta[name]["doc"] for name in data.keys()]),
38 | ]
39 | return make_table(l, lead_text=" preview:\n")
40 |
41 |
42 | class Scenarios(DrafBaseClass):
43 | """Stores scenarios."""
44 |
45 | def __repr__(self):
46 | all = self.get_all()
47 | l = [
48 | ("Id", list(all.keys())),
49 | ("Name", [sc.name for sc in all.values()]),
50 | ("Doc", [sc.doc for sc in all.values()]),
51 | ]
52 | return make_table(l, lead_text=" preview:\n")
53 |
54 | def rename(self, old_scen_id: str, new_scen_id: str) -> None:
55 | sc = self.__dict__.pop(old_scen_id)
56 | sc.id = new_scen_id
57 | self.__dict__[new_scen_id] = sc
58 |
59 | def get(self, scen_id) -> "Scenario":
60 | return getattr(self, scen_id)
61 |
62 | def get_by_name(self, name: str) -> "Scenario":
63 | for sc in self.get_all().values():
64 | if sc.name == name:
65 | return sc
66 | else:
67 | return None
68 |
69 |
70 | class Dimensions(DrafBaseClass):
71 | """Stores dimensions."""
72 |
73 | def __init__(self):
74 | self._meta: Dict[str, Dict] = dict()
75 |
76 | def __repr__(self):
77 | data = self.get_all()
78 | meta = self._meta
79 | l = [
80 | ("Name", list(data.keys())),
81 | ("Doc", [meta[name]["doc"] for name in data.keys()]),
82 | ("Unit", [meta[name]["unit"] for name in data.keys()]),
83 | ]
84 | return make_table(l, lead_text=" preview:\n")
85 |
86 |
87 | class EntityStore(DrafBaseClass):
88 | def __repr__(self):
89 | layout = "{bullet}{name:<20} {etype:<4} {comp:<5} {dims:<5} {unit:<14} {doc}\n"
90 | return self._build_repr(layout, which_metadata=["unit", "etype", "comp", "doc", "dims"])
91 |
92 | def __init__(self):
93 | self._changed_since_last_dic_export: bool = False
94 |
95 | @property
96 | def _empty_dims_dic(self) -> Dict[str, List[str]]:
97 | """Returns an empty dimension dictionary of the shape
98 | {: [, ]}.
99 | """
100 | ents_list = self.get_all().keys()
101 | dims_dic = {ent: hp.get_dims(ent) for ent in ents_list}
102 | dims_set = set(dims_dic.values())
103 | _empty_dims_dic = {dims: [] for dims in dims_set}
104 | for ent, dims in dims_dic.items():
105 | _empty_dims_dic[dims].append(ent)
106 | return _empty_dims_dic
107 |
108 | def _to_dims_dic(self, unstack_to_first_dim: bool = False) -> Dict[str, Union[Dict, pd.Series]]:
109 | """Returns a dimension dictionary where nonscalar params are stored in dataframes per
110 | dimension.
111 |
112 | Args:
113 | unstack_to_first_dim: If MultiIndex are unstacked resulting in Dataframes with a normal
114 | Index.
115 |
116 | """
117 | dims_dic = self._empty_dims_dic.copy()
118 |
119 | for dim in dims_dic:
120 |
121 | if dim == "":
122 | dims_dic[dim] = {ent: self.get(ent) for ent in dims_dic[dim]}
123 |
124 | else:
125 | # use the index of the first entity of given dimension
126 | first_el_of_empty_dims_dic: str = dims_dic[dim][0]
127 | multi_idx = self.get(first_el_of_empty_dims_dic).index
128 | multi_idx.name = dim
129 | dims_dic[dim] = pd.DataFrame(index=multi_idx)
130 |
131 | for ent in self._empty_dims_dic[dim]:
132 | dims_dic[dim][ent] = getattr(self, ent)
133 |
134 | if unstack_to_first_dim:
135 | dims_dic[dim] = dims_dic[dim].unstack(level=list(range(1, len(dim))))
136 |
137 | self._changed_since_last_dic_export = False
138 | return dims_dic
139 |
140 | def filtered(
141 | self,
142 | etype: Optional[str] = None,
143 | comp: Optional[str] = None,
144 | desc: Optional[str] = None,
145 | dims: Optional[str] = None,
146 | func: Optional[Callable] = None,
147 | ) -> Dict:
148 | if func is None:
149 | func = lambda n: True
150 | return {
151 | k: v
152 | for k, v in self.get_all().items()
153 | if (
154 | (hp.get_etype(k) == etype or etype is None)
155 | and (hp.get_component(k) == comp or comp is None)
156 | and (hp.get_desc(k) == desc or desc is None)
157 | and (hp.get_dims(k) == dims or dims is None)
158 | and func(k)
159 | )
160 | }
161 |
162 | def get(self, name: str):
163 | """Returns entity"""
164 | return getattr(self, name)
165 |
166 |
167 | class Params(EntityStore):
168 | """Stores parameters in a convenient way together with its functions."""
169 |
170 | def __init__(self):
171 | super().__init__()
172 | self._meta: Dict[str, Dict] = {}
173 |
174 | def __repr__(self):
175 | data = self.get_all()
176 | meta = self._meta
177 | l = [
178 | ("Name", list(data.keys())),
179 | ("Dims", [hp.get_dims(k) for k in data.keys()]),
180 | ("(⌀) Value", [hp.get_mean(i) for i in data.values()]),
181 | ("Unit", [meta[k]["unit"] for k in data.keys()]),
182 | ("Doc", [textwrap.fill(meta[k]["doc"], width=40) for k in data.keys()]),
183 | ("Source", [textwrap.shorten(meta[k]["src"], width=17) for k in data.keys()]),
184 | ]
185 | return make_table(l, lead_text=f" preview:\n")
186 |
187 | def _set_meta(self, ent_name: str, meta_type: str, value: str) -> None:
188 | self._meta.setdefault(ent_name, {})[meta_type] = value
189 |
190 | def _convert_unit(
191 | self,
192 | ent_name: str,
193 | return_unit: str,
194 | conversion_factor: float = None,
195 | conversion_func: Callable = None,
196 | ):
197 | par = self.get(ent_name)
198 | if conversion_factor is not None:
199 | par *= conversion_factor
200 | if conversion_func is not None:
201 | par = conversion_func(par)
202 | self._set_meta(ent_name, "unit", return_unit)
203 |
204 |
205 | class Vars(EntityStore):
206 | """Stores optimization variables."""
207 |
208 | def __init__(self):
209 | super().__init__()
210 | self._meta: Dict[str, Dict] = {}
211 |
212 | def __getstate__(self):
213 | return dict(_meta=self._meta)
214 |
215 |
216 | class Results(EntityStore):
217 | """Stores results in a easy accessible way together with its functions."""
218 |
219 | def __init__(self, sc: "Scenario"):
220 | super().__init__()
221 | self._get_results_from_variables(sc=sc)
222 |
223 | def __repr__(self):
224 | data = self.get_all()
225 | meta = self._meta
226 | l = [
227 | ("Name", list(data.keys())),
228 | ("Dims", [hp.get_dims(k) for k in data.keys()]),
229 | ("(⌀) Value", [hp.get_mean(i) for i in data.values()]),
230 | ("Unit", [meta[k]["unit"] for k in data.keys()]),
231 | ("Doc", [textwrap.fill(meta[k]["doc"], width=50) for k in data.keys()]),
232 | ]
233 | return make_table(l, lead_text=" preview:\n")
234 |
235 | # TODO: Move _get_results_from_variables, _from_gurobipy, _from_pyomo to Scenario for
236 | # better type hinting
237 |
238 | def _get_results_from_variables(self, sc: "Scenario") -> None:
239 | if sc.mdl_language == "gp":
240 | self._from_gurobipy(sc)
241 | else:
242 | self._from_pyomo(sc)
243 |
244 | self._changed_since_last_dic_export = True
245 | self._meta = sc.vars._meta
246 |
247 | def _from_gurobipy(self, sc: "Scenario") -> None:
248 | for name, var in sc.vars.get_all().items():
249 | dims = hp.get_dims(name)
250 | if dims == "":
251 | data = var.x
252 | else:
253 | dic = sc.mdl.getAttr("x", var)
254 | data = pd.Series(dic, name=name)
255 | data.index = data.index.set_names(list(dims))
256 |
257 | setattr(self, name, data)
258 |
259 | def _from_pyomo(self, sc: "Scenario") -> None:
260 | for name, var in sc.vars.get_all().items():
261 | dims = hp.get_dims(name)
262 | if dims == "":
263 | data = var.value
264 | else:
265 | dic = {index: var[index].value for index in var}
266 | data = pd.Series(dic, name=name)
267 | data.index = data.index.set_names(list(dims))
268 |
269 | setattr(self, name, data)
270 |
271 | def _get_meta(self, ent_name: str, meta_type: str) -> Any:
272 | try:
273 | return self._meta[ent_name][meta_type]
274 | except KeyError:
275 | return None
276 |
277 | def _set_meta(self, ent_name: str, meta_type: str, value: str) -> None:
278 | self._meta.setdefault(ent_name, {})[meta_type] = value
279 |
280 | def _copy_meta(self, source_ent: str, target_ent: str, which_metas: List = None) -> None:
281 | if which_metas is None:
282 | which_metas = ["doc", "unit", "dims"]
283 |
284 | for meta_type in which_metas:
285 | self._set_meta(
286 | ent_name=target_ent,
287 | meta_type=meta_type,
288 | value=self._get_meta(ent_name=source_ent, meta_type=meta_type),
289 | )
290 |
291 | def make_pos_ent(self, source: str, target_neg: str = None, doc_target: str = None) -> None:
292 | """Makes entities positive.
293 |
294 | If a target-entity-name is given, the negative values are stored as positive values in a
295 | new entity.
296 |
297 | Args:
298 | source: Source entity.
299 | target_neg: Negative target entity.
300 | doc_target: The doc string of the target.
301 | """
302 | try:
303 | source_ser = self.get(source)
304 |
305 | if target_neg is None:
306 | if source_ser.min() < -0.1:
307 | logger.warning(
308 | f"Significant negative values (between {source_ser.min():n} and 0) of the"
309 | f" entity '{source}' were clipped"
310 | )
311 |
312 | else:
313 | ser = -source_ser.where(cond=source_ser < 0, other=0)
314 | ser.name = target_neg
315 | setattr(self, target_neg, ser)
316 |
317 | which_metas = ["etype", "comp", "unit", "dims"]
318 | self._copy_meta(source_ent=source, target_ent=target_neg, which_metas=which_metas)
319 |
320 | if isinstance(doc_target, str):
321 | self._set_meta(target_neg, meta_type="doc", value=doc_target)
322 |
323 | source_ser.where(cond=source_ser > 0, other=0, inplace=True)
324 |
325 | except AttributeError as e:
326 | logger.info(f"AttributeError: {e}")
327 |
328 | def set_threshold(self, ent_name: str, threshold: float = 1e-10) -> None:
329 | """Set results to zero if value range is between zero an a given threshold."""
330 | ser = self.get(ent_name)
331 | setattr(self, ent_name, ser.where(cond=ser > threshold, other=0.0))
332 | self._changed_since_last_dic_export = True
333 |
--------------------------------------------------------------------------------
/draf/core/mappings.py:
--------------------------------------------------------------------------------
1 | GRB_OPT_STATUS = {
2 | 1: "LOADED",
3 | 2: "OPTIMAL",
4 | 3: "INFEASIBLE",
5 | 4: "INF_OR_UNBD",
6 | 5: "UNBOUNDED",
7 | 6: "CUTOFF",
8 | 7: "ITERATION_LIMIT",
9 | 8: "NODE_LIMIT",
10 | 9: "TIME_LIMIT",
11 | 10: "SOLUTION_LIMIT",
12 | 11: "INTERRUPTED",
13 | 12: "NUMERIC",
14 | 13: "SUBOPTIMAL",
15 | 14: "INPROGRESS",
16 | 15: "USER_OBJ_LIMIT",
17 | }
18 |
19 | VAR_PAR = {
20 | "p": "par_dic",
21 | "par": "par_dic",
22 | "params": "par_dic",
23 | "v": "res_dic",
24 | "var": "res_dic",
25 | "vars": "res_dic",
26 | "variables": "res_dic",
27 | "r": "res_dic",
28 | "res": "res_dic",
29 | "results": "res_dic",
30 | }
31 |
--------------------------------------------------------------------------------
/draf/core/time_series_prepper.py:
--------------------------------------------------------------------------------
1 | import logging
2 | from typing import Any, List, Optional, Tuple
3 |
4 | import holidays
5 | import numpy as np
6 | import pandas as pd
7 | from elmada import get_emissions, get_prices
8 |
9 | import draf.helper as hp
10 | from draf import prep
11 | from draf.prep.demand import SLP_PROFILES
12 |
13 | logger = logging.getLogger(__name__)
14 | logger.setLevel(level=logging.WARN)
15 |
16 |
17 | class TimeSeriesPrepper:
18 | """This class holds convenience functions for paremeter preparation and hands the scenario
19 | object on them.
20 | """
21 |
22 | from draf.prep import param_funcs as funcs
23 |
24 | def __init__(self, sc):
25 | self.sc = sc
26 |
27 | def __getstate__(self):
28 | """For serialization with pickle."""
29 | return None
30 |
31 | def k__PartYearComp_(self, name: str = "k__PartYearComp_") -> float:
32 | """Add cost weighting factor to compensate part year analysis."""
33 | sc = self.sc
34 | return self.sc.param(
35 | name=name,
36 | unit="",
37 | doc="Weighting factor to compensate part year analysis",
38 | data=len(sc.dtindex) / len(sc.dtindex_custom),
39 | )
40 |
41 | def k__dT_(self, name: str = "k__dT_"):
42 | return self.sc.param(name=name, unit="h", doc="Time steps width", data=self.sc.step_width)
43 |
44 | @hp.copy_doc(get_emissions, start="Args:")
45 | def ce_EG_T(self, name: str = "ce_EG_T", method: str = "XEF_PP", **kwargs) -> pd.Series:
46 | """Add dynamic carbon emission factors."""
47 | sc = self.sc
48 | return sc.param(
49 | name=name,
50 | unit="kgCO2eq/kWh_el",
51 | doc=f"Carbon emission factors (via elmada using year, freq, country, and CEF-method)",
52 | data=sc.match_dtindex(
53 | get_emissions(
54 | year=sc.year, freq=sc.freq, country=sc.country, method=method, **kwargs
55 | )
56 | * hp.conv("g", "kg", 1e-3)
57 | ),
58 | )
59 |
60 | def c_EG_RTP_T(self, name: str = "c_EG_RTP_T", method: str = "hist_EP", **kwargs) -> pd.Series:
61 | """Add Real-time-prices-tariffs."""
62 | sc = self.sc
63 | return self.sc.param(
64 | name=name,
65 | unit="€/kWh_el",
66 | doc=f"Day-ahead-market-prices (via elmada using year, freq, and country)",
67 | data=sc.match_dtindex(
68 | get_prices(year=sc.year, freq=sc.freq, method=method, country=sc.country, **kwargs)
69 | / 1e3
70 | ),
71 | )
72 |
73 | def c_EG_PP_T(self, name: str = "c_EG_PP_T", method: str = "PP") -> pd.Series:
74 | """Add marginal costs from PP-method. Only for Germany."""
75 | sc = self.sc
76 | return sc.param(
77 | name=name,
78 | unit="€/kWh_el",
79 | doc=f"Marginal Costs {sc.year}, {sc.freq}, {sc.country}",
80 | data=sc.match_dtindex(
81 | get_prices(year=sc.year, freq=sc.freq, country=sc.country, method=method)
82 | ),
83 | )
84 |
85 | def c_EG_PWL_T(self, name: str = "c_EG_PWL_T", method: str = "PWL", **kwargs) -> pd.Series:
86 | """Add marginal costs from PWL-method."""
87 | sc = self.sc
88 | return sc.param(
89 | name=name,
90 | unit="€/kWh_el",
91 | doc=f"Marginal Costs {sc.year}, {sc.freq}, {sc.country}",
92 | data=sc.match_dtindex(
93 | get_prices(year=sc.year, freq=sc.freq, country=sc.country, method=method, **kwargs)
94 | ),
95 | )
96 |
97 | def c_EG_TOU_T(
98 | self,
99 | name: str = "c_EG_TOU_T",
100 | prices: Optional[Tuple[float, float]] = None,
101 | prov: str = "BW",
102 | ) -> pd.Series:
103 | """A Time-of-Use tariff with two prices.
104 | If no prices are given the according RTP tariff is taken as basis.
105 | """
106 | holis = getattr(holidays, self.sc.country)(subdiv=prov)
107 | isLowTime_T = np.array(
108 | [
109 | True
110 | if ((x.dayofweek >= 5) or not (8 <= x.hour < 20) or (x.date() in holis))
111 | else False
112 | for x in self.sc.dtindex_custom
113 | ]
114 | )
115 | isHighTime_T = np.invert(isLowTime_T)
116 |
117 | if prices is None:
118 | try:
119 | low_price = self.sc.params.c_EG_RTP_T[isLowTime_T].mean()
120 | high_price = self.sc.params.c_EG_RTP_T[isHighTime_T].mean()
121 | except AttributeError as err:
122 | logger.error(
123 | "Mean price for TOU tariff cannot be inferred"
124 | f" from RTP, since there is no RTP. {err}"
125 | )
126 |
127 | else:
128 | if isinstance(prices, Tuple) and len(prices) == 2:
129 | low_price = min(prices)
130 | high_price = max(prices)
131 |
132 | return self.sc.param(
133 | name=name,
134 | unit="€/kWh_el",
135 | doc=f"Time-Of-Use-tariff (calculated from Real-time-price)",
136 | data=low_price * isLowTime_T + high_price * isHighTime_T,
137 | )
138 |
139 | def c_EG_FLAT_T(self, price: Optional[float] = None, name: str = "c_EG_FLAT_T"):
140 | """Add a flat electricity tariff.
141 |
142 | If no price is given the according RTP tariff is taken as basis.
143 | """
144 | if price is None:
145 | try:
146 | price = self.sc.params.c_EG_RTP_T.mean()
147 | except AttributeError as err:
148 | logger.error(
149 | "Mean price for FLAT tariff cannot be inferred"
150 | f" from RTP, since there is no RTP. {err}"
151 | )
152 |
153 | unit = "€/kWh_el"
154 | return self.sc.param(
155 | name=name,
156 | unit=unit,
157 | doc=f"Flat-electricity tariff (calculated from Real-time-price)",
158 | fill=price,
159 | )
160 |
161 | @hp.copy_doc(prep.get_el_SLP)
162 | def P_eDem_T(
163 | self,
164 | name: str = "P_eDem_T",
165 | profile: str = "G1",
166 | peak_load: Optional[float] = None,
167 | annual_energy: Optional[float] = None,
168 | offset: float = 0,
169 | province: Optional[str] = None,
170 | ) -> pd.Series:
171 | """Add an electricity demand"""
172 | sc = self.sc
173 |
174 | return sc.param(
175 | name=name,
176 | unit="kW_el",
177 | doc=f"Electricity demand from standard load profile {profile}: {SLP_PROFILES[profile]}",
178 | data=sc.match_dtindex(
179 | prep.get_el_SLP(
180 | year=sc.year,
181 | freq=sc.freq,
182 | profile=profile,
183 | peak_load=peak_load,
184 | annual_energy=annual_energy,
185 | offset=offset,
186 | country=sc.country,
187 | province=province,
188 | )
189 | ),
190 | )
191 |
192 | @hp.copy_doc(prep.get_heating_demand)
193 | def dQ_hDem_T(
194 | self,
195 | name: str = "dQ_hDem_T",
196 | annual_energy: float = 1e6,
197 | target_temp: float = 22.0,
198 | threshold_temp: float = 15.0,
199 | ) -> pd.Series:
200 | """Create and add a heating demand time series using weather data nearby."""
201 |
202 | sc = self.sc
203 |
204 | ser_amb_temp = prep.get_air_temp(coords=sc.coords, year=sc.year)
205 |
206 | return sc.param(
207 | name=name,
208 | unit="kW_th",
209 | doc=f"Heating demand derived from ambient temperature near coords.",
210 | data=sc.match_dtindex(
211 | prep.get_heating_demand(
212 | year=sc.year,
213 | freq=sc.freq,
214 | ser_amb_temp=ser_amb_temp,
215 | annual_energy=annual_energy,
216 | target_temp=target_temp,
217 | threshold_temp=threshold_temp,
218 | )
219 | ),
220 | )
221 |
222 | @hp.copy_doc(prep.get_cooling_demand)
223 | def dQ_cDem_T(
224 | self,
225 | name: str = "dQ_cDem_T",
226 | annual_energy: float = 1e6,
227 | target_temp: float = 22.0,
228 | threshold_temp: float = 22.0,
229 | ) -> pd.Series:
230 | """Create and add a cooling demand time series using weather data nearby."""
231 | sc = self.sc
232 |
233 | return sc.param(
234 | name=name,
235 | unit="kW_th",
236 | doc=f"Cooling demand derived from ambient temperature near coords.",
237 | data=sc.match_dtindex(
238 | prep.get_cooling_demand(
239 | year=sc.year,
240 | freq=sc.freq,
241 | coords=sc.coords,
242 | annual_energy=annual_energy,
243 | target_temp=target_temp,
244 | threshold_temp=threshold_temp,
245 | )
246 | ),
247 | )
248 |
249 | def P_PV_profile_T(
250 | self,
251 | name: str = "P_PV_profile_T",
252 | use_coords: bool = True,
253 | overwrite_coords: Optional[Tuple] = None,
254 | **gsee_kw,
255 | ) -> pd.Series:
256 | """Add a photovoltaic profile.
257 |
258 | Args:
259 | use_coords: For Germany only: If the `coords` of the CaseStudy should be used to
260 | calculate the PV profile via `gsee`. In that case, the weather data from the
261 | nearest available weather station is used.
262 | overwrite_coords: Coordinates that are taken instead of the case study coordinates.
263 | gsee_kw: Keywords used in the `gsee.pv.run_model` (https://gsee.readthedocs.io)
264 | function.
265 | """
266 | sc = self.sc
267 |
268 | if use_coords:
269 | if sc.coords is not None:
270 | coords = sc.coords
271 | if overwrite_coords is not None:
272 | coords = overwrite_coords
273 | logger.info(f"{coords} coordinates used for PV calculation.")
274 | assert coords is not None, "No coordinates given, but `use_coords=True`."
275 | ser = prep.get_pv_power(year=sc.year, coords=coords, **gsee_kw).reset_index(drop=True)
276 | else:
277 | logger.warning(
278 | "No coords given or usage not wanted. Year-independant backup PV profile is used."
279 | )
280 | ser = prep.get_backup_PV_profile()
281 | import calendar
282 |
283 | if calendar.isleap(self.sc.year):
284 | ser = pd.Series(np.concatenate([ser.values, ser[-24:].values]))
285 |
286 | return sc.param(
287 | name=name,
288 | unit="kW_el/kW_peak",
289 | doc="Produced PV-power for 1 kW_peak",
290 | data=sc.match_dtindex(ser, resample=True),
291 | )
292 |
293 | def c_EG_addon_(
294 | self,
295 | name: str = "c_EG_addon_",
296 | AbLa_surcharge=0.00003,
297 | Concession_fee=0.0011,
298 | EEG_surcharge=0, # (German Renewable Energies Act levy) no longer due since 2022-07-01
299 | Electricity_tax=0.01537,
300 | KWK_surcharge=0.0038,
301 | Network_cost=0, # paid per kW peak
302 | NEV_surcharge=0.0027,
303 | Offshore_surcharge=0.00419,
304 | Sales=0.01537,
305 | ) -> float:
306 | """Add electricity price components other than wholesale prices.
307 | Defaults for German Industry [1].
308 |
309 | [1]: https://www.bdew.de/media/documents/221208_BDEW-Strompreisanalyse_Dez2022_08.12.2022_korr_vx5gByn.pdf page 34
310 | """
311 |
312 | price_components = [
313 | AbLa_surcharge,
314 | Concession_fee,
315 | EEG_surcharge,
316 | Electricity_tax,
317 | KWK_surcharge,
318 | Network_cost,
319 | NEV_surcharge,
320 | Offshore_surcharge,
321 | Sales,
322 | ]
323 |
324 | return self.sc.param(
325 | name=name,
326 | unit="€/kWh_el",
327 | doc="Electricity taxes and levies",
328 | data=sum(price_components),
329 | )
330 |
331 | def T__amb_T(self, name: str = "T__amb_T") -> pd.Series:
332 | """Uses coordinates to prepare ambient air temperature time series in °C."""
333 | sc = self.sc
334 | assert isinstance(sc.coords, tuple)
335 | ser = prep.get_air_temp(coords=sc.coords, year=sc.year, with_dt=False)
336 | return sc.param(
337 | name=name,
338 | unit="°C",
339 | doc=f"Ambient temperature",
340 | data=sc.match_dtindex(sc.resample(ser)),
341 | )
342 |
--------------------------------------------------------------------------------
/draf/data/demand/electricity/SLP_BDEW/Repräsentative Profile VDEW.xls:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/DrafProject/draf/949c3d22836d4df01ec582c37034ef5bfc920f23/draf/data/demand/electricity/SLP_BDEW/Repräsentative Profile VDEW.xls
--------------------------------------------------------------------------------
/draf/data/demand/electricity/SLP_BDEW/readme.txt:
--------------------------------------------------------------------------------
1 | Downloaded on 2021-08-26 by Markus Fleschutz from https://www.bdew.de/energie/standardlastprofile-strom/
2 | direct link: https://www.bdew.de/media/documents/Profile.zip
--------------------------------------------------------------------------------
/draf/data/wind/Prep_Kelmarsh_wind.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "id": "3696e31d",
6 | "metadata": {},
7 | "source": [
8 | "This notebook documents how the `wind_kelmarsh_2019.csv` file was calculated.\n",
9 | "The source file `Turbine_Data_Kelmarsh_2_2019-01-01_-_2020-01-01_229.csv` is not part of draf since it has 150 MB."
10 | ]
11 | },
12 | {
13 | "cell_type": "code",
14 | "execution_count": 1,
15 | "id": "b6ee70e3",
16 | "metadata": {},
17 | "outputs": [],
18 | "source": [
19 | "files = {\n",
20 | " # 1: \"Turbine_Data_Kelmarsh_1_2019-01-01_-_2020-01-01_228.csv\",\n",
21 | " 2: \"Turbine_Data_Kelmarsh_2_2019-01-01_-_2020-01-01_229.csv\",\n",
22 | " # 3: \"Turbine_Data_Kelmarsh_3_2019-01-01_-_2020-01-01_230.csv\",\n",
23 | " # 4: \"Turbine_Data_Kelmarsh_4_2019-01-01_-_2020-01-01_231.csv\",\n",
24 | " # 5: \"Turbine_Data_Kelmarsh_5_2019-01-01_-_2020-01-01_232.csv\",\n",
25 | " # 6: \"Turbine_Data_Kelmarsh_6_2019-01-01_-_2020-01-01_233.csv\",\n",
26 | "}\n",
27 | "\n",
28 | "import pandas as pd\n",
29 | "from pathlib import Path\n",
30 | "import numpy as np\n",
31 | "from draf.helper import write\n",
32 | "\n",
33 | "turbine_capacity = 2050 # kW see also https://www.wind-turbine-models.com/turbines/889-senvion-mm92\n",
34 | "\n",
35 | "# downloaded Kelmarsh_SCADA_2019_3085.zip from https://doi.org/10.5281/zenodo.5841834\n",
36 | "# print(Path(files[1]).read_text()[:1500])"
37 | ]
38 | },
39 | {
40 | "cell_type": "code",
41 | "execution_count": 2,
42 | "id": "9c4fc0b6",
43 | "metadata": {},
44 | "outputs": [],
45 | "source": [
46 | "def get_profile(turbine):\n",
47 | " df_orig = pd.read_csv(Path(files[turbine]), skiprows=9, index_col=0, parse_dates=True)\n",
48 | " return df_orig.loc[\"2019\", \"Energy Export (kWh)\"].resample(\"H\").sum().reset_index(drop=True) / turbine_capacity"
49 | ]
50 | },
51 | {
52 | "cell_type": "code",
53 | "execution_count": 3,
54 | "id": "da9fa5cd",
55 | "metadata": {},
56 | "outputs": [],
57 | "source": [
58 | "ser = get_profile(turbine=2)"
59 | ]
60 | },
61 | {
62 | "cell_type": "code",
63 | "execution_count": 4,
64 | "id": "2943fbc8",
65 | "metadata": {},
66 | "outputs": [
67 | {
68 | "name": "stdout",
69 | "output_type": "stream",
70 | "text": [
71 | "The capacity factor is 0.33\n"
72 | ]
73 | }
74 | ],
75 | "source": [
76 | "print(f\"The capacity factor is {ser.mean():.2f}\")"
77 | ]
78 | },
79 | {
80 | "cell_type": "code",
81 | "execution_count": 5,
82 | "id": "91b150be",
83 | "metadata": {},
84 | "outputs": [],
85 | "source": [
86 | "# convert from UTC to German winter time\n",
87 | "ser = pd.Series(data=np.roll(ser, 1), index=ser.index, name=\"Power\")"
88 | ]
89 | },
90 | {
91 | "cell_type": "code",
92 | "execution_count": 6,
93 | "id": "ef42eadd",
94 | "metadata": {},
95 | "outputs": [
96 | {
97 | "data": {
98 | "text/plain": [
99 | "0.3288368972045885"
100 | ]
101 | },
102 | "execution_count": 6,
103 | "metadata": {},
104 | "output_type": "execute_result"
105 | }
106 | ],
107 | "source": [
108 | "ser.mean()"
109 | ]
110 | },
111 | {
112 | "cell_type": "code",
113 | "execution_count": 7,
114 | "id": "37d06dc5",
115 | "metadata": {},
116 | "outputs": [],
117 | "source": [
118 | "assert len(ser) == 8760"
119 | ]
120 | },
121 | {
122 | "cell_type": "code",
123 | "execution_count": 8,
124 | "id": "c7db1ff0",
125 | "metadata": {},
126 | "outputs": [
127 | {
128 | "data": {
129 | "text/plain": [
130 | "count 8760.000000\n",
131 | "mean 0.328837\n",
132 | "std 0.297384\n",
133 | "min 0.000000\n",
134 | "25% 0.081951\n",
135 | "50% 0.235610\n",
136 | "75% 0.510732\n",
137 | "max 1.001951\n",
138 | "Name: Power, dtype: float64"
139 | ]
140 | },
141 | "execution_count": 8,
142 | "metadata": {},
143 | "output_type": "execute_result"
144 | }
145 | ],
146 | "source": [
147 | "ser.describe()"
148 | ]
149 | },
150 | {
151 | "cell_type": "code",
152 | "execution_count": 9,
153 | "id": "7586d3e7",
154 | "metadata": {},
155 | "outputs": [],
156 | "source": [
157 | "write(ser, \"2019_wind_kelmarsh2.csv\")"
158 | ]
159 | }
160 | ],
161 | "metadata": {
162 | "kernelspec": {
163 | "display_name": "Python 3",
164 | "language": "python",
165 | "name": "python3"
166 | },
167 | "language_info": {
168 | "codemirror_mode": {
169 | "name": "ipython",
170 | "version": 3
171 | },
172 | "file_extension": ".py",
173 | "mimetype": "text/x-python",
174 | "name": "python",
175 | "nbconvert_exporter": "python",
176 | "pygments_lexer": "ipython3",
177 | "version": "3.9.13"
178 | },
179 | "toc": {
180 | "base_numbering": 1,
181 | "nav_menu": {},
182 | "number_sections": true,
183 | "sideBar": true,
184 | "skip_h1_title": false,
185 | "title_cell": "Table of Contents",
186 | "title_sidebar": "Contents",
187 | "toc_cell": false,
188 | "toc_position": {},
189 | "toc_section_display": true,
190 | "toc_window_display": false
191 | },
192 | "vscode": {
193 | "interpreter": {
194 | "hash": "b0fa6594d8f4cbf19f97940f81e996739fb7646882a419484c72d19e05852a7e"
195 | }
196 | }
197 | },
198 | "nbformat": 4,
199 | "nbformat_minor": 5
200 | }
201 |
--------------------------------------------------------------------------------
/draf/paths.py:
--------------------------------------------------------------------------------
1 | from pathlib import Path
2 |
3 | from appdirs import user_cache_dir
4 |
5 |
6 | def _get_cache_directory() -> Path:
7 | """Returns the path to cache directory and creates it, if not yet existing."""
8 | fp = Path(user_cache_dir(appname="draf", appauthor="DrafProject"))
9 | fp.mkdir(parents=True, exist_ok=True)
10 | return fp
11 |
12 |
13 | CACHE_DIR = _get_cache_directory()
14 | BASE_DIR = Path(__file__).resolve().parent
15 | DATA_DIR = BASE_DIR / "data"
16 | RESULTS_DIR = BASE_DIR / "results"
17 | RESULTS_DIR.mkdir(parents=True, exist_ok=True)
18 |
--------------------------------------------------------------------------------
/draf/plotting/__init__.py:
--------------------------------------------------------------------------------
1 | """This module provides plotting for scenarios."""
2 |
3 | from draf.plotting.cs_plotting import CsPlotter
4 | from draf.plotting.scen_plotting import ScenPlotter
5 |
--------------------------------------------------------------------------------
/draf/plotting/base_plotter.py:
--------------------------------------------------------------------------------
1 | from IPython import get_ipython
2 |
3 |
4 | class BasePlotter:
5 | def script_type(self) -> str:
6 | """Returns script type to determine if interactive plots are available."""
7 | if get_ipython() is not None:
8 | ipy_str = str(type(get_ipython()))
9 | if "zmqshell" in ipy_str:
10 | return "jupyter"
11 | if "terminal" in ipy_str:
12 | return "ipython"
13 | else:
14 | return "terminal"
15 |
--------------------------------------------------------------------------------
/draf/plotting/plotting_util.py:
--------------------------------------------------------------------------------
1 | from draf.prep.data_base import SRC
2 |
3 |
4 | def make_clickable_src(src: str) -> str:
5 | """Converts a src_key into a html href string"""
6 | try:
7 | url = getattr(SRC, src[1:]).url
8 | return f"{src}"
9 | except AttributeError:
10 | return src
11 |
--------------------------------------------------------------------------------
/draf/prep/__init__.py:
--------------------------------------------------------------------------------
1 | """A toolbox for preparation of timeseries for optimization."""
2 | from draf.prep.data_base import SRC, DataBase, ParDat
3 | from draf.prep.demand import get_cooling_demand, get_el_SLP, get_heating_demand
4 | from draf.prep.pv import get_backup_PV_profile, get_nearestStationData_for_gsee, get_pv_power
5 | from draf.prep.weather import get_air_temp, get_data_for_gsee, get_df_from_DWD, get_nearest_stations
6 |
--------------------------------------------------------------------------------
/draf/prep/demand.py:
--------------------------------------------------------------------------------
1 | """Functions to prepare energy demand time series."""
2 |
3 | import datetime
4 | import logging
5 | import warnings
6 | from typing import Any, Callable, Dict, Iterable, List, Optional, Tuple, Union
7 |
8 | import holidays
9 | import pandas as pd
10 |
11 | from draf import helper as hp
12 | from draf import paths
13 | from draf.prep.weather import get_air_temp
14 |
15 | logger = logging.getLogger(__name__)
16 | logger.setLevel(level=logging.WARN)
17 |
18 | SLP_PROFILES = {
19 | "H0": "Household general",
20 | "G0": "Business in general",
21 | "G1": "Business on weekdays 08:00 - 18:00",
22 | "G2": "Business with heavy evening consumption",
23 | "G3": "Business continuous",
24 | "G4": "shop/hairdresser",
25 | "G5": "Bakeries with bakery",
26 | "G6": "Weekend operation",
27 | "L0": "farms",
28 | "L1": "farms with milk and livestock",
29 | "L2": "other farms",
30 | }
31 |
32 |
33 | def get_el_SLP(
34 | year: int = 2019,
35 | freq: str = "15min",
36 | profile: str = "G1",
37 | peak_load: Optional[float] = None,
38 | annual_energy: Optional[float] = None,
39 | offset: float = 0,
40 | country: str = "DE",
41 | province: Optional[str] = None,
42 | ) -> pd.Series:
43 | """Return synthetic electricity time series based on standard load profiles.
44 |
45 | Args:
46 | year: Year
47 | freq: Desired frequency of the resulting time series. Choose between '60min', '15min'.
48 | profile: Chosen profile. Choose between:
49 | H0: Household general
50 | G0: Business in general
51 | G1: Business on weekdays 08:00 - 18:00
52 | G2: Business with heavy evening consumption
53 | G3: Business continuous
54 | G4: shop/hairdresser
55 | G5: Bakeries with bakery
56 | G6: Weekend operation
57 | L0: farms
58 | L1: farms with milk and livestock
59 | L2: other farms
60 | peak_load: Maximum load of the year. If given, the time series is scaled so that the
61 | maximum value matches it.
62 | annual_energy: Yearly energy value in kWh_el. If given, the time series is scaled so
63 | that the yearly total matches the `annual_energy`.
64 | offset: Value which is added to every time step and also considered then scaling to
65 | `peak_load` and `annual_energy`.
66 | country: Used to determine the public holidays.
67 | E.g. if `country`='DE' and `province`=None public holidays for North Rhine-Westphalia
68 | are taken: Neujahrstag, Karfreitag, Ostersonntag, Ostermontag, Maifeiertag,
69 | Christi Himmelfahrt, Pfingstsonntag, Pfingstmontag, Fronleichnam,
70 | Tag der Deutschen Einheit, Allerheiligen, 1. Weihnachtstag, 2. Weihnachtstag
71 | province: Further specifies the public holidays more specifically.
72 |
73 | Season Types:
74 | Summer: 15.05. - 14.09.
75 | Winter: 01.11. - 20.03.
76 | Transitional: 21.03. - 14.05. and 15.09. - 31.10.
77 |
78 | Public holidays (dependent on country and province or state) receive the Sunday profile.
79 | """
80 | warnings.filterwarnings("ignore", message="indexing past lexsort depth")
81 | assert profile in SLP_PROFILES
82 |
83 | if offset > 0 and peak_load is not None:
84 | assert offset < peak_load
85 |
86 | fp = paths.DATA_DIR / f"demand/electricity/SLP_BDEW/Repräsentative Profile VDEW.xls"
87 | df = pd.read_excel(io=fp, sheet_name=profile, header=[1, 2], index_col=0, skipfooter=1)
88 | df = df.reset_index(drop=True)
89 |
90 | def _get_season(date):
91 | is_summer = (datetime.datetime(date.year, 5, 15) <= date) and (
92 | date <= datetime.datetime(date.year, 9, 14)
93 | )
94 | is_winter = (date >= datetime.datetime(date.year, 11, 1)) or (
95 | date <= datetime.datetime(date.year, 3, 20)
96 | )
97 | if is_summer:
98 | return "Sommer"
99 | elif is_winter:
100 | return "Winter"
101 | else:
102 | return "Übergangszeit"
103 |
104 | holiday_obj = getattr(holidays, country)(subdiv=province)
105 |
106 | def _get_day_type(date):
107 | is_holiday = date in holiday_obj
108 | is_sunday = date.dayofweek == 6
109 | is_saturday = date.dayofweek == 5
110 | if is_sunday or is_holiday:
111 | return "Sonntag"
112 | elif is_saturday:
113 | return "Samstag"
114 | else:
115 | return "Werktag"
116 |
117 | days = hp.make_datetimeindex(year=year, freq="D")
118 | seasons = [_get_season(day) for day in days]
119 | day_types = [_get_day_type(day) for day in days]
120 |
121 | dt_index = hp.make_datetimeindex(year=year, freq="15min")
122 | ser = pd.Series(index=dt_index, dtype="float64")
123 |
124 | for day, season, day_type in zip(days, seasons, day_types):
125 | ser.loc[day.strftime("%Y-%m-%d")] = df[season, day_type].values
126 |
127 | ser = hp.resample(ser, year=year, start_freq="15min", target_freq=freq, aggfunc="mean")
128 | ser = ser.reset_index(drop=True)
129 |
130 | if peak_load is not None:
131 | ser = ser * (peak_load - offset) / ser.max()
132 |
133 | delta_T = hp.get_step_width(freq)
134 |
135 | if annual_energy is not None:
136 | ser = ser * (annual_energy - (offset * dt_index.size * delta_T)) / (ser.sum() * delta_T)
137 |
138 | ser = ser + offset
139 |
140 | logger.info(
141 | "SLP created\n"
142 | f"\t{str(year)}, {freq}\n"
143 | f"\t{profile} ({SLP_PROFILES[profile]})\n"
144 | f"\tpeak_load: {ser.max()}\n"
145 | f"\tannual_energy{ser.sum() * delta_T}"
146 | )
147 |
148 | return ser
149 |
150 |
151 | def get_heating_demand(
152 | year: int,
153 | freq: str = "60min",
154 | ser_amb_temp: Optional[pd.Series] = None,
155 | annual_energy: float = 1e6,
156 | target_temp: float = 22.0,
157 | threshold_temp: float = 15.0,
158 | coords: Optional[Tuple[float, float]] = None,
159 | ) -> pd.Series:
160 | """Returns a heating demand profile based on the air temperature."""
161 | if ser_amb_temp is None:
162 | assert coords is not None
163 | assert year is not None
164 | ser_amb_temp = get_air_temp(coords=coords, year=year, with_dt=False)
165 | assert target_temp >= threshold_temp
166 | ser_amb_temp[ser_amb_temp > threshold_temp] = target_temp
167 | ser = target_temp - ser_amb_temp
168 | scaling_factor = annual_energy / ser.sum()
169 | ser *= scaling_factor
170 | ser.name = "dQ_hDem_T"
171 | logger.info(
172 | f"Heating demand created with annual energy={annual_energy}, target_temp={target_temp}"
173 | f", threshold_temp={threshold_temp}."
174 | )
175 | ser = hp.resample(ser, year=year, start_freq="60min", target_freq=freq)
176 | return ser
177 |
178 |
179 | def get_cooling_demand(
180 | year: int,
181 | freq: str = "60min",
182 | ser_amb_temp: Optional[pd.Series] = None,
183 | annual_energy: float = 1e6,
184 | target_temp: float = 22.0,
185 | threshold_temp: float = 22.0,
186 | coords: Optional[Tuple[float, float]] = None,
187 | ) -> pd.Series:
188 | """Returns a cooling demand profile based on the ambient air temperature."""
189 | if ser_amb_temp is None:
190 | assert coords is not None
191 | assert year is not None
192 | ser_amb_temp = get_air_temp(coords=coords, year=year, with_dt=False)
193 | assert target_temp <= threshold_temp
194 | ser_amb_temp[ser_amb_temp < threshold_temp] = target_temp
195 | ser = ser_amb_temp - target_temp
196 | scaling_factor = annual_energy / ser.sum()
197 | ser = ser * scaling_factor
198 | ser.name = "dQ_cDem_T"
199 | logger.info(
200 | f"Cooling demand created with annual energy={annual_energy}, target_temp={target_temp}"
201 | f", threshold_temp={threshold_temp}."
202 | )
203 | ser = hp.resample(ser, year=year, start_freq="60min", target_freq=freq)
204 | return ser
205 |
--------------------------------------------------------------------------------
/draf/prep/gsee_module/cec_tools.py:
--------------------------------------------------------------------------------
1 | # This file was copied from the gsee repository
2 | # https://github.com/renewables-ninja/gsee/tree/2738600e64a645f97eb96f6c9fb7d3f2856cf24c
3 | # as a workaround to https://github.com/renewables-ninja/gsee/issues/12
4 |
5 | # This is the license of GSEE:
6 |
7 | # BSD 3-Clause License
8 |
9 | # Copyright (c) 2013-2018, Stefan Pfenninger
10 | # All rights reserved.
11 |
12 | # Redistribution and use in source and binary forms, with or without
13 | # modification, are permitted provided that the following conditions are met:
14 |
15 | # * Redistributions of source code must retain the above copyright notice, this
16 | # list of conditions and the following disclaimer.
17 |
18 | # * Redistributions in binary form must reproduce the above copyright notice,
19 | # this list of conditions and the following disclaimer in the documentation
20 | # and/or other materials provided with the distribution.
21 |
22 | # * Neither the name of the copyright holder nor the names of its
23 | # contributors may be used to endorse or promote products derived from
24 | # this software without specific prior written permission.
25 |
26 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
27 | # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
28 | # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
29 | # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
30 | # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
31 | # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
32 | # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
33 | # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
34 | # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
35 | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
36 |
37 | import pvlib
38 |
39 |
40 | def get_efficiency(irradiance, cell_temperature, module_params):
41 | """
42 | irradiance : float or pandas.Series
43 | Effective irradiance (W/m2) that is converted to photocurrent.
44 | cell_temperature : float or pandas.Series
45 | Average cell temperature of cells within a module in deg C.
46 | module_params : dict
47 | Module params 'alpha_sc', 'a_ref', 'I_L_ref', 'I_o_ref', 'R_sh_ref', 'R_s'.
48 |
49 | """
50 | params = pvlib.pvsystem.calcparams_desoto(
51 | effective_irradiance=irradiance, temp_cell=cell_temperature, **module_params
52 | )
53 |
54 | # Ensure that the shunt resistance is not infinite
55 | # Commented out because we want to still return valid Series when
56 | # some of the values are zero -- NaNs from 0-divisions are filled later
57 | # assert params[3] != math.inf
58 |
59 | dc = pvlib.pvsystem.singlediode(*params)
60 | efficiency = dc["p_mp"] / irradiance
61 | return efficiency
62 |
63 |
64 | def relative_eff(irradiance, cell_temperature, params):
65 | """
66 | Compute relative efficiency of PV module as a function of irradiance
67 | and cell/module temperature, from Huld (2010):
68 |
69 | .. math:: n_{rel} = \frac{P_{stc} * (G / G_{stc})}{P}
70 |
71 | Where G is in-plane irradiance, P is power output,
72 | and STC conditions are :math:`G = 1000` and
73 | :math:`T_{mod} = 25`.
74 |
75 | When irradiance is zero, a zero relative efficiency is returned.
76 |
77 | Parameters
78 | ----------
79 |
80 | irradiance : float or pandas.Series
81 | Irradiance in W/m2.
82 | cell_temperature : float or pandas.Series
83 | Average cell temperature of cells within a module in deg C.
84 | params : dict
85 | Module params 'alpha_sc', 'a_ref', 'I_L_ref', 'I_o_ref', 'R_sh_ref', 'R_s'.
86 |
87 | """
88 | if isinstance(irradiance, float) and irradiance == 0:
89 | return 0
90 |
91 | power_stc = 1000 * get_efficiency(1000, 25, params)
92 | power = irradiance * get_efficiency(irradiance, cell_temperature, params)
93 |
94 | # Fill NaNs from any possible divisions by zero with 0
95 | return (power / (power_stc * (irradiance / 1000))).fillna(0)
96 |
--------------------------------------------------------------------------------
/draf/prep/gsee_module/trigon.py:
--------------------------------------------------------------------------------
1 | # This file was copied from the gsee repository
2 | # https://github.com/renewables-ninja/gsee/tree/2738600e64a645f97eb96f6c9fb7d3f2856cf24c
3 | # as a workaround to https://github.com/renewables-ninja/gsee/issues/12
4 |
5 | # This is the license of GSEE:
6 |
7 | # BSD 3-Clause License
8 |
9 | # Copyright (c) 2013-2018, Stefan Pfenninger
10 | # All rights reserved.
11 |
12 | # Redistribution and use in source and binary forms, with or without
13 | # modification, are permitted provided that the following conditions are met:
14 |
15 | # * Redistributions of source code must retain the above copyright notice, this
16 | # list of conditions and the following disclaimer.
17 |
18 | # * Redistributions in binary form must reproduce the above copyright notice,
19 | # this list of conditions and the following disclaimer in the documentation
20 | # and/or other materials provided with the distribution.
21 |
22 | # * Neither the name of the copyright holder nor the names of its
23 | # contributors may be used to endorse or promote products derived from
24 | # this software without specific prior written permission.
25 |
26 | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
27 | # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
28 | # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
29 | # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
30 | # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
31 | # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
32 | # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
33 | # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
34 | # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
35 | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
36 |
37 | import datetime
38 |
39 | import ephem
40 | import numpy as np
41 | import pandas as pd
42 |
43 |
44 | def _get_rise_and_set_time(date, sun, obs):
45 | """
46 | Returns a tuple of (rise, set) time for the given date, sun and observer.
47 | """
48 | obs.date = date
49 | sun.compute(obs)
50 |
51 | # Up to and including v0.2.1, old API was implicitly setting use_center
52 | # to True, but considering the sun's radius leads to slightly more
53 | # realistic rise/set time
54 | try:
55 | rising = obs.next_rising(sun, use_center=False)
56 | except (ephem.AlwaysUpError, ephem.NeverUpError):
57 | rising = None
58 |
59 | try:
60 | setting = obs.next_setting(sun, use_center=False)
61 | except (ephem.AlwaysUpError, ephem.NeverUpError):
62 | setting = None
63 |
64 | rise_time = None if not rising else rising.datetime()
65 | set_time = None if not setting else setting.datetime()
66 |
67 | return (rise_time, set_time)
68 |
69 |
70 | def sun_rise_set_times(datetime_index, coords):
71 | """
72 | Returns sunrise and set times for the given datetime_index and coords,
73 | as a Series indexed by date (days, resampled from the datetime_index).
74 | """
75 | sun = ephem.Sun()
76 | obs = ephem.Observer()
77 | obs.lat = str(coords[0])
78 | obs.lon = str(coords[1])
79 |
80 | # Ensure datetime_index is daily
81 | dtindex = pd.DatetimeIndex(datetime_index.to_series().map(pd.Timestamp.date).unique())
82 |
83 | return pd.Series([_get_rise_and_set_time(i, sun, obs) for i in dtindex], index=dtindex)
84 |
85 |
86 | def sun_angles(datetime_index, coords, rise_set_times=None):
87 | """
88 | Calculates sun angles. Returns a dataframe containing `sun_alt`,
89 | `sun_zenith`, `sun_azimuth` and `duration` over the passed datetime index.
90 | Parameters
91 | ----------
92 | datetime_index : pandas datetime index
93 | Handled as if they were UTC not matter what timezone info
94 | they may supply.
95 | coords : (float, float) or (int, int) tuple
96 | Latitude and longitude.
97 | rise_set_times : list, default None
98 | List of (sunrise, sunset) time tuples, if not passed, is computed
99 | here.
100 | """
101 |
102 | def _sun_alt_azim(sun, obs):
103 | sun.compute(obs)
104 | return sun.alt, sun.az
105 |
106 | # Initialize ephem objects
107 | obs = ephem.Observer()
108 | obs.lat = str(coords[0])
109 | obs.lon = str(coords[1])
110 | sun = ephem.Sun()
111 |
112 | # Calculate daily sunrise/sunset times
113 | if rise_set_times is None:
114 | rise_set_times = sun_rise_set_times(datetime_index, coords)
115 |
116 | # Calculate hourly altitute, azimuth, and sunshine
117 | alts = []
118 | azims = []
119 | durations = []
120 |
121 | for index, item in enumerate(datetime_index):
122 | obs.date = item
123 | # rise/set times are indexed by day, so need to adjust lookup
124 | rise_time, set_time = rise_set_times.loc[item.date().strftime("%Y-%m-%d")]
125 |
126 | # Set angles, sun altitude and duration based on hour of day:
127 | if rise_time is not None and item.hour == rise_time.hour:
128 | # Special case for sunrise hour
129 | duration = 60 - rise_time.minute - (rise_time.second / 60.0)
130 | obs.date = rise_time + datetime.timedelta(minutes=duration / 2)
131 | sun_alt, sun_azimuth = _sun_alt_azim(sun, obs)
132 | elif set_time is not None and item.hour == set_time.hour:
133 | # Special case for sunset hour
134 | duration = set_time.minute + set_time.second / 60.0
135 | obs.date = item + datetime.timedelta(minutes=duration / 2)
136 | sun_alt, sun_azimuth = _sun_alt_azim(sun, obs)
137 | else:
138 | # All other hours
139 | duration = 60
140 | obs.date = item + datetime.timedelta(minutes=30)
141 | sun_alt, sun_azimuth = _sun_alt_azim(sun, obs)
142 | if sun_alt < 0: # If sun is below horizon
143 | sun_alt, sun_azimuth, duration = 0, 0, 0
144 |
145 | alts.append(sun_alt)
146 | azims.append(sun_azimuth)
147 | durations.append(duration)
148 | df = pd.DataFrame(
149 | {"sun_alt": alts, "sun_azimuth": azims, "duration": durations}, index=datetime_index
150 | )
151 | df["sun_zenith"] = (np.pi / 2) - df.sun_alt
152 | # Sun altitude considered zero if slightly below horizon
153 | df["sun_alt"] = df["sun_alt"].clip(lower=0)
154 | return df
155 |
156 |
157 | def _incidence_fixed(sun_alt, tilt, azimuth, sun_azimuth):
158 | """Returns incidence angle for a fixed panel"""
159 | return np.arccos(
160 | np.sin(sun_alt) * np.cos(tilt)
161 | + np.cos(sun_alt) * np.sin(tilt) * np.cos(azimuth - sun_azimuth)
162 | )
163 |
164 |
165 | def _incidence_single_tracking(sun_alt, tilt, azimuth, sun_azimuth):
166 | """
167 | Returns incidence angle for a 1-axis tracking panel
168 | Parameters
169 | ----------
170 | sun_alt : sun altitude angle
171 | tilt : tilt of tilt axis
172 | azimuth : rotation of tilt axis
173 | sun_azimuth : sun azimuth angle
174 | """
175 | if tilt == 0:
176 | return np.arccos(np.sqrt(1 - np.cos(sun_alt) ** 2 * np.cos(sun_azimuth - azimuth) ** 2))
177 | else:
178 | return np.arccos(
179 | np.sqrt(
180 | 1
181 | - (
182 | np.cos(sun_alt + tilt)
183 | - np.cos(tilt) * np.cos(sun_alt) * (1 - np.cos(sun_azimuth - azimuth))
184 | )
185 | ** 2
186 | )
187 | )
188 |
189 |
190 | def _tilt_single_tracking(sun_alt, tilt, azimuth, sun_azimuth):
191 | """
192 | Returns panel tilt angle for a 1-axis tracking panel
193 | Parameters
194 | ----------
195 | sun_alt : sun altitude angle
196 | tilt : tilt of tilt axis
197 | azimuth : rotation of tilt axis
198 | sun_azimuth : sun azimuth angle
199 | """
200 | if tilt == 0:
201 | return np.arctan(np.sin(sun_azimuth - azimuth) / np.tan(sun_alt))
202 | else:
203 | return np.arctan(
204 | (np.cos(sun_alt) * np.sin(sun_azimuth - azimuth))
205 | / (
206 | np.sin(sun_alt - tilt)
207 | + np.sin(tilt) * np.cos(sun_alt) * (1 - np.cos(sun_azimuth - azimuth))
208 | )
209 | )
210 |
211 |
212 | def aperture_irradiance(
213 | direct, diffuse, coords, tilt=0, azimuth=0, tracking=0, albedo=0.3, dni_only=False, angles=None
214 | ):
215 | """
216 | Parameters
217 | ----------
218 | direct : pandas.Series
219 | Direct horizontal irradiance with a datetime index
220 | diffuse : pandas.Series
221 | Diffuse horizontal irradiance with the same datetime index as `direct`
222 | coords : (float, float)
223 | (lat, lon) tuple of location coordinates
224 | tilt : float, default=0
225 | Angle of panel relative to the horizontal plane.
226 | 0 = flat.
227 | azimuth : float, default=0
228 | Deviation of the tilt direction from the meridian.
229 | 0 = towards pole, going clockwise, 3.14 = towards equator.
230 | tracking : int, default=0
231 | 0 (none, default), 1 (tilt), or 2 (tilt and azimuth).
232 | If 1, `tilt` gives the tilt of the tilt axis relative to horizontal
233 | (tilt=0) and `azimuth` gives the orientation of the tilt axis.
234 | albedo : float, default=0.3
235 | reflectance of the surrounding surface
236 | dni_only : bool, default False
237 | only calculate and directly return a DNI time series (ignores
238 | tilt, azimuth, tracking and albedo arguments).
239 | angles : pandas.DataFrame, optional
240 | Solar angles. If default (None), they are computed automatically.
241 | """
242 | # 0. Correct azimuth if we're on southern hemisphere, so that 3.14
243 | # points north instead of south
244 | if coords[0] < 0:
245 | azimuth = azimuth + np.pi
246 | # 1. Calculate solar angles
247 | if angles is None:
248 | sunrise_set_times = sun_rise_set_times(direct.index, coords)
249 | angles = sun_angles(direct.index, coords, sunrise_set_times)
250 | # 2. Calculate direct normal irradiance
251 | dni = (direct * (angles["duration"] / 60)) / np.cos(angles["sun_zenith"])
252 | if dni_only:
253 | return dni
254 | # 3. Calculate appropriate aperture incidence angle
255 | if tracking == 0:
256 | incidence = _incidence_fixed(angles["sun_alt"], tilt, azimuth, angles["sun_azimuth"])
257 | panel_tilt = tilt
258 | elif tracking == 1:
259 | # 1-axis tracking with horizontal or tilted tracking axis
260 | incidence = _incidence_single_tracking(
261 | angles["sun_alt"], tilt, azimuth, angles["sun_azimuth"]
262 | )
263 | panel_tilt = _tilt_single_tracking(angles["sun_alt"], tilt, azimuth, angles["sun_azimuth"])
264 | elif tracking == 2:
265 | # 2-axis tracking means incidence angle is zero
266 | # Assuming azimuth/elevation tracking for tilt/azimuth angles
267 | incidence = 0
268 | panel_tilt = angles["sun_zenith"]
269 | azimuth = angles["sun_azimuth"]
270 | else:
271 | raise ValueError("Invalid setting for tracking: {}".format(tracking))
272 | # 4. Compute direct and diffuse irradiance on plane
273 | # Clipping ensures that very low panel to sun altitude angles do not
274 | # result in negative direct irradiance (reflection)
275 | plane_direct = (dni * np.cos(incidence)).fillna(0).clip(lower=0)
276 | plane_diffuse = (
277 | diffuse * ((1 + np.cos(panel_tilt)) / 2)
278 | + albedo * (direct + diffuse) * ((1 - np.cos(panel_tilt)) / 2)
279 | ).fillna(0)
280 | return pd.DataFrame({"direct": plane_direct, "diffuse": plane_diffuse})
281 |
--------------------------------------------------------------------------------
/draf/prep/par_dat.py:
--------------------------------------------------------------------------------
1 | from dataclasses import dataclass
2 | from typing import Union
3 |
4 | import pandas as pd
5 |
6 | from draf import helper as hp
7 |
8 |
9 | @dataclass
10 | class Source:
11 | url: str
12 | doc: str = ""
13 | bib: str = ""
14 |
15 |
16 | @dataclass
17 | class ParDat:
18 | name: str
19 | data: Union[float, pd.Series]
20 | doc: str = ""
21 | src: str = ""
22 | unit: str = ""
23 |
24 | @property
25 | def etype(self) -> str:
26 | return hp.get_etype(self.name)
27 |
28 | @property
29 | def comp(self) -> str:
30 | return hp.get_component(self.name)
31 |
32 | @property
33 | def desc(self) -> str:
34 | return hp.get_desc(self.name)
35 |
36 | @property
37 | def dims(self) -> str:
38 | return hp.get_dims(self.name)
39 |
--------------------------------------------------------------------------------
/draf/prep/param_funcs.py:
--------------------------------------------------------------------------------
1 | """Functions that provide investment prices for energy-related technologies in the industrial
2 | environment.
3 | """
4 | import pandas as pd
5 |
6 | from draf.prep.data_base import ParDat
7 |
8 |
9 | def c_PV_inv_() -> ParDat:
10 | """CAPEX for PV
11 |
12 | Valid for Europe in 2019 for P_inst=>500 kW_peak.
13 | """
14 | return ParDat(name="c_PV_inv_", data=460, doc="CAPEX", src="@Vartiainen_2019", unit="€/kW_peak")
15 |
16 |
17 | def c_BES_inv_(estimated_size=100, which="mean") -> ParDat:
18 | """CAPEX for lithium-ion battery energy storages
19 |
20 | Parameters:
21 | estimated_size: 0..50000 kWh_el installed capacity.
22 | which: Selects in ('lowest', 'mean', 'highest') of the realized prices.
23 | """
24 | assert 0 <= estimated_size <= 50000
25 | assert which in ("lowest", "mean", "highest")
26 |
27 | sizes = (100, 500, 1000, 10000, 50000)
28 | prices = dict(
29 | highest=(990, 980, 700, 650, 600),
30 | mean=(720, 651, 550, 497, 413),
31 | lowest=(295, 280, 250, 245, 215),
32 | )
33 |
34 | for size, price in zip(sizes, prices[which]):
35 | if estimated_size <= size:
36 | break
37 | return ParDat(name="c_BES_inv_", data=price, doc="CAPEX", src="@PVMAG_2020", unit="€/kWh_el")
38 |
39 |
40 | def c_CHP_inv_(estimated_size=400, fuel_type="bio") -> ParDat:
41 | """CAPEX for combined heat and power
42 |
43 | Parameters:
44 | estimated_size: 0..2500 kW_el nominal electric power.
45 | fuel_type: In['bio', 'ng']
46 |
47 | Formulas:
48 | ng: 9332.6 * estimated_size ** -0.461
49 | bio: 15648 * estimated_size ** -0.5361
50 | """
51 | assert 0 <= estimated_size <= 2500
52 |
53 | if fuel_type == "ng":
54 | value = 9332.6 * estimated_size**-0.461
55 | elif fuel_type == "bio":
56 | value = 15648 * estimated_size**-0.5361
57 | else:
58 | raise ValueError("fuel_type must be in ['bio', 'ng'].")
59 |
60 | return ParDat(name="c_CHP_inv_", data=value, doc="CAPEX", src="@ASUE_2011", unit="€/kW_el")
61 |
62 |
63 | def c_HP_inv_(estimated_size=100) -> ParDat:
64 | """CAPEX for electric heat pumps
65 |
66 | Parameters:
67 | estimated_size: 0..200 kWh_th heating power.
68 |
69 | Formula:
70 | 1520.7 * estimated_size ** -.363
71 |
72 | Source:
73 | Wolf_2017 (https://doi.org/10.18419/opus-9593)
74 |
75 | Other sources:
76 | high temperature HP data, but only specific equipment cost --> (100-400 €/kW_th) @Kosmadakis.2020 (https://doi.org/10.1016/j.enconman.2020.113488)
77 | large scale HPs (3MW_th) --> (670-950 €/kW_th) @DanishEA_2022 (https://ens.dk/en/our-services/projections-and-models/technology-data/technology-data-generation-electricity-and)
78 | (0.5 MW-10 MW) --> (490-1100 €/kW_th) @Sandvall_2017 (https://doi.org/10.1016/j.esr.2017.10.003)
79 | Air-sourced heat pump --> (387-1089 €/kW_th) @Petkov_2020 (https://doi.org/10.1016/j.apenergy.2020.115197)
80 | (>0.1 MW) --> (300-900 €/kW_th) @Meyers_2018 (https://doi.org/10.1016/j.solener.2018.08.011)
81 | """
82 | assert 0 < estimated_size < 200
83 | value = 1520.7 * estimated_size**-0.363
84 |
85 | return ParDat(name="c_HP_inv_", data=value, doc="CAPEX", src="@Wolf_2017", unit="€/kW_el")
86 |
87 |
88 | def c_TES_inv_(estimated_size=100, temp_spread=40) -> ParDat:
89 | """CAPEX for heat storages.
90 |
91 | Parameters:
92 | estimated_size: 30..30000 m³ storage size.
93 | temp_spread: Temperature spread in °C.
94 |
95 | Formula:
96 | price_per_m3 = 8222.6 * estimated_size ** -0.394
97 | """
98 | assert 30 < estimated_size < 30000
99 | assert 0 < temp_spread < 1000
100 | specific_heat = 4.2 # kJ/(kg*K)
101 | kJ_per_kWh = 3600 # kJ/kWh
102 | density = 999.975 # kg/m³
103 | kWh_per_m3 = specific_heat / kJ_per_kWh * temp_spread * density
104 | price_per_m3 = 8222.6 * estimated_size**-0.394
105 | value = price_per_m3 / kWh_per_m3
106 | return ParDat(name="c_TES_inv_", data=value, doc="CAPEX", src="@FFE_2016", unit="€/kW_th")
107 |
108 |
109 | def c_HOB_inv_() -> ParDat:
110 | """CAPEX for gas boiler Vitoplex 300 with 620 kW_th"""
111 | thermal_capa_in_kW_th = 620
112 | cost_factor_for_pipes_etc = 1.5
113 | main_component_costs = pd.Series({"Heizkessel": 18216, "Weishaupt Gebläsebrenner": 5399})
114 | value = cost_factor_for_pipes_etc * main_component_costs.sum() / thermal_capa_in_kW_th
115 | return ParDat(name="c_HOB_inv_", data=value, doc="CAPEX", src="@VIESSMANN", unit="€/kW_th")
116 |
117 |
118 | def eta_CHP_el_(fuel: str = "bio") -> ParDat:
119 | if fuel == "bio":
120 | data, src = 0.42, "@PLAN_BIOGAS"
121 | elif fuel == "ng":
122 | data, src = 0.40, "@Mathiesen_2015"
123 | else:
124 | ValueError("Only biogas implemented")
125 | return ParDat(
126 | name="eta_CHP_el_", data=data, doc=f"Electric efficiency", src=src, unit="kW_el/kW"
127 | )
128 |
129 |
130 | def eta_CHP_th_(fuel: str = "bio") -> ParDat:
131 | if fuel == "bio":
132 | data, src = 0.42, "@PLAN_BIOGAS"
133 | elif fuel == "ng":
134 | data, src = 0.45, "@Mathiesen_2015"
135 | else:
136 | ValueError("Only biogas implemented")
137 | return ParDat(
138 | name="eta_CHP_th_", data=data, doc=f"Thermal efficiency", src=src, unit="kW_th/kW"
139 | )
140 |
141 |
142 | def eta_CHP_el_F() -> ParDat:
143 | name = "eta_CHP_el_F"
144 | bio = eta_CHP_el_(fuel="bio")
145 | ng = eta_CHP_el_(fuel="ng")
146 | ng.data = pd.Series({"ng": ng.data, "bio": bio.data}, name=name)
147 | ng.name = name
148 | ng.src += " " + bio.src
149 | return ng
150 |
151 |
152 | def eta_CHP_th_F() -> ParDat:
153 | name = "eta_CHP_th_F"
154 | bio = eta_CHP_th_(fuel="bio")
155 | ng = eta_CHP_th_(fuel="ng")
156 | ng.data = pd.Series({"ng": ng.data, "bio": bio.data}, name=name)
157 | ng.name = name
158 | ng.src += " " + bio.src
159 | return ng
160 |
--------------------------------------------------------------------------------
/draf/prep/pv.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import warnings
3 | from functools import lru_cache
4 | from typing import Dict, List, Optional, Set, Tuple
5 |
6 | import pandas as pd
7 | from elmada.helper import read
8 |
9 | from draf import paths
10 | from draf.prep.gsee_module.pv import run_model
11 | from draf.prep.weather import get_data_for_gsee, get_nearest_stations
12 |
13 | logger = logging.getLogger(__name__)
14 | logger.setLevel(level=logging.WARN)
15 |
16 |
17 | def fit_gsee_model_to_real(
18 | real: pd.Series, gsee_params: Dict, capa: float, get_whole_year_if_sliced: bool = True
19 | ) -> Tuple[pd.Series, float]:
20 | """Runs the gsee model and fits the resulting power time series according
21 | to the annual energy sum.
22 |
23 | Args:
24 | real: Hourly historic PV power.
25 | gsee_params: Year, tilt, azim, system_loss.
26 | capa: PV Capacity in kW_peak.
27 | get_whole_year_if_sliced: If True, models the whole year even if only part-year
28 | data were given.
29 |
30 | Returns:
31 | pd.Series: Resulting PV-power time series.
32 | float: Additional system losses not caused by panel and inverter (fraction).
33 | """
34 | slicer = _get_slicer(real)
35 | e_gsee = get_pv_power(**gsee_params)
36 | gsee_energy = capa * e_gsee[slicer].sum()
37 | real_energy = real[slicer].sum()
38 | system_loss = (gsee_energy - real_energy) / gsee_energy
39 | ser = capa * e_gsee * (1 - system_loss)
40 | result_slicer = slice(None) if get_whole_year_if_sliced else slicer
41 | return ser[result_slicer], system_loss
42 |
43 |
44 | def _get_slicer(real: pd.Series) -> slice:
45 | ser = real[real.notnull()]
46 | start = ser.index[0]
47 | end = ser.index[-1]
48 | is_whole_year = start.day == 1 and start.month == 1 and end.day == 31 and end.month == 12
49 |
50 | if is_whole_year:
51 | return slice(None)
52 | else:
53 | logger.warning(f"Part-year data were given: {start} - {end}.")
54 | return slice(start, end)
55 |
56 |
57 | @lru_cache(maxsize=5)
58 | def get_pv_power(
59 | year: int,
60 | coords: Tuple[float, float],
61 | tilt: float = 0,
62 | azim: float = 180,
63 | capacity: float = 1.0,
64 | tracking: int = 0,
65 | system_loss: float = 0.0,
66 | **gsee_kw,
67 | ):
68 | """Returns electrical PV power using the gsee.pv model with weather data from
69 | the nearest DWD weather station.
70 |
71 | Args:
72 | coords: Latitude and longitude.
73 | tilt: Tilt angle (degrees).
74 | azim: Azimuth angle (degrees, 180 = towards equator).
75 | tracking: Tracking (0: none, 1: 1-axis, 2: 2-axis).
76 | capacity : Installed capacity in W.
77 | system_loss: Total system power losses (fraction).
78 | """
79 | warnings.simplefilter(action="ignore", category=FutureWarning)
80 | df = get_nearestStationData_for_gsee(year=year, coords=coords)
81 | return run_model(
82 | data=df,
83 | coords=coords,
84 | tilt=tilt,
85 | azim=azim,
86 | tracking=tracking,
87 | capacity=capacity,
88 | system_loss=system_loss,
89 | **gsee_kw,
90 | )
91 |
92 |
93 | def get_nearestStationData_for_gsee(year: int, coords: Tuple[float, float]):
94 | meta = get_nearest_stations(coords=coords, year=year)
95 | logger.info(f"Used stations:\n{meta.to_string()}")
96 | return get_data_for_gsee(
97 | stations_id_air=meta.loc["Stations_id", "air_temperature"],
98 | stations_id_solar=meta.loc["Stations_id", "solar"],
99 | year=year,
100 | )
101 |
102 |
103 | def get_backup_PV_profile() -> pd.Series:
104 | """Get a 60min backup PV profile for 1 kWh_peak for a unspecific non-leapyear."""
105 | return read(paths.DATA_DIR / "pv/backup/pv_el.csv")
106 |
--------------------------------------------------------------------------------
/draf/prep/weather.py:
--------------------------------------------------------------------------------
1 | import datetime
2 | from io import BytesIO, StringIO
3 | from pathlib import Path
4 | from typing import Dict, List, Optional, Set, Tuple
5 | from urllib.request import urlopen
6 | from zipfile import ZipFile
7 |
8 | import numpy as np
9 | import pandas as pd
10 | import requests
11 | from bs4 import BeautifulSoup
12 | from elmada.helper import read, write
13 | from geopy.distance import great_circle
14 |
15 | from draf.paths import CACHE_DIR
16 |
17 | # TODO: The new https://wetterdienst.readthedocs.io/ package may improve maintainability.
18 |
19 | # TODO: get 10min data and resample to 15min https://opendata.dwd.de/climate_environment/CDC/observations_germany/climate/10_minutes/
20 |
21 |
22 | DWD_BASE = "https://opendata.dwd.de/climate_environment/CDC/observations_germany/climate/hourly"
23 | MIDDLE = dict(solar="/solar", air_temperature="/air_temperature/historical")
24 |
25 | ZIP = dict(
26 | solar="/stundenwerte_ST_{stations_id:05}_row.zip",
27 | air_temperature="/stundenwerte_TU_{stations_id:05}_{von_datum}_{bis_datum}_hist.zip",
28 | )
29 |
30 | DOC = dict(
31 | solar="/ST_Stundenwerte_Beschreibung_Stationen.txt",
32 | air_temperature="/TU_Stundenwerte_Beschreibung_Stationen.txt",
33 | )
34 |
35 | PRODUKT_TYPE = dict(solar="ST", air_temperature="TU")
36 |
37 |
38 | def get_data_for_gsee(stations_id_air: int, stations_id_solar: int, year: int):
39 | """Provide solar ('global_horziontal' [W/m²], 'diffuse_fraction' [%]) and air
40 | ('temperature' [°C]) data for gsee, see [1].
41 |
42 | Note: In Germany, the mean global radiation should lie between 100 and 135 W/m², see [2].
43 |
44 | [1] https://gsee.readthedocs.io/en/latest/#power-output-from-a-pv-system-with-fixed-panels
45 | [2] https://de.wikipedia.org/w/index.php?title=Globalstrahlung&oldid=198981241#Messung_und_typische_Werte
46 | [3] https://www.translatorscafe.com/unit-converter/de-DE/heat-flux-density/5-1/joule/second/meter%C2%B2-watt/meter%C2%B2
47 | """
48 | at = get_df_from_DWD("air_temperature", stations_id_air)
49 | at.index = pd.to_datetime(at["MESS_DATUM"], format="%Y%m%d%H")
50 | at = at.loc[str(year)]
51 |
52 | sol = get_df_from_DWD("solar", stations_id_solar)
53 | sol.index = pd.to_datetime(sol["MESS_DATUM_WOZ"], format="%Y%m%d%H:%M")
54 | sol = sol.loc[str(year)]
55 |
56 | # fill values of -999 which indicate nans
57 | for k in ["FG_LBERG", "FD_LBERG"]:
58 | sol.loc[sol[k] < 0, k] = np.nan
59 | sol[k] = sol[k].interpolate()
60 |
61 | global_horizontal = sol["FG_LBERG"]
62 | diffuse = sol["FD_LBERG"]
63 | diffuse_fraction = diffuse / global_horizontal
64 |
65 | # convert from [J/(h·cm²)] to[W/m²]
66 | global_horizontal *= 10000 / 3600 # J/(h·m²) # J/(s·m²) = W/m² , see [3] in docstring
67 |
68 | # fill nans due to dividing by zero
69 | diffuse_fraction.fillna(0, inplace=True)
70 |
71 | return pd.DataFrame(
72 | {
73 | "global_horizontal": global_horizontal,
74 | "diffuse_fraction": diffuse_fraction,
75 | "temperature": at["TT_TU"],
76 | }
77 | )
78 |
79 |
80 | def get_air_temp(coords: Tuple[float, float], year: int, with_dt=False) -> pd.Series:
81 | """Returns hourly air temperature for German locations."""
82 | data_type = "air_temperature"
83 | stations_id = get_nearest_station(coords=coords, data_type=data_type, year=year)["Stations_id"]
84 | df = get_df_from_DWD(data_type=data_type, stations_id=stations_id)
85 | df.index = pd.to_datetime(df["MESS_DATUM"], format="%Y%m%d%H")
86 | ser = df.loc[str(year), "TT_TU"]
87 | ser.index.name = "T"
88 | ser.name = "air_temperature"
89 | if not with_dt:
90 | ser = ser.reset_index(drop=True)
91 | return ser
92 |
93 |
94 | def get_df_from_DWD(data_type: str, stations_id: int):
95 | """For a description of data, see [1] for solar and [2] for air temperature.
96 |
97 | [1] https://opendata.dwd.de/climate_environment/CDC/observations_germany/climate/hourly/solar/BESCHREIBUNG_obsgermany_climate_hourly_solar_de.pdf
98 | [2] https://opendata.dwd.de/climate_environment/CDC/observations_germany/climate/hourly/air_temperature/historical/BESCHREIBUNG_obsgermany_climate_hourly_tu_historical_de.pdf
99 |
100 | data_type 'solar':
101 | STATIONS_ID: Identifikationsnummer der Station
102 | MESS_DATUM: Intervallende in UTC [yyyymmddhh:mm]
103 | QN_592: Qualitätsniveau der nachfolgenden Spalten [code siehe Absatz "Qualitätsinformation"]
104 | ATMO_LBERG: Stundensumme der atmosphärischen Gegenstrahlung [J/cm ^ 2]
105 | FD_LBERG: Stundensumme der diffusen solaren Strahlung [J/cm ^ 2]
106 | FG_LBERG: Stundensumme der Globalstrahlung [J/cm ^ 2]
107 | SD_LBERG: Stundensumme der Sonnenscheindauer [min]
108 | ZENIT: Zenitwinkel der Sonne bei Intervallmitte [Grad]
109 | MESS_DATUM: Intervallende in WOZ [yyyymmddhh:mm]
110 |
111 | data_type 'air_temperature':
112 | STATIONS_ID: Stationsidentifikationsnummer
113 | MESS_DATUM: Zeitstempel [yyyymmddhh]
114 | QN_9: Qualitätsniveau der nachfolgenden Spalten [code siehe Absatz "Qualitätsinformation"]
115 | TT_TU: Lufttemperatur in 2m Höhe [°C]
116 | RF_TU: relative Feuchte [%]
117 | eor: Ende data record
118 | """
119 | stat_id = get_stations_id_string(stations_id)
120 | type_id = PRODUKT_TYPE[data_type].lower()
121 | globlist = list(CACHE_DIR.glob(f"produkt_{type_id}_stunde_*_{stat_id}.parquet"))
122 | if len(globlist) == 1:
123 | fp = globlist[0]
124 | df = read(fp)
125 | return df
126 | elif len(globlist) == 0:
127 | unzip_and_download(data_type=data_type, stations_id=stations_id)
128 | return get_df_from_DWD(data_type=data_type, stations_id=stations_id)
129 | else:
130 | raise RuntimeError(f"Too many zip files for station id {stat_id}.")
131 |
132 |
133 | def unzip_and_download(data_type: str, stations_id: int):
134 | url = get_zip_url(data_type=data_type, stations_id=stations_id)
135 |
136 | with ZipFile(BytesIO(urlopen(url).read()), "r") as myzip:
137 | name = get_produkt_filename_in_zip(myzip)
138 | fp = CACHE_DIR / f"{name}"
139 | fp = fp.with_suffix(".parquet")
140 | with myzip.open(name) as myfile:
141 | my_bytes = myfile.read()
142 | df = pd.read_csv(BytesIO(my_bytes), sep=";")
143 | write(df, fp)
144 |
145 | print(f"Cached {data_type} for station_id={stations_id} to {fp.name}")
146 |
147 |
148 | def get_produkt_filename_in_zip(zipfile):
149 | produkt_files = [i.filename for i in zipfile.filelist if i.filename.startswith("produkt")]
150 | if len(produkt_files) == 1:
151 | return produkt_files[0]
152 | else:
153 | raise RuntimeError("No produkt-file in given zip-folder.")
154 |
155 |
156 | def get_zip_file_path(data_type: str, stations_id: int):
157 | stat = get_stations_id_string(stations_id)
158 | zips = list(CACHE_DIR.glob(f"stundenwerte_{PRODUKT_TYPE[data_type]}_{stat}_*"))
159 | if len(zips) == 1:
160 | return zips[0]
161 | else:
162 | raise RuntimeError("No or too many zip files for this station id.")
163 |
164 |
165 | def get_stations_id_string(stations_id: int) -> str:
166 | return f"{stations_id:05}"
167 |
168 |
169 | def get_foldername(data_type: str, stations_id: int) -> str:
170 | fp = get_zip_name(data_type=data_type, stations_id=stations_id)
171 | return Path(fp).stem
172 |
173 |
174 | def download_zip(data_type: str, stations_id: int):
175 | """DEPRECATED: currently not used in favor of 'unzip_and_download'."""
176 | url = get_zip_url(data_type=data_type, stations_id=stations_id)
177 | zipresp = urlopen(url)
178 | zipfilename = get_zip_name(data_type=data_type, stations_id=stations_id)
179 | with open(CACHE_DIR / f"{zipfilename}", "wb") as file:
180 | file.write(zipresp.read())
181 |
182 |
183 | def get_zip_url(data_type: str, stations_id: int):
184 | zip_name = get_zip_name(data_type=data_type, stations_id=stations_id)
185 | return DWD_BASE + MIDDLE[data_type] + "/" + zip_name
186 |
187 |
188 | def get_zip_name(data_type: str, stations_id: int):
189 | return get_zip_names(data_type=data_type)[stations_id]
190 |
191 |
192 | def get_zip_names(data_type) -> Dict[int, str]:
193 | url = DWD_BASE + MIDDLE[data_type]
194 | page = requests.get(url).text
195 | soup = BeautifulSoup(page, "lxml")
196 | rows = soup.find_all("a")
197 | d = {}
198 | for i in rows:
199 | zip_name = i.get("href")
200 | if zip_name.startswith("stundenwerte"):
201 | station_id = int(zip_name.split("_")[2])
202 | d[station_id] = zip_name
203 | return d
204 |
205 |
206 | def get_nearest_stations(coords: Tuple[float, float], year: Optional[int] = None) -> pd.DataFrame:
207 | types = ("solar", "air_temperature")
208 | d = {t: get_nearest_station(coords=coords, data_type=t, year=year) for t in types}
209 | return pd.DataFrame(d)
210 |
211 |
212 | def get_nearest_station(
213 | coords: Tuple[float, float], data_type: str = "solar", year: Optional[int] = None
214 | ) -> pd.Series:
215 | assert data_type in ("solar", "air_temperature")
216 | df = read_stations(data_type=data_type)
217 | df = filter_year(df, year=year)
218 |
219 | lats = df["geoBreite"].values
220 | lons = df["geoLaenge"].values
221 | distance = np.zeros_like(lats)
222 |
223 | for i, lat, lon in zip(df.index, lats, lons):
224 | destination = (lat, lon)
225 | distance[i] = great_circle(coords, destination).km
226 | xmin = distance.argmin()
227 | ser = df.loc[xmin].copy()
228 | ser.loc["distance_in_km"] = distance[xmin]
229 | return ser
230 |
231 |
232 | def filter_year(df, year):
233 | if year is None:
234 | return df
235 | else:
236 | dt = datetime.datetime(year=year, month=12, day=31)
237 | bis = pd.to_datetime(df["bis_datum"], format="%Y%m%d")
238 | return df[bis >= dt].reset_index(drop=True)
239 |
240 |
241 | def read_stations(data_type: str, cache: bool = False) -> pd.DataFrame:
242 | fp_cache = CACHE_DIR / f"stations_{data_type}.parquet"
243 |
244 | if fp_cache.exists() and cache:
245 | df = read(fp_cache)
246 |
247 | else:
248 | s = read_stations_table(data_type=data_type)
249 | header = s.replace("\r", "").split("\n")[0]
250 | col_names = header.split(" ")
251 | df = pd.read_fwf(
252 | StringIO(s),
253 | widths=[6, 9, 8, 15, 12, 10, 42, 98],
254 | header=None,
255 | encoding="utf-8",
256 | skiprows=[0, 1],
257 | names=col_names,
258 | )
259 | write(df, fp_cache)
260 | return df
261 |
262 |
263 | def read_stations_table(data_type: str) -> str:
264 | fp = DWD_BASE + MIDDLE[data_type] + DOC[data_type]
265 | return urlopen(fp).read().decode("latin-1")
266 |
--------------------------------------------------------------------------------
/draf/sort_sections.py:
--------------------------------------------------------------------------------
1 | def sort_lines_in_string(s: str) -> str:
2 | return "\n".join(sorted(s.split("\n"), key=lambda v: v.upper()))
3 |
4 |
5 | def sort_sections(s: str) -> str:
6 | starter = "\n # SORTING_START\n"
7 | ender = " # SORTING_END\n"
8 |
9 | whole_string = s.split(starter)
10 | new = whole_string[0]
11 | rest = whole_string[1:]
12 |
13 | for part in rest:
14 | x = part.split(ender)
15 | new += starter[:-1] + sort_lines_in_string(x[0]) + "\n" + ender + x[1]
16 | return new
17 |
18 |
19 | if __name__ == "__main__":
20 | from pathlib import Path
21 |
22 | this_dir = Path(__file__).parent
23 | for filename in ["prep/data_base.py", "conventions.py"]:
24 | fp = this_dir / filename
25 | fp.write_text(sort_sections(fp.read_text()))
26 |
--------------------------------------------------------------------------------
/draf/tsa/__init__.py:
--------------------------------------------------------------------------------
1 | """Module for time series analyses."""
2 |
3 | from draf.tsa.demand_analyzer import DemandAnalyzer
4 | from draf.tsa.peak_load import PeakLoadAnalyzer
5 |
--------------------------------------------------------------------------------
/draf/tsa/demand_analyzer.py:
--------------------------------------------------------------------------------
1 | from typing import List, Tuple
2 |
3 | import holidays
4 | import matplotlib.pyplot as plt
5 | import numpy as np
6 | import pandas as pd
7 | import seaborn as sns
8 |
9 | from draf import helper as hp
10 | from draf.core.datetime_handler import DateTimeHandler
11 | from draf.tsa.peak_load import PeakLoadAnalyzer
12 |
13 |
14 | class DemandAnalyzer(DateTimeHandler):
15 | def __init__(
16 | self, p_el: pd.Series, year: int = 2020, freq: str = "15min", ylabel=r"P$_\mathrm{el}$ (kW)"
17 | ) -> None:
18 | self.p_el = p_el
19 | self.ylabel = ylabel
20 | self._set_dtindex(year=year, freq=freq)
21 |
22 | def get_peak_load_analyzer(self) -> PeakLoadAnalyzer:
23 | return PeakLoadAnalyzer(
24 | self.p_el, year=self.year, freq=self.freq, figsize=(10, 2), ylabel=self.ylabel
25 | )
26 |
27 | def show_peaks(
28 | self, target_percentile: int = 95, c_EG: float = 0.12, c_EG_peak: float = 50.0
29 | ) -> PeakLoadAnalyzer:
30 | pla = PeakLoadAnalyzer(
31 | self.p_el, year=self.year, freq=self.freq, figsize=(10, 2), ylabel=self.ylabel
32 | )
33 | pla.set_prices(c_EG=c_EG, c_EG_peak=c_EG_peak)
34 | pla.histo(target_percentile)
35 | return pla
36 |
37 | def show_stats(self):
38 | self.violin_plot()
39 | self.line_plot()
40 | self.ordered_line_plot()
41 | self.averages_plot()
42 | self.weekdays_plot()
43 |
44 | def get_stats(self) -> None:
45 | step_width = hp.get_step_width(self.freq)
46 | sum_value, sum_unit = hp.auto_fmt(self.p_el.sum() * step_width, "kWh")
47 | std_value, std_unit = hp.auto_fmt(self.p_el.std(), "kW")
48 | peak_to_average = self.p_el.max() / self.p_el.mean()
49 |
50 | data = [
51 | ("Year:", f"{self.year}", ""),
52 | ("Frequency:", f"{hp.int_from_freq(self.freq)}", "minutes"),
53 | ("Length:", f"{len(self.p_el):,.0f}", "time steps"),
54 | ("Annual sum:", f"{sum_value:,.2f}", sum_unit),
55 | ("Standard deviation:", f"{std_value:,.2f}", std_unit),
56 | ("Peak-to-average ratio:", f"{peak_to_average:,.2f}", ""),
57 | ("Full-load hours:", f"{8760*peak_to_average**-1:,.2f}", "h"),
58 | ]
59 | text_table, table_width = self.make_text_table(data)
60 | header = " Metrics ".center(table_width, "-")
61 | return f"{header}\n{text_table}\n"
62 |
63 | def make_text_table(self, data: List[Tuple[str]]):
64 | li = []
65 | col_width = [max([len(word) for word in col]) for col in zip(*data)]
66 | for row in data:
67 | li.append(
68 | " ".join(
69 | [
70 | row[0].rjust(col_width[0]),
71 | row[1].rjust(col_width[1]),
72 | row[2].ljust(col_width[2]),
73 | ]
74 | )
75 | )
76 | text_table = "\n".join(li)
77 | table_width = len(li[0])
78 | return text_table, table_width
79 |
80 | def weekdays_plot(self, consider_holidays: bool = True) -> None:
81 | dated_demand = self.dated(self.p_el)
82 | weekdays = ["Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday", "Sunday"]
83 |
84 | if consider_holidays:
85 | weekdays.append("Holiday")
86 |
87 | fig, axes = plt.subplots(nrows=2, ncols=len(weekdays), figsize=(10, 3), sharey=True)
88 | axes = zip(*axes) # transpose
89 | adder = " / Holidays" if consider_holidays else ""
90 | fig.suptitle("Weekdays" + adder, fontweight="bold")
91 |
92 | for weekday_number, (ax_tuple, weekday) in enumerate(zip(axes, weekdays)):
93 |
94 | if consider_holidays:
95 | country = "DE"
96 | holis = getattr(holidays, country)(years=self.year)
97 | is_holiday = pd.Series(
98 | dated_demand.index.map(lambda x: x in holis), index=dated_demand.index
99 | )
100 |
101 | if weekday_number <= 6:
102 | is_given_weekday = dated_demand.index.weekday == weekday_number
103 | if consider_holidays:
104 | ser = dated_demand[is_given_weekday & ~is_holiday]
105 | else:
106 | ser = dated_demand[is_given_weekday]
107 | else:
108 | ser = dated_demand[is_holiday]
109 | df = pd.DataFrame(ser.values.reshape((self.steps_per_day, -1), order="F"))
110 |
111 | df.plot(legend=False, alpha=0.1, color="k", linewidth=1, ax=ax_tuple[0])
112 | sns.violinplot(y=ser, ax=ax_tuple[1], scale="width", color="lightblue", cut=0)
113 |
114 | ndays = df.shape[1]
115 | ax_tuple[0].set_title(f"{ndays} x\n{weekday}")
116 | for ax in ax_tuple:
117 | ax.set_ylabel(self.ylabel)
118 | ax.get_xaxis().set_visible(False)
119 | hp.add_thousands_formatter(ax, x=False)
120 |
121 | plt.tight_layout(w_pad=-1.0, h_pad=0)
122 | sns.despine()
123 |
124 | def averages_plot(self):
125 | timeframes = ["Quarters", "Months", "Weeks"]
126 | resamplers = ["Q", "M", "W"]
127 | fig, axes = plt.subplots(
128 | ncols=len(timeframes),
129 | figsize=(10, 1.6),
130 | gridspec_kw={"width_ratios": [4, 12, 52]},
131 | sharey=True,
132 | )
133 | fig.suptitle("Averages", fontweight="bold")
134 | plt.tight_layout()
135 |
136 | for resampler, timeframe, ax in zip(resamplers, timeframes, axes):
137 | ser = self.dated(self.p_el)
138 | ser = ser.resample(resampler).mean()
139 | ser = ser.set_axis(range(1, len(ser) + 1))
140 | ser.plot.bar(width=0.8, ax=ax, color="darkgray")
141 | ax.set_ylabel(self.ylabel)
142 | ax.tick_params(axis="x", labelrotation=0)
143 | ax.set_title(timeframe)
144 | for i, label in enumerate(ax.xaxis.get_ticklabels()[:-1]):
145 | if i % 4 != 0:
146 | label.set_visible(False)
147 | hp.add_thousands_formatter(ax, x=False)
148 | sns.despine()
149 |
150 | def line_plot(self) -> None:
151 | fig, ax = plt.subplots(1, figsize=(10, 2))
152 | plt.tight_layout()
153 | data = self.dated(self.p_el)
154 | data.plot(linewidth=0.6, ax=ax, color="darkgray")
155 | ax.set_ylabel(self.ylabel)
156 | hp.add_thousands_formatter(ax, x=False)
157 | ax.set_ylim(bottom=0)
158 | sns.despine()
159 | ax.set_title("Load curve", fontdict=dict(fontweight="bold"))
160 |
161 | def ordered_line_plot(self) -> None:
162 | fig, ax = plt.subplots(1, figsize=(10, 2))
163 | plt.tight_layout()
164 | data = self.p_el.sort_values(ascending=False).reset_index(drop=True)
165 | data.plot(linewidth=1.5, ax=ax, color="darkgray")
166 | ax.set_title("Ordered annual duration curve", fontdict=dict(fontweight="bold"))
167 | ax.set_ylabel(self.ylabel)
168 | ax.set_xlabel(f"Time steps ({self.freq_unit})")
169 | hp.add_thousands_formatter(ax)
170 | ax.set_ylim(bottom=0)
171 | sns.despine()
172 |
173 | def violin_plot(self) -> None:
174 | fig, ax = plt.subplots(figsize=(10, 2.5))
175 | plt.tight_layout()
176 | ax = sns.violinplot(y=self.p_el, cut=0, width=0.4, scale="width", color="lightblue", ax=ax)
177 | ax.set_ylabel(self.ylabel)
178 | ax.set_xlim(-0.5, 0.85)
179 | ax.set_ylim(bottom=0)
180 | ax.set_title(self.get_stats(), fontdict=dict(fontweight="bold"), fontfamily="monospace")
181 | hp.add_thousands_formatter(ax, x=False)
182 | ax.get_xaxis().set_visible(False)
183 | self._annotate_violins_left_side(ax)
184 | self._annotate_violins_right_side(ax)
185 | sns.despine(bottom=True)
186 |
187 | def _annotate_violins_left_side(self, ax) -> None:
188 | to_annotate = [
189 | ("Max", self.p_el.max()),
190 | ("Mean", self.p_el.mean()),
191 | ("Min", self.p_el.min()),
192 | ]
193 |
194 | for what, value_string in to_annotate:
195 | ax.text(
196 | x=-0.25,
197 | y=value_string,
198 | s=f"{what}: {value_string:,.0f} kW",
199 | color="k",
200 | ha="right",
201 | va="center",
202 | fontweight="bold",
203 | )
204 | ax.annotate(
205 | "",
206 | (0, value_string),
207 | (-0.25, value_string),
208 | arrowprops=dict(arrowstyle="-", linestyle="--", alpha=1),
209 | )
210 |
211 | def _annotate_violins_right_side(self, ax) -> None:
212 | percentile_range = (50, 60, 70, 80, 90, 95, 97, 99)
213 | percentile_locs = np.linspace(0.1, 0.95, len(percentile_range))
214 | y_max = self.p_el.max()
215 |
216 | for pcnt, pcnt_loc in zip(percentile_range, percentile_locs):
217 | quantile = pcnt / 100
218 | value = self.p_el.quantile(q=quantile)
219 | edge_x = 0.21
220 | highlight = pcnt % 10 == 0
221 | alpha = 0.5 if highlight else 1
222 | ax.annotate(
223 | text="",
224 | xy=(0, value),
225 | xytext=(edge_x, value),
226 | arrowprops=dict(arrowstyle="-", linestyle="--", alpha=alpha, shrinkA=0),
227 | )
228 | ax.annotate(
229 | text="",
230 | xy=(edge_x, value),
231 | xytext=(0.31, pcnt_loc * y_max),
232 | arrowprops=dict(arrowstyle="-", linestyle="--", alpha=alpha, shrinkB=0),
233 | )
234 | ax.text(
235 | x=0.31,
236 | y=pcnt_loc * y_max,
237 | s=f"{pcnt} percentile: {value:,.0f} kW (= {y_max - value:,.0f} kW reduction)",
238 | ha="left",
239 | va="center",
240 | alpha=alpha,
241 | )
242 |
--------------------------------------------------------------------------------
/draf/tsa/peak_load.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import textwrap
3 | from typing import Any, Callable, Dict, Iterable, List, Optional, Tuple, Union
4 |
5 | import matplotlib.patches as patches
6 | import matplotlib.pyplot as plt
7 | import numpy as np
8 | import pandas as pd
9 | import seaborn as sns
10 |
11 | from draf import helper as hp
12 | from draf.core.datetime_handler import DateTimeHandler
13 | from draf.core.entity_stores import Params
14 |
15 | logger = logging.getLogger(__name__)
16 | logger.setLevel(level=logging.WARN)
17 |
18 |
19 | class PeakLoadAnalyzer(DateTimeHandler):
20 | def __init__(
21 | self,
22 | p_el: pd.Series,
23 | year: int,
24 | freq: str,
25 | figsize: Tuple = (10, 3),
26 | ylabel: str = "$P_{el}$ (kW)",
27 | ):
28 | self.p_el = p_el
29 | self._set_dtindex(year=year, freq=freq)
30 | self.figsize = figsize
31 | self.params = Params()
32 | self.set_prices()
33 | self.ylabel = ylabel
34 |
35 | def set_prices(self, c_EG=0.12, c_EG_peak=50.0):
36 | p = self.params
37 | p.c_EG = c_EG
38 | p.c_EG_peak = c_EG_peak
39 | p.p_max = self.p_el.max()
40 | p.e_annual_sum = self.p_el.sum() * self.step_width
41 | p.C_EG = p.c_EG * p.e_annual_sum
42 | p.C_EG_peak = p.c_EG_peak * p.p_max
43 | p.C = p.C_EG + p.C_EG_peak
44 |
45 | def histo(self, target_percentile: int = 95):
46 | """Presents the biggest peaks of a load curve as table and barchart.
47 |
48 | Args:
49 | peak_reduction: Desired reduction of peak demand in kW.
50 | number_of_peaks: Desired number of highest peaks to be eliminated.
51 | """
52 | assert 1 < target_percentile < 100, "target_percentile must be in [1..100]"
53 | p_el = self.p_el
54 | self.target_percentile = target_percentile
55 | self.p_el_ordered = p_el_ordered = p_el.sort_values(ascending=False).reset_index(drop=True)
56 | self.target_peakload = target_peakload = p_el.quantile(target_percentile / 100)
57 | self.trimmed_peaks = trimmed_peaks = p_el_ordered[p_el_ordered > target_peakload]
58 | self.nPeaks = len(trimmed_peaks)
59 |
60 | fig, ax = plt.subplots(nrows=4, figsize=(self.figsize[0], self.figsize[1] * 5))
61 | fig.suptitle(self._get_stats(self.target_percentile), fontweight="bold")
62 | fig.set_facecolor("whitesmoke")
63 | self.load_curve_with_threshold_plot(ax[0])
64 | self.ordered_peaks_plot(ax[1])
65 | self.zoomed_ordered_peaks_plot(ax[2])
66 | self.peak_width_bar_plot(ax[3])
67 | sns.despine()
68 | fig.tight_layout()
69 | plt.subplots_adjust(hspace=0.6)
70 |
71 | def _get_stats(self, target_percentile):
72 | reduc = self.p_el_ordered[0] - self.target_peakload
73 | savings_raw = reduc * self.params.c_EG_peak
74 | rel_savings = savings_raw / self.params.C_EG_peak
75 | savings, savings_unit = hp.auto_fmt(savings_raw, "€/a")
76 | return textwrap.dedent(
77 | f"""\
78 | Peak load reduction of {reduc:,.0f} kW
79 | from {self.p_el.max():,.0f} to {self.target_peakload:,.0f} kW ({target_percentile:.0f} percentile)
80 |
81 | Savings:{rel_savings:.1%} network costs (={savings:,.2f} {savings_unit} with {self.params.c_EG_peak:,.0f} €/kW)
82 | """
83 | )
84 |
85 | def load_curve_with_threshold_plot(self, ax):
86 | ax.plot(self.p_el, label="Original load curve", lw=0.6, c="darkgray")
87 | ax.axhline(
88 | self.target_peakload,
89 | c="firebrick",
90 | ls="--",
91 | label=f"Threshold = {self.target_peakload:,.0f} kW",
92 | lw=1,
93 | )
94 | ax.set(ylabel=self.ylabel)
95 | ax.set_ylim(bottom=0, top=self.p_el.max())
96 | ax.set_title("Load curve", fontweight="bold")
97 | ax.margins(y=0.0, x=0.0)
98 | ax.legend(loc="lower center", ncol=3)
99 | hp.add_thousands_formatter(ax)
100 |
101 | def ordered_peaks_plot(self, ax):
102 | trimmed_peaks = self.trimmed_peaks
103 | target_peakload = self.target_peakload
104 | p_el_ordered = self.p_el_ordered
105 | nPeaks = self.nPeaks
106 | ax.add_patch(
107 | patches.Rectangle(
108 | (0, target_peakload),
109 | trimmed_peaks.size,
110 | self.p_el.max() - target_peakload,
111 | linewidth=0.5,
112 | color="firebrick",
113 | alpha=0.1,
114 | )
115 | )
116 | ax.plot(p_el_ordered, lw=2, c="darkgray")
117 | ax.set_title("Ordered load duration curve", fontweight="bold")
118 | ax.plot(trimmed_peaks, c="firebrick", lw=2)
119 | ax.margins(y=0.0, x=0.0)
120 | ax.set(ylabel=self.ylabel, xlabel=f"Time ({self.freq_unit})")
121 | ax.annotate(
122 | textwrap.dedent(
123 | f"""\
124 | Peak loads above {target_peakload:,.0f} kW
125 | occur in {nPeaks:,.0f} time steps (≈ {nPeaks / (self.steps_per_day / 24):,.0f} hours)
126 | """
127 | ),
128 | xy=(nPeaks, target_peakload),
129 | xytext=(len(p_el_ordered) * 0.02, p_el_ordered[0] * 0.01),
130 | arrowprops=dict(
131 | facecolor="lightgray", shrink=0.05, linewidth=0, width=2, headwidth=8, headlength=8
132 | ),
133 | )
134 | ax.set_ylim(bottom=0, top=p_el_ordered[0])
135 | ax.margins(y=0.0)
136 | hp.add_thousands_formatter(ax)
137 |
138 | def zoomed_ordered_peaks_plot(self, ax):
139 | ax.patch.set_facecolor("firebrick")
140 | ax.patch.set_alpha(0.1)
141 | plt.tight_layout()
142 | self.trimmed_peaks.plot(markersize=5, linewidth=3, color="firebrick", ax=ax)
143 | ax.set_title(f"Zoom on {self.nPeaks:,.0f} highest peaks", fontweight="bold")
144 | ax.set(ylabel=self.ylabel, xlabel=f"Time ({self.freq_unit})")
145 | hp.add_thousands_formatter(ax, x=False)
146 |
147 | def peak_width_bar_plot(self, ax):
148 | ser = self._get_peak_widths()
149 | ser.plot.bar(
150 | width=0.9,
151 | label=textwrap.dedent(
152 | f"""\
153 | Counts of peak-widths within the {self.nPeaks:,.0f} highest peaks
154 | (e.g. A peak duration of {ser.index[0]:,.0f} time steps occures {ser.iloc[0]:,.0f} times)"""
155 | ),
156 | color="firebrick",
157 | ax=ax,
158 | )
159 | ax.set(xlabel=f"Peak duration ({self.freq_unit})", ylabel="Frequency")
160 | ax.set_title("Peak durations", fontweight="bold")
161 | ax.legend(loc="upper right")
162 | hp.add_thousands_formatter(ax, x=False)
163 |
164 | def _get_peak_widths(self):
165 | p_el_np = self.p_el.values
166 | target_peakload = self.target_peakload
167 | peak_width_list = np.array([])
168 | b = 0
169 | for i in range(len(p_el_np)):
170 | if target_peakload < p_el_np[i] <= p_el_np.max():
171 | b += 1
172 | else:
173 | if b > 0:
174 | peak_width_list = np.append(peak_width_list, b)
175 | b = 0
176 | uni = np.unique(peak_width_list, return_counts=True)
177 | return pd.Series(data=uni[1], index=uni[0].astype(int))
178 |
179 | def simulate_BES(
180 | self,
181 | e_bes_capa: float = 1000.0,
182 | p_bes_max: float = 1000.0,
183 | c_bes_inv: float = 500.0,
184 | threshold: Optional[float] = None,
185 | transfer_threshold_from_histo: bool = True,
186 | ) -> None:
187 | """Simulate an Battery Energy Storage (BES) with a given capacity and maximum power.
188 |
189 | Args:
190 | e_bes_capa: Capacity of BES in kWh
191 | p_bes_max: Maximum power of BES in kW
192 | c_bes_inv: Investment costs of BES in € / kWh_el
193 | threshold: Threshold of charging strategy. System tries to charge the battery
194 | if time series > threshold.
195 | transfer_threshold_from_histo: If threshold shall be transfered from histo().
196 | """
197 |
198 | assert e_bes_capa >= 0
199 | assert p_bes_max >= 0
200 |
201 | if transfer_threshold_from_histo:
202 | if hasattr(self, "target_peakload"):
203 | switch_point = self.target_peakload
204 | else:
205 | raise Exception(
206 | "If `transfer_threshold_from_histo` is switched on, "
207 | "histo() has to be executed first."
208 | )
209 | else:
210 | switch_point = threshold
211 |
212 | p_el = self.p_el.values
213 |
214 | p_BES_T = np.zeros(len(p_el))
215 | p_eex_buy = np.zeros(len(p_el))
216 | load = np.zeros(len(p_el))
217 | unload = np.zeros(len(p_el))
218 |
219 | for t, val in enumerate(p_el):
220 | if t == 0:
221 | p_BES_T[t] = 0
222 | load[t] = 0
223 | elif val > switch_point:
224 | if p_BES_T[t - 1] < (e_bes_capa - (val - switch_point)):
225 | load[t] = min(e_bes_capa - p_BES_T[t - 1], val - switch_point, p_bes_max)
226 | else:
227 | load[t] = 0
228 | elif val < switch_point:
229 | unload[t] = min(p_BES_T[t - 1], switch_point - val, p_bes_max)
230 |
231 | p_BES_T[t] = p_BES_T[t - 1] + load[t] - unload[t]
232 | p_eex_buy[t] = val - load[t] + unload[t]
233 |
234 | # Plot results
235 | fig, ax = plt.subplots(figsize=(10, 4))
236 | ax.plot(p_el, label="P_el", c="r")
237 | ax.plot(p_BES_T, label="p_BES_T")
238 | ax.plot(p_eex_buy, label="P_eex_buy", c="g")
239 | ax.plot(load, label="load")
240 | ax.plot(unload, label="unload")
241 | ax.plot([switch_point for t in range(len(p_el))], label="switch point")
242 | ax.plot([p_eex_buy.max() for t in range(len(p_el))], label="EEX_max")
243 |
244 | def get_success():
245 | if switch_point == p_eex_buy.max():
246 | return (
247 | f"reduce the maximum peak power by {self.p_el.max() - p_eex_buy.max():,.0f} kW."
248 | )
249 | else:
250 | return (
251 | "only reduce the maximum peak power by "
252 | f"{self.p_el.max() - p_eex_buy.max():,.0f} kW instead of "
253 | f"the wanted {self.p_el.max() - switch_point:,.0f} kW."
254 | )
255 |
256 | title = (
257 | f"The battery storage system with {e_bes_capa:,.0f} kWh capacity\n"
258 | f" and {p_bes_max:,.0f} kW loading/unloading power "
259 | f"(~{c_bes_inv * e_bes_capa:,.0f} €) could\n"
260 | f"{get_success()}"
261 | )
262 | ax.margins(0)
263 | ax.set_title(title, y=1.03, fontsize=12, weight="bold")
264 | fig.legend(ncol=1, loc="center right", bbox_to_anchor=(1.15, 0.5))
265 | fig.tight_layout(pad=0.4, w_pad=0.5, h_pad=3.0)
266 | sns.despine()
267 |
268 | def simulate_SA(self) -> None:
269 | """Sensitivity analysis using the simulate()"""
270 | for e_bes_capa in np.arange(3000, 10000, 3000):
271 | for p_bes_max in np.arange(0, 4000, 1000):
272 | self.simulate_BES(e_bes_capa=e_bes_capa, p_bes_max=p_bes_max, show_res=True)
273 |
--------------------------------------------------------------------------------
/draf_cheat_sheet.md:
--------------------------------------------------------------------------------
1 | # DRAF CHEAT SHEET
2 |
3 | ## Common work flow
4 |
5 | ```python
6 | # import draf and its pre-defined component templates:
7 | import draf
8 | from draf.components import *
9 |
10 | # create a case study:
11 | cs = draf.CaseStudy("my_case_study", year=2019, country="DE", freq="60min",
12 | coords=(49.01, 8.39), consider_invest=True)
13 |
14 | # define a modeling horizon (default: whole year, here: 2 days):
15 | cs.set_time_horizon(start="Apr-01 00:00", steps=24 * 2)
16 |
17 | # create and parametrize a reference scenario:
18 | sc = cs.add_REF_scen("REF", components=[BES, Main, EG, eDem, PV])
19 | sc.update_params(E_BES_CAPx_=100)
20 |
21 | # create a second scenario based on "REF" and update a parameter:
22 | sc_new = cs.add_scen("new_scen", based_on="REF")
23 | sc_new.update_params(E_BES_CAPx_=500)
24 |
25 | # solve all scenarios:
26 | cs.optimize()
27 |
28 | # save the case study (including all scenarios and data) to your hard disk:
29 | cs.save()
30 | ```
31 |
32 | ```python
33 | cs = draf.open_latest_casestudy("my_case_study")
34 |
35 | # get an overview of the results:
36 | cs.plot()
37 | ```
38 |
39 | ## Interactive analysis
40 |
41 | - `cs.scens` - show scenarios
42 | - `sc.dims` - show dimensions
43 | - `sc.params` - show parameter entities
44 | - `sc.vars` - show variable entities
45 | - `sc.res` - show result entities
46 |
47 | ### On a case study
48 |
49 | - `cs.scens.REF` - access a specific scenario
50 | - `cs.plot()` - access essential plots and tables
51 | - `cs.plot.describe_interact()` - description of all entities
52 | - `cs.optimize(solver_params=dict(MIPGap=0))` - set a [MIPGap](https://www.gurobi.com/documentation/9.5/refman/mipgap2.html)
53 |
54 | ### On a scenario
55 |
56 | - `sc.params.c_EG_RTP_T` - access a specific parameter entity
57 | - `sc.res.P_EG_buy_T` - access a specific results entity
58 | - `sc.plot.describe()` - description of all entities of this scenario
59 |
60 | ### On an entity
61 |
62 | - `cs.dated(my_entity_T)` - add a date-time index to a time series
63 | - `my_entity_T.plot()` - plot an entity with [pd.Series.plot](https://pandas.pydata.org/docs/reference/api/pandas.Series.plot.html)
64 |
65 | ### Paths
66 |
67 | - `draf.paths.DATA_DIR` - draf data directory
68 | - `draf.paths.RESULTS_DIR` - draf results directory
69 | - `draf.paths.CACHE_DIR` - draf cache directory
70 | - `elmada.paths.CACHE_DIR` - elmada cache directory
71 |
72 | ### [Elmada](https://github.com/DrafProject/elmada)
73 |
74 | - `import elmada`
75 | - `elmada.get_prices(method="hist_EP")` - get historic day-ahead market prices from ENTSO-E
76 | - `elmada.get_emissions(method="XEF_EP")` - get historic grid-mix emission factors
77 |
78 | ## Building own components
79 |
80 | ```python
81 | from draf import Collectors, Dimensions, Params, Results, Scenario, Vars
82 | from gurobipy import GRB, Model, quicksum
83 | from draf.prep import DataBase as db
84 |
85 | # short version
86 | class MyShortComponent:
87 | def param_func(self, sc): ...
88 | def model_func(self, sc, m, d, p, v, c): ...
89 |
90 | # advanced version
91 | class MyAdvancedComponent(Component):
92 | """Description of your own component"""
93 |
94 | def dim_func(self, sc: Scenario):
95 | sc.dim("F", ["ng", "bio"], doc="Types of fuel")
96 |
97 | def param_func(self, sc: Scenario):
98 | sc.collector("F_fuel_F", doc="Fuel power", unit="kWh")
99 | sc.param(from_db=db.c_Fuel_F)
100 | sc.var("C_Fuel_ceTax_", doc="Total carbon tax on fuel", unit="k€/a")
101 |
102 | def model_func(self, sc: Scenario, m: Model, d: Dimensions, p: Params, v: Vars, c: Collectors):
103 | m.addConstr(v.C_Fuel_ == p.k__dT_ * v.F_fuel_F.prod(p.c_Fuel_F) * conv("€", "k€", 1e-3))
104 | c.CE_TOT_["Fuel"] = v.CE_Fuel_
105 | c.P_EL_source_T["PV"] = lambda t: v.P_PV_FI_T[t] + v.P_PV_OC_T[t]
106 | ```
107 |
108 | ### param_func
109 |
110 | - `sc.collector("P_EL_source_T", doc="...", unit="kW_el")` - add a collector
111 | - `sc.var("C_TOT_", doc="Total costs", unit="k€/a", lb=-GRB.INFINITY)` - define a variable that can be negative
112 | - `sc.var("P_PV_FI_T", doc="Feed-in", unit="kW_el")` - define a variable with time dimension
113 | - `sc.param("c_PV_inv_", data=200, doc="Investment costs", unit="€/kW_p", src="my_data_source")`
114 | - `sc.param(from_db=db.c_Fuel_F)` - use data from the [draf database](draf/prep/data_base.py)
115 | - prepper functions (default parameter name is the function name):
116 | - `sc.prep.c_EG_T()` - real-time-prices-tariffs
117 | - `sc.prep.ce_EG_T()` - dynamic carbon emission factors
118 | - `sc.prep.P_eDem_T(profile="G1", annual_energy=5e6)` - electricity demand with 5 GWh/a
119 | - `sc.prep.dQ_hDem_T(annual_energy=2e6, target_temp=22.0, threshold_temp=15.0)` - heating demand with 2 GWh/a using weather data
120 | - `sc.prep.P_PV_profile_T()` - photovoltaic profile
121 | - `sc.prep.c_EG_addon_(AbLa_surcharge=0.00003, Concession_fee=0.0011, ...)` - electricity price components other than wholesale prices
122 | - `sc.prep.T__amb_T()` - ambient air temperature from nearest weather station
123 |
124 | ### model_func
125 |
126 | - General
127 | - `d.T` - time index (special dimension)
128 | - `p.k__dT_` - time step (special parameter)
129 | - `p.k__PartYearComp_` - weighting factor to compensate part year analysis (special parameter)
130 | - [GurobiPy Syntax](https://www.gurobi.com/documentation), e.g.:
131 | - `from gurobipy import GRB, Model, quicksum` - import GurobiPy objects
132 | - `m.setObjective((...), GRB.MINIMIZE)` - set objective
133 | - `m.addConstr((v.your == p.constraint * v.goes + p.here), "constr_1")` - add one constraint
134 | - `m.addConstrs((v.your_T[t] == p.constraint_T[t] * v.goes_ + p.here_T[t] for t in d.T), "constr_2")` - add a set of constraints
135 | - [Pyomo Syntax](http://www.pyomo.org/documentation), e.g.:
136 | - `import pyomo.environ as pyo` - import Pyomo
137 | - `pyo.Objective(expr=(...), sense=pyo.minimize)` - set objective
138 | - `m.constr_1 = pyo.Constraint(expr=(v.your == p.constraint * v.goes + p.here))` - add one constraint
139 | - `m.constr_1 = pyo.Constraint(d.T, rule=lambda t: v.your_T[t] == p.constraint_T[t] * v.goes_ + p.here_T[t])` - add a set of constraints
140 |
141 | ## Helper functions
142 |
143 | - `draf.helper.address2coords("Moltkestraße 30, Karlsruhe")` - converts an address to geo coordinates
144 | - `draf.helper.play_beep_sound()` - play a beep sound (is used in the `cs.optimize()` routine)
145 | - `draf.helper.read(...)` - draf default read function
146 | - `draf.helper.write(...)` - draf default write function
147 | - `pd.read_csv(...)` - read an external csv file
148 |
149 | ## Naming conventions
150 |
151 | ### Naming conventions for entities
152 |
153 | In general, entity names should adhere to the following structure:
154 | `___`
155 |
156 | |Examples:|entity 1|entity 2|
157 | |-|-|-|
158 | |Entity name|`P_EG_buy_T`|`c_PV_inv_`|
159 | |Etype|`P`|`c`|
160 | |Component|`EG`|`PV`|
161 | |Description|`buy`|`inv`|
162 | |Dimension|`T`|`-`|
163 |
164 | For typical symbols, see [draf/conventions.py](draf/conventions.py).
165 |
166 | ### Naming conventions for Python objects
167 |
168 | | short | long |
169 | |-------|------------------|
170 | | `cs` | CaseStudy object |
171 | | `sc`, `scen` | Scenario object |
172 | | `m`, `mdl` | Model |
173 | | `d`, `dims` | Dimension container object |
174 | | `p`, `params` | Parameters container object |
175 | | `v`, `vars` | Variables container object |
176 | | `r`, `res` | Results container object |
177 | | `ent` | Entity: a variable or parameter |
178 | | `doc` | Documentation / description string |
179 | | `constr` | Constraint |
180 | | `meta` | Meta data |
181 | | `df` | Pandas `DataFrame` |
182 | | `ser` | Pandas `Series` |
183 | | `fp` | file path |
184 | | `gp` | `gurobipy` - the Gurobi Python Interface |
185 |
186 | ## Versioning of draf
187 |
188 | Bump version (replace `` with `major`, `minor`, or `patch`):
189 |
190 | ```sh
191 | bump2version --dry-run --verbose
192 | bump2version
193 | git push origin
194 | ```
195 |
--------------------------------------------------------------------------------
/environment.yml:
--------------------------------------------------------------------------------
1 | name: draf
2 | channels:
3 | - conda-forge
4 | dependencies: # NOTE: here '=' (single)
5 | - ephem # dependency of gsee_module
6 | - geopy
7 | - glpk
8 | - ipython
9 | - ipywidgets
10 | - jupyter
11 | - matplotlib
12 | - numpy
13 | - numpy-financial
14 | - pandas
15 | - pip
16 | - plotly
17 | - pvlib-python >= 0.6.3 # dependency of gsee_module
18 | - pyomo
19 | - python = 3.9
20 | - seaborn
21 | - pip: # NOTE: here '==' (double)
22 | - elmada
23 | - holidays
24 | - ray
25 | - '--editable=.[dev,jupyter]' # installs a full editable version of draf
26 |
--------------------------------------------------------------------------------
/environments/environment_py37.yml:
--------------------------------------------------------------------------------
1 | name: draf
2 | channels:
3 | - conda-forge
4 | dependencies: # NOTE: here '=' (single)
5 | - glpk=4.65
6 | - gsee=0.3
7 | - ipython=7.22
8 | - ipywidgets=7.6
9 | - jupyter=1.0
10 | - matplotlib=3.3
11 | - numpy=1.19
12 | - pandas=0.24
13 | - pip
14 | - plotly=5.3
15 | - pyomo=5.7
16 | - python=3.7
17 | - seaborn=0.11
18 | - pip: # NOTE: here '==' (double)
19 | - elmada==0.1.*
20 | - holidays==0.9.*
21 | - '--editable=.[dev,jupyter]' # installs a full editable version of draf
22 |
--------------------------------------------------------------------------------
/environments/environment_py37explicit_win64.txt:
--------------------------------------------------------------------------------
1 | # This file may be used to create an environment using:
2 | # $ conda create --name --file
3 | # platform: win-64
4 | @EXPLICIT
5 | https://conda.anaconda.org/conda-forge/win-64/ca-certificates-2021.5.30-h5b45459_0.tar.bz2
6 | https://conda.anaconda.org/conda-forge/win-64/intel-openmp-2021.3.0-h57928b3_3372.tar.bz2
7 | https://conda.anaconda.org/conda-forge/win-64/msys2-conda-epoch-20160418-1.tar.bz2
8 | https://conda.anaconda.org/conda-forge/win-64/pandoc-2.14.1-h8ffe710_0.tar.bz2
9 | https://conda.anaconda.org/conda-forge/win-64/ucrt-10.0.20348.0-h57928b3_0.tar.bz2
10 | https://conda.anaconda.org/conda-forge/win-64/winpty-0.4.3-4.tar.bz2
11 | https://conda.anaconda.org/conda-forge/win-64/m2w64-gmp-6.1.0-2.tar.bz2
12 | https://conda.anaconda.org/conda-forge/win-64/m2w64-libwinpthread-git-5.0.0.4634.697f757-2.tar.bz2
13 | https://conda.anaconda.org/conda-forge/win-64/vs2015_runtime-14.29.30037-h902a5da_5.tar.bz2
14 | https://conda.anaconda.org/conda-forge/win-64/m2w64-gcc-libs-core-5.3.0-7.tar.bz2
15 | https://conda.anaconda.org/conda-forge/win-64/vc-14.2-hb210afc_5.tar.bz2
16 | https://conda.anaconda.org/conda-forge/win-64/blosc-1.21.0-h0e60522_0.tar.bz2
17 | https://conda.anaconda.org/conda-forge/win-64/bzip2-1.0.8-h8ffe710_4.tar.bz2
18 | https://conda.anaconda.org/conda-forge/win-64/glpk-4.65-h8ffe710_1004.tar.bz2
19 | https://conda.anaconda.org/conda-forge/win-64/icu-68.1-h0e60522_0.tar.bz2
20 | https://conda.anaconda.org/conda-forge/win-64/jbig-2.1-h8d14728_2003.tar.bz2
21 | https://conda.anaconda.org/conda-forge/win-64/jpeg-9d-h8ffe710_0.tar.bz2
22 | https://conda.anaconda.org/conda-forge/win-64/lerc-2.2.1-h0e60522_0.tar.bz2
23 | https://conda.anaconda.org/conda-forge/win-64/libclang-11.1.0-default_h5c34c98_1.tar.bz2
24 | https://conda.anaconda.org/conda-forge/win-64/libdeflate-1.7-h8ffe710_5.tar.bz2
25 | https://conda.anaconda.org/conda-forge/win-64/libsodium-1.0.18-h8d14728_1.tar.bz2
26 | https://conda.anaconda.org/conda-forge/win-64/lz4-c-1.9.3-h8ffe710_1.tar.bz2
27 | https://conda.anaconda.org/conda-forge/win-64/m2w64-gcc-libgfortran-5.3.0-6.tar.bz2
28 | https://conda.anaconda.org/conda-forge/win-64/openssl-1.1.1k-h8ffe710_1.tar.bz2
29 | https://conda.anaconda.org/conda-forge/win-64/sqlite-3.36.0-h8ffe710_0.tar.bz2
30 | https://conda.anaconda.org/conda-forge/win-64/tbb-2021.3.0-h2d74725_0.tar.bz2
31 | https://conda.anaconda.org/conda-forge/win-64/tk-8.6.10-h8ffe710_1.tar.bz2
32 | https://conda.anaconda.org/conda-forge/win-64/xz-5.2.5-h62dcd97_1.tar.bz2
33 | https://conda.anaconda.org/conda-forge/win-64/zlib-1.2.11-h62dcd97_1010.tar.bz2
34 | https://conda.anaconda.org/conda-forge/win-64/hdf4-4.2.15-h0e5069d_3.tar.bz2
35 | https://conda.anaconda.org/conda-forge/win-64/krb5-1.19.2-hbae68bd_0.tar.bz2
36 | https://conda.anaconda.org/conda-forge/win-64/libpng-1.6.37-h1d00b33_2.tar.bz2
37 | https://conda.anaconda.org/conda-forge/win-64/libssh2-1.9.0-h680486a_6.tar.bz2
38 | https://conda.anaconda.org/conda-forge/win-64/libzip-1.8.0-hfed4ece_0.tar.bz2
39 | https://conda.anaconda.org/conda-forge/win-64/m2w64-gcc-libs-5.3.0-7.tar.bz2
40 | https://conda.anaconda.org/conda-forge/win-64/mkl-2021.3.0-hb70f87d_564.tar.bz2
41 | https://conda.anaconda.org/conda-forge/win-64/python-3.7.10-h7840368_100_cpython.tar.bz2
42 | https://conda.anaconda.org/conda-forge/win-64/zeromq-4.3.4-h0e60522_0.tar.bz2
43 | https://conda.anaconda.org/conda-forge/win-64/zstd-1.5.0-h6255e5f_0.tar.bz2
44 | https://conda.anaconda.org/conda-forge/noarch/appdirs-1.4.4-pyh9f0ad1d_0.tar.bz2
45 | https://conda.anaconda.org/conda-forge/noarch/async_generator-1.10-py_0.tar.bz2
46 | https://conda.anaconda.org/conda-forge/noarch/attrs-21.2.0-pyhd8ed1ab_0.tar.bz2
47 | https://conda.anaconda.org/conda-forge/noarch/backcall-0.2.0-pyh9f0ad1d_0.tar.bz2
48 | https://conda.anaconda.org/conda-forge/noarch/backports-1.0-py_2.tar.bz2
49 | https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-2.0.0-pyhd8ed1ab_0.tar.bz2
50 | https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.4-pyh9f0ad1d_0.tar.bz2
51 | https://conda.anaconda.org/conda-forge/noarch/decorator-5.0.9-pyhd8ed1ab_0.tar.bz2
52 | https://conda.anaconda.org/conda-forge/noarch/defusedxml-0.7.1-pyhd8ed1ab_0.tar.bz2
53 | https://conda.anaconda.org/conda-forge/noarch/entrypoints-0.3-pyhd8ed1ab_1003.tar.bz2
54 | https://conda.anaconda.org/conda-forge/win-64/freetype-2.10.4-h546665d_1.tar.bz2
55 | https://conda.anaconda.org/conda-forge/noarch/idna-3.1-pyhd3deb0d_0.tar.bz2
56 | https://conda.anaconda.org/conda-forge/noarch/ipython_genutils-0.2.0-py_1.tar.bz2
57 | https://conda.anaconda.org/conda-forge/noarch/jupyterlab_widgets-1.0.0-pyhd8ed1ab_1.tar.bz2
58 | https://conda.anaconda.org/conda-forge/win-64/libblas-3.9.0-11_win64_mkl.tar.bz2
59 | https://conda.anaconda.org/conda-forge/win-64/libcurl-7.78.0-h789b8ee_0.tar.bz2
60 | https://conda.anaconda.org/conda-forge/win-64/libtiff-4.3.0-h0c97f57_1.tar.bz2
61 | https://conda.anaconda.org/conda-forge/noarch/nest-asyncio-1.5.1-pyhd8ed1ab_0.tar.bz2
62 | https://conda.anaconda.org/conda-forge/noarch/nose-1.3.7-py_1006.tar.bz2
63 | https://conda.anaconda.org/conda-forge/noarch/olefile-0.46-pyh9f0ad1d_1.tar.bz2
64 | https://conda.anaconda.org/conda-forge/noarch/pandocfilters-1.4.2-py_1.tar.bz2
65 | https://conda.anaconda.org/conda-forge/noarch/parso-0.8.2-pyhd8ed1ab_0.tar.bz2
66 | https://conda.anaconda.org/conda-forge/noarch/pickleshare-0.7.5-py_1003.tar.bz2
67 | https://conda.anaconda.org/conda-forge/noarch/ply-3.11-py_1.tar.bz2
68 | https://conda.anaconda.org/conda-forge/noarch/prometheus_client-0.11.0-pyhd8ed1ab_0.tar.bz2
69 | https://conda.anaconda.org/conda-forge/noarch/pycparser-2.20-pyh9f0ad1d_2.tar.bz2
70 | https://conda.anaconda.org/conda-forge/win-64/pyephem-3.7.6.0-py37hfa6e2cd_1000.tar.bz2
71 | https://conda.anaconda.org/conda-forge/noarch/pyparsing-2.4.7-pyh9f0ad1d_0.tar.bz2
72 | https://conda.anaconda.org/conda-forge/win-64/python_abi-3.7-2_cp37m.tar.bz2
73 | https://conda.anaconda.org/conda-forge/noarch/pytz-2021.1-pyhd8ed1ab_0.tar.bz2
74 | https://conda.anaconda.org/conda-forge/win-64/qt-5.12.9-h5909a2a_4.tar.bz2
75 | https://conda.anaconda.org/conda-forge/noarch/qtpy-1.10.0-pyhd8ed1ab_0.tar.bz2
76 | https://conda.anaconda.org/conda-forge/noarch/send2trash-1.8.0-pyhd8ed1ab_0.tar.bz2
77 | https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2
78 | https://conda.anaconda.org/conda-forge/noarch/testpath-0.5.0-pyhd8ed1ab_0.tar.bz2
79 | https://conda.anaconda.org/conda-forge/noarch/typing_extensions-3.10.0.0-pyha770c72_0.tar.bz2
80 | https://conda.anaconda.org/conda-forge/noarch/webencodings-0.5.1-py_1.tar.bz2
81 | https://conda.anaconda.org/conda-forge/noarch/wheel-0.37.0-pyhd8ed1ab_1.tar.bz2
82 | https://conda.anaconda.org/conda-forge/noarch/zipp-3.5.0-pyhd8ed1ab_0.tar.bz2
83 | https://conda.anaconda.org/conda-forge/win-64/certifi-2021.5.30-py37h03978a9_0.tar.bz2
84 | https://conda.anaconda.org/conda-forge/win-64/cffi-1.14.6-py37hd8e9650_0.tar.bz2
85 | https://conda.anaconda.org/conda-forge/win-64/chardet-4.0.0-py37h03978a9_1.tar.bz2
86 | https://conda.anaconda.org/conda-forge/win-64/curl-7.78.0-h789b8ee_0.tar.bz2
87 | https://conda.anaconda.org/conda-forge/noarch/cycler-0.10.0-py_2.tar.bz2
88 | https://conda.anaconda.org/conda-forge/win-64/ephem-4.0.0.2-py37hcc03f2d_0.tar.bz2
89 | https://conda.anaconda.org/conda-forge/win-64/hdf5-1.10.6-nompi_h5268f04_1114.tar.bz2
90 | https://conda.anaconda.org/conda-forge/win-64/importlib-metadata-4.6.4-py37h03978a9_0.tar.bz2
91 | https://conda.anaconda.org/conda-forge/win-64/jedi-0.18.0-py37h03978a9_2.tar.bz2
92 | https://conda.anaconda.org/conda-forge/win-64/kiwisolver-1.3.1-py37h8c56517_1.tar.bz2
93 | https://conda.anaconda.org/conda-forge/win-64/lcms2-2.12-h2a16943_0.tar.bz2
94 | https://conda.anaconda.org/conda-forge/win-64/libcblas-3.9.0-11_win64_mkl.tar.bz2
95 | https://conda.anaconda.org/conda-forge/win-64/liblapack-3.9.0-11_win64_mkl.tar.bz2
96 | https://conda.anaconda.org/conda-forge/win-64/llvmlite-0.36.0-py37habb0c8c_0.tar.bz2
97 | https://conda.anaconda.org/conda-forge/win-64/markupsafe-2.0.1-py37hcc03f2d_0.tar.bz2
98 | https://conda.anaconda.org/conda-forge/win-64/mistune-0.8.4-py37hcc03f2d_1004.tar.bz2
99 | https://conda.anaconda.org/conda-forge/win-64/mock-4.0.3-py37h03978a9_1.tar.bz2
100 | https://conda.anaconda.org/conda-forge/win-64/openjpeg-2.4.0-hb211442_1.tar.bz2
101 | https://conda.anaconda.org/conda-forge/noarch/packaging-21.0-pyhd8ed1ab_0.tar.bz2
102 | https://conda.anaconda.org/conda-forge/win-64/pyqt5-sip-4.19.18-py37hf2a7229_7.tar.bz2
103 | https://conda.anaconda.org/conda-forge/win-64/pyrsistent-0.17.3-py37hcc03f2d_2.tar.bz2
104 | https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2
105 | https://conda.anaconda.org/conda-forge/win-64/pywin32-301-py37hcc03f2d_0.tar.bz2
106 | https://conda.anaconda.org/conda-forge/win-64/pywinpty-1.1.3-py37h7f67f24_0.tar.bz2
107 | https://conda.anaconda.org/conda-forge/win-64/pyzmq-22.2.1-py37hcce574b_0.tar.bz2
108 | https://conda.anaconda.org/conda-forge/win-64/setuptools-57.4.0-py37h03978a9_0.tar.bz2
109 | https://conda.anaconda.org/conda-forge/win-64/tornado-6.1-py37hcc03f2d_1.tar.bz2
110 | https://conda.anaconda.org/conda-forge/noarch/traitlets-5.0.5-py_0.tar.bz2
111 | https://conda.anaconda.org/conda-forge/win-64/win_inet_pton-1.1.0-py37h03978a9_2.tar.bz2
112 | https://conda.anaconda.org/conda-forge/win-64/argon2-cffi-20.1.0-py37hcc03f2d_2.tar.bz2
113 | https://conda.anaconda.org/conda-forge/noarch/backports.functools_lru_cache-1.6.4-pyhd8ed1ab_0.tar.bz2
114 | https://conda.anaconda.org/conda-forge/noarch/bleach-4.0.0-pyhd8ed1ab_0.tar.bz2
115 | https://conda.anaconda.org/conda-forge/win-64/brotlipy-0.7.0-py37hcc03f2d_1001.tar.bz2
116 | https://conda.anaconda.org/conda-forge/win-64/cryptography-3.4.7-py37h20c650d_0.tar.bz2
117 | https://conda.anaconda.org/conda-forge/noarch/jinja2-3.0.1-pyhd8ed1ab_0.tar.bz2
118 | https://conda.anaconda.org/conda-forge/noarch/joblib-1.0.1-pyhd8ed1ab_0.tar.bz2
119 | https://conda.anaconda.org/conda-forge/noarch/jsonschema-3.2.0-pyhd8ed1ab_3.tar.bz2
120 | https://conda.anaconda.org/conda-forge/win-64/jupyter_core-4.7.1-py37h03978a9_0.tar.bz2
121 | https://conda.anaconda.org/conda-forge/win-64/libnetcdf-4.8.0-nompi_hf689e7d_103.tar.bz2
122 | https://conda.anaconda.org/conda-forge/win-64/numpy-1.19.5-py37hcbcd69c_2.tar.bz2
123 | https://conda.anaconda.org/conda-forge/win-64/pillow-8.3.1-py37hd7d9ad0_0.tar.bz2
124 | https://conda.anaconda.org/conda-forge/noarch/pip-21.2.4-pyhd8ed1ab_0.tar.bz2
125 | https://conda.anaconda.org/conda-forge/noarch/pygments-2.10.0-pyhd8ed1ab_0.tar.bz2
126 | https://conda.anaconda.org/conda-forge/win-64/pyqt-impl-5.12.3-py37hf2a7229_7.tar.bz2
127 | https://conda.anaconda.org/conda-forge/win-64/pysocks-1.7.1-py37h03978a9_3.tar.bz2
128 | https://conda.anaconda.org/conda-forge/noarch/pyutilib-6.0.0-pyh9f0ad1d_0.tar.bz2
129 | https://conda.anaconda.org/conda-forge/win-64/terminado-0.11.1-py37h03978a9_0.tar.bz2
130 | https://conda.anaconda.org/conda-forge/win-64/cftime-1.5.0-py37hebb4d16_0.tar.bz2
131 | https://conda.anaconda.org/conda-forge/noarch/jupyter_client-6.1.12-pyhd8ed1ab_0.tar.bz2
132 | https://conda.anaconda.org/conda-forge/noarch/jupyterlab_pygments-0.1.2-pyh9f0ad1d_0.tar.bz2
133 | https://conda.anaconda.org/conda-forge/win-64/matplotlib-base-3.3.3-py37h3379fd5_0.tar.bz2
134 | https://conda.anaconda.org/conda-forge/noarch/nbformat-5.1.3-pyhd8ed1ab_0.tar.bz2
135 | https://conda.anaconda.org/conda-forge/win-64/numba-0.53.0-py37h49d82ab_0.tar.bz2
136 | https://conda.anaconda.org/conda-forge/win-64/numexpr-2.7.3-py37h08fd248_0.tar.bz2
137 | https://conda.anaconda.org/conda-forge/win-64/pandas-0.24.0-py37h6538335_0.tar.bz2
138 | https://conda.anaconda.org/conda-forge/win-64/pyomo-5.7.3-py37hf2a7229_0.tar.bz2
139 | https://conda.anaconda.org/conda-forge/noarch/pyopenssl-20.0.1-pyhd8ed1ab_0.tar.bz2
140 | https://conda.anaconda.org/conda-forge/win-64/pyqtchart-5.12-py37hf2a7229_7.tar.bz2
141 | https://conda.anaconda.org/conda-forge/win-64/pyqtwebengine-5.12.1-py37hf2a7229_7.tar.bz2
142 | https://conda.anaconda.org/conda-forge/win-64/scipy-1.7.1-py37hb6553fb_0.tar.bz2
143 | https://conda.anaconda.org/conda-forge/noarch/wcwidth-0.2.5-pyh9f0ad1d_2.tar.bz2
144 | https://conda.anaconda.org/conda-forge/noarch/nbclient-0.5.4-pyhd8ed1ab_0.tar.bz2
145 | https://conda.anaconda.org/conda-forge/win-64/netcdf4-1.5.7-nompi_py37h5c66228_100.tar.bz2
146 | https://conda.anaconda.org/conda-forge/noarch/patsy-0.5.1-py_0.tar.bz2
147 | https://conda.anaconda.org/conda-forge/noarch/prompt-toolkit-3.0.19-pyha770c72_0.tar.bz2
148 | https://conda.anaconda.org/conda-forge/win-64/pyqt-5.12.3-py37h03978a9_7.tar.bz2
149 | https://conda.anaconda.org/conda-forge/win-64/pytables-3.6.1-py37hdc91d43_3.tar.bz2
150 | https://conda.anaconda.org/conda-forge/noarch/seaborn-base-0.11.1-pyhd8ed1ab_1.tar.bz2
151 | https://conda.anaconda.org/conda-forge/noarch/urllib3-1.26.6-pyhd8ed1ab_0.tar.bz2
152 | https://conda.anaconda.org/conda-forge/win-64/xarray-0.11.0-py37_1000.tar.bz2
153 | https://conda.anaconda.org/conda-forge/win-64/ipython-7.22.0-py37h7813e69_0.tar.bz2
154 | https://conda.anaconda.org/conda-forge/win-64/matplotlib-3.3.3-py37h03978a9_0.tar.bz2
155 | https://conda.anaconda.org/conda-forge/win-64/nbconvert-6.1.0-py37h03978a9_0.tar.bz2
156 | https://conda.anaconda.org/conda-forge/noarch/prompt_toolkit-3.0.19-hd8ed1ab_0.tar.bz2
157 | https://conda.anaconda.org/conda-forge/noarch/requests-2.26.0-pyhd8ed1ab_0.tar.bz2
158 | https://conda.anaconda.org/conda-forge/win-64/statsmodels-0.12.2-py37hda49f71_0.tar.bz2
159 | https://conda.anaconda.org/conda-forge/win-64/ipykernel-5.5.5-py37h7813e69_0.tar.bz2
160 | https://conda.anaconda.org/conda-forge/noarch/pvlib-python-0.8.1-pyhd8ed1ab_0.tar.bz2
161 | https://conda.anaconda.org/conda-forge/noarch/seaborn-0.11.1-hd8ed1ab_1.tar.bz2
162 | https://conda.anaconda.org/conda-forge/win-64/gsee-0.3.1-py37hfa6e2cd_0.tar.bz2
163 | https://conda.anaconda.org/conda-forge/noarch/jupyter_console-6.4.0-pyhd8ed1ab_0.tar.bz2
164 | https://conda.anaconda.org/conda-forge/noarch/notebook-6.4.3-pyha770c72_0.tar.bz2
165 | https://conda.anaconda.org/conda-forge/noarch/qtconsole-5.1.1-pyhd8ed1ab_0.tar.bz2
166 | https://conda.anaconda.org/conda-forge/win-64/widgetsnbextension-3.5.1-py37h03978a9_4.tar.bz2
167 | https://conda.anaconda.org/conda-forge/noarch/ipywidgets-7.6.3-pyhd3deb0d_0.tar.bz2
168 | https://conda.anaconda.org/conda-forge/win-64/jupyter-1.0.0-py37h03978a9_6.tar.bz2
169 |
--------------------------------------------------------------------------------
/environments/environment_py37general.yml:
--------------------------------------------------------------------------------
1 | name: draf
2 | channels:
3 | - conda-forge
4 | dependencies: # NOTE: here '=' (single)
5 | - glpk
6 | - gsee
7 | - ipython
8 | - ipywidgets
9 | - jupyter
10 | - matplotlib
11 | - numpy<1.20 # https://github.com/numpy/numpy/issues/18355
12 | - pandas
13 | - pip
14 | - plotly
15 | - pyomo>=5.7
16 | - python=3.7
17 | - seaborn
18 | - pip: # NOTE: here '==' (double)
19 | - holidays
20 | - elmada
21 | - '--editable=.[dev,jupyter]' # installs a full editable version of draf
22 |
--------------------------------------------------------------------------------
/environments/environment_py39all_mac.yml:
--------------------------------------------------------------------------------
1 | name: draf39
2 | channels:
3 | - gurobi
4 | - conda-forge
5 | - https://conda.anaconda.org/gurobi
6 | - defaults
7 | dependencies:
8 | - anyio=3.5.0=py39hecd8cb5_0
9 | - appnope=0.1.2=py39hecd8cb5_1001
10 | - argon2-cffi=21.3.0=pyhd8ed1ab_0
11 | - argon2-cffi-bindings=21.2.0=py39h89e85a6_1
12 | - asttokens=2.0.5=pyhd3eb1b0_0
13 | - attrs=22.1.0=py39hecd8cb5_0
14 | - backcall=0.2.0=pyhd3eb1b0_0
15 | - backports=1.1=pyhd3eb1b0_0
16 | - backports.functools_lru_cache=1.6.4=pyhd8ed1ab_0
17 | - beautifulsoup4=4.11.1=py39hecd8cb5_0
18 | - bleach=4.1.0=pyhd3eb1b0_0
19 | - brotli=1.0.9=hca72f7f_7
20 | - brotli-bin=1.0.9=hca72f7f_7
21 | - brotlipy=0.7.0=py39h89e85a6_1003
22 | - bzip2=1.0.8=h0d85af4_4
23 | - c-ares=1.18.1=hca72f7f_0
24 | - ca-certificates=2023.01.10=hecd8cb5_0
25 | - cached-property=1.5.2=hd8ed1ab_1
26 | - cached_property=1.5.2=pyha770c72_1
27 | - certifi=2022.12.7=py39hecd8cb5_0
28 | - cffi=1.15.1=py39h6c40b1e_3
29 | - cftime=1.6.0=py39h86b5767_0
30 | - charset-normalizer=2.0.12=pyhd8ed1ab_0
31 | - comm=0.1.2=py39hecd8cb5_0
32 | - cryptography=38.0.4=py39hf6deb26_0
33 | - curl=7.82.0=h9f20792_0
34 | - cycler=0.11.0=pyhd3eb1b0_0
35 | - dataclasses=0.8=pyh6d0b6a4_7
36 | - dbus=1.13.6=h811a1a6_3
37 | - debugpy=1.5.1=py39he9d5cce_0
38 | - decorator=5.1.1=pyhd3eb1b0_0
39 | - defusedxml=0.7.1=pyhd8ed1ab_0
40 | - entrypoints=0.4=py39hecd8cb5_0
41 | - ephem=4.1.3=py39h89e85a6_2
42 | - et_xmlfile=1.1.0=py39hecd8cb5_0
43 | - executing=0.8.3=pyhd3eb1b0_0
44 | - expat=2.4.9=he9d5cce_0
45 | - flit-core=3.7.1=pyhd8ed1ab_0
46 | - fonttools=4.28.5=py39h89e85a6_0
47 | - freetype=2.11.0=hd8bbffd_0
48 | - geographiclib=1.52=pyhd8ed1ab_0
49 | - geopy=2.2.0=pyhd8ed1ab_0
50 | - gettext=0.19.8.1=hd1a6beb_1008
51 | - giflib=5.2.1=hbcb3906_2
52 | - glpk=5.0=h3cb5acd_0
53 | - gmp=6.2.1=he9d5cce_3
54 | - gurobi=10.0.0=py39_0
55 | - h5py=3.6.0=nompi_py39hbc6cb89_100
56 | - hdf4=4.2.15=hefd3b78_3
57 | - hdf5=1.12.1=nompi_ha60fbc9_104
58 | - icu=69.1=he49afe7_0
59 | - idna=3.4=py39hecd8cb5_0
60 | - ipykernel=6.19.2=py39h01d92e1_0
61 | - ipython=8.2.0=py39h6e9494a_0
62 | - ipython_genutils=0.2.0=pyhd3eb1b0_1
63 | - ipywidgets=7.7.0=pyhd8ed1ab_0
64 | - jbig=2.1=h0d85af4_2003
65 | - jedi=0.18.1=py39hecd8cb5_1
66 | - jinja2=3.1.2=py39hecd8cb5_0
67 | - jpeg=9e=hca72f7f_0
68 | - jsonschema=4.16.0=py39hecd8cb5_0
69 | - jupyter=1.0.0=py39h6e9494a_7
70 | - jupyter_client=7.4.8=py39hecd8cb5_0
71 | - jupyter_console=6.4.4=py39hecd8cb5_0
72 | - jupyter_core=5.1.1=py39hecd8cb5_0
73 | - jupyter_server=1.23.4=py39hecd8cb5_0
74 | - jupyterlab_pygments=0.1.2=py_0
75 | - jupyterlab_widgets=1.1.0=pyhd8ed1ab_0
76 | - kiwisolver=1.4.4=py39hcec6c5f_0
77 | - krb5=1.19.4=hdba6334_0
78 | - lcms2=2.12=hf1fd2bf_0
79 | - lerc=3.0=he9d5cce_0
80 | - libblas=3.9.0=13_osx64_openblas
81 | - libbrotlicommon=1.0.9=hca72f7f_7
82 | - libbrotlidec=1.0.9=hca72f7f_7
83 | - libbrotlienc=1.0.9=hca72f7f_7
84 | - libcblas=3.9.0=13_osx64_openblas
85 | - libclang=13.0.1=default_he082bbe_0
86 | - libcurl=7.82.0=h9f20792_0
87 | - libcxx=14.0.6=h9765a3e_0
88 | - libdeflate=1.10=h0d85af4_0
89 | - libedit=3.1.20221030=h6c40b1e_0
90 | - libev=4.33=h9ed2024_1
91 | - libffi=3.4.2=hecd8cb5_6
92 | - libgfortran=5.0.0=11_3_0_hecd8cb5_28
93 | - libgfortran5=11.3.0=h9dfd629_28
94 | - libglib=2.70.2=hf1fb8c0_4
95 | - libiconv=1.16=hca72f7f_2
96 | - liblapack=3.9.0=13_osx64_openblas
97 | - libllvm11=11.1.0=h46f1229_6
98 | - libllvm13=13.0.1=h64f94b2_2
99 | - libnetcdf=4.8.1=nompi_h6609ca0_101
100 | - libnghttp2=1.47.0=h942079c_0
101 | - libopenblas=0.3.18=openmp_h3351f45_0
102 | - libpng=1.6.37=h7cec526_2
103 | - libpq=14.2=hea3049e_0
104 | - libsodium=1.0.18=hbcb3906_1
105 | - libssh2=1.10.0=h52ee1ee_2
106 | - libtiff=4.3.0=h17f2ce3_3
107 | - libwebp=1.2.4=h56c3ce4_0
108 | - libwebp-base=1.2.4=hca72f7f_0
109 | - libxcb=1.13=h0d85af4_1004
110 | - libzip=1.8.0=h8b0c345_1
111 | - libzlib=1.2.11=h6c3fc93_1014
112 | - llvm-openmp=14.0.6=h0dcd299_0
113 | - llvmlite=0.39.1=py39h8346a28_0
114 | - lz4-c=1.9.3=he49afe7_1
115 | - markupsafe=2.1.1=py39hca72f7f_0
116 | - matplotlib=3.5.1=py39h6e9494a_0
117 | - matplotlib-base=3.5.1=py39hb07454d_0
118 | - matplotlib-inline=0.1.6=py39hecd8cb5_0
119 | - mistune=0.8.4=py39h89e85a6_1005
120 | - munkres=1.1.4=py_0
121 | - mysql-common=8.0.28=hab7e275_2
122 | - mysql-libs=8.0.28=h2a44bf5_2
123 | - nbclassic=0.4.8=py39hecd8cb5_0
124 | - nbclient=0.5.13=py39hecd8cb5_0
125 | - nbconvert=6.4.5
126 | - nbconvert-core=6.4.5
127 | - nbconvert-pandoc=6.4.5
128 | - nbformat=5.7.0=py39hecd8cb5_0
129 | - ncurses=6.4=hcec6c5f_0
130 | - nest-asyncio=1.5.6=py39hecd8cb5_0
131 | - netcdf4=1.6.2=py39hd243f81_0
132 | - notebook=6.5.2=py39hecd8cb5_0
133 | - notebook-shim=0.2.2=py39hecd8cb5_0
134 | - nspr=4.33=he9d5cce_0
135 | - nss=3.76=hfce436b_0
136 | - numba=0.56.4=py39h07fba90_0
137 | - numpy=1.22.3=py39hf56e92f_0
138 | - numpy-financial=1.0.0=pyhd8ed1ab_0
139 | - openjpeg=2.4.0=h6e7aa92_1
140 | - openpyxl=3.0.9=pyhd3eb1b0_0
141 | - openssl=1.1.1s=hca72f7f_0
142 | - packaging=22.0=py39hecd8cb5_0
143 | - pandas=1.4.1=py39h4d6be9b_0
144 | - pandoc=2.17.1.1=h694c41f_0
145 | - pandocfilters=1.5.0=pyhd3eb1b0_0
146 | - parso=0.8.3=pyhd3eb1b0_0
147 | - patsy=0.5.3=py39hecd8cb5_0
148 | - pcre=8.45=h23ab428_0
149 | - pexpect=4.8.0=pyhd3eb1b0_3
150 | - pickleshare=0.7.5=pyhd3eb1b0_1003
151 | - pillow=9.0.1=py39hd2c7aa1_2
152 | - pip=22.0.4=pyhd8ed1ab_0
153 | - plotly=5.6.0=pyhd8ed1ab_0
154 | - ply=3.11=py_1
155 | - prometheus_client=0.14.1=py39hecd8cb5_0
156 | - prompt-toolkit=3.0.36=py39hecd8cb5_0
157 | - prompt_toolkit=3.0.36=hd3eb1b0_0
158 | - psutil=5.9.0=py39hca72f7f_0
159 | - pthread-stubs=0.4=hc929b4f_1001
160 | - ptyprocess=0.7.0=pyhd3eb1b0_2
161 | - pure_eval=0.2.2=pyhd3eb1b0_0
162 | - pvlib-python=0.9.1=pyhd8ed1ab_0
163 | - pycodestyle=2.8.0=pyhd3eb1b0_0
164 | - pycparser=2.21=pyhd3eb1b0_0
165 | - pygments=2.11.2=pyhd3eb1b0_0
166 | - pyopenssl=22.0.0=pyhd3eb1b0_0
167 | - pyparsing=3.0.9=py39hecd8cb5_0
168 | - pyqt=5.12.3=py39h9d385e7_4
169 | - pyrsistent=0.18.1=py39h89e85a6_0
170 | - pysocks=1.7.1=py39h6e9494a_4
171 | - python=3.9.12=h8b4d769_1_cpython
172 | - python-dateutil=2.8.2=pyhd3eb1b0_0
173 | - python-fastjsonschema=2.16.2=py39hecd8cb5_0
174 | - python_abi=3.9=2_cp39
175 | - pytz=2022.7=py39hecd8cb5_0
176 | - pyzmq=23.2.0=py39he9d5cce_0
177 | - qt=5.12.9=h2a607e2_5
178 | - qtconsole=5.4.0=py39hecd8cb5_0
179 | - qtpy=2.2.0=py39hecd8cb5_0
180 | - readline=8.2=hca72f7f_0
181 | - requests=2.28.1=py39hecd8cb5_0
182 | - scipy=1.8.0=py39h056f1c0_1
183 | - seaborn=0.11.2=hd8ed1ab_0
184 | - seaborn-base=0.11.2=pyhd8ed1ab_0
185 | - send2trash=1.8.0=pyhd3eb1b0_1
186 | - setuptools=65.6.3=py39hecd8cb5_0
187 | - six=1.16.0=pyhd3eb1b0_1
188 | - sniffio=1.2.0=py39hecd8cb5_1
189 | - soupsieve=2.3.2.post1=py39hecd8cb5_0
190 | - sqlite=3.37.1=hb516253_0
191 | - stack_data=0.2.0=pyhd3eb1b0_0
192 | - statsmodels=0.13.5=py39hacda100_0
193 | - tbb=2021.6.0=ha357a0b_1
194 | - tenacity=8.0.1=py39hecd8cb5_1
195 | - terminado=0.17.1=py39hecd8cb5_0
196 | - testpath=0.6.0=py39hecd8cb5_0
197 | - tk=8.6.12=h5dbffcc_0
198 | - tornado=6.2=py39hca72f7f_0
199 | - traitlets=5.7.1=py39hecd8cb5_0
200 | - tzdata=2022g=h04d1e81_0
201 | - urllib3=1.26.14=py39hecd8cb5_0
202 | - wcwidth=0.2.5=pyh9f0ad1d_2
203 | - webencodings=0.5.1=py39hecd8cb5_1
204 | - websocket-client=0.58.0=py39hecd8cb5_4
205 | - wheel=0.37.1=pyhd3eb1b0_0
206 | - widgetsnbextension=3.6.0=py39h6e9494a_0
207 | - xorg-libxau=1.0.9=h35c211d_0
208 | - xorg-libxdmcp=1.1.3=h35c211d_0
209 | - xz=5.2.10=h6c40b1e_1
210 | - zeromq=4.3.4=he49afe7_1
211 | - zlib=1.2.11=h6c3fc93_1014
212 | - zstd=1.5.2=h582d3a0_0
213 | - pip:
214 | - absl-py==1.0.0
215 | - aiohttp==3.8.3
216 | - aiosignal==1.3.1
217 | - appdirs==1.4.4
218 | - arxiv-latex-cleaner==0.1.27
219 | - async-timeout==4.0.2
220 | - black==22.3.0
221 | - bump2version==1.0.1
222 | - click==8.1.0
223 | - convertdate==2.4.0
224 | - coverage==6.3.2
225 | - deprecated==1.2.13
226 | - dotmap==1.3.30
227 | - elmada==0.1.0
228 | - entsoe-py==0.2.10
229 | - execnet==1.9.0
230 | - filelock==3.6.0
231 | - frozenlist==1.3.3
232 | - grpcio==1.43.0
233 | - gurobipy==10.0.0
234 | - highspy==1.5.0.dev0
235 | - hijri-converter==2.2.3
236 | - holidays==0.13
237 | - inflection==0.5.1
238 | - iniconfig==1.1.1
239 | - isort==5.10.1
240 | - joblib==1.2.0
241 | - jupyter-contrib-core==0.3.3
242 | - jupyter-contrib-nbextensions==0.5.1
243 | - jupyter-highlight-selected-word==0.2.0
244 | - jupyter-latex-envs==1.4.6
245 | - jupyter-nbextensions-configurator==0.4.1
246 | - jupyternotify==0.1.15
247 | - jupytext==1.13.7
248 | - kaleido==0.2.1
249 | - korean-lunar-calendar==0.2.1
250 | - lxml==4.8.0
251 | - markdown-it-py==1.1.0
252 | - mdit-py-plugins==0.3.0
253 | - meteostat==1.6.1
254 | - more-itertools==8.12.0
255 | - mpmath==1.3.0
256 | - msgpack==1.0.3
257 | - multidict==6.0.4
258 | - mypy==0.942
259 | - mypy-extensions==0.4.3
260 | - networkx==3.0
261 | - pathspec==0.9.0
262 | - pdfkit==1.0.0
263 | - platformdirs==2.5.1
264 | - pluggy==1.0.0
265 | - protobuf==3.20.0
266 | - py==1.11.0
267 | - pyarrow==7.0.0
268 | - pybind11==2.10.3
269 | - pymeeus==0.5.11
270 | - pyomo==6.4.4
271 | - pypdf2==3.0.1
272 | - pyqt5-sip==4.19.18
273 | - pyqtchart==5.12
274 | - pyqtwebengine==5.12.1
275 | - pytest==7.1.1
276 | - pytest-cov==3.0.0
277 | - pytest-forked==1.4.0
278 | - pytest-mock==3.7.0
279 | - pytest-xdist==2.5.0
280 | - pyyaml==6.0
281 | - quandl==3.7.0
282 | - ray==1.11.0
283 | - redis==4.2.2
284 | - ruamel-yaml==0.17.21
285 | - ruamel-yaml-clib==0.2.7
286 | - scikit-learn==1.2.1
287 | - sympy==1.11.1
288 | - tabulate==0.8.9
289 | - threadpoolctl==3.1.0
290 | - timebudget==0.7.1
291 | - toml==0.10.2
292 | - tomli==2.0.1
293 | - tqdm==4.63.1
294 | - tsam==2.2.2
295 | - typing-extensions==4.1.1
296 | - windpowerlib==0.2.1
297 | - wrapt==1.14.0
298 | - xlrd==2.0.1
299 | - yarl==1.8.2
300 | - '--editable=.[dev,jupyter]' # installs a full editable version of draf
301 |
--------------------------------------------------------------------------------
/environments/environment_py39all_win64.yml:
--------------------------------------------------------------------------------
1 | name: draf39
2 | channels:
3 | - conda-forge
4 | - http://conda.anaconda.org/gurobi
5 | - defaults
6 | dependencies:
7 | - argon2-cffi=21.1.0=py39hb82d6ee_0
8 | - async_generator=1.10=py_0
9 | - attrs=21.2.0=pyhd8ed1ab_0
10 | - backcall=0.2.0=pyh9f0ad1d_0
11 | - backports=1.0=py_2
12 | - backports.functools_lru_cache=1.6.4=pyhd8ed1ab_0
13 | - bleach=4.1.0=pyhd8ed1ab_0
14 | - brotlipy=0.7.0=py39hb82d6ee_1001
15 | - bzip2=1.0.8=h8ffe710_4
16 | - ca-certificates=2021.10.8=h5b45459_0
17 | - cached-property=1.5.2=hd8ed1ab_1
18 | - cached_property=1.5.2=pyha770c72_1
19 | - certifi=2021.10.8=py39hcbf5309_0
20 | - cffi=1.14.6=py39h0878f49_1
21 | - cftime=1.5.1=py39h5d4886f_0
22 | - chardet=4.0.0=py39hcbf5309_1
23 | - colorama=0.4.4=pyh9f0ad1d_0
24 | - cryptography=35.0.0=py39hd8d06c1_0
25 | - curl=7.79.1=h789b8ee_1
26 | - cycler=0.10.0=py_2
27 | - dataclasses=0.8=pyhc8e2a94_3
28 | - debugpy=1.4.1=py39h415ef7b_0
29 | - decorator=5.1.0=pyhd8ed1ab_0
30 | - defusedxml=0.7.1=pyhd8ed1ab_0
31 | - entrypoints=0.3=pyhd8ed1ab_1003
32 | - ephem=4.1=py39hb82d6ee_0
33 | - freetype=2.10.4=h546665d_1
34 | - glpk=5.0=h8ffe710_0
35 | - h5py=3.4.0=nompi_py39hd4deaf1_101
36 | - hdf4=4.2.15=h0e5069d_3
37 | - hdf5=1.12.1=nompi_h2a0e4a3_101
38 | - icu=68.1=h0e60522_0
39 | - importlib-metadata=4.8.1=py39hcbf5309_0
40 | - intel-openmp=2021.4.0=h57928b3_3556
41 | - ipykernel=6.4.2=py39h832f523_0
42 | - ipython=7.28.0=py39h832f523_0
43 | - ipython_genutils=0.2.0=py_1
44 | - ipywidgets=7.6.5=pyhd8ed1ab_0
45 | - jbig=2.1=h8d14728_2003
46 | - jedi=0.18.0=py39hcbf5309_2
47 | - jinja2=3.0.2=pyhd8ed1ab_0
48 | - jpeg=9d=h8ffe710_0
49 | - jsonschema=4.1.2=pyhd8ed1ab_0
50 | - jupyter=1.0.0=py39hcbf5309_6
51 | - jupyter_client=7.0.6=pyhd8ed1ab_0
52 | - jupyter_console=6.4.0=pyhd8ed1ab_0
53 | - jupyter_core=4.8.1=py39hcbf5309_0
54 | - jupyterlab_pygments=0.1.2=pyh9f0ad1d_0
55 | - jupyterlab_widgets=1.0.2=pyhd8ed1ab_0
56 | - kiwisolver=1.3.2=py39h2e07f2f_0
57 | - krb5=1.19.2=hbae68bd_2
58 | - lcms2=2.12=h2a16943_0
59 | - lerc=3.0=h0e60522_0
60 | - libblas=3.9.0=8_mkl
61 | - libcblas=3.9.0=8_mkl
62 | - libclang=11.1.0=default_h5c34c98_1
63 | - libcurl=7.79.1=h789b8ee_1
64 | - libdeflate=1.8=h8ffe710_0
65 | - liblapack=3.9.0=8_mkl
66 | - libnetcdf=4.8.1=nompi_h1cc8e9d_101
67 | - libpng=1.6.37=h1d00b33_2
68 | - libsodium=1.0.18=h8d14728_1
69 | - libssh2=1.10.0=h680486a_2
70 | - libtiff=4.3.0=hd413186_2
71 | - libzip=1.8.0=hfed4ece_1
72 | - libzlib=1.2.11=h8ffe710_1013
73 | - llvmlite=0.36.0=py39h34b8924_4
74 | - lz4-c=1.9.3=h8ffe710_1
75 | - m2w64-gcc-libgfortran=5.3.0=6
76 | - m2w64-gcc-libs=5.3.0=7
77 | - m2w64-gcc-libs-core=5.3.0=7
78 | - m2w64-gmp=6.1.0=2
79 | - m2w64-libwinpthread-git=5.0.0.4634.697f757=2
80 | - markupsafe=2.0.1=py39hb82d6ee_0
81 | - matplotlib=3.4.3=py39hcbf5309_1
82 | - matplotlib-base=3.4.3=py39h581301d_1
83 | - matplotlib-inline=0.1.3=pyhd8ed1ab_0
84 | - mistune=0.8.4=py39hb82d6ee_1004
85 | - mkl=2020.4=hb70f87d_311
86 | - msys2-conda-epoch=20160418=1
87 | - nbclient=0.5.4=pyhd8ed1ab_0
88 | - nbconvert=6.2.0=py39hcbf5309_0
89 | - nbformat=5.1.3=pyhd8ed1ab_0
90 | - nest-asyncio=1.5.1=pyhd8ed1ab_0
91 | - netcdf4=1.5.7=nompi_py39hf113b1f_103
92 | - notebook=6.4.5=pyha770c72_0
93 | - numba=0.53.1=py39hf11a4ad_0
94 | - numpy=1.21.3=py39h6635163_0
95 | - olefile=0.46=pyh9f0ad1d_1
96 | - openjpeg=2.4.0=hb211442_1
97 | - openssl=1.1.1l=h8ffe710_0
98 | - packaging=21.0=pyhd8ed1ab_0
99 | - pandas=1.3.4=py39h2e25243_0
100 | - pandoc=2.14.2=h8ffe710_0
101 | - pandocfilters=1.5.0=pyhd8ed1ab_0
102 | - parso=0.8.2=pyhd8ed1ab_0
103 | - patsy=0.5.2=pyhd8ed1ab_0
104 | - pickleshare=0.7.5=py_1003
105 | - pillow=8.3.2=py39h916092e_0
106 | - pip=21.3=pyhd8ed1ab_0
107 | - plotly=5.3.1=pyhd8ed1ab_0
108 | - ply=3.11=py_1
109 | - prometheus_client=0.11.0=pyhd8ed1ab_0
110 | - prompt-toolkit=3.0.20=pyha770c72_0
111 | - prompt_toolkit=3.0.20=hd8ed1ab_0
112 | - pvlib-python=0.9.0=pyhd8ed1ab_1
113 | - pycparser=2.20=pyh9f0ad1d_2
114 | - pygments=2.10.0=pyhd8ed1ab_0
115 | - pyomo=6.1.2=py39h415ef7b_0
116 | - pyopenssl=21.0.0=pyhd8ed1ab_0
117 | - pyparsing=2.4.7=pyh9f0ad1d_0
118 | - pyqt=5.12.3=py39hcbf5309_7
119 | - pyqt-impl=5.12.3=py39h415ef7b_7
120 | - pyqt5-sip=4.19.18=py39h415ef7b_7
121 | - pyqtchart=5.12=py39h415ef7b_7
122 | - pyqtwebengine=5.12.1=py39h415ef7b_7
123 | - pyrsistent=0.17.3=py39hb82d6ee_2
124 | - pysocks=1.7.1=py39hcbf5309_3
125 | - python=3.9.7=h7840368_3_cpython
126 | - python-dateutil=2.8.2=pyhd8ed1ab_0
127 | - python_abi=3.9=2_cp39
128 | - pytz=2021.3=pyhd8ed1ab_0
129 | - pywin32=301=py39hb82d6ee_0
130 | - pywinpty=1.1.4=py39h99910a6_0
131 | - pyzmq=22.3.0=py39he46f08e_0
132 | - qt=5.12.9=h5909a2a_4
133 | - qtconsole=5.1.1=pyhd8ed1ab_0
134 | - qtpy=1.11.2=pyhd8ed1ab_0
135 | - requests=2.26.0=pyhd8ed1ab_0
136 | - scipy=1.7.1=py39hc0c34ad_0
137 | - seaborn=0.11.2=hd8ed1ab_0
138 | - seaborn-base=0.11.2=pyhd8ed1ab_0
139 | - send2trash=1.8.0=pyhd8ed1ab_0
140 | - setuptools=58.2.0=py39hcbf5309_0
141 | - six=1.16.0=pyh6c4a22f_0
142 | - sqlite=3.36.0=h8ffe710_2
143 | - statsmodels=0.13.0=py39h5d4886f_0
144 | - tbb=2020.2=h2d74725_4
145 | - tenacity=8.0.1=pyhd8ed1ab_0
146 | - terminado=0.12.1=py39hcbf5309_0
147 | - testpath=0.5.0=pyhd8ed1ab_0
148 | - tk=8.6.11=h8ffe710_1
149 | - tornado=6.1=py39hb82d6ee_1
150 | - traitlets=5.1.0=pyhd8ed1ab_0
151 | - tzdata=2021d=he74cb21_0
152 | - ucrt=10.0.20348.0=h57928b3_0
153 | - urllib3=1.26.7=pyhd8ed1ab_0
154 | - vc=14.2=hb210afc_5
155 | - vs2015_runtime=14.29.30037=h902a5da_5
156 | - wcwidth=0.2.5=pyh9f0ad1d_2
157 | - webencodings=0.5.1=py_1
158 | - wheel=0.37.0=pyhd8ed1ab_1
159 | - widgetsnbextension=3.5.1=py39hcbf5309_4
160 | - win_inet_pton=1.1.0=py39hcbf5309_2
161 | - winpty=0.4.3=4
162 | - xz=5.2.5=h62dcd97_1
163 | - zeromq=4.3.4=h0e60522_1
164 | - zipp=3.6.0=pyhd8ed1ab_0
165 | - zlib=1.2.11=h8ffe710_1013
166 | - zstd=1.5.0=h6255e5f_0
167 | - pip:
168 | - appdirs==1.4.4
169 | - atomicwrites==1.4.0
170 | - beautifulsoup4==4.10.0
171 | - black==21.9b0
172 | - bump2version==1.0.1
173 | - charset-normalizer==2.0.7
174 | - click==8.0.3
175 | - convertdate==2.3.2
176 | - coverage==6.0.2
177 | - elmada==0.1.0
178 | - entsoe-py==0.2.10
179 | - execnet==1.9.0
180 | - gurobipy==9.1.2
181 | - hijri-converter==2.2.2
182 | - holidays==0.11.3.1
183 | - idna==3.3
184 | - inflection==0.5.1
185 | - iniconfig==1.1.1
186 | - isort==5.9.3
187 | - jupytext==1.13.0
188 | - korean-lunar-calendar==0.2.1
189 | - lxml==4.6.3
190 | - markdown-it-py==1.1.0
191 | - mdit-py-plugins==0.2.8
192 | - more-itertools==8.10.0
193 | - mpu==0.23.1
194 | - mypy==0.910
195 | - mypy-extensions==0.4.3
196 | - pathspec==0.9.0
197 | - platformdirs==2.4.0
198 | - pluggy==1.0.0
199 | - py==1.10.0
200 | - pyarrow==5.0.0
201 | - pymeeus==0.5.11
202 | - pytest==6.2.5
203 | - pytest-cov==3.0.0
204 | - pytest-forked==1.3.0
205 | - pytest-mock==3.6.1
206 | - pytest-xdist==2.4.0
207 | - pyyaml==6.0
208 | - quandl==3.6.1
209 | - regex==2021.10.21
210 | - soupsieve==2.2.1
211 | - tabulate==0.8.9
212 | - toml==0.10.2
213 | - tomli==1.2.1
214 | - tqdm==4.62.3
215 | - typing-extensions==3.10.0.2
216 | - xlrd==2.0.1
217 |
--------------------------------------------------------------------------------
/examples/bev.py:
--------------------------------------------------------------------------------
1 | """This example optimizes the charging of battery electric vehicles (BEV).
2 |
3 | Two time series are considered:
4 | `P_BEV_drive_TB`: The discharging power of BEV during its utilization.
5 | `y_BEV_avail_TB`: The availability (0 or 1) for charging.
6 | Here, we assume that the BEV is available for charging when it is not used.
7 | """
8 |
9 | import draf
10 | from draf.components import *
11 |
12 |
13 | def main():
14 | cs = draf.CaseStudy("bev", year=2019, freq="60min", coords=(49.01, 8.39))
15 | cs.set_time_horizon(start="Apr-01 00:00", steps=24 * 30)
16 | sc = cs.add_REF_scen(components=[eDem, EG, PV, BEV, Main]) # fmt: skip
17 | p_drive = sc.params.P_BEV_drive_TB.where(
18 | sc.params.P_BEV_drive_TB.index.get_level_values(0) % 24 < 12, 20
19 | )
20 | y_avail = (p_drive == 0).astype(int)
21 | sc.update_params(P_BEV_drive_TB=p_drive, y_BEV_avail_TB=y_avail)
22 | cs.optimize(logToConsole=False)
23 | return cs
24 |
--------------------------------------------------------------------------------
/examples/der_hut.py:
--------------------------------------------------------------------------------
1 | """Example model for Distributed Energy Resources (DER) and Heat Upgrading Technologies (HUT)"""
2 |
3 |
4 | import draf
5 | from draf.components import *
6 |
7 |
8 | def main():
9 | cs = draf.CaseStudy(
10 | "der_hut", year=2019, freq="60min", coords=(49.01, 8.39), consider_invest=True
11 | )
12 | cs.set_time_horizon(start="Apr-01 00:00", steps=24 * 2)
13 | sc = cs.add_REF_scen(components=[cDem, eDem, hDem, BES, CHP, EG, Fuel, HD,
14 | HOB, HP, P2H, PV, TES, Main]) # fmt: skip
15 | cs.optimize(logToConsole=False)
16 | return cs
17 |
--------------------------------------------------------------------------------
/examples/minimal.py:
--------------------------------------------------------------------------------
1 | """Minimal example with only one user-defined component.
2 |
3 | Notes:
4 | - The model just determines the total costs `C_TOT_`. There is nothing to "optimize".
5 | - param_func is DRAF syntax, the model_func is GurobiPy syntax.
6 | - Most of the CaseStudy functions can be chained.
7 | """
8 |
9 | from gurobipy import GRB, Model, quicksum
10 |
11 | import draf
12 | from draf import Collectors, Dimensions, Params, Scenario, Vars # only used for type hinting
13 | from draf.abstract_component import Component # only used for type hinting
14 |
15 |
16 | class Minimal(Component):
17 |
18 | def param_func(self, sc: Scenario):
19 |
20 | # Define the optimization variable C_TOT_:
21 | sc.var("C_TOT_", doc="Total costs", unit="€/a")
22 |
23 | # Prepare time-dependent day-ahead market prices as parameter c_EG_RTP_T:
24 | sc.prep.c_EG_RTP_T()
25 |
26 | # Prepare a time-dependent G1 standard load profile (Business on weekdays 08:00 - 18:00)
27 | # with the annual energy of 5 GWh:
28 | sc.prep.P_eDem_T(profile="G1", annual_energy=5e6)
29 |
30 | def model_func(self, sc: Scenario, m: Model, d: Dimensions, p: Params, v: Vars, c: Collectors):
31 |
32 | # Set the objective function:
33 | m.setObjective(v.C_TOT_, GRB.MINIMIZE)
34 |
35 | # Add a constraint to the model
36 | m.addConstr(v.C_TOT_ == p.k__dT_ * quicksum(p.P_eDem_T[t] * p.c_EG_RTP_T[t] for t in d.T))
37 |
38 |
39 | def main():
40 | cs = draf.CaseStudy()
41 | cs.set_time_horizon(start="Apr-01 00:00", steps=24 * 2)
42 | cs.add_REF_scen(components=[Minimal])
43 | cs.optimize()
44 | return cs
45 |
--------------------------------------------------------------------------------
/examples/prod.py:
--------------------------------------------------------------------------------
1 | """Flexible operation of an industrial process."""
2 | import draf
3 | from draf.components import eDem, EG, PP, PS, pDem, Main
4 |
5 |
6 | def main():
7 | cs = draf.CaseStudy(
8 | "prod", year=2019, freq="60min", coords=(49.01, 8.39), consider_invest=False
9 | )
10 | cs.set_time_horizon(start="Apr-01 00:00", steps=24 * 2)
11 | sc = cs.add_REF_scen(components=[eDem, EG, PP, PS, pDem, Main]) # fmt: skip
12 | cs.optimize(logToConsole=False)
13 | return cs
14 |
--------------------------------------------------------------------------------
/examples/pv.py:
--------------------------------------------------------------------------------
1 | """A small example with a fixed electricity demand (eDem), an electricity grid (EG), a
2 | photovoltaic system (PV), and the Main component which contains the objective function and the
3 | energy balances. There is no controllable component, so the operation is determined and cannot be
4 | optimized. That means the Solver will solve the problem in pre-solve.
5 | """
6 |
7 | import draf
8 | from draf.components import *
9 |
10 |
11 | def main():
12 | cs = draf.CaseStudy(name="min_imp", year=2019, freq="60min", country="DE", coords=(49.01, 8.39))
13 | cs.set_time_horizon(start="Apr-01 00:00", steps=24 * 2)
14 | sc = cs.add_REF_scen(doc="no BES", components=[eDem, EG(c_buyPeak=50), PV, Main])
15 | cs.add_scens(nParetoPoints=2)
16 | cs.optimize()
17 | return cs
18 |
--------------------------------------------------------------------------------
/examples/pv_bes.py:
--------------------------------------------------------------------------------
1 | """Optimizing the operation of a 100 kWh Battery Energy Storage (BES) considering a Photovoltaic
2 | (PV), a fixed electricity demand (eDem), and an existing 1 MWp Photovoltaic system.
3 |
4 | The example is used in the DRAF showcase see: https://mfleschutz.github.io/draf-showcase/#/2
5 | """
6 |
7 | import draf
8 | from draf.components import *
9 |
10 |
11 | def main():
12 | cs = draf.CaseStudy("pv_bes", year=2019, freq="60min", coords=(49.01, 8.39))
13 | cs.set_time_horizon(start="Aug-01 00:00", steps=24)
14 | sc = cs.add_REF_scen(components=[eDem, BES(E_CAPx=100), PV(P_CAPx=1e3), EG, Main])
15 | cs.add_scens([("c_EG_T", "t", ["c_EG_TOU_T", "c_EG_FLAT_T"])])
16 | cs.optimize(logToConsole=False)
17 | return cs
18 |
--------------------------------------------------------------------------------
/examples/pyomo_pv.py:
--------------------------------------------------------------------------------
1 | """This minimal example simulates the electricity purchase for a given demand and PV production.
2 | All variables are solved in the presolve. The aim of this file is to show the syntax of parameter
3 | & model definition when using Pyomo. They are used by the main function to create a case-study with
4 | two scenarios: `REF` and `REF_PV`.
5 | """
6 |
7 | import pyomo.environ as pyo
8 | from gurobipy import GRB
9 |
10 | import draf
11 | from draf import Collectors, Dimensions, Params, Results, Scenario, Vars
12 | from draf.abstract_component import Component
13 |
14 |
15 | class TEST_COMP(Component):
16 |
17 | def param_func(_, sc: draf.Scenario):
18 |
19 | # Total
20 | sc.var("C_TOT_", doc="Total costs", unit="k€/a", lb=-GRB.INFINITY)
21 | sc.var("CE_TOT_", doc="Total emissions", unit="kgCO2eq/a", lb=-GRB.INFINITY)
22 |
23 | # Pareto
24 | sc.param("k_PTO_alpha_", 0, "Pareto weighting factor", "")
25 |
26 | # EG
27 | sc.prep.c_EG_RTP_T()
28 | sc.prep.ce_EG_T()
29 | sc.var("P_EG_buy_T", doc="Purchasing electrical power", unit="kW_el", lb=-GRB.INFINITY)
30 |
31 | # Demand
32 | sc.prep.P_eDem_T(profile="G1", annual_energy=5e6)
33 |
34 | # PV
35 | sc.param("P_PV_CAPx_", 0, "existing capacity", "kW_peak")
36 | sc.prep.P_PV_profile_T(use_coords=True)
37 |
38 | def model_func(_, sc: Scenario, m: pyo.Model, d: Dimensions, p: Params, v: Vars, c: Collectors):
39 |
40 | m.obj = pyo.Objective(
41 | expr=(1 - p.k_PTO_alpha_) * v.C_TOT_ + p.k_PTO_alpha_ * v.CE_TOT_, sense=pyo.minimize
42 | )
43 |
44 | # C
45 | m.DEF_C_ = pyo.Constraint(
46 | expr=(
47 | v.C_TOT_
48 | == p.k__dT_ * pyo.quicksum(v.P_EG_buy_T[t] * p.c_EG_RTP_T[t] / 1e3 for t in d.T)
49 | )
50 | )
51 |
52 | # CE
53 | m.DEF_CE_ = pyo.Constraint(
54 | expr=(v.CE_TOT_ == p.k__dT_ * pyo.quicksum(v.P_EG_buy_T[t] * p.ce_EG_T[t] for t in d.T))
55 | )
56 |
57 | # Electricity
58 | m.BAL_pur = pyo.Constraint(
59 | d.T,
60 | rule=lambda v, t: v.P_EG_buy_T[t] + p.P_PV_CAPx_ * p.P_PV_profile_T[t] == p.P_eDem_T[t],
61 | )
62 |
63 |
64 | def main():
65 | cs = draf.CaseStudy("pv_pyo", year=2019, freq="60min", coords=(49.01, 8.39), mdl_language="pyo")
66 | cs.set_time_horizon(start="Apr-01 00:00", steps=24 * 2)
67 | cs.add_REF_scen(components=[TEST_COMP])
68 | cs.add_scen("REF_PV", doc="REF plus PV").update_params(P_PV_CAPx_=100)
69 | cs.optimize(which_solver="glpk")
70 | return cs
71 |
--------------------------------------------------------------------------------
/fmt.bat:
--------------------------------------------------------------------------------
1 | @REM format
2 | black draf tests
3 | isort draf tests
4 | python draf/sort_sections.py
--------------------------------------------------------------------------------
/fmt.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | # format and sort section
4 |
5 | black draf tests
6 | isort draf tests
7 | python draf/sort_sections.py
8 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [tool.black]
2 | line-length = 100
3 | preview = true
4 | skip-magic-trailing-comma = true
5 | exclude = '''
6 | /(
7 | \.eggs
8 | | \.git
9 | | \.github
10 | | \.mypy_cache
11 | | \.pytest_cache
12 | | \.vscode
13 | | .*\.egg-info
14 | | doc
15 | | local
16 | )/
17 | '''
18 |
19 | [tool.isort]
20 | line_length = 100
21 | include_trailing_comma = true
22 | profile = "black"
23 |
24 | [tool.pytest.ini_options]
25 | addopts = [
26 | "--strict-markers",
27 | "--cov=draf",
28 | # "--cov-report=term:skip-covered",
29 | # "--cov-report=html",
30 | "--durations=3",
31 | # "-m=not slow",
32 | # "--disable-pytest-warnings",
33 | ]
34 | markers = [
35 | "slow: marks slow tests > 1sec and not an important model test",
36 | "gurobi: marks tests that rely on a valid gurobi license"
37 | ]
38 | python_files = "test_*.py"
39 | python_functions = "test_*"
40 | testpaths = ["tests"]
41 |
42 | # [tool.coverage.run]
43 | # omit = []
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | from pathlib import Path
2 |
3 | from setuptools import find_packages, setup
4 |
5 | long_description = Path("README.md").read_text().strip()
6 |
7 | setup(
8 | name="draf",
9 | version="0.3.1",
10 | author="Markus Fleschutz",
11 | author_email="mfleschutz@gmail.com",
12 | description="Demand Response Analysis Framework",
13 | long_description=long_description,
14 | long_description_content_type="text/markdown",
15 | url="https://github.com/mfleschutz/draf",
16 | license="LGPLv3",
17 | packages=find_packages(exclude=["tests"]),
18 | python_requires=">=3.6",
19 | install_requires=[
20 | "appdirs",
21 | "elmada",
22 | "geopy",
23 | "gurobipy",
24 | "holidays",
25 | "matplotlib",
26 | "numpy",
27 | "pandas",
28 | "pyomo>=5.7",
29 | "ray",
30 | "seaborn",
31 | "tabulate",
32 | "tqdm",
33 | "pvlib",
34 | "ephem",
35 | "plotly",
36 | "numpy_financial",
37 | "ipywidgets",
38 | ],
39 | extras_require={
40 | "dev": [
41 | "black",
42 | "bump2version",
43 | "isort",
44 | "mypy",
45 | "pytest-cov",
46 | "pytest-mock",
47 | "pytest-xdist",
48 | "pytest",
49 | ],
50 | "jupyter": ["jupyter", "jupytext"]
51 | },
52 | classifiers=[
53 | "Development Status :: 4 - Beta",
54 | "Intended Audience :: Developers",
55 | "Intended Audience :: Science/Research",
56 | "License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)",
57 | "Natural Language :: English",
58 | "Operating System :: OS Independent",
59 | "Programming Language :: Python :: 3 :: Only",
60 | "Programming Language :: Python :: 3",
61 | "Topic :: Scientific/Engineering :: Information Analysis",
62 | "Topic :: Scientific/Engineering",
63 | "Topic :: Software Development :: Libraries :: Python Modules",
64 | ],
65 | keywords=[
66 | "energy systems",
67 | "optimization",
68 | "decarbonization",
69 | "mathematical programming",
70 | "demand response",
71 | "energy hubs",
72 | "distributed energy resources",
73 | ],
74 | )
75 |
--------------------------------------------------------------------------------
/tests/core/test_case_study.py:
--------------------------------------------------------------------------------
1 | import collections
2 | import time
3 |
4 | import pandas as pd
5 | import pytest
6 |
7 | import draf
8 |
9 |
10 | @pytest.fixture
11 | def case() -> draf.CaseStudy:
12 | return draf.CaseStudy()
13 |
14 |
15 | def test___init__(case):
16 | assert case.name == "test"
17 | if case.freq not in ["15min", "60min"]:
18 | with pytest.raises(AssertionError):
19 | draf.__init__()
20 |
21 |
22 | @pytest.mark.parametrize("freq, expected", [["15min", 0.25], ["30min", 0.5], ["60min", 1.0]])
23 | def test_step_width(freq: str, expected: float, case):
24 | case.freq = freq
25 | assert case.step_width == expected
26 |
27 |
28 | @pytest.mark.parametrize(
29 | "freq, steps, unit", [["15min", 96, "1/4 h"], ["30min", 48, "1/2 h"], ["60min", 24, "h"]]
30 | )
31 | def test__set_dtindex(freq: str, steps: int, unit: str, case):
32 | case._set_dtindex(year=2019, freq=freq)
33 | assert case.steps_per_day == steps
34 | assert case.freq_unit == unit
35 |
36 |
37 | @pytest.mark.parametrize(
38 | "start, steps, end, t1, t2",
39 | [
40 | ["May1 00:00", None, "Jun1 23:00", 2880, 3647],
41 | ["May1 00:00", 24 * 30, None, 2880, 3599],
42 | ["Oct3 20:00", None, None, 6620, 8759],
43 | ["Dec15 00:00", None, "Dec15 15:00", 8352, 8367],
44 | ],
45 | )
46 | def test_set_time_horizon(start: str, steps: str, end: str, t1: int, t2: int, case):
47 | case.set_time_horizon(start=start, steps=steps, end=end)
48 | assert case._t1 == t1
49 | assert case._t2 == t2
50 |
51 |
52 | @pytest.mark.parametrize(
53 | "string, expected",
54 | [["1", slice(0, 744, None)], ["2", slice(744, 1416, None)], ["10", slice(6552, 7296, None)]],
55 | )
56 | def test__get_int_loc_from_dtstring(string: str, expected: str, case):
57 | assert case._get_int_loc_from_dtstring(s=string) == expected
58 |
59 |
60 | @pytest.fixture
61 | def case() -> draf.CaseStudy:
62 | return draf.CaseStudy()
63 |
64 |
65 | def test_scens_list(case):
66 | assert case.scens_list == []
67 |
68 |
69 | def test_scens_dic(case):
70 | assert case.scens_dic == {}
71 |
72 |
73 | def test_valid_scens(case):
74 | assert case.valid_scens == {}
75 |
76 |
77 | def test_ordered_valid_scens(case):
78 | assert case.ordered_valid_scens == collections.OrderedDict()
79 |
80 |
81 | def test_pareto(case):
82 | assert isinstance(case.pareto, pd.DataFrame)
83 |
84 |
85 | def test_dt_info(case):
86 | assert isinstance(case.dt_info, str)
87 |
--------------------------------------------------------------------------------
/tests/core/test_scenario.py:
--------------------------------------------------------------------------------
1 | import pandas as pd
2 | import pytest
3 |
4 | from draf import CaseStudy, Scenario
5 | from draf.prep.data_base import DataBase as db
6 |
7 |
8 | @pytest.fixture
9 | def cs() -> CaseStudy:
10 | return CaseStudy(year=2019)
11 |
12 |
13 | @pytest.fixture
14 | def empty_sc(cs) -> Scenario:
15 | sc = cs.add_REF_scen()
16 | return sc
17 |
18 |
19 | @pytest.fixture
20 | def sc(cs) -> Scenario:
21 | sc = cs.add_REF_scen()
22 | sc.param("eta_test_", data=5)
23 | return sc
24 |
25 |
26 | def test_get_var_par_dic_with_param(empty_sc):
27 | assert isinstance(empty_sc.get_var_par_dic(what="params"), dict)
28 |
29 | with pytest.raises(RuntimeError) as e:
30 | empty_sc.get_var_par_dic(what="res")
31 | assert e == "Scenario has no res_dic."
32 |
33 |
34 | def test__activate_vars(sc):
35 | assert isinstance(sc._activate_vars(), Scenario)
36 | assert sc._activate_vars().year == 2019
37 |
38 |
39 | def test_match_dtindex(cs):
40 | cs.set_time_horizon(start="Jan-02 00", steps=24 * 2)
41 | sc = cs.add_REF_scen()
42 | ser = pd.Series(index=range(8760), dtype="float64")
43 | df = ser.to_frame()
44 | for input in (ser, df):
45 | new = sc.match_dtindex(input)
46 | assert new.index[0] == 24
47 | assert new.index[-1] == 71
48 |
49 |
50 | def test_get_entity(sc):
51 | assert sc.get_entity("eta_test_") == 5
52 |
53 |
54 | def test_param(sc):
55 | sc.param(name="x_HP_test_", data=4, doc="test doc", unit="test_unit", src="test_source")
56 | sc.param(from_db=db.funcs.c_CHP_inv_())
57 | sc.param(from_db=db.eta_HP_)
58 | sc.param(name="c_Fuel_other-name_", from_db=db.c_Fuel_co2_)
59 | for ent in ["c_CHP_inv_", "eta_HP_", "c_Fuel_other-name_"]:
60 | isinstance(sc.params.get(ent), float)
61 |
--------------------------------------------------------------------------------
/tests/plotting/test_cs_plotting.py:
--------------------------------------------------------------------------------
1 | from draf.plotting import cs_plotting
2 |
3 |
4 | def test_float_to_x():
5 | assert cs_plotting.float_to_int_to_string(2.4) == "2"
6 | assert cs_plotting.float_to_int_to_string(2.6) == "3"
7 | assert cs_plotting.float_to_string_with_precision_1(2.44) == "2.4"
8 | assert cs_plotting.float_to_string_with_precision_2(2.444) == "2.44"
9 |
--------------------------------------------------------------------------------
/tests/prep/test_demand.py:
--------------------------------------------------------------------------------
1 | import pandas as pd
2 | import pytest
3 |
4 | from draf import prep
5 |
6 |
7 | @pytest.mark.slow
8 | def test_get_el_SLP():
9 | ser = prep.get_el_SLP(year=2019, country="DE", freq="60min", profile="G1", annual_energy=1e6)
10 | assert isinstance(ser, pd.Series)
11 | assert ser.sum() == pytest.approx(1e6)
12 |
13 | ser = prep.get_el_SLP(
14 | year=2019, country="DE", freq="60min", profile="G1", offset=1e3, peak_load=5e3
15 | )
16 | assert isinstance(ser, pd.Series)
17 | assert ser.max() == pytest.approx(5e3)
18 |
19 |
20 | def test_get_heating_demand():
21 | t_amb = pd.Series(12, index=range(8760))
22 | ser = prep.get_heating_demand(ser_amb_temp=t_amb, year=2019)
23 | assert isinstance(ser, pd.Series)
24 | assert ser.min() > 0
25 |
26 |
27 | def test_get_cooling_demand():
28 | t_amb = pd.Series(30, index=range(8760))
29 | t_amb[2] = 15
30 | ser = prep.get_cooling_demand(ser_amb_temp=t_amb, year=2019)
31 | assert isinstance(ser, pd.Series)
32 | assert ser.min() >= 0.0
33 | assert ser[2] == 0.0
34 |
--------------------------------------------------------------------------------
/tests/prep/test_pv.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | from draf import prep
4 |
5 |
6 | @pytest.mark.slow
7 | def test_get_pv_power():
8 | ser = prep.get_pv_power(year=2019, coords=(49.01, 8.39))
9 | assert ser.max() == pytest.approx(0.837, rel=1e-2)
10 |
--------------------------------------------------------------------------------
/tests/prep/test_weather.py:
--------------------------------------------------------------------------------
1 | import pandas as pd
2 | import pytest
3 |
4 | from draf.prep import weather
5 |
6 |
7 | @pytest.mark.slow
8 | def test_get_data_for_gsee():
9 | df = weather.get_data_for_gsee(stations_id_air=4063, stations_id_solar=691, year=2019)
10 | mean = df["global_horizontal"].mean()
11 | assert 90 < mean < 150, f"Average global radiation data (={mean:.3f}) are unrealistic."
12 |
13 |
14 | def test_get_nearest_stations():
15 | df = weather.get_nearest_stations(year=2019, coords=(49.01, 8.39))
16 | assert isinstance(df, pd.DataFrame)
17 | assert tuple(df.iloc[6].values) == ("Mannheim", "Rheinstetten")
18 |
19 |
20 | def test_read_stations():
21 | assert isinstance(weather.read_stations("solar"), pd.DataFrame)
22 | assert isinstance(weather.read_stations("air_temperature"), pd.DataFrame)
23 |
24 |
25 | @pytest.mark.slow
26 | def test_read_stations_table():
27 | assert isinstance(weather.read_stations_table("solar"), str)
28 | assert isinstance(weather.read_stations_table("air_temperature"), str)
29 |
30 |
31 | def test_get_air_temp():
32 | assert weather.get_air_temp(year=2019, coords=(49.01, 8.39)).mean() == pytest.approx(11.766061)
33 |
--------------------------------------------------------------------------------
/tests/test_examples.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | from examples import bev, der_hut, minimal, prod, pv, pv_bes, pyomo_pv
4 |
5 |
6 | @pytest.mark.gurobi
7 | @pytest.mark.parametrize("mdl", [bev, der_hut, minimal, prod, pv, pv_bes, pyomo_pv])
8 | def test_examples(mdl):
9 | c = mdl.main().REF_scen.res.C_TOT_
10 | assert isinstance(c, float)
11 |
--------------------------------------------------------------------------------
/tests/test_helper.py:
--------------------------------------------------------------------------------
1 | import pandas as pd
2 | import pytest
3 |
4 | from draf import helper as hp
5 |
6 |
7 | def test_get_etype():
8 | assert hp.get_etype("c_") == "c"
9 | assert hp.get_etype("c_comp") == "c"
10 | assert hp.get_etype("c_comp_DIMS") == "c"
11 | assert hp.get_etype("c_comp_desc_DIMS") == "c"
12 |
13 |
14 | def test_get_dims():
15 | assert hp.get_component("c_") == ""
16 | assert hp.get_component("c_DIMS") == "DIMS"
17 | assert hp.get_component("c_COMP_DIMS") == "DIMS"
18 | assert hp.get_component("c_COMP_desc_DIMS") == "DIMS"
19 |
20 |
21 | def test_get_desc():
22 | assert hp.get_desc("c_") == ""
23 | assert hp.get_desc("c_DIMS") == ""
24 | assert hp.get_desc("c_COMP_DIMS") == ""
25 | assert hp.get_desc("c_COMP_desc_DIMS") == "desc"
26 | assert hp.get_desc("c_COMP_desc_addon_DIMS") == "desc"
27 | assert hp.get_desc("c__desc_") == "desc"
28 |
29 |
30 | def test_get_dims():
31 | assert hp.get_dims("x_") == ""
32 | assert hp.get_dims("x_DIMS") == "DIMS"
33 | assert hp.get_dims("x_comp_DIMS") == "DIMS"
34 | assert hp.get_dims("x_comp_desc_DIMS") == "DIMS"
35 |
36 |
37 | def test_datetime_to_int():
38 | assert hp.datetime_to_int(freq="60min", year=2019, month=12, day=31) == 8760 - 24
39 |
40 |
41 | def test_int_to_datetime():
42 | assert isinstance(hp.int_to_datetime(freq="60min", year=2019, pos=8760 - 24), pd.Timestamp)
43 |
44 |
45 | def test_auto_fmt():
46 | num, unit = hp.auto_fmt(num=2e4, unit="kW")
47 | assert (num, unit) == (20.0, "MW")
48 |
49 | num, unit = hp.auto_fmt(num=2e6, unit="€")
50 | assert (num, unit) == (2.0, "M€")
51 |
52 | num, unit = hp.auto_fmt(num=2e6, unit="€", target_unit="k€")
53 | assert (num, unit) == (2000.0, "k€")
54 |
55 | num, unit = hp.auto_fmt(num=2e4, unit="gCO2eq")
56 | assert (num, unit) == (20.0, "kgCO2eq")
57 |
58 |
59 | def test_wrap_and_border():
60 | assert hp.wrap_and_border("spam and eggs", 6) == "┌────┐\n│spam│\n│and │\n│eggs│\n└────┘"
61 |
62 |
63 | def test_bordered():
64 | assert hp.bordered("spam") == "┌────┐\n│spam│\n└────┘"
65 |
66 |
67 | def test_ser_to_df_for_latex_table():
68 | ser = pd.Series(dict(a=3, b=2, c=4))
69 | result = hp.ser_to_df_for_latex_table(ser, ncols=2)
70 | assert result.__repr__() == " index 0 index 0\n0 a 3 b 2"
71 |
72 |
73 | def test_human_readable_size():
74 | assert hp.human_readable_size(size=1400, decimal_places=2) == "1.40 KB"
75 |
76 |
77 | def test_topological_sort():
78 | order_restriction_with_cyclic_dependency = [("A", {"B"}), ("B", {"C"}), ("C", {"A"})]
79 | with pytest.raises(ValueError):
80 | list(hp.topological_sort(order_restriction_with_cyclic_dependency))
81 |
--------------------------------------------------------------------------------
/tests/test_sort_sections.py:
--------------------------------------------------------------------------------
1 | from draf.sort_sections import sort_sections
2 |
3 | a = """\
4 | a
5 |
6 | b
7 | # SORTING_START
8 | ASUE_2011 = "https://asue.de/sites"
9 | BMWI_2020 = "https://www.bmwi-energiewende.de/EWD"
10 | # SORTING_END
11 |
12 | c:
13 | d
14 |
15 | # SORTING_START
16 | """
17 |
18 | b = """\
19 | aa_EEG_ = ParDat(name="c_EG_EEG_", data=0.065)
20 | """
21 |
22 |
23 | c = """\
24 | zz_PV_ = ParDat(name="ol_PV_", data=25)
25 | """
26 |
27 | d = """\
28 | # SORTING_END
29 |
30 | e
31 | """
32 |
33 |
34 | def test_sort_sections():
35 | assert a + b + c + d == sort_sections(a + c + b + d)
36 |
--------------------------------------------------------------------------------
/tests/test_tsa.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | from draf.prep import get_el_SLP
4 | from draf.tsa import DemandAnalyzer
5 |
6 |
7 | @pytest.mark.slow
8 | def test_tsa():
9 | year, freq = 2020, "60min"
10 | p_el = get_el_SLP(year, freq, annual_energy=10e6, profile="G0")
11 |
12 | da = DemandAnalyzer(p_el, year, freq)
13 | da.show_stats()
14 |
15 | pla = da.get_peak_load_analyzer()
16 | pla.histo(target_percentile=95)
17 | pla.simulate_BES(e_bes_capa=2.0, p_bes_max=2.0)
18 |
--------------------------------------------------------------------------------