├── .gitignore
├── LICENSE.md
├── README.md
├── doc
├── _static
│ ├── basic.css
│ ├── classic.css
│ ├── doctools.js
│ ├── documentation_options.js
│ ├── file.png
│ ├── jquery-3.5.1.js
│ ├── jquery.js
│ ├── language_data.js
│ ├── minus.png
│ ├── plus.png
│ ├── pygments.css
│ ├── searchtools.js
│ ├── sidebar.js
│ ├── underscore-1.3.1.js
│ └── underscore.js
├── genindex.html
├── index.html
├── modules.html
├── pub_data_visualization.auctions.html
├── pub_data_visualization.auctions.load.entsoe.html
├── pub_data_visualization.auctions.load.entsoe.transcode.html
├── pub_data_visualization.auctions.load.html
├── pub_data_visualization.auctions.plot.html
├── pub_data_visualization.auctions.plot.subplot.html
├── pub_data_visualization.capacity.aggregated.html
├── pub_data_visualization.capacity.aggregated.load.entsoe.html
├── pub_data_visualization.capacity.aggregated.load.entsoe.transcode.html
├── pub_data_visualization.capacity.aggregated.load.html
├── pub_data_visualization.capacity.aggregated.load.rte.html
├── pub_data_visualization.capacity.aggregated.load.rte.transcode.html
├── pub_data_visualization.capacity.html
├── pub_data_visualization.capacity.unit.html
├── pub_data_visualization.capacity.unit.load.html
├── pub_data_visualization.capacity.unit.load.rte.html
├── pub_data_visualization.capacity.unit.load.rte.transcode.html
├── pub_data_visualization.global_tools.html
├── pub_data_visualization.global_var.html
├── pub_data_visualization.html
├── pub_data_visualization.load.html
├── pub_data_visualization.load.load.eco2mix.html
├── pub_data_visualization.load.load.eco2mix.load_raw.html
├── pub_data_visualization.load.load.eco2mix.transcode.html
├── pub_data_visualization.load.load.entsoe.html
├── pub_data_visualization.load.load.entsoe.transcode.html
├── pub_data_visualization.load.load.html
├── pub_data_visualization.load.plot.html
├── pub_data_visualization.load.plot.subplot.html
├── pub_data_visualization.load.tools.html
├── pub_data_visualization.multiplots.html
├── pub_data_visualization.outages.html
├── pub_data_visualization.outages.load.entsoe.html
├── pub_data_visualization.outages.load.entsoe.transcode.html
├── pub_data_visualization.outages.load.html
├── pub_data_visualization.outages.load.rte.html
├── pub_data_visualization.outages.load.rte.transcode.html
├── pub_data_visualization.outages.plot.html
├── pub_data_visualization.outages.plot.subplot.html
├── pub_data_visualization.outages.tools.html
├── pub_data_visualization.production.html
├── pub_data_visualization.production.load.eco2mix.html
├── pub_data_visualization.production.load.eco2mix.transcode.html
├── pub_data_visualization.production.load.entsoe.html
├── pub_data_visualization.production.load.entsoe.transcode.html
├── pub_data_visualization.production.load.html
├── pub_data_visualization.production.load.rte.html
├── pub_data_visualization.production.load.rte.transcode.html
├── pub_data_visualization.production.plot.html
├── pub_data_visualization.production.plot.subplot.html
├── pub_data_visualization.weather.html
├── pub_data_visualization.weather.load.html
├── pub_data_visualization.weather.load.meteofrance.html
├── pub_data_visualization.weather.load.meteofrance.transcode.html
├── pub_data_visualization.weather.plot.html
├── pub_data_visualization.weather.plot.subplot.html
├── py-modindex.html
└── search.html
├── examples
├── indices
│ └── prices.png
├── load
│ ├── forecasting_error.png
│ └── power.png
├── multiplots
│ ├── scatter_price_load.png
│ ├── scatter_price_production.png
│ ├── scatter_price_weather.png
│ ├── spot_report.png
│ └── transparent_production.png
├── outages
│ ├── animated_availability.png
│ ├── evolution_mean_availability.png
│ ├── expected_program.png
│ ├── incremental_programs.png
│ └── regression_delays.png
├── production
│ └── power.png
└── weather
│ ├── curve.png
│ └── distribution_temperature.png
├── pub_data_visualization
├── __init__.py
├── global_tools
│ ├── __init__.py
│ ├── compute_delivery_dates.py
│ ├── compute_delivery_period_index.py
│ ├── compute_delivery_windows.py
│ ├── compute_maturity.py
│ ├── compute_nb_hours.py
│ ├── dt_exists_in_tz.py
│ ├── format_contract_name.py
│ ├── format_latex.py
│ ├── format_unit_name.py
│ ├── heatmap.py
│ ├── piecewise_constant_interpolators.py
│ └── set_mpl.py
├── global_var
│ ├── __init__.py
│ ├── cascade_frequencies.py
│ ├── cascade_frequencies_gas.py
│ ├── cascade_maturities.py
│ ├── dikt_tz.py
│ ├── months.py
│ ├── path_folders.py
│ ├── plot_variables.py
│ └── user_defined_names.py
├── indices
│ ├── __init__.py
│ ├── load
│ │ ├── __init__.py
│ │ ├── entsoe_da
│ │ │ ├── __init__.py
│ │ │ ├── load.py
│ │ │ ├── paths.py
│ │ │ └── transcode
│ │ │ │ ├── __init__.py
│ │ │ │ └── columns.py
│ │ └── load.py
│ └── plot
│ │ ├── __init__.py
│ │ ├── price.py
│ │ └── subplot
│ │ ├── __init__.py
│ │ └── price.py
├── load
│ ├── __init__.py
│ ├── load
│ │ ├── __init__.py
│ │ ├── eco2mix
│ │ │ ├── __init__.py
│ │ │ ├── load.py
│ │ │ ├── load_raw
│ │ │ │ ├── __init__.py
│ │ │ │ ├── load_raw.py
│ │ │ │ └── url.py
│ │ │ ├── paths.py
│ │ │ └── transcode
│ │ │ │ ├── __init__.py
│ │ │ │ └── columns.py
│ │ ├── entsoe
│ │ │ ├── __init__.py
│ │ │ ├── load.py
│ │ │ ├── paths.py
│ │ │ └── transcode
│ │ │ │ ├── __init__.py
│ │ │ │ └── columns.py
│ │ └── load.py
│ ├── plot
│ │ ├── __init__.py
│ │ ├── forecasting_error.py
│ │ ├── power.py
│ │ └── subplot
│ │ │ ├── __init__.py
│ │ │ ├── forecasting_error.py
│ │ │ └── power.py
│ └── tools
│ │ ├── __init__.py
│ │ └── mean_load_delivery_period.py
├── multiplots
│ ├── __init__.py
│ ├── cloud_2d.py
│ ├── spot_report.py
│ ├── subplot
│ │ ├── __init__.py
│ │ └── kernel.py
│ └── transparent_production.py
├── outages
│ ├── __init__.py
│ ├── load
│ │ ├── __init__.py
│ │ ├── entsoe
│ │ │ ├── __init__.py
│ │ │ ├── assemble.py
│ │ │ ├── load.py
│ │ │ ├── paths.py
│ │ │ └── transcode
│ │ │ │ ├── __init__.py
│ │ │ │ ├── columns.py
│ │ │ │ ├── map_code.py
│ │ │ │ ├── outage_status.py
│ │ │ │ ├── outage_type.py
│ │ │ │ └── production_source.py
│ │ ├── load.py
│ │ └── rte
│ │ │ ├── __init__.py
│ │ │ ├── assemble.py
│ │ │ ├── load.py
│ │ │ ├── paths.py
│ │ │ └── transcode
│ │ │ ├── __init__.py
│ │ │ ├── capacity.py
│ │ │ ├── columns.py
│ │ │ ├── eic_code.py
│ │ │ ├── outage_status.py
│ │ │ ├── outage_type.py
│ │ │ ├── producer_name.py
│ │ │ ├── production_source.py
│ │ │ └── unit_type.py
│ ├── plot
│ │ ├── __init__.py
│ │ ├── animated_availability.py
│ │ ├── evolution_mean_availability.py
│ │ ├── expected_program.py
│ │ ├── incremental_programs.py
│ │ ├── regression_delays.py
│ │ └── subplot
│ │ │ ├── __init__.py
│ │ │ ├── evolution_mean_availability.py
│ │ │ ├── expected_program.py
│ │ │ ├── incremental_programs.py
│ │ │ ├── nameplate_capacity.py
│ │ │ └── regression_delays.py
│ └── tools
│ │ ├── __init__.py
│ │ ├── compute_all_programs.py
│ │ ├── compute_missing_energy.py
│ │ ├── cross_section_view.py
│ │ ├── extrapolate_programs.py
│ │ └── sum_programs.py
├── production
│ ├── __init__.py
│ ├── load
│ │ ├── __init__.py
│ │ ├── eco2mix
│ │ │ ├── __init__.py
│ │ │ ├── load.py
│ │ │ ├── paths.py
│ │ │ └── transcode
│ │ │ │ ├── __init__.py
│ │ │ │ └── columns.py
│ │ ├── entsoe
│ │ │ ├── __init__.py
│ │ │ ├── load.py
│ │ │ ├── paths.py
│ │ │ └── transcode
│ │ │ │ ├── __init__.py
│ │ │ │ ├── columns.py
│ │ │ │ └── map_code.py
│ │ ├── load.py
│ │ └── rte
│ │ │ ├── __init__.py
│ │ │ ├── load.py
│ │ │ ├── paths.py
│ │ │ └── transcode
│ │ │ ├── __init__.py
│ │ │ ├── format_str_date.py
│ │ │ └── production_source.py
│ └── plot
│ │ ├── __init__.py
│ │ ├── power.py
│ │ └── subplot
│ │ ├── __init__.py
│ │ └── power.py
├── production_capacity
│ ├── __init__.py
│ ├── aggregated
│ │ ├── __init__.py
│ │ └── load
│ │ │ ├── __init__.py
│ │ │ ├── entsoe
│ │ │ ├── __init__.py
│ │ │ ├── load.py
│ │ │ ├── paths.py
│ │ │ └── transcode
│ │ │ │ ├── __init__.py
│ │ │ │ ├── columns.py
│ │ │ │ └── production_source.py
│ │ │ ├── load.py
│ │ │ └── rte
│ │ │ ├── __init__.py
│ │ │ ├── load.py
│ │ │ ├── paths.py
│ │ │ └── transcode
│ │ │ ├── __init__.py
│ │ │ └── production_source.py
│ └── unit
│ │ ├── __init__.py
│ │ └── load
│ │ ├── __init__.py
│ │ ├── load.py
│ │ └── rte
│ │ ├── __init__.py
│ │ ├── load.py
│ │ ├── paths.py
│ │ └── transcode
│ │ ├── __init__.py
│ │ ├── columns.py
│ │ └── production_source.py
├── transmission
│ ├── __init__.py
│ ├── load
│ │ ├── __init__.py
│ │ ├── entsog_nominations
│ │ │ ├── __init__.py
│ │ │ ├── load.py
│ │ │ ├── paths.py
│ │ │ ├── query.py
│ │ │ └── transcode
│ │ │ │ ├── __init__.py
│ │ │ │ ├── columns.py
│ │ │ │ └── columns_dropped.py
│ │ └── load.py
│ └── plot
│ │ └── __init__.py
└── weather
│ ├── __init__.py
│ ├── load
│ ├── __init__.py
│ ├── load.py
│ └── meteofrance
│ │ ├── __init__.py
│ │ ├── geography.py
│ │ ├── load.py
│ │ ├── paths.py
│ │ ├── transcode
│ │ ├── __init__.py
│ │ └── columns.py
│ │ └── url.py
│ └── plot
│ ├── __init__.py
│ ├── curve.py
│ ├── distribution.py
│ └── subplot
│ ├── __init__.py
│ ├── curve.py
│ └── distribution.py
├── requirements.txt
├── scripts
├── indices
│ └── main_price.py
├── load
│ ├── main_forecasting_error.py
│ └── main_power.py
├── multiplots
│ ├── main_scatter_price_load.py
│ ├── main_scatter_price_production.py
│ ├── main_scatter_price_weather.py
│ ├── main_spot_report.py
│ └── main_transparent_production.py
├── outages
│ ├── main_animated_availability.py
│ ├── main_evolution_mean_availability.py
│ ├── main_expected_program.py
│ ├── main_incremental_programs.py
│ └── main_regression_delays.py
├── production
│ └── main_power.py
├── transmission
│ └── main_draft.py
└── weather
│ ├── main_curve.py
│ └── main_distribution.py
└── setup.py
/.gitignore:
--------------------------------------------------------------------------------
1 |
2 |
3 | # gitignore
4 | #.gitignore
5 | venv/
6 | .idea/
7 |
8 |
9 | # Files
10 | #*.txt
11 | *.pdf
12 | *.xls*
13 | *.doc*
14 | *.db
15 |
16 |
17 | # Personal ignore
18 | IIU_*
19 | TBC_*
20 | TBD_*
21 | NUA_*
22 | pers_var.py
23 | doc_source/
24 |
25 |
26 | # Generated files
27 | *.DS_Store
28 | __pycache__/
29 | *.py[cod]
30 | *.log
31 | *.egg-info/
32 | .___init__.py
33 |
34 | # Sphinx
35 | doc/.doctrees/
36 | doc/_sources/
37 | doc/.buildinfo
38 | doc/objects.inv
39 | doc/searchindex.js
40 | doc_source/
41 |
42 |
43 |
--------------------------------------------------------------------------------
/LICENSE.md:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2020 Commission de Régulation de l'Énergie
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/doc/_static/documentation_options.js:
--------------------------------------------------------------------------------
1 | var DOCUMENTATION_OPTIONS = {
2 | URL_ROOT: document.getElementById("documentation_options").getAttribute('data-url_root'),
3 | VERSION: '0.1',
4 | LANGUAGE: 'None',
5 | COLLAPSE_INDEX: false,
6 | BUILDER: 'html',
7 | FILE_SUFFIX: '.html',
8 | LINK_SUFFIX: '.html',
9 | HAS_SOURCE: true,
10 | SOURCELINK_SUFFIX: '.txt',
11 | NAVIGATION_WITH_KEYS: false
12 | };
--------------------------------------------------------------------------------
/doc/_static/file.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cre-dev/pub-data-visualization/f99d4fa14daefb9eaf63cc4d9ac62330e6702533/doc/_static/file.png
--------------------------------------------------------------------------------
/doc/_static/minus.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cre-dev/pub-data-visualization/f99d4fa14daefb9eaf63cc4d9ac62330e6702533/doc/_static/minus.png
--------------------------------------------------------------------------------
/doc/_static/plus.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cre-dev/pub-data-visualization/f99d4fa14daefb9eaf63cc4d9ac62330e6702533/doc/_static/plus.png
--------------------------------------------------------------------------------
/doc/search.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 | Search — energy-data-visualization 0.1 documentation
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
Search
46 |
47 |
48 |
49 | Please activate JavaScript to enable the search
50 | functionality.
51 |
52 |
53 |
54 | Searching for multiple words only shows matches that contain
55 | all words.
56 |
57 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
75 |
76 |
77 |
90 |
94 |
95 |
--------------------------------------------------------------------------------
/examples/indices/prices.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cre-dev/pub-data-visualization/f99d4fa14daefb9eaf63cc4d9ac62330e6702533/examples/indices/prices.png
--------------------------------------------------------------------------------
/examples/load/forecasting_error.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cre-dev/pub-data-visualization/f99d4fa14daefb9eaf63cc4d9ac62330e6702533/examples/load/forecasting_error.png
--------------------------------------------------------------------------------
/examples/load/power.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cre-dev/pub-data-visualization/f99d4fa14daefb9eaf63cc4d9ac62330e6702533/examples/load/power.png
--------------------------------------------------------------------------------
/examples/multiplots/scatter_price_load.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cre-dev/pub-data-visualization/f99d4fa14daefb9eaf63cc4d9ac62330e6702533/examples/multiplots/scatter_price_load.png
--------------------------------------------------------------------------------
/examples/multiplots/scatter_price_production.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cre-dev/pub-data-visualization/f99d4fa14daefb9eaf63cc4d9ac62330e6702533/examples/multiplots/scatter_price_production.png
--------------------------------------------------------------------------------
/examples/multiplots/scatter_price_weather.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cre-dev/pub-data-visualization/f99d4fa14daefb9eaf63cc4d9ac62330e6702533/examples/multiplots/scatter_price_weather.png
--------------------------------------------------------------------------------
/examples/multiplots/spot_report.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cre-dev/pub-data-visualization/f99d4fa14daefb9eaf63cc4d9ac62330e6702533/examples/multiplots/spot_report.png
--------------------------------------------------------------------------------
/examples/multiplots/transparent_production.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cre-dev/pub-data-visualization/f99d4fa14daefb9eaf63cc4d9ac62330e6702533/examples/multiplots/transparent_production.png
--------------------------------------------------------------------------------
/examples/outages/animated_availability.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cre-dev/pub-data-visualization/f99d4fa14daefb9eaf63cc4d9ac62330e6702533/examples/outages/animated_availability.png
--------------------------------------------------------------------------------
/examples/outages/evolution_mean_availability.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cre-dev/pub-data-visualization/f99d4fa14daefb9eaf63cc4d9ac62330e6702533/examples/outages/evolution_mean_availability.png
--------------------------------------------------------------------------------
/examples/outages/expected_program.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cre-dev/pub-data-visualization/f99d4fa14daefb9eaf63cc4d9ac62330e6702533/examples/outages/expected_program.png
--------------------------------------------------------------------------------
/examples/outages/incremental_programs.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cre-dev/pub-data-visualization/f99d4fa14daefb9eaf63cc4d9ac62330e6702533/examples/outages/incremental_programs.png
--------------------------------------------------------------------------------
/examples/outages/regression_delays.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cre-dev/pub-data-visualization/f99d4fa14daefb9eaf63cc4d9ac62330e6702533/examples/outages/regression_delays.png
--------------------------------------------------------------------------------
/examples/production/power.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cre-dev/pub-data-visualization/f99d4fa14daefb9eaf63cc4d9ac62330e6702533/examples/production/power.png
--------------------------------------------------------------------------------
/examples/weather/curve.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cre-dev/pub-data-visualization/f99d4fa14daefb9eaf63cc4d9ac62330e6702533/examples/weather/curve.png
--------------------------------------------------------------------------------
/examples/weather/distribution_temperature.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cre-dev/pub-data-visualization/f99d4fa14daefb9eaf63cc4d9ac62330e6702533/examples/weather/distribution_temperature.png
--------------------------------------------------------------------------------
/pub_data_visualization/__init__.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Visualization module for public energy data with Python.
4 |
5 | See https://github.com/cre-os/energy-data-visualization for a presentation of this project.
6 |
7 | All the subpackages have a similar structure :
8 |
9 | | TypeOfData/
10 | | __init__.py
11 | | load/
12 | | __init__.py
13 | | dataSource1/
14 | | dataSource2/
15 | | ...
16 | | tools/
17 | | __init__.py
18 | | tool1.py
19 | | tool2.py
20 | | ...
21 | | plot/
22 | | __init__.py
23 | | plotFunction1.py
24 | | plotFunction2.py
25 | | ...
26 | | subplot/
27 | | subplotFunction1.py
28 | | subplotFunction2.py
29 | | ...
30 |
31 | """
32 |
33 |
34 | from . import global_tools
35 | from . import global_var
36 | from . import indices
37 | from . import load
38 | from . import multiplots
39 | from . import outages
40 | from . import production
41 | from . import production_capacity
42 | from . import weather
43 |
--------------------------------------------------------------------------------
/pub_data_visualization/global_tools/__init__.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Generic set of tools to manipulate the data.
4 |
5 | This module contains a set of tools used by
6 | the different subpackages of this project.
7 |
8 | """
9 |
10 | from .compute_delivery_dates import *
11 | from .compute_delivery_period_index import *
12 | from .compute_delivery_windows import *
13 | from .compute_maturity import *
14 | from .compute_nb_hours import *
15 | from .dt_exists_in_tz import *
16 | from .format_contract_name import *
17 | from .format_latex import *
18 | from .format_unit_name import *
19 | from .heatmap import *
20 | from .piecewise_constant_interpolators import *
21 | from .set_mpl import *
22 |
23 |
--------------------------------------------------------------------------------
/pub_data_visualization/global_tools/compute_nb_hours.py:
--------------------------------------------------------------------------------
1 |
2 | import numpy as np
3 | import pandas as pd
4 | #
5 | from .. import global_var
6 |
7 |
8 | def compute_nb_hours(product_delivery_windows,
9 | frequency = None,
10 | ):
11 | """
12 | Computes the number of hours of a given delivery contract.
13 |
14 | :param product_delivery_windows: The delivery windows
15 | :type product_delivery_windows: list of pairs of pd.Timestamp
16 | :return: The number of hours of the contract
17 | :rtype: int
18 | """
19 | if product_delivery_windows is None:
20 | return None
21 |
22 | for beginning, end in product_delivery_windows:
23 | assert type(beginning) == pd.Timestamp
24 | assert type(end) == pd.Timestamp
25 | nb_seconds = np.sum([end - beginning
26 | for beginning, end in product_delivery_windows
27 | ]).total_seconds()
28 | assert ( nb_seconds % 1800 == 0
29 | or frequency in [global_var.contract_frequency_bow,
30 | ]
31 | )
32 | nb_hours = (nb_seconds/3600)
33 | nb_hours = float(nb_hours)
34 | return nb_hours
--------------------------------------------------------------------------------
/pub_data_visualization/global_tools/dt_exists_in_tz.py:
--------------------------------------------------------------------------------
1 |
2 | import pandas as pd
3 | from pytz.exceptions import NonExistentTimeError
4 |
5 |
6 | def dt_exists_in_tz(x, tz):
7 | """
8 | Tests the existence of a timestamp in a given timezone.
9 |
10 | :param x: The time to test
11 | :param tz: The local timezone
12 | :type x: pd.Timestamp
13 | :type tz: pytz.tzfile
14 | :return: True if the timestamp exists in the timezone
15 | :rtype: bool
16 | """
17 | assert ( type(x) == pd.Timestamp
18 | or x is pd.NaT
19 | )
20 | try:
21 | x.tz_localize(tz, ambiguous = True)
22 | return True
23 | except NonExistentTimeError:
24 | return False
--------------------------------------------------------------------------------
/pub_data_visualization/global_tools/format_contract_name.py:
--------------------------------------------------------------------------------
1 |
2 | import re
3 | #
4 | from .. import global_var
5 |
6 | def format_contract_name(commodity = None,
7 | map_code = None,
8 | year = None,
9 | frequency = None,
10 | delivery_period = None,
11 | profile = None,
12 | ):
13 | """
14 | Returns a generic name for a given contract.
15 |
16 | :param year: The year of the delivery
17 | :param frequency: The type of delivery contract (year, month, etc.)
18 | :param delivery_period: The index of the delivery contract
19 | :param profile: profile of the delivery contract
20 | :type year: int
21 | :type frequency: string
22 | :type delivery_periodx: int
23 | :type profile: string
24 | :return: The formatted name of the contract
25 | :rtype: string
26 | """
27 | assert len(str(year)) == 4
28 | assert str(year).isdigit()
29 | assert type(frequency) == str
30 | assert ( re.compile(global_var.contract_profile_bloc_pattern).match(profile)
31 | or profile in {global_var.contract_profile_gas,
32 | global_var.contract_profile_base,
33 | global_var.contract_profile_ofpk,
34 | global_var.contract_profile_peak,
35 | global_var.contract_profile_hour,
36 | global_var.contract_profile_half_hour,
37 | }
38 | )
39 |
40 | return '.'.join(filter(None, [map_code,
41 | commodity,
42 | '{year}{frequency}{delivery_period}'.format(year = year,
43 | frequency = frequency,
44 | delivery_period = delivery_period,
45 | ),
46 | (profile
47 | if profile not in [global_var.contract_profile_gas,
48 | global_var.contract_profile_hour,
49 | global_var.contract_profile_half_hour,
50 | global_var.contract_profile_bloc,
51 | ]
52 | else
53 | None
54 | ),
55 | ]))
56 |
57 |
--------------------------------------------------------------------------------
/pub_data_visualization/global_tools/format_latex.py:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 | def format_latex(ss):
8 | """
9 | Formats a string so that it is compatible with Latex.
10 |
11 | :param ss: The string to format
12 | :type ss: string
13 | :return: The formatted string
14 | :rtype: string
15 | """
16 | tt = (str(ss).replace('_', ' ')
17 | .replace('%', '\%')
18 | )
19 | return tt
--------------------------------------------------------------------------------
/pub_data_visualization/global_tools/format_unit_name.py:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | def format_unit_name(unit):
5 | """
6 | Formats the name of a production unit
7 | so that different date sources coincide.
8 |
9 | :param unit: The name of the unit
10 | :type unit: string
11 | :return: The formatted name of the unit
12 | :rtype: string
13 | """
14 |
15 | name = unit.upper() # UPPER CASES
16 | name = name.lstrip() # Remove left blank spaces
17 | name = name.rstrip() # Remove right blank spaces
18 | name = ' '.join(name.split()) # Remove multiple blank spaces
19 | name = name.replace("'", ' ')
20 | name = name.replace('-', ' ')
21 | name = name.replace('_', ' ')
22 |
23 | for key, value in dikt.items():
24 | name = name.replace(key, value)
25 |
26 | return name
27 |
28 |
29 | dikt = {
30 |
31 | # Country
32 | # locally_used_name : standardized_name,
33 |
34 | # FR
35 | "AIGLE (L )" : "AIGLE",
36 | "AMFARD14" : "AMFARD 14",
37 | "AMFARD15" : "AMFARD 15",
38 | "BAYET MORANT 1" : "BAYET",
39 | "BLAYAIS (LE)" : "BLAYAIS",
40 | "BUGEY (LE)" : "BUGEY",
41 | "CHASTANG (LE)" : "CHASTANG",
42 | "CHEYLAS (LE)" : "CHEYLAS",
43 | "CHINON B" : "CHINON",
44 | "CHOOZ B" : "CHOOZ",
45 | "COMBIGOLFE CCG" : "COMBIGOLFE",
46 | "CYCOFOS PL" : "CYCOFOS ",
47 | "DAMPIERRE EN BURLY" : "DAMPIERRE",
48 | "FR GA MORANT1" : "BAYET",
49 | "FR CPCU COGEVITRY" : "COGEVITRY",
50 | "FR CPCU SAINT OUEN" : "ST OUEN",
51 | "FR LA STPIERRE G" : "ST PIERRE",
52 | "FR MAREGES" : "MAREGES",
53 | "FR SAINT PIERRE" : "ST PIERRE",
54 | "HAVRE (LE)" : "HAVRE",
55 | "LUCY 3" : "LUCY",
56 | "MARTIGUES PONTEAU" : "MARTIGUES",
57 | "MAXE (LA)" : "MAXE",
58 | "NOGENT SUR SEINE" : "NOGENT",
59 | "PORCHEVILLE B" : "PORCHEVILLE",
60 | "POUGET (LE)" : "POUGET",
61 | "PROVENCE 4 BIOMASSE" : "PROVENCE 4",
62 | "SPEM CCG" : "SPEM",
63 | "SAINT CHAMAS" : "ST CHAMAS",
64 | "SAINT ESTEVE" : "ST ESTEVE",
65 | "SAINT GUILLERME" : "ST GUILLERME",
66 | "SAINT PIERRE" : "ST PIERRE",
67 | "SAINT PIERRE COGNET" : "ST PIERRE",
68 | "SAINTE CROIX" : "STE CROIX",
69 | "SPEM POINTE TG" : "SPEM POINTE",
70 | "ST ALBAN ST MAURICE" : "ST ALBAN",
71 | "ST LAURENT DES EAUX" : "ST LAURENT",
72 | "ST LAURENT B" : "ST LAURENT",
73 | "TRICASTIN (LE)" : "TRICASTIN",
74 | "VITRY ( SUR SEINE)" : "VITRY",
75 |
76 | }
77 |
--------------------------------------------------------------------------------
/pub_data_visualization/global_tools/piecewise_constant_interpolators.py:
--------------------------------------------------------------------------------
1 |
2 |
3 | import numpy as np
4 | import numbers
5 | import pandas as pd
6 |
7 | def piecewise_constant_interpolators(x,
8 | y,
9 | smoother = None,
10 | ):
11 | """
12 | Returns the points to interpolate
13 | to draw a piecewise constant function.
14 |
15 | :param x: The x-coordinates of the steps
16 | :param y: The y-coordinates of the steps
17 | :param smoother: Boolean to draw oblique instead of vertical steps
18 | :type x: list of floats
19 | :type y: list of floats
20 | :type smoother: string
21 | :return: The coordinates of the points to interpolate
22 | :rtype: (list of floats, list of floats)
23 | """
24 | #
25 | X, Y = levels_to_points(x, y)
26 |
27 | # Simplify X and Y
28 | if smoother == 'basic':
29 | X = np.array([X[0],
30 | *[e
31 | for ii in range(int(X[1:-1].shape[0]/2))
32 | for e in [X[2*ii + 1] - min(pd.Timedelta(hours = 6),
33 | (X[2*ii + 1] - X[2*ii])/4,
34 | (X[2*ii + 3] - X[2*ii + 2])/4,
35 | ),
36 | X[2*ii + 2] + min(pd.Timedelta(hours = 6),
37 | (X[2*ii + 1] - X[2*ii])/4,
38 | (X[2*ii + 3] - X[2*ii + 2])/4,
39 | ),
40 | ]
41 | ],
42 | X[-1],
43 | ])
44 | else:
45 | X, Y = remove_redundant_interpolators(X, Y)
46 |
47 | return X, Y
48 |
49 |
50 |
51 | def levels_to_points(x, y):
52 | Y = [e
53 | for e in y[:-1]
54 | for ii in range(2)
55 | ]
56 | #
57 | for ee in Y:
58 | assert isinstance(ee, numbers.Number), '{0}\n{1}'.format(type(ee), ee)
59 | #
60 | X = [d
61 | for d in x
62 | for ii in range(2)
63 | ][1:-1]
64 |
65 | X = np.array(X)
66 | Y = np.array(Y)
67 |
68 | return X, Y
69 |
70 |
71 |
72 | def remove_redundant_interpolators(X, Y):
73 | cond_plot = np.array([( ii in {0, len(X) - 1}
74 | or Y[ii-1] != Y[ii]
75 | or Y[ii+1] != Y[ii]
76 | )
77 | for ii in range(len(X))
78 | ])
79 | X = X[cond_plot]
80 | Y = Y[cond_plot]
81 | return X, Y
82 |
83 |
--------------------------------------------------------------------------------
/pub_data_visualization/global_tools/set_mpl.py:
--------------------------------------------------------------------------------
1 |
2 |
3 | from termcolor import colored
4 | from distutils.spawn import find_executable
5 |
6 |
7 | size_txt = 12
8 | latex_preamble = r"""
9 | \usepackage{amsmath}
10 | \usepackage{amssymb}
11 | \usepackage{accents}
12 | \usepackage{bm}
13 | """
14 |
15 | verb = False
16 |
17 | def set_mpl(mpl,
18 | plt,
19 | fontP,
20 | ):
21 | """
22 | Sets matplotlib so that all plots look alike with Latex strings.
23 |
24 | :param mpl: The matplotlib module
25 | :param plt: The matplotlib.pyplot module
26 | :param fontP: The desired font of the police
27 | :type mpl: module
28 | :type plt: module
29 | :type fontP: string
30 | :return: None
31 | :rtype: None
32 | """
33 | try:
34 | mpl.verbose.level = 'debug-annoying'
35 | except AttributeError as e:
36 | if verb:
37 | print(colored(e, 'red'))
38 | if find_executable('latex'):
39 | ### Latex
40 | plt.rc('text', usetex=True)
41 | plt.rc('text.latex', preamble = latex_preamble)
42 | plt.rc('legend',**{'fontsize' : size_txt})
43 | ### Font
44 | plt.rc('font', **{'family' : 'serif',
45 | 'serif' : ['Computer Modern'],
46 | 'size' : size_txt
47 | })
48 | fontP.set_size('small')
49 | else:
50 | if verb:
51 | print("latex not found")
--------------------------------------------------------------------------------
/pub_data_visualization/global_var/__init__.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Module to homogenize and format the data coming from different data sources.
4 |
5 | """
6 |
7 | from .cascade_frequencies import *
8 | from .cascade_frequencies_gas import *
9 | from .cascade_maturities import *
10 | from .dikt_tz import *
11 | from .months import *
12 | from .path_folders import *
13 | from .plot_variables import *
14 | from .user_defined_names import *
15 |
--------------------------------------------------------------------------------
/pub_data_visualization/global_var/cascade_frequencies.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Definition of the cascade effect between different WEPs.
4 |
5 | """
6 |
7 | from . import user_defined_names as global_var
8 |
9 | cascade_frequencies = [global_var.contract_frequency_years,
10 | global_var.contract_frequency_year,
11 | global_var.contract_frequency_gas_year,
12 | global_var.contract_frequency_boy,
13 | global_var.contract_frequency_season,
14 | global_var.contract_frequency_bos,
15 | global_var.contract_frequency_quarter,
16 | global_var.contract_frequency_boq,
17 | global_var.contract_frequency_months,
18 | global_var.contract_frequency_month,
19 | global_var.contract_frequency_bom,
20 | global_var.contract_frequency_week,
21 | global_var.contract_frequency_bow,
22 | global_var.contract_frequency_weekend,
23 | global_var.contract_frequency_days,
24 | global_var.contract_frequency_day,
25 | global_var.contract_frequency_within_day,
26 | global_var.contract_frequency_bloc,
27 | global_var.contract_frequency_hour,
28 | global_var.contract_frequency_half_hour,
29 | ]
30 |
31 |
32 |
33 |
--------------------------------------------------------------------------------
/pub_data_visualization/global_var/cascade_frequencies_gas.py:
--------------------------------------------------------------------------------
1 | """
2 | Definition of the cascade effect between different WEPs.
3 |
4 | """
5 |
6 | from . import user_defined_names as global_var
7 |
8 | cascade_frequencies_gas = [
9 | global_var.contract_frequency_gas_year,
10 | global_var.contract_frequency_season,
11 | global_var.contract_frequency_quarter,
12 | global_var.contract_frequency_month,
13 | global_var.contract_frequency_week,
14 | global_var.contract_frequency_day,
15 | global_var.contract_frequency_within_day,
16 | ]
17 |
--------------------------------------------------------------------------------
/pub_data_visualization/global_var/cascade_maturities.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Definition of the cascade effect between different maturities.
4 |
5 | """
6 |
7 | from . import user_defined_names as global_var
8 |
9 | h_max = 50
10 |
11 | cascade_maturities = [
12 | *[global_var.maturity_gas_year.format(nb_years = ii) for ii in range(h_max,-1,-1)],
13 | *[global_var.maturity_year.format(nb_years = ii) for ii in range(h_max,-1,-1)],
14 | *[global_var.maturity_season.format(nb_seasons = ii) for ii in range(h_max,-1,-1)],
15 | *[global_var.maturity_quarter.format(nb_quarters = ii) for ii in range(h_max,-1,-1)],
16 | *[global_var.maturity_month.format(nb_months = ii) for ii in range(h_max,-1,-1)],
17 | *[global_var.maturity_week.format(nb_weeks = ii) for ii in range(h_max,-1,-1)],
18 | *[global_var.maturity_weekend.format(nb_weeks = ii) for ii in range(h_max,-1,-1)],
19 | *[global_var.maturity_day.format(nb_days = ii) for ii in range(h_max,-1,-1)],
20 | ]
21 |
22 |
--------------------------------------------------------------------------------
/pub_data_visualization/global_var/dikt_tz.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Dictionary of timezones.
4 |
5 | Dictionary of pairs
6 | map_code : corresponding timezone
7 |
8 | """
9 |
10 | import pytz
11 |
12 |
13 | dikt_tz = {'AT' : pytz.timezone('Europe/Vienna'),
14 | 'BE' : pytz.timezone('Europe/Brussels'),
15 | 'BG' : pytz.timezone('Europe/Sofia'),
16 | 'CH' : pytz.timezone('Europe/Zurich'),
17 | 'DE_AT_LU' : pytz.timezone('Europe/Berlin'),
18 | 'DE_LU' : pytz.timezone('Europe/Berlin'),
19 | 'CZ' : pytz.timezone('Europe/Prague'),
20 | 'DK1' : pytz.timezone('Europe/Copenhagen'),
21 | 'DK2' : pytz.timezone('Europe/Copenhagen'),
22 | 'EE' : pytz.timezone('Europe/Tallinn'),
23 | 'ES' : pytz.timezone('Europe/Madrid'),
24 | 'FI' : pytz.timezone('Europe/Helsinki'),
25 | 'French' : pytz.timezone('CET'),
26 | 'FR' : pytz.timezone('CET'),
27 | 'GB' : pytz.timezone('Europe/London'),
28 | 'GR' : pytz.timezone('Europe/Athens'),
29 | 'HR' : pytz.timezone('Europe/Zagreb'),
30 | 'HU' : pytz.timezone('Europe/Budapest'),
31 | 'IE_SEM' : pytz.timezone('Europe/Dublin'),
32 | 'IT_BRNN' : pytz.timezone('Europe/Rome'),
33 | 'IT_CNOR' : pytz.timezone('Europe/Rome'),
34 | 'IT_CSUD' : pytz.timezone('Europe/Rome'),
35 | 'IT_FOGN' : pytz.timezone('Europe/Rome'),
36 | 'IT_GR' : pytz.timezone('Europe/Rome'),
37 | 'IT_NORD' : pytz.timezone('Europe/Rome'),
38 | 'IT_NORD_AT' : pytz.timezone('Europe/Rome'),
39 | 'IT_NORD_CH' : pytz.timezone('Europe/Rome'),
40 | 'IT_NORD_FR' : pytz.timezone('Europe/Rome'),
41 | 'IT_NORD_SI' : pytz.timezone('Europe/Rome'),
42 | 'IT_North' : pytz.timezone('Europe/Rome'),
43 | 'IT_PRGP' : pytz.timezone('Europe/Rome'),
44 | 'IT_ROSN' : pytz.timezone('Europe/Rome'),
45 | 'IT_SACO_AC' : pytz.timezone('Europe/Rome'),
46 | 'IT_SACO_DC' : pytz.timezone('Europe/Rome'),
47 | 'IT_SARD' : pytz.timezone('Europe/Rome'),
48 | 'IT_SICI' : pytz.timezone('Europe/Rome'),
49 | 'IT_SUD' : pytz.timezone('Europe/Rome'),
50 | 'LT' : pytz.timezone('Europe/Vilnius'),
51 | 'LV' : pytz.timezone('Europe/Riga'),
52 | 'ME' : pytz.timezone('Europe/Podgorica'),
53 | 'NL' : pytz.timezone('Europe/Amsterdam'),
54 | 'NO1' : pytz.timezone('Europe/Oslo'),
55 | 'NO2' : pytz.timezone('Europe/Oslo'),
56 | 'NO3' : pytz.timezone('Europe/Oslo'),
57 | 'NO4' : pytz.timezone('Europe/Oslo'),
58 | 'NO5' : pytz.timezone('Europe/Oslo'),
59 | 'PL' : pytz.timezone('Europe/Warsaw'),
60 | 'PT' : pytz.timezone('Europe/Lisbon'),
61 | 'RO' : pytz.timezone('Europe/Bucharest'),
62 | 'RS' : pytz.timezone('Europe/Belgrade'),
63 | 'SE1' : pytz.timezone('Europe/Stockholm'),
64 | 'SE2' : pytz.timezone('Europe/Stockholm'),
65 | 'SE3' : pytz.timezone('Europe/Stockholm'),
66 | 'SE4' : pytz.timezone('Europe/Stockholm'),
67 | 'SI' : pytz.timezone('Europe/Rome'),
68 | 'SK' : pytz.timezone('Europe/Bratislava'),
69 | }
70 |
71 |
72 |
--------------------------------------------------------------------------------
/pub_data_visualization/global_var/months.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Correspondances between dates and written months.
4 |
5 | Correspondances between the months written in different sources
6 | and their indices (from 1 to 12).
7 |
8 | """
9 |
10 | import calendar
11 |
12 | month_str_int = {**{month.upper() : ii
13 | for ii, month in list(enumerate(calendar.month_abbr))[1:]
14 | },
15 | 'JANV' : 1,
16 | 'FÉVR' : 2,
17 | 'MARS' : 3,
18 | 'AVR' : 4,
19 | 'MAI' : 5,
20 | 'JUNE' : 6,
21 | 'JUIN' : 6,
22 | 'JULY' : 7,
23 | 'JUIL' : 7,
24 | 'AOÛT' : 8,
25 | 'SEPT' : 9,
26 | 'DÉC' : 12,
27 | }
--------------------------------------------------------------------------------
/pub_data_visualization/global_var/path_folders.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Definition of the paths to the data.
4 |
5 | Definition of
6 | the folders to read and save the data
7 | and the folders where the plots should be saved.
8 | Although a default configuration is proposed,
9 | the user can decide her own folders by creating
10 | a module pers_var.py in this folder.
11 | """
12 |
13 | import os
14 |
15 | try: # Local non-synchronized file for personal folders path
16 | from .pers_var import path_public_data
17 | from .pers_var import path_transformed
18 | from .pers_var import path_outputs
19 | from .pers_var import path_plots
20 |
21 | except (ModuleNotFoundError, AssertionError):
22 | path_home = r'{0}'.format(os.path.expanduser('~'))
23 | path_public_data = os.path.join(path_home,
24 | r'_energy_public_data',
25 | )
26 | path_transformed = os.path.join(path_home,
27 | r'_energy_tmp_data',
28 | )
29 | path_outputs = os.path.join(path_home,
30 | r'_energy_outputs',
31 | )
32 | path_plots = os.path.join(path_home,
33 | r'_energy_plots',
34 | )
35 |
36 |
--------------------------------------------------------------------------------
/pub_data_visualization/global_var/plot_variables.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | User defined variables for plotting.
4 |
5 | """
6 | from distutils.spawn import find_executable
7 | import matplotlib.cm as cm
8 |
9 | dt_formatter_file = '%Y%m%d_%H%M'
10 | if find_executable('latex'):
11 | dt_formatter = "%a\ %d/%m/%Y\ %H{:}%M"
12 | dt_formatter_tz = '%d/%m/%Y\ %H{:}%M %Z'
13 | date_formatter = "%a\ %d/%m/%Y"
14 | else:
15 | dt_formatter = "%a %d/%m/%Y %H:%M"
16 | dt_formatter_tz = '%d/%m/%Y %H:%M %Z'
17 | date_formatter = "%a %d/%m/%Y"
18 |
19 | colors = cm.tab10.colors
20 | linestyles = ['-', '--', '-.', ':']
21 | markers = ["o", "^", ">", "<", "v", 'd', 'x']
22 |
23 | figsize_horizontal = (18,10)
24 | figsize_vertical = (10,18)
25 | figsize_horizontal_ppt = (11.5,6.5)
26 | figsize_vertical_ppt = (6,10)
27 | figsize_square_ppt = (10,10)
28 |
--------------------------------------------------------------------------------
/pub_data_visualization/indices/__init__.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Module to load and plot public auctions data.
4 |
5 | """
6 |
7 | from .load import *
8 | from . import plot
9 |
--------------------------------------------------------------------------------
/pub_data_visualization/indices/load/__init__.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Module to load public auctions data.
4 |
5 | """
6 |
7 |
8 | from .load import *
9 |
--------------------------------------------------------------------------------
/pub_data_visualization/indices/load/entsoe_da/__init__.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Module to load auctions data coming from ENTSO-E.
4 |
5 | """
6 |
7 |
8 | from .load import *
9 |
--------------------------------------------------------------------------------
/pub_data_visualization/indices/load/entsoe_da/paths.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Folders where the raw auctions data provided by ENTSO-E
4 | and the transformed dataframes are saved.
5 |
6 | """
7 |
8 | import os
9 | #
10 | from .... import global_var
11 |
12 |
13 | folder_raw = os.path.join(global_var.path_public_data,
14 | '11_ENTSOE',
15 | 'DayAheadPrices_12.1.D',
16 | )
17 | fpath_tmp = os.path.join(global_var.path_transformed,
18 | 'ENTSOE',
19 | 'DayAheadPrices',
20 | 'DayAheadPrices_12.1.D_{map_code}.csv',
21 | )
--------------------------------------------------------------------------------
/pub_data_visualization/indices/load/entsoe_da/transcode/__init__.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Module to match the user defined names with the names used by ENTSO-E.
4 |
5 | This module establishes the correspondances between the user defined names
6 | and the names used by ENTSO-E in the files containing the auctions data.
7 | """
8 |
9 |
10 | from .columns import *
--------------------------------------------------------------------------------
/pub_data_visualization/indices/load/entsoe_da/transcode/columns.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Correspondances between the names used by ENTSO-E
4 | for the columns and the user defined names.
5 |
6 | """
7 |
8 | from ..... import global_var
9 |
10 | columns = {'AreaCode' : global_var.geography_area_code,
11 | 'AreaTypeCode' : global_var.geography_area_type_code,
12 | 'AreaName' : global_var.geography_area_name,
13 | 'Currency' : global_var.currency,
14 | 'Day' : global_var.contract_delivery_begin_day_utc,
15 | 'DateTime' : global_var.contract_delivery_begin_dt_utc,
16 | 'Month' : global_var.contract_delivery_begin_month_utc,
17 | 'Year' : global_var.contract_delivery_begin_year_utc,
18 | 'MapCode' : global_var.geography_map_code,
19 | 'Price' : global_var.auction_price_euro_mwh,
20 | 'UpdateTime' : global_var.publication_dt_utc,
21 | 'ResolutionCode' : global_var.time_resolution_code,
22 | }
--------------------------------------------------------------------------------
/pub_data_visualization/indices/load/load.py:
--------------------------------------------------------------------------------
1 |
2 | import pandas as pd
3 | #
4 | from ... import global_var
5 | from . import entsoe_da
6 |
7 |
8 | def load(source = None,
9 | date_min = None,
10 | date_max = None,
11 | **kwargs,
12 | ):
13 | """
14 | Calls the appropriate loader of indices from the given data source.
15 |
16 | :param source: The data source
17 | :param date_min: The left bound
18 | :param date_max: The right bound
19 | :param kwargs: Additional kwargs
20 | :type source: string
21 | :type date_min: pd.Timestamp
22 | :type date_max: pd.Timestamp
23 | :type kwargs: dict
24 | :return: The selected indices
25 | :rtype: pd.DataFrame
26 | """
27 |
28 | if source == global_var.data_source_auctions_entsoe:
29 | dg = entsoe_da.load(**kwargs)
30 |
31 | else:
32 | raise ValueError('Incorrect data source : {0}'.format(source))
33 |
34 | dh = dg.loc[ pd.Series(True, index = dg.index)
35 | & ((dg[global_var.auction_dt_utc] >= date_min) if bool(date_min) else True)
36 | & ((dg[global_var.auction_dt_utc] < date_max) if bool(date_max) else True)
37 | ]
38 |
39 | assert dh.shape[0] > 0
40 | assert dh.index.is_unique
41 |
42 | return dh
43 |
44 |
--------------------------------------------------------------------------------
/pub_data_visualization/indices/plot/__init__.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Module to plot auctions data.
4 |
5 | """
6 |
7 |
8 | from .price import *
9 |
--------------------------------------------------------------------------------
/pub_data_visualization/indices/plot/price.py:
--------------------------------------------------------------------------------
1 |
2 | import numpy as np
3 | import os
4 | #
5 | from ... import global_tools, global_var
6 | from . import subplot
7 | #
8 | import seaborn as sns
9 | import matplotlib as mpl
10 | import matplotlib.pyplot as plt
11 | import matplotlib.dates as mdates
12 | from pandas.plotting import register_matplotlib_converters; register_matplotlib_converters()
13 | from matplotlib.font_manager import FontProperties
14 | global_tools.set_mpl(mpl, plt, FontProperties())
15 | #
16 |
17 |
18 | def price(dg,
19 | source = None,
20 | map_code = None,
21 | date_min = None,
22 | date_max = None,
23 | folder_out = None,
24 | close = None,
25 | figsize = global_var.figsize_horizontal,
26 | ):
27 | """
28 | Plot the auction prices by creating a figure and
29 | calling the function to fill the subplot.
30 |
31 | :param dg: The auction prices
32 | :param source: The data source
33 | :param map_code: The bidding zone
34 | :param date_min: The left bound
35 | :param date_max: The right bound
36 | :param folder_out: The folder where the figure is saved
37 | :param close: Boolean to close the figure after it is saved
38 | :param figsize: Desired size of the figure
39 | :type dg: pd.DataFrame
40 | :type source: string
41 | :type map_code: string
42 | :type date_min: pd.Timestamp
43 | :type date_max: pd.Timestamp
44 | :type folder_out: string
45 | :type close: bool
46 | :type figsize: (int,int)
47 | :return: None
48 | :rtype: None
49 | """
50 | ### Interactive mode
51 | if close:
52 | plt.ioff()
53 | else:
54 | plt.ion()
55 |
56 |
57 | ### Figure
58 | fig, ax = plt.subplots(figsize = figsize,
59 | nrows = 1,
60 | ncols = 1,
61 | )
62 |
63 | ### Subplot
64 | subplot.price(ax,
65 | dg,
66 | )
67 |
68 | ### Ticks
69 | ax.xaxis.set_major_formatter(mdates.DateFormatter(global_var.dt_formatter))
70 | fig.autofmt_xdate()
71 | if date_min and date_max:
72 | ax.set_xlim(date_min, date_max)
73 |
74 | ### labels
75 | ax.set_xlabel(global_tools.format_latex(global_var.contract_delivery_begin_dt_utc))
76 | ax.set_ylabel(global_var.auction_price_euro_mwh)
77 |
78 | ### Add legend
79 | lns01, labs01 = ax.get_legend_handles_labels()
80 | by_label0 = dict(zip(labs01, lns01))
81 | ax.legend(by_label0.values(),
82 | by_label0.keys(),
83 | loc = 0,
84 | )
85 |
86 | ### Finalize
87 | title = ' - '.join(filter(None, ['source = {source}' if source else '',
88 | 'map_code = {map_code}'if map_code else '',
89 | ])).format(source = source,
90 | map_code = map_code,
91 | )
92 | fig.suptitle(global_tools.format_latex(title))
93 | plt.tight_layout(rect = [0, 0.01, 1, 0.95])
94 |
95 | # Save
96 | full_path = os.path.join(folder_out,
97 | "auctions_price",
98 | "period_{begin}_{end}".format(begin = date_min.strftime(global_var.dt_formatter_file),
99 | end = date_max.strftime(global_var.dt_formatter_file),
100 | ) if date_min and date_max else '',
101 | title,
102 | )
103 | os.makedirs(os.path.dirname(full_path),
104 | exist_ok = True,
105 | )
106 | plt.savefig(full_path + ".png",
107 | format = "png",
108 | bbox_inches = "tight",
109 | )
110 | if close:
111 | plt.close()
112 |
113 |
--------------------------------------------------------------------------------
/pub_data_visualization/indices/plot/subplot/__init__.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Module for the subplots of auctions data.
4 |
5 | """
6 |
7 |
8 | from .price import *
9 |
--------------------------------------------------------------------------------
/pub_data_visualization/indices/plot/subplot/price.py:
--------------------------------------------------------------------------------
1 |
2 |
3 | #
4 | from .... import global_tools, global_var
5 | #
6 | import seaborn as sns
7 | import matplotlib as mpl
8 | import matplotlib.pyplot as plt
9 | import matplotlib.dates as mdates
10 | from pandas.plotting import register_matplotlib_converters; register_matplotlib_converters()
11 | from matplotlib.font_manager import FontProperties
12 | global_tools.set_mpl(mpl, plt, FontProperties())
13 | #
14 |
15 |
16 | def price(ax,
17 | dg,
18 | **kwargs,
19 | ):
20 | """
21 | Draws in a subplot the auction prices.
22 |
23 | :param ax: The ax to fill
24 | :param dg: The auction prices
25 | :param kwargs: additional parameter for the plt.plot function
26 | :type ax: matplotlib.axes._subplots.AxesSubplot
27 | :type dg: pd.DataFrame
28 | :type kwargs: dict
29 | :return: None
30 | :rtype: None
31 | """
32 |
33 | for col in dg.columns:
34 | ax.plot(
35 | dg.index.get_level_values(global_var.contract_delivery_begin_dt_utc),
36 | dg[col],
37 | label = global_tools.format_latex(col),
38 | **kwargs,
39 | )
40 |
--------------------------------------------------------------------------------
/pub_data_visualization/load/__init__.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Visualization module to load and plot the load data.
4 |
5 | """
6 |
7 | from .load import *
8 | from . import plot
9 | from . import tools
10 |
--------------------------------------------------------------------------------
/pub_data_visualization/load/load/__init__.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Module to load the load data.
4 |
5 | """
6 |
7 |
8 | from .load import *
--------------------------------------------------------------------------------
/pub_data_visualization/load/load/eco2mix/__init__.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Module to load the load data provided by eCO2mix.
4 |
5 | """
6 |
7 |
8 | from .load import *
9 |
10 |
--------------------------------------------------------------------------------
/pub_data_visualization/load/load/eco2mix/load_raw/__init__.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Module to load or download the load data from eCO2mix.
4 |
5 | """
6 |
7 |
8 | from .load_raw import *
9 |
10 |
--------------------------------------------------------------------------------
/pub_data_visualization/load/load/eco2mix/load_raw/load_raw.py:
--------------------------------------------------------------------------------
1 |
2 | import os
3 | import urllib
4 | import zipfile
5 | import errno
6 | import pandas as pd
7 | #
8 | from . import url
9 |
10 | def load_raw(year):
11 | """
12 | Downloads the data from eCO2mix if it is not saved locally
13 | then reads the downloaded dataframes.
14 |
15 | :param year: The year of the data to load
16 | :type year: int
17 | :return: The selected eCO2mix data
18 | :rtype: pd.DataFrame
19 | """
20 |
21 | fname_xls = url.fname_xls.format(year = year)
22 | fpath_xls = os.path.join(url.folder_raw,
23 | fname_xls,
24 | )
25 | try:
26 | if not os.path.isfile(fpath_xls): # try to download
27 | os.makedirs(url.folder_raw, exist_ok = True)
28 | fname_zip = url.fname_zip.format(year = year)
29 | fpath_zip = os.path.join(url.folder_raw,
30 | fname_zip,
31 | )
32 | url_zip = os.path.join(url.website,
33 | fname_zip,
34 | )
35 | urllib.request.urlretrieve(url_zip,
36 | fpath_zip,
37 | )
38 | with zipfile.ZipFile(fpath_zip, 'r') as zipObj:
39 | zipObj.extractall(os.path.dirname(fpath_xls))
40 | assert os.path.isfile(fpath_xls)
41 | df = pd.read_csv(fpath_xls,
42 | header = 0,
43 | index_col = False,
44 | sep = '\t',
45 | encoding = 'latin-1',
46 | na_values = ['ND'],
47 | skipinitialspace = True,
48 | low_memory = False,
49 | )
50 | except FileNotFoundError:
51 | raise FileNotFoundError(
52 | errno.ENOENT,
53 | '\nFile not found : {0}\n'
54 | 'It can sbe downloaded from \n'
55 | '{1}\n'
56 | 'and stored in\n'
57 | '{2}'.format(fname_xls,
58 | url.website,
59 | url.folder_raw,
60 | ))
61 | return df
--------------------------------------------------------------------------------
/pub_data_visualization/load/load/eco2mix/load_raw/url.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | URLs and filenames to download the data provided by eCO2mix
4 | and the folder where it should be saved locally.
5 |
6 | """
7 |
8 | import os
9 | #
10 | from ..... import global_var
11 |
12 | website = 'https://eco2mix.rte-france.com/download/eco2mix/'
13 | fname_zip = 'eCO2mix_RTE_Annuel-Definitif_{year}.zip'
14 | fname_xls = 'eCO2mix_RTE_Annuel-Definitif_{year}.xls'
15 |
16 | folder_raw = os.path.join(global_var.path_public_data,
17 | '24_RTE',
18 | 'eCO2mix_RTE',
19 | )
--------------------------------------------------------------------------------
/pub_data_visualization/load/load/eco2mix/paths.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Folders where the raw auctions data provided by eCO2mix
4 | and the transformed dataframes are saved.
5 |
6 | """
7 |
8 | import os
9 | #
10 | from .... import global_var
11 |
12 | fpath_load_tmp = os.path.join(global_var.path_transformed,
13 | 'eCO2mix',
14 | 'load_{0}_{1}.csv',
15 | )
16 |
--------------------------------------------------------------------------------
/pub_data_visualization/load/load/eco2mix/transcode/__init__.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Module to match the user defined names with the names used by eCO2mix.
4 |
5 | """
6 |
7 | from .columns import *
--------------------------------------------------------------------------------
/pub_data_visualization/load/load/eco2mix/transcode/columns.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Correspondances between the names used by eCO2mix
4 | and the user defined names.
5 |
6 | """
7 |
8 | from ..... import global_var
9 |
10 | columns = {'Périmètre' : global_var.geography_area_name,
11 | 'Nature' : global_var.file_info,
12 | 'AreaName' : global_var.geography_area_name,
13 | 'Date' : global_var.load_date_local,
14 | 'Heures' : global_var.load_time_local,
15 | 'Consommation' : global_var.load_nature_observation,
16 | 'Prévision J-1' : global_var.load_nature_forecast_day1,
17 | 'Prévision J' : global_var.load_nature_forecast_day0,
18 | }
--------------------------------------------------------------------------------
/pub_data_visualization/load/load/entsoe/__init__.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Module to load the load data provided by ENTSO-E.
4 |
5 | """
6 |
7 |
8 | from .load import *
9 |
10 |
--------------------------------------------------------------------------------
/pub_data_visualization/load/load/entsoe/load.py:
--------------------------------------------------------------------------------
1 |
2 |
3 | import pandas as pd
4 | import os
5 | #
6 | from .... import global_var
7 | from . import transcode, paths
8 |
9 | def load(map_code = None):
10 | """
11 | Loads the load data provided by ENTSO-E.
12 |
13 | :param map_code: The delivery zone
14 | :type map_code: string
15 | :return: The load data
16 | :rtype: pd.DataFrame
17 | """
18 |
19 | df_path = paths.fpath_tmp.format(map_code = map_code) + '.csv'
20 | try:
21 | print('Load load/entsoe - ', end = '')
22 | df = pd.read_csv(df_path,
23 | header = [0],
24 | sep = ';',
25 | )
26 | df.loc[:,global_var.load_dt_utc] = pd.to_datetime(df[global_var.load_dt_utc])
27 | print('Loaded')
28 | except Exception as e:
29 | print('fail')
30 | print(e)
31 | dikt_load = {}
32 | try:
33 | list_files = sorted(os.listdir(paths.folder_raw))
34 | assert len(list_files) > 0
35 | except Exception as e:
36 | print('Files not found.\n'
37 | 'They can be downloaded with the SFTP share proposed by ENTSOE at \n'
38 | 'https://transparency.entsoe.eu/content/static_content/Static%20content/knowledge%20base/SFTP-Transparency_Docs.html\n'
39 | 'and stored in\n'
40 | '{0}'.format(paths.folder_raw)
41 | )
42 | raise e
43 | for ii, fname in enumerate(list_files):
44 | if os.path.splitext(fname)[1] == '.csv':
45 | print('\r{0:3}/{1:3} - {2:<28}'.format(ii+1,
46 | len(list_files),
47 | fname,
48 | ),
49 | end = '',
50 | )
51 | df = pd.read_csv(os.path.join(paths.folder_raw,
52 | fname,
53 | ),
54 | encoding = 'UTF-8',
55 | sep = '\t',
56 | decimal = '.',
57 | )
58 | df = df.rename(transcode.columns,
59 | axis = 1,
60 | )
61 | df[global_var.load_dt_utc] = pd.to_datetime(df[global_var.load_dt_utc]).dt.tz_localize('UTC')
62 | df = df[df[global_var.geography_map_code] == map_code]
63 | df = df[[global_var.load_dt_utc,
64 | global_var.geography_map_code,
65 | global_var.load_power_mw,
66 | ]]
67 | df = df.set_index([global_var.load_dt_utc,
68 | global_var.geography_map_code,
69 | ])
70 | #df.columns.name = global_var.load_nature
71 | df = df.dropna(axis = 0,
72 | how = 'all',
73 | )
74 | #df = df.stack(0)
75 | #df.name = global_var.quantity_value
76 | df = df.reset_index()
77 | dikt_load[fname] = df
78 | print()
79 | df = pd.concat([dikt_load[key]
80 | for key in dikt_load.keys()
81 | ],
82 | axis = 0,
83 | )
84 | df[global_var.commodity] = global_var.commodity_electricity
85 | df[global_var.load_nature] = global_var.load_nature_observation
86 | df[global_var.load_power_gw] = df[global_var.load_power_mw]/1e3
87 | df = df[~df[global_var.load_dt_utc].duplicated(keep='first')]
88 |
89 | # Save
90 | print('Save')
91 | os.makedirs(os.path.dirname(df_path),
92 | exist_ok = True,
93 | )
94 | df.to_csv(df_path,
95 | sep = ';',
96 | index = False,
97 | )
98 |
99 | print('done')
100 | return df
101 |
--------------------------------------------------------------------------------
/pub_data_visualization/load/load/entsoe/paths.py:
--------------------------------------------------------------------------------
1 | """
2 | Folders where the raw load data provided by ENTSO-E
3 | and the transformed dataframes are saved.
4 |
5 | """
6 |
7 | import os
8 | #
9 | from .... import global_var
10 |
11 | folder_raw = os.path.join(global_var.path_public_data,
12 | '11_ENTSOE',
13 | 'ActualTotalLoad_6.1.A',
14 | )
15 |
16 | fpath_tmp = os.path.join(global_var.path_transformed,
17 | 'ENTSOE',
18 | 'ActualTotalLoad_6.1.A',
19 | 'ActualTotalLoad_6.1.A_{map_code}',
20 | )
21 |
--------------------------------------------------------------------------------
/pub_data_visualization/load/load/entsoe/transcode/__init__.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Module to match the user defined names with the names used by ENTSO-E.
4 |
5 | """
6 |
7 | from .columns import *
--------------------------------------------------------------------------------
/pub_data_visualization/load/load/entsoe/transcode/columns.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Correspondances between the names used by ENTSO-E
4 | and the user defined names.
5 |
6 | """
7 |
8 | from ..... import global_var
9 |
10 | columns = {'AreaCode' : global_var.geography_area_code,
11 | 'AreaName' : global_var.geography_area_name,
12 | 'AreaTypeCode' : global_var.geography_area_type_code,
13 | 'DateTime' : global_var.load_dt_utc,
14 | 'Day' : global_var.load_day_utc,
15 | 'GenerationUnitEIC' : global_var.unit_eic,
16 | 'InstalledGenCapacity' : global_var.capacity_nominal_mw,
17 | 'MapCode' : global_var.geography_map_code,
18 | 'Month' : global_var.load_month_utc,
19 | 'PowerSystemResourceName' : global_var.unit_name,
20 | 'ResolutionCode' : global_var.time_resolution_code,
21 | 'TotalLoadValue' : global_var.load_power_mw,
22 | 'UpdateTime' : global_var.publication_dt_utc,
23 | 'Year' : global_var.load_year_utc,
24 | }
--------------------------------------------------------------------------------
/pub_data_visualization/load/load/load.py:
--------------------------------------------------------------------------------
1 |
2 | import pandas as pd
3 | #
4 | from ... import global_var
5 | from . import eco2mix, entsoe
6 |
7 |
8 | def load(source = None,
9 | map_code = None,
10 | date_min = None,
11 | date_max = None,
12 | ):
13 | """
14 | Calls the appropriate loader of the load data
15 | from the given data source,
16 | in a given area,
17 | and between two dates.
18 |
19 | :param source: The data source
20 | :param map_code: The delivery zone
21 | :param date_min: The left bound
22 | :param date_max: The right bound
23 | :type source: string
24 | :type map_code: string
25 | :type date_min: pd.Timestamp
26 | :type date_max: pd.Timestamp
27 | :return: The selected load data
28 | :rtype: pd.DataFrame
29 | """
30 |
31 | if source == global_var.data_source_load_eco2mix:
32 | df = eco2mix.load(map_code = map_code,
33 | date_min = date_min,
34 | date_max = date_max,
35 | )
36 |
37 | elif source == global_var.data_source_load_entsoe:
38 | df = entsoe.load(map_code = map_code)
39 |
40 | else:
41 | raise ValueError('Incorrect source : {0}'.format(source))
42 |
43 | # At this point, df has columns
44 | # global_var.commodity
45 | # global_var.load_dt_utc
46 | # (values are e.g. observation or forecast)
47 | #
48 | #
49 | #
50 |
51 | # Checks
52 | assert set(df.columns) == {global_var.commodity,
53 | global_var.load_dt_utc,
54 | global_var.load_nature,
55 | global_var.load_power_gw,
56 | global_var.load_power_mw,
57 | global_var.geography_map_code,
58 | }
59 |
60 | # Sort
61 | dg = df.set_index(global_var.load_dt_utc)
62 | dg = dg.sort_index()
63 | dg = dg.reindex(sorted(dg.columns), axis = 1)
64 |
65 | # Filter
66 | dh = dg.loc[ pd.Series(True, index = dg.index)
67 | & ((dg.index >= date_min) if bool(date_min) else True)
68 | & ((dg.index < date_max) if bool(date_max) else True)
69 | ]
70 |
71 | assert dh.shape[0] > 0
72 | assert not dh.reset_index()[[global_var.load_dt_utc,global_var.load_nature]].duplicated().sum()
73 |
74 | return dh
75 |
--------------------------------------------------------------------------------
/pub_data_visualization/load/plot/__init__.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Module to plot the load data.
4 |
5 | """
6 |
7 |
8 | from .power import *
9 | from .forecasting_error import *
--------------------------------------------------------------------------------
/pub_data_visualization/load/plot/subplot/__init__.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Module to fill subplots with load data.
4 |
5 | """
6 |
7 |
8 | from .power import *
9 | from .forecasting_error import *
--------------------------------------------------------------------------------
/pub_data_visualization/load/plot/subplot/forecasting_error.py:
--------------------------------------------------------------------------------
1 |
2 | import matplotlib as mpl
3 | #
4 | from .... import global_var
5 |
6 |
7 | def forecasting_error(ax,
8 | df,
9 | load_unit = None,
10 | load_observation_nature = None,
11 | load_forecast_nature = None,
12 | **kwargs
13 | ):
14 | """
15 | Draws in a subplot the forecasting error.
16 |
17 | :param ax: The ax to fill
18 | :param df: The load data
19 | :param load_observation_nature: The nature of the observation data to plot
20 | :param load_forecast_nature: The nature of the forecasts to plot
21 | :param kwargs: additional parameter for the plt.plot function
22 | :type ax: matplotlib.axes._subplots.AxesSubplot
23 | :type df: pd.DataFrame
24 | :type load_observation_nature: string
25 | :type load_forecast_nature: string
26 | :type kwargs: dict
27 | :return: None
28 | :rtype: None
29 | """
30 |
31 | forecasting_error = ( df.loc[df[global_var.load_nature] == load_observation_nature][load_unit]
32 | - df.loc[df[global_var.load_nature] == load_forecast_nature][load_unit]
33 | )
34 | forecasting_error = forecasting_error.squeeze().dropna()
35 |
36 | ax.plot(forecasting_error.index,
37 | forecasting_error,
38 | **kwargs,
39 | )
40 | ax.fill_between(
41 | forecasting_error.index,
42 | forecasting_error,
43 | where = (forecasting_error) > 0,
44 | label = 'Positive errors',
45 | color = mpl.colors.cnames['deepskyblue'],
46 | )
47 | ax.fill_between(
48 | forecasting_error.index,
49 | forecasting_error,
50 | where = (forecasting_error) < 0,
51 | label = 'Negative errors',
52 | color = mpl.colors.cnames['firebrick'],
53 | )
54 | ax.plot(
55 | [forecasting_error.index.min(),
56 | forecasting_error.index.max(),
57 | ],
58 | [0,0],
59 | color = 'k',
60 | ls = ':',
61 | )
62 |
63 |
64 |
65 |
66 |
--------------------------------------------------------------------------------
/pub_data_visualization/load/plot/subplot/power.py:
--------------------------------------------------------------------------------
1 |
2 | #
3 | from .... import global_var
4 |
5 |
6 | def power(ax,
7 | df,
8 | map_code = None,
9 | load_nature = None,
10 | load_unit = None,
11 | **kwargs,
12 | ):
13 | """
14 | Draws in a subplot the load data.
15 |
16 | :param ax: The ax to fill
17 | :param df: The load data
18 | :param map_code: The delivery zone
19 | :param load_nature: The nature of the data to plot
20 | :param kwargs: additional parameter for the plt.plot function
21 | :type ax: matplotlib.axes._subplots.AxesSubplot
22 | :type df: pd.DataFrame
23 | :type map_code: string
24 | :type load_nature: string
25 | :type kwargs: dict
26 | :return: None
27 | :rtype: None
28 | """
29 |
30 | # dg = df.xs((map_code,
31 | # load_nature,
32 | # ),
33 | # level = (global_var.geography_map_code,
34 | # global_var.load_nature,
35 | # ),
36 | # axis = 1,
37 | # )
38 | dg = df.loc[ (df[global_var.geography_map_code] == map_code)
39 | & (df[global_var.load_nature] == load_nature)
40 | ]
41 |
42 | dg = dg.dropna()
43 | ax.plot(dg.index,
44 | dg[load_unit],
45 | **kwargs,
46 | )
47 |
--------------------------------------------------------------------------------
/pub_data_visualization/load/tools/__init__.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Module to transform the load data.
4 |
5 | """
6 |
7 | from .mean_load_delivery_period import *
--------------------------------------------------------------------------------
/pub_data_visualization/load/tools/mean_load_delivery_period.py:
--------------------------------------------------------------------------------
1 |
2 |
3 | import pandas as pd
4 | #
5 | from ... import global_var
6 |
7 |
8 | def mean_load_delivery_period(df,
9 | product_delivery_windows,
10 | ):
11 | """
12 | Computes the average load during
13 | the delivery windows of
14 | a given contract.
15 |
16 | :param df: The load data
17 | :param product_delivery_windows: The delivery windows of the contract
18 | :type df: pd.DataFrame
19 | :type product_delivery_windows: list of pairs of pd.Timestamp
20 | :return: The average load
21 | :rtype: float
22 | """
23 |
24 | load_windows = pd.concat([df.loc[begin:end]
25 | for begin, end in product_delivery_windows
26 | ],
27 | axis = 0,
28 | )
29 | mean_load = load_windows[global_var.load_mw].mean()
30 |
31 | return mean_load
32 |
33 |
--------------------------------------------------------------------------------
/pub_data_visualization/multiplots/__init__.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Visualization module to plot data from different sources in one figure.
4 |
5 | """
6 |
7 | from .cloud_2d import *
8 | from .transparent_production import *
9 | from .spot_report import *
10 |
--------------------------------------------------------------------------------
/pub_data_visualization/multiplots/subplot/__init__.py:
--------------------------------------------------------------------------------
1 |
2 |
3 | from . import kernel
--------------------------------------------------------------------------------
/pub_data_visualization/multiplots/subplot/kernel.py:
--------------------------------------------------------------------------------
1 |
2 | import numpy as np
3 | import matplotlib.pyplot as plt
4 | from scipy.stats import kde
5 | #
6 |
7 |
8 | def kernel(ax,
9 | X,
10 | Y,
11 | ):
12 | """
13 | Draws in a subplot the 2d kernel of a set of points.
14 |
15 | :param ax: The ax to fill
16 | :param X: X-coordinates of the points
17 | :param Y: Y-coordinates of the points
18 | :type ax: matplotlib.axes._subplots.AxesSubplot
19 | :type X: np.array
20 | :type Y: np.array
21 | :return: None
22 | :rtype: None
23 | """
24 |
25 | x = X.values.reshape(-1)
26 | y = Y.values
27 | data = np.array([x, y]).T
28 | k = kde.gaussian_kde(data.T)
29 | nbins = 100
30 | xi, yi = np.mgrid[x.min():x.max():nbins*1j, y.min():y.max():nbins*1j]
31 | zi = k(np.vstack([xi.flatten(), yi.flatten()]))
32 | ax.pcolormesh(xi, yi, zi.reshape(xi.shape), shading='gouraud', cmap=plt.cm.BuGn_r)
33 | ax.contour(xi,
34 | yi,
35 | zi.reshape(xi.shape),
36 | )
37 |
--------------------------------------------------------------------------------
/pub_data_visualization/outages/__init__.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Module to load and plot public outages data.
4 |
5 | """
6 |
7 |
8 | from .load import *
9 | from . import plot
10 | from . import tools
11 |
12 |
13 |
--------------------------------------------------------------------------------
/pub_data_visualization/outages/load/__init__.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Module to load public outages data.
4 |
5 | """
6 |
7 |
8 | from .load import *
--------------------------------------------------------------------------------
/pub_data_visualization/outages/load/entsoe/__init__.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Module to load outages data from ENTSO-E.
4 |
5 | """
6 |
7 | from .load import *
--------------------------------------------------------------------------------
/pub_data_visualization/outages/load/entsoe/assemble.py:
--------------------------------------------------------------------------------
1 |
2 | import pandas as pd
3 | #
4 | from .... import global_var
5 |
6 |
7 | def assemble(df):
8 | """
9 | Merges the outages data provided by ENTSO-E
10 | in different files and discards duplicates.
11 |
12 | :param df: The outages data frame
13 | :type df: pd.DataFrame
14 | :return: The corrected outages data frame
15 | :rtype: pd.DataFrame
16 | """
17 |
18 | print('\nExtract')
19 | # Only keep the last publications
20 | df = df.groupby([global_var.publication_id,
21 | global_var.publication_version,
22 | global_var.publication_dt_utc,
23 | ],
24 | axis = 0,
25 | ).tail(1)
26 |
27 | ### Sort rows
28 | print('Sort rows')
29 | df = df.sort_values(by = [
30 | global_var.publication_creation_dt_utc,
31 | global_var.publication_id,
32 | global_var.publication_version,
33 | global_var.publication_dt_utc,
34 | global_var.outage_status,
35 | global_var.outage_begin_dt_utc,
36 | ],
37 | ascending = [True,
38 | True,
39 | True,
40 | True,
41 | False,
42 | True,
43 | ]
44 | )
45 |
46 | ### Checks
47 | print('Checks')
48 | assert not pd.isnull(df.index.values).any()
49 | assert df[df[global_var.outage_status] == global_var.outage_status_finished].index.is_unique
50 |
51 | return df
52 |
--------------------------------------------------------------------------------
/pub_data_visualization/outages/load/entsoe/paths.py:
--------------------------------------------------------------------------------
1 | """
2 | Folders where the raw outages data provided by ENTSO-E
3 | and the transformed dataframes are saved.
4 |
5 | """
6 |
7 | import os
8 | #
9 | from .... import global_var
10 |
11 | folder_raw = os.path.join(global_var.path_public_data,
12 | '11_ENTSOE',
13 | 'Outages',
14 | )
15 | fpath_tmp = os.path.join(global_var.path_transformed,
16 | 'ENTSOE',
17 | 'Outages',
18 | 'Outages_{map_code}_{file}',
19 | )
20 |
--------------------------------------------------------------------------------
/pub_data_visualization/outages/load/entsoe/transcode/__init__.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Module to match the user defined names with the names used by ENTSO-E.
4 |
5 | """
6 |
7 | from .columns import *
8 | from .map_code import *
9 | from .outage_status import *
10 | from .outage_type import *
11 | from .production_source import *
--------------------------------------------------------------------------------
/pub_data_visualization/outages/load/entsoe/transcode/columns.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Correspondances between the names of the columns used by ENTSO-E
4 | and the user defined names.
5 |
6 | """
7 |
8 | from ..... import global_var
9 |
10 | columns = {'areacode' : global_var.geography_area_code,
11 | 'AreaCode' : global_var.geography_area_code,
12 | 'AreaName' : global_var.geography_area_name,
13 | 'AreaTypeCode' : global_var.geography_area_type_code,
14 | 'MapCode' : global_var.geography_map_code,
15 | 'AvailableCapacity' : global_var.capacity_available_mw,
16 | 'Day' : global_var.outage_begin_day_utc,
17 | 'StartTS' : global_var.outage_begin_dt_utc,
18 | 'Month' : global_var.outage_begin_month_utc,
19 | 'Year' : global_var.outage_begin_year_utc,
20 | 'Reason' : global_var.outage_cause,
21 | 'ReasonCode' : global_var.outage_cause_code,
22 | 'ReasonText' : global_var.outage_cause_comments,
23 | 'EndTS' : global_var.outage_end_dt_utc,
24 | 'Status' : global_var.outage_status,
25 | 'Type' : global_var.outage_type,
26 | 'ProductionType' : global_var.production_source,
27 | 'MRID' : global_var.publication_id,
28 | 'UpdateTime' : global_var.publication_dt_utc,
29 | 'Version' : global_var.publication_version,
30 | 'TimeZone' : global_var.time_zone,
31 | 'PowerResourceEIC' : global_var.unit_eic,
32 | 'PowerRecourceEIC' : global_var.unit_eic,
33 | 'UnitName' : global_var.unit_name,
34 | 'InstalledGenCapacity' : global_var.capacity_nominal_mw,
35 | 'InstalledCapacity' : global_var.capacity_nominal_mw,
36 | 'VoltageConnectionLevel' : global_var.unit_voltage_connection,
37 | }
38 |
39 |
40 |
--------------------------------------------------------------------------------
/pub_data_visualization/outages/load/entsoe/transcode/map_code.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Correspondances between the map_code
4 | used by ENTSO-E and the user defined names.
5 |
6 | """
7 |
8 | from ..... import global_var
9 |
10 | dikt = {'AT': global_var.geography_map_code_austria,
11 | 'BE': global_var.geography_map_code_belgium,
12 | 'BG': global_var.geography_map_code_bulgaria,
13 | 'CH': global_var.geography_map_code_swiss,
14 | 'CZ': global_var.geography_map_code_czech,
15 | 'DK': global_var.geography_map_code_denmark,
16 | 'ES': global_var.geography_map_code_spain,
17 | 'FI': global_var.geography_map_code_finland,
18 | 'FR': global_var.geography_map_code_france,
19 | 'GB': global_var.geography_map_code_great_britain,
20 | 'HU': global_var.geography_map_code_hungary,
21 | 'LT': global_var.geography_map_code_latvia,
22 | 'NL': global_var.geography_map_code_netherlands,
23 | 'PL': global_var.geography_map_code_poland,
24 | 'PT': global_var.geography_map_code_portugal,
25 | 'RO': global_var.geography_map_code_romania,
26 | 'SK': global_var.geography_map_code_slovakia,
27 | }
28 |
29 | def map_code(ss):
30 | ans = dikt.get(ss, ss)
31 | return ans
32 |
33 |
34 |
--------------------------------------------------------------------------------
/pub_data_visualization/outages/load/entsoe/transcode/outage_status.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Correspondances between the names of the statutes of outages
4 | used by ENTSO-E and the user defined names.
5 |
6 | """
7 |
8 | from ..... import global_var
9 |
10 | outage_status = {'Active' : global_var.outage_status_active,
11 | 'Cancelled' : global_var.outage_status_cancelled,
12 | 'Withdrawn' : global_var.outage_status_cancelled,
13 | }
14 |
15 |
16 |
--------------------------------------------------------------------------------
/pub_data_visualization/outages/load/entsoe/transcode/outage_type.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Correspondances between the names of the types of outages
4 | used by ENTSO-E and the user defined names.
5 |
6 | """
7 |
8 | from ..... import global_var
9 |
10 | outage_type = {'Forced' : global_var.outage_type_fortuitous,
11 | 'Planned' : global_var.outage_type_planned,
12 | }
13 |
14 |
15 |
--------------------------------------------------------------------------------
/pub_data_visualization/outages/load/entsoe/transcode/production_source.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Correspondances between the names of the energy production sources
4 | used by ENTSO-E and the user defined names.
5 |
6 | """
7 |
8 | from ..... import global_var
9 |
10 | production_source = {'Biomass ' : global_var.production_source_biomass,
11 | 'Fossil Hard coal ' : global_var.production_source_fossil_coal,
12 | 'Fossil Gas ' : global_var.production_source_fossil_gas,
13 | 'Fossil Oil ' : global_var.production_source_fossil_oil,
14 | 'Hydro Pumped Storage ' : global_var.production_source_hydro_pumped_storage,
15 | 'Hydro Water Reservoir ' : global_var.production_source_hydro_reservoir,
16 | 'Hydro Run-of-river and poundage ' : global_var.production_source_hydro_run_of_river,
17 | 'Marine ' : global_var.production_source_marine,
18 | 'Nuclear ' : global_var.production_source_nuclear,
19 | 'Other ' : global_var.production_source_other,
20 | }
21 |
22 |
23 |
--------------------------------------------------------------------------------
/pub_data_visualization/outages/load/load.py:
--------------------------------------------------------------------------------
1 |
2 | import pandas as pd
3 | #
4 | from ... import global_var
5 | from . import entsoe, rte
6 |
7 |
8 | def load(source = None,
9 | map_code = None,
10 | producer = None,
11 | unit_name = None,
12 | production_source = None,
13 | publication_dt_min = None,
14 | publication_dt_max = None,
15 | ):
16 | """
17 | Calls the appropriate loader of the outages data.
18 |
19 | :param source: The data source
20 | :param map_code: The delivery zone
21 | :param producer: The operator of the production assets
22 | :param unit_name: The names of the production assets
23 | :param production_source: The energy source of the production assets
24 | :param publication_dt_min: The left bound of the publications
25 | :param publication_dt_max: The right bound of the publications
26 | :type source: string
27 | :type map_code: string
28 | :type producer: string
29 | :type unit_name: string
30 | :type production_source: string
31 | :type publication_dt_min: pd.Timestamp
32 | :type publication_dt_max: pd.Timestamp
33 | :return: The selected outages data
34 | :rtype: pd.DataFrame
35 | """
36 |
37 | if source == global_var.data_source_outages_rte:
38 | df, dikt_incoherences = rte.load(map_code = map_code)
39 |
40 | elif source == global_var.data_source_outages_entsoe:
41 | df = entsoe.load(map_code = map_code)
42 |
43 | else:
44 | raise ValueError('Incorrect source : {0}'.format(source))
45 |
46 | # Format
47 | df = df.set_index([global_var.publication_id,
48 | global_var.publication_version,
49 | global_var.publication_dt_utc,
50 | ],
51 | drop = True,
52 | )
53 |
54 | # Filter
55 | df = df[col_order]
56 | dg = df.loc[ pd.Series(True, index = df.index)
57 | & ((df[global_var.producer_name] .isin([producer] if type(producer) == str else producer)) if bool(producer) else True)
58 | & ((df[global_var.production_source].isin([production_source] if type(production_source) == str else production_source)) if bool(production_source) else True)
59 | & ((df[global_var.unit_name] .isin([unit_name] if type(unit_name) == str else unit_name)) if bool(unit_name) else True)
60 | & ((df[global_var.publication_dt_utc] >= publication_dt_min) if bool(publication_dt_min) else True)
61 | & ((df[global_var.publication_dt_utc] < publication_dt_max) if bool(publication_dt_max) else True)
62 | ]
63 |
64 | # Checks
65 | assert dg.shape[0] > 0
66 |
67 | return dg
68 |
69 |
70 | col_order = [
71 | global_var.outage_begin_dt_utc,
72 | global_var.outage_end_dt_utc,
73 | global_var.unit_name,
74 | global_var.capacity_available_mw,
75 | global_var.capacity_nominal_mw,
76 | global_var.producer_name,
77 | global_var.geography_map_code,
78 | global_var.production_source,
79 | global_var.publication_creation_dt_utc,
80 | global_var.outage_type,
81 | global_var.outage_cause,
82 | global_var.outage_status,
83 | global_var.file_name,
84 | ]
85 |
--------------------------------------------------------------------------------
/pub_data_visualization/outages/load/rte/__init__.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Module to load outages data from RTE.
4 |
5 | """
6 |
7 | from .load import *
--------------------------------------------------------------------------------
/pub_data_visualization/outages/load/rte/paths.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Folders where the raw outages data provided by RTE
4 | and the transformed dataframes are saved.
5 |
6 | """
7 |
8 | import os
9 | #
10 | from .... import global_var
11 |
12 | folder_raw = os.path.join(global_var.path_public_data,
13 | '24_RTE',
14 | 'DonneesIndisponibilitesProduction',
15 | )
16 | fpath_tmp = os.path.join(global_var.path_transformed,
17 | 'RTE',
18 | 'DonneesIndisponibilitesProduction',
19 | 'DonneesIndisponibilitesProduction_{map_code}_{file}',
20 | )
21 |
--------------------------------------------------------------------------------
/pub_data_visualization/outages/load/rte/transcode/__init__.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Module to match the user defined names with the names used by RTE.
4 |
5 | """
6 |
7 | from .capacity import *
8 | from .columns import *
9 | from .eic_code import *
10 | from .producer_name import *
11 | from .outage_status import *
12 | from .outage_type import *
13 | from .production_source import *
14 | from .unit_type import *
--------------------------------------------------------------------------------
/pub_data_visualization/outages/load/rte/transcode/capacity.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Capacities of the production assets in the files provided by RTE
4 | when it is not detailed.
5 |
6 | """
7 |
8 |
9 | capacity = {
10 | 'BEAUCAIRE 5' : 35, # ?
11 | 'BEAUCHASTEL 1': 32, # ?
12 | 'BEAUCHASTEL 3': 32, # ?
13 | 'BOLLENE 1' : 59, # ?
14 | 'BOLLENE 5' : 58, # ?
15 | 'CHATEAUNEUF DU RHONE 4': 46, # ?
16 | 'CHATEAUNEUF DU RHONE 5': 46, # ?
17 | 'GENISSIAT 6' : 70, # ?
18 | 'SEYSSEL 1' : 15, # ?
19 | }
20 |
21 |
--------------------------------------------------------------------------------
/pub_data_visualization/outages/load/rte/transcode/columns.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Correspondances between the names of the columns
4 | used by RTE and the user defined names.
5 |
6 | """
7 |
8 | #
9 | from ..... import global_var
10 |
11 |
12 | columns = {"Nom du producteur" : global_var.producer_name,
13 | "Début Période" : global_var.outage_period_begin_dt_local,
14 | "Fin Période" : global_var.outage_period_end_dt_local,
15 | "Puissance disponible restante" : global_var.capacity_available_mw,
16 | "Début indispo" : global_var.outage_begin_dt_local,
17 | "Cause" : global_var.outage_cause,
18 | "Fin Indispo" : global_var.outage_end_dt_local,
19 | "Statut" : global_var.outage_status,
20 | "Type d'indisponibilité" : global_var.outage_type,
21 | "Filière" : global_var.production_source,
22 | "Création" : global_var.publication_creation_dt_local,
23 | "Mise à jour" : global_var.publication_dt_local,
24 | "ID Indisponibilité de production" : global_var.publication_id,
25 | "Version" : global_var.publication_version,
26 | "Nom de l'unité" : global_var.unit_name,
27 | "Puissance nominale" : global_var.capacity_nominal_mw,
28 | "Type de l'unité de production" : global_var.unit_type,
29 | }
--------------------------------------------------------------------------------
/pub_data_visualization/outages/load/rte/transcode/eic_code.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Connections between the EIC codes of the production assets
4 | used by RTE and the user defined names.
5 |
6 | See for more details
7 | https://www.entsoe.eu/data/energy-identification-codes-eic/eic-approved-codes/
8 |
9 | """
10 |
11 | eic_code = {
12 | '17W100P100P0028N': 'ARAMON 1',
13 | '17W100P100P0029L': 'ARAMON 2',
14 | '17W100P100P0034S': 'CORDEMAIS 2',
15 | '17W100P100P0035Q': 'CORDEMAIS 3',
16 | '17W100P100P0042T': 'PORCHEVILLE B 1',
17 | '17W100P100P0043R': 'PORCHEVILLE B 2',
18 | '17W100P100P0044P': 'PORCHEVILLE B 3',
19 | '17W100P100P0045N': 'PORCHEVILLE B 4',
20 | '17W100P100P00105': 'EMILE HUCHET 7',
21 | '17W100P100P00113': 'EMILE HUCHET 8',
22 | '17W100P100P0336C': 'SPEM',
23 | '17W100P100P0344D': 'EMILE HUCHET 7',
24 | '17W100P100P0345B': 'EMILE HUCHET 8',
25 | '17W100P100P03582': 'BATHIE 3',
26 | '17W100P100P0361D': 'BATHIE 6',
27 | '17W100P100P20746': 'COCHE 5',
28 | '17W100P100P1334B': 'BEAUCAIRE 5',
29 | '17W100P100P13367': 'BEAUCHASTEL 1',
30 | '17W100P100P13383': 'BEAUCHASTEL 3',
31 | '17W100P100P1268Z': 'BOLLENE 1',
32 | '17W100P100P12727': 'BOLLENE 5',
33 | '17W100P100P1491W': 'CHATEAUNEUF DU RHONE 4',
34 | '17W100P100P1493S': 'CHATEAUNEUF DU RHONE 5',
35 | '17W100P100P1600G': 'GENISSIAT 6',
36 | '17W100P100P1932S': 'SEYSSEL 1',
37 | }
38 |
--------------------------------------------------------------------------------
/pub_data_visualization/outages/load/rte/transcode/outage_status.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Correspondances between the names of the statutes of the outages
4 | used by RTE and the user defined names.
5 |
6 | """
7 |
8 | from ..... import global_var
9 |
10 |
11 | outage_status = {"Annulée" : global_var.outage_status_cancelled,
12 | "Terminée" : global_var.outage_status_finished,
13 | "nan" : global_var.outage_status_nan,
14 | }
--------------------------------------------------------------------------------
/pub_data_visualization/outages/load/rte/transcode/outage_type.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Correspondances between the names of the types of outages
4 | used by RTE and the user defined names.
5 |
6 | """
7 |
8 | from ..... import global_var
9 |
10 |
11 | outage_type = {"Indisponibilité fortuite" : global_var.outage_type_fortuitous,
12 | "Indisponibilité planifiée" : global_var.outage_type_planned,
13 | }
--------------------------------------------------------------------------------
/pub_data_visualization/outages/load/rte/transcode/producer_name.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Correspondances between the names of the operating companies
4 | used by RTE and the user defined names.
5 |
6 | """
7 |
8 | import numpy as np
9 | #
10 | from ..... import global_var
11 |
12 |
13 | producer_name = {'EDF' : global_var.producer_name_edf,
14 | 'ENGIE' : global_var.producer_name_engie,
15 | 'UNIPER' : global_var.producer_name_eon,
16 | 'GDF' : global_var.producer_name_engie,
17 | 'PSS POWER' : global_var.producer_name_pss,
18 | 'GAZEL ENERGIE' : global_var.producer_name_gazel,
19 | 'CELEST POWER' : global_var.producer_name_total,
20 | 'ALPIQ' : global_var.producer_name_alpiq,
21 | 'TOTAL RAFFINAGE FRANCE ' : global_var.producer_name_total,
22 | 'DIRECT ENERGIE' : global_var.producer_name_total,
23 | 'TOTAL DIRECT ENERGIE' : global_var.producer_name_total,
24 | np.nan : global_var.producer_name_unknown,
25 | }
--------------------------------------------------------------------------------
/pub_data_visualization/outages/load/rte/transcode/production_source.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Correspondances between the names of the energy production sources
4 | used by RTE and the user defined names.
5 |
6 | """
7 |
8 | from ..... import global_var
9 |
10 |
11 | production_source = {'Autre' : global_var.production_source_other,
12 | 'Biomasse' : global_var.production_source_biomass,
13 | 'Charbon' : global_var.production_source_fossil_coal,
14 | 'Gaz' : global_var.production_source_fossil_gas,
15 | 'Fioul' : global_var.production_source_fossil_oil,
16 | 'Hydraulique STEP' : global_var.production_source_hydro_pumped_storage,
17 | 'Hydraulique lacs' : global_var.production_source_hydro_reservoir,
18 | "Hydraulique fil de l'eau / éclusée" : global_var.production_source_hydro_run_of_river,
19 | 'Marin' : global_var.production_source_marine,
20 | 'nan' : global_var.production_source_unknown,
21 | 'Nucléaire' : global_var.production_source_nuclear,
22 | }
--------------------------------------------------------------------------------
/pub_data_visualization/outages/load/rte/transcode/unit_type.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Correspondances between the names of the types of outages
4 | used by RTE and the user defined names.
5 |
6 | """
7 | #
8 | from ..... import global_var
9 |
10 |
11 | unit_type = {"Groupe" : global_var.unit_type_group,
12 | "Centrale" : global_var.unit_type_plant,
13 | }
--------------------------------------------------------------------------------
/pub_data_visualization/outages/plot/__init__.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Module to plot outages data.
4 |
5 | """
6 |
7 |
8 | from .animated_availability import *
9 | from .evolution_mean_availability import *
10 | from .expected_program import *
11 | from .incremental_programs import *
12 | from .regression_delays import *
--------------------------------------------------------------------------------
/pub_data_visualization/outages/plot/subplot/__init__.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Module to draw the subplots with outages data.
4 |
5 | """
6 |
7 |
8 | from .evolution_mean_availability import *
9 | from .expected_program import *
10 | from .incremental_programs import *
11 | from .nameplate_capacity import *
12 | from .regression_delays import *
13 |
--------------------------------------------------------------------------------
/pub_data_visualization/outages/plot/subplot/evolution_mean_availability.py:
--------------------------------------------------------------------------------
1 |
2 |
3 | from .... import global_tools, global_var
4 |
5 |
6 | def evolution_mean_availability(ax,
7 | df,
8 | unit = None,
9 | color = None,
10 | step = False,
11 | diff_init = False,
12 | ):
13 | """
14 | Draws in a subplot the evolution of the mean unavailbility
15 | of a set of production assets.
16 |
17 | :param ax: The ax to fill
18 | :param df: The expected availability during the delivery
19 | :param unit: The power unit for the plot (MW or GW)
20 | :param color: The color to plot the series
21 | :param step: Boolean to interpolate linearly or piecewise constantly
22 | :param diff_init: Bool to plot relative differences
23 | :type ax: matplotlib.axes._subplots.AxesSubplot
24 | :type df: pd.Series
25 | :type unit: string
26 | :type color: string
27 | :type step: bool
28 | :type diff_init: bool
29 | :return: None
30 | :rtype: None
31 | """
32 |
33 | if diff_init:
34 | df = df - df.iloc[0]
35 |
36 | X, Y = global_tools.piecewise_constant_interpolators(df.index,
37 | df.values,
38 | )
39 |
40 | if unit == global_var.capacity_unavailable_gw:
41 | Y /= 1e3
42 | elif unit == global_var.capacity_unavailable_mw:
43 | Y = Y
44 | else:
45 | raise ValueError
46 |
47 | if step:
48 | ax.step(X,
49 | Y,
50 | where = 'post',
51 | label = '{0}mean {1}'.format('$\Delta$ ' if diff_init else '',
52 | unit,
53 | ),
54 | color = (global_var.colors[9]
55 | if color is None
56 | else
57 | color
58 | ),
59 | )
60 | else:
61 | ax.plot(X,
62 | Y,
63 | label = 'mean {0}'.format(unit),
64 | color = (global_var.colors[9]
65 | if color is None
66 | else
67 | color
68 | ),
69 | )
70 |
71 |
72 |
73 |
74 |
75 |
--------------------------------------------------------------------------------
/pub_data_visualization/outages/plot/subplot/expected_program.py:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | from .... import global_tools
5 |
6 |
7 | def expected_program(ax,
8 | ds,
9 | **kwargs,
10 | ):
11 | """
12 | Draws in a subplot the expected availability program
13 | of a set of production assets.
14 |
15 | :param ax: The ax to fill
16 | :param ds: The expected availability program
17 | :param kwargs: Additional arugments for the plt.plot function
18 | :type ax: matplotlib.axes._subplots.AxesSubplot
19 | :type ds: pd.Series
20 | :type kwargs: dict
21 | :return: None
22 | :rtype: None
23 | """
24 |
25 | X, Y = global_tools.piecewise_constant_interpolators(ds.index,
26 | ds.values,
27 | )
28 |
29 |
30 | # Plot program
31 | ax.plot(X,
32 | Y,
33 | markevery = 1,
34 | markersize = 10,
35 | ls = '-.',
36 | **kwargs,
37 | )
38 |
39 |
40 |
--------------------------------------------------------------------------------
/pub_data_visualization/outages/plot/subplot/incremental_programs.py:
--------------------------------------------------------------------------------
1 |
2 |
3 | import itertools
4 | #
5 | from .... import global_tools, global_var
6 |
7 |
8 |
9 | def incremental_programs(ax,
10 | df_programs,
11 | diff_init = False,
12 | smoother = None,
13 | vline_publication_dt = False,
14 | ):
15 | """
16 | Draws in a subplot the expected availability programs
17 | of a set of production assets.
18 |
19 | :param ax: The ax to fill
20 | :param df_programs: The expected availability programs
21 | :param diff_init: Boolean to plot relative differences
22 | with the initial date
23 | :param smoother: Boolean to draw oblique instead of vertical steps
24 | :param vline_publication_dt: Boolean to draw vertical line
25 | :type ax: matplotlib.axes._subplots.AxesSubplot
26 | :type df_programs: pd.DataFrame
27 | :type diff_init: bool
28 | :type smoother: bool
29 | :type vline_publication_dt: bool
30 | :return: None
31 | :rtype: None
32 | """
33 |
34 | ### Plot init
35 | if diff_init:
36 | df_programs = df_programs - df_programs.iloc[:,[0]].values
37 | dd = df_programs.columns[0]
38 | ds_program = df_programs.loc[:,dd]
39 | X, Y = global_tools.piecewise_constant_interpolators(ds_program.index,
40 | ds_program,
41 | smoother = smoother,
42 | )
43 | ax.plot(X,
44 | Y,
45 | label = global_tools.format_latex('init - {0}'.format(dd.strftime(format = global_var.dt_formatter_tz))),
46 | color = 'k',
47 | ls = ':',
48 | )
49 |
50 | ### Plot programs
51 | for ii, (dd, ds_program) in itertools.islice(enumerate(df_programs.items()), int(diff_init), None):
52 | X, Y = global_tools.piecewise_constant_interpolators(ds_program.index,
53 | ds_program,
54 | smoother = smoother,
55 | )
56 | ax.plot(X,
57 | Y,
58 | label = global_tools.format_latex(dd.strftime(format = global_var.dt_formatter_tz)),
59 | color = global_var.colors[ii],
60 | )
61 |
62 | ### Plot nameplate capacity
63 | if not diff_init:
64 | ax.plot([df_programs.index.min(), df_programs.index.max()],
65 | [df_programs.values.max() for kk in range(2)],
66 | ls = ':',
67 | linewidth = 0.5,
68 | color = 'k',
69 | label = 'nameplate capacity',
70 | )
71 |
72 | ### Plot vline for publication_dt
73 | if vline_publication_dt:
74 | ax.plot([df_programs.columns[0], df_programs.columns[0]],
75 | ax.get_ylim(),
76 | label=global_tools.format_latex(df_programs.columns[0].strftime(format = global_var.dt_formatter_tz)),
77 | color='k',
78 | ls='--',
79 | )
80 |
81 |
--------------------------------------------------------------------------------
/pub_data_visualization/outages/plot/subplot/nameplate_capacity.py:
--------------------------------------------------------------------------------
1 |
2 |
3 | def nameplate_capacity(ax,
4 | df,
5 | ):
6 | """
7 | Draws in a subplot the nameplate capacity
8 | of a set of production assets
9 | as a horizontal line.
10 |
11 | :param ax: The ax to fill
12 | :param df: The nameplate capacity at different dates
13 | :type ax: matplotlib.axes._subplots.AxesSubplot
14 | :type df: pd.Series
15 | :return: None
16 | :rtype: None
17 | """
18 |
19 | ax.plot([df.index.min(), df.index.max()],
20 | [df.values.max() for kk in range(2)],
21 | ls = ':',
22 | linewidth = 0.5,
23 | color = 'k',
24 | label = 'nameplate capacity',
25 | )
26 |
--------------------------------------------------------------------------------
/pub_data_visualization/outages/plot/subplot/regression_delays.py:
--------------------------------------------------------------------------------
1 |
2 |
3 | from .... import global_var
4 |
5 |
6 | def regression_delays(ax,
7 | df,
8 | ):
9 | """
10 | Plots the announced and finally observed lengths of the outages
11 | of a set of units with a regression line in a subplot.
12 |
13 | Permanent plant shutdowns are a problem for the regression.
14 |
15 | :param ax: The ax to fill
16 | :param df: The outages dataframe
17 | :type ax: matplotlib.axes._subplots.AxesSubplot
18 | :type df: pd.DataFrame
19 | :return: None
20 | :rtype: None
21 | """
22 |
23 | dg_grouped = df.reset_index().groupby(global_var.publication_id)
24 |
25 | X = ( dg_grouped[global_var.outage_end_dt_utc].head(1)
26 | - dg_grouped[global_var.outage_begin_dt_utc].head(1)
27 | ).reset_index(drop = True).dt.total_seconds()/3600
28 | Y = ( dg_grouped[global_var.outage_end_dt_utc].tail(1)
29 | - dg_grouped[global_var.outage_begin_dt_utc].tail(1)
30 | ).reset_index(drop = True).dt.total_seconds()/3600
31 |
32 | ax.scatter(X,
33 | Y,
34 | )
35 |
36 | ax.plot([min(min(X),min(Y)),max(max(X),max(Y))],
37 | [min(min(X),min(Y)),max(max(X),max(Y))],
38 | color = 'k',
39 | ls = '--',
40 | label = 'first bisector'
41 | )
42 |
43 | ### regression
44 | a = (1/(X.T@X))*(X.T@Y)
45 | ax.plot([min(min(X),min(Y)),max(max(X),max(Y))],
46 | [a*min(min(X),min(Y)),a*max(max(X),max(Y))],
47 | color = 'g',
48 | ls = ':',
49 | label = 'linear fit a = {0:.2f}'.format(a),
50 | )
51 |
52 |
--------------------------------------------------------------------------------
/pub_data_visualization/outages/tools/__init__.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Module containing a set of tools to deal with the outages data.
4 |
5 | """
6 |
7 |
8 | from .compute_all_programs import *
9 | from .compute_missing_energy import *
10 | from .cross_section_view import *
11 | from .extrapolate_programs import *
12 | from .sum_programs import *
13 |
--------------------------------------------------------------------------------
/pub_data_visualization/outages/tools/cross_section_view.py:
--------------------------------------------------------------------------------
1 |
2 | import pandas as pd
3 |
4 | def cross_section_view(df_program,
5 | tolerated_delay = pd.Timedelta(minutes = 0),
6 | ):
7 | """
8 | Computes the expected production at all times t given
9 | the information available at times t + tolerated_delay.
10 |
11 | :param df_program: The expected availabilty programs
12 | :param tolerated_delay: The tolerated delay for publication
13 | :type df_program: pd.DataFrame
14 | :type tolerated_delay: pd.Timedelta
15 | :return: The expected availability given the publications
16 | :rtype: pd.Series
17 | """
18 |
19 | publications_dt = df_program.index
20 | publications_minus_delay_dt = [d - (pd.Timedelta(minutes = 0) # last publication date is selected below
21 | if tolerated_delay is None
22 | else
23 | tolerated_delay
24 | )
25 | for d in publications_dt
26 | ]
27 |
28 | new_production_steps = sorted(set(df_program.columns).union(publications_minus_delay_dt))
29 | new_production_steps = pd.Index(list(filter(lambda x : x >= df_program.columns.min(),
30 | new_production_steps,
31 | )),
32 | name = df_program.columns.name,
33 | )
34 | index_delays = pd.Index(['last publications'
35 | if tolerated_delay is None
36 | else
37 | 'expected_program (tolerated_delay = {0} min)'.format(int(tolerated_delay.total_seconds()/60))
38 | ]
39 | )
40 |
41 | viewed_series = pd.DataFrame(data = 0,
42 | index = new_production_steps,
43 | columns = index_delays,
44 | )
45 |
46 |
47 | df_program = df_program.reindex(index = viewed_series.index+tolerated_delay,
48 | method = 'ffill',
49 | )
50 | df_program = df_program.reindex(columns = viewed_series.index,
51 | method = 'ffill',
52 | )
53 |
54 | for timestamp in viewed_series.index:
55 | # if type(tolerated_delay) == pd.Timedelta:
56 | # publi_idx = df_program.index.get_loc(timestamp + tolerated_delay,
57 | # 'ffill',
58 | # )
59 | # elif tolerated_delay is None:
60 | # publi_idx = -1
61 | # else:
62 | # raise TypeError
63 |
64 | # prod_idx = df_program.columns.get_loc(timestamp,
65 | # 'ffill',
66 | # )
67 | # viewed_series.loc[timestamp] = df_program.iloc[timestamp + tolerated_delay,
68 | # timestamp,
69 | # ]
70 | if tolerated_delay is None:
71 | viewed_series.loc[timestamp] = df_program.loc[-1,timestamp]
72 | else:
73 | viewed_series.loc[timestamp] = df_program.loc[timestamp + tolerated_delay,timestamp]
74 |
75 | return viewed_series
76 |
77 |
--------------------------------------------------------------------------------
/pub_data_visualization/outages/tools/extrapolate_programs.py:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | import pandas as pd
5 |
6 |
7 | def extrapolate_programs(dikt_programs,
8 | dates_to_extrapolate,
9 | production_dt_min = None,
10 | production_dt_max = None,
11 | ):
12 | """
13 | Compute the total expected program from the availability programs
14 | at the different publication dates.
15 |
16 | :param dikt_programs: The expected availabilty programs
17 | :param dates_to_extrapolate: The dates to compute
18 | the availability program
19 | :param production_dt_min: The left bound of the programs
20 | :param production_dt_max: The right bound of the programs
21 | :type dikt_programs: dict
22 | :type dates_to_extrapolate: list of pd.Timestamps
23 | :type production_dt_min: pd.Timestamp
24 | :type production_dt_max: pd.Timestamp
25 | :return: The expected availability at the dates to extrapolate
26 | :rtype: pd.DataFrame
27 | """
28 |
29 | all_production_steps = sorted(set([e
30 | for unit_name in dikt_programs
31 | for e in dikt_programs[unit_name].columns
32 | ]))
33 | selected_production_steps = [e
34 | for ii, e in enumerate(all_production_steps)
35 | if ( ( ii+1 == len(all_production_steps)
36 | or production_dt_min is None
37 | or all_production_steps[ii+1] >= production_dt_min
38 | )
39 | and ( production_dt_max is None
40 | or all_production_steps[ii] < production_dt_max
41 | )
42 | )
43 | ]
44 |
45 | dikt_extrapolated_programs = {}
46 | for dd in dates_to_extrapolate:
47 | program_plants = pd.DataFrame({plant_name : programs.iloc[programs.index.get_loc(dd,
48 | 'ffill',
49 | )].reindex(index = selected_production_steps,
50 | method = 'ffill',
51 | )
52 | for plant_name, programs in dikt_programs.items()
53 | })
54 | program_plants = program_plants.fillna(method = 'bfill', axis = 0)
55 | dikt_extrapolated_programs[dd] = program_plants.sum(axis = 1)
56 |
57 | return pd.DataFrame(dikt_extrapolated_programs)
58 |
59 |
--------------------------------------------------------------------------------
/pub_data_visualization/outages/tools/sum_programs.py:
--------------------------------------------------------------------------------
1 |
2 |
3 | import pandas as pd
4 |
5 |
6 | def sum_programs(dikt_programs,
7 | production_dt_min = None,
8 | production_dt_max = None,
9 | publication_dt_min = None,
10 | publication_dt_max = None,
11 | ):
12 | """
13 | Sums the availability programs of a set of production assets.
14 |
15 | :param dikt_programs: The expected availabilty programs
16 | :param production_dt_min: The left bound of the programs
17 | :param production_dt_max: The right bound of the programs
18 | :param publication_dt_min: The left bound of the publications
19 | :param publication_dt_max: The right bound of the publications
20 | :type dikt_programs: dict
21 | :type production_dt_min: pd.Timestamp
22 | :type production_dt_max: pd.Timestamp
23 | :type publication_dt_min: pd.Timestamp
24 | :type publication_dt_max: pd.Timestamp
25 | :return: The total expected availability
26 | :rtype: pd.DataFrame
27 | """
28 |
29 |
30 | all_production_steps = sorted(set([e
31 | for unit_name in dikt_programs
32 | for e in dikt_programs[unit_name].columns
33 | ]))
34 | all_publication_steps = sorted(set([e
35 | for unit_name in dikt_programs
36 | for e in dikt_programs[unit_name].index
37 | ]))
38 | selected_production_steps = [e
39 | for ii, e in enumerate(all_production_steps)
40 | if ( ( ii+1 == len(all_production_steps)
41 | or all_production_steps[ii+1] >= production_dt_min
42 | )
43 | and all_production_steps[ii] < production_dt_max
44 | )
45 | ]
46 | selected_publication_steps = [e
47 | for ii, e in enumerate(all_publication_steps)
48 | if ( ( ii+1 == len(all_publication_steps)
49 | or all_publication_steps[ii+1] >= publication_dt_min
50 | )
51 | and all_publication_steps[ii] < publication_dt_max
52 | )
53 | ]
54 | dikt_programs = {k:v.reindex(index = selected_publication_steps,
55 | method = 'ffill',
56 | ).reindex(columns = selected_production_steps,
57 | method = 'ffill',
58 | )
59 | for k, v in dikt_programs.items()
60 | }
61 |
62 | dikt = {}
63 | for ii, dd in enumerate(selected_publication_steps):
64 | dikt[dd] = pd.DataFrame({plant_name : dikt_programs[plant_name].loc[dd]
65 | for plant_name in dikt_programs
66 | }).sum(axis = 1)
67 | dm = pd.DataFrame(dikt).T
68 |
69 | return dm
--------------------------------------------------------------------------------
/pub_data_visualization/production/__init__.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Module to load and plot production data.
4 |
5 | """
6 |
7 |
8 | from .load import *
9 | from . import plot
10 |
--------------------------------------------------------------------------------
/pub_data_visualization/production/load/__init__.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Module to load production data.
4 |
5 | """
6 |
7 |
8 |
9 | from .load import *
--------------------------------------------------------------------------------
/pub_data_visualization/production/load/eco2mix/__init__.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Module to load production data provided by eCO2mix.
4 |
5 | """
6 |
7 |
8 | from .load import *
--------------------------------------------------------------------------------
/pub_data_visualization/production/load/eco2mix/paths.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Folders where the raw production data provided by eCO2mix
4 | and the transformed dataframes are saved.
5 |
6 | """
7 |
8 | import os
9 | #
10 | from .... import global_var
11 |
12 | fpath_tmp = os.path.join(global_var.path_transformed,
13 | 'eCO2mix',
14 | 'production_{0}_{1}.csv',
15 | )
16 |
--------------------------------------------------------------------------------
/pub_data_visualization/production/load/eco2mix/transcode/__init__.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Module to match the user defined names with the names used by eCO2mix.
4 |
5 | """
6 |
7 | from .columns import *
--------------------------------------------------------------------------------
/pub_data_visualization/production/load/eco2mix/transcode/columns.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Correspondances between the names used by eCO2mix
4 | and the user defined names.
5 |
6 | """
7 |
8 | from ..... import global_var
9 |
10 | columns = {'Périmètre' : global_var.geography_area_name,
11 | 'Nature' : global_var.file_info,
12 | 'Date' : global_var.production_date_local,
13 | 'Heures' : global_var.production_time_local,
14 | 'Consommation' : global_var.load_nature_observation,
15 | 'Prévision J-1' : global_var.load_nature_forecast_day1,
16 | 'Prévision J' : global_var.load_nature_forecast_day0,
17 | }
--------------------------------------------------------------------------------
/pub_data_visualization/production/load/entsoe/__init__.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Module to load production data provided by ENTSO-E.
4 |
5 | """
6 |
7 |
8 | from .load import *
--------------------------------------------------------------------------------
/pub_data_visualization/production/load/entsoe/paths.py:
--------------------------------------------------------------------------------
1 | """
2 | Folders where the raw production data provided by ENTSO-E
3 | and the transformed dataframes are saved.
4 |
5 | """
6 |
7 | import os
8 | #
9 | from .... import global_var
10 |
11 | folder_raw = os.path.join(global_var.path_public_data,
12 | '11_ENTSOE',
13 | 'ActualGenerationOutputPerGenerationUnit_16.1.A',
14 | )
15 | fpath_tmp = os.path.join(global_var.path_transformed,
16 | 'ENTSOE',
17 | 'ActualGenerationOutputPerGenerationUnit_16.1.A',
18 | 'ActualGenerationOutputPerGenerationUnit_16.1.A_{map_code}',
19 | )
20 |
--------------------------------------------------------------------------------
/pub_data_visualization/production/load/entsoe/transcode/__init__.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Module to match the user defined names with the names used by ENTSO-E.
4 |
5 | """
6 |
7 | from .columns import *
8 | from .map_code import *
--------------------------------------------------------------------------------
/pub_data_visualization/production/load/entsoe/transcode/columns.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Connections between the names used by ENTSO-E
4 | and the user defined names.
5 |
6 | """
7 |
8 | from ..... import global_var
9 |
10 | columns = {'ActualConsumption' : global_var.production_negative_part_mw,
11 | 'ActualGenerationOutput' : global_var.production_positive_part_mw,
12 | 'AreaCode' : global_var.geography_area_code,
13 | 'AreaName' : global_var.geography_area_name,
14 | 'AreaTypeCode' : global_var.geography_area_type_code,
15 | 'DateTime' : global_var.production_dt_utc,
16 | 'Day' : global_var.production_day_utc,
17 | 'GenerationUnitEIC' : global_var.unit_eic,
18 | 'InstalledGenCapacity' : global_var.capacity_nominal_mw,
19 | 'MapCode' : global_var.geography_map_code,
20 | 'Month' : global_var.production_month_utc,
21 | 'PowerSystemResourceName' : global_var.unit_name,
22 | 'ProductionType' : global_var.production_source,
23 | 'ResolutionCode' : global_var.time_resolution_code,
24 | 'UpdateTime' : global_var.publication_dt_utc,
25 | 'Year' : global_var.production_year_utc,
26 | }
--------------------------------------------------------------------------------
/pub_data_visualization/production/load/entsoe/transcode/map_code.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Correspondances between the map_code
4 | used by ENTSO-E and the user defined names.
5 |
6 | """
7 |
8 | from ..... import global_var
9 |
10 | dikt = {'AT': global_var.geography_map_code_austria,
11 | 'BE': global_var.geography_map_code_belgium,
12 | 'BG': global_var.geography_map_code_bulgaria,
13 | 'CH': global_var.geography_map_code_swiss,
14 | 'CZ': global_var.geography_map_code_czech,
15 | 'DK': global_var.geography_map_code_denmark,
16 | 'ES': global_var.geography_map_code_spain,
17 | 'FI': global_var.geography_map_code_finland,
18 | 'FR': global_var.geography_map_code_france,
19 | 'GB': global_var.geography_map_code_great_britain,
20 | 'HU': global_var.geography_map_code_hungary,
21 | 'LT': global_var.geography_map_code_latvia,
22 | 'NL': global_var.geography_map_code_netherlands,
23 | 'PL': global_var.geography_map_code_poland,
24 | 'PT': global_var.geography_map_code_portugal,
25 | 'RO': global_var.geography_map_code_romania,
26 | 'SK': global_var.geography_map_code_slovakia,
27 | }
28 |
29 | def map_code(ss):
30 | ans = dikt.get(ss, ss)
31 | return ans
32 |
33 |
34 |
--------------------------------------------------------------------------------
/pub_data_visualization/production/load/load.py:
--------------------------------------------------------------------------------
1 |
2 | import pandas as pd
3 | #
4 | from ... import global_var
5 | from . import eco2mix, entsoe, rte
6 |
7 |
8 | def load(source = None,
9 | map_code = None,
10 | date_min = None,
11 | date_max = None,
12 | ):
13 | """
14 | Calls the appropriate loader of the production data
15 | from the given data source,
16 | in a given delivery_zone,
17 | and between two dates.
18 |
19 | :param source: The data source
20 | :param map_code: The bidding zone
21 | :param date_min: The left bound
22 | :param date_max: The right bound
23 | :type source: string
24 | :type map_code: string
25 | :type date_min: pd.Timestamp
26 | :type date_max: pd.Timestamp
27 | :return: The selected production data
28 | :rtype: pd.DataFrame
29 | """
30 |
31 | if source == global_var.data_source_production_eco2mix:
32 | df = eco2mix.load(map_code = map_code,
33 | date_min = date_min,
34 | date_max = date_max,
35 | )
36 |
37 | elif source == global_var.data_source_production_rte:
38 | df = rte.load()
39 |
40 | elif source == global_var.data_source_production_entsoe:
41 | df = entsoe.load(map_code = map_code)
42 |
43 | else:
44 | raise ValueError
45 |
46 | assert set(df.columns) == {global_var.commodity,
47 | global_var.geography_map_code,
48 | global_var.production_dt_utc,
49 | global_var.production_nature,
50 | global_var.production_power_mw,
51 | global_var.production_source,
52 | global_var.unit_name,
53 | }
54 |
55 | # Sort
56 | dg = df.reindex(sorted(df.columns), axis = 1)
57 | dg = dg.set_index(global_var.production_dt_utc)
58 | dg = dg.sort_index()
59 | dg[global_var.production_power_gw] = dg[global_var.production_power_mw]/1e3
60 |
61 | # Filter
62 | dh = dg.loc[ pd.Series(True, index = dg.index)
63 | & ((dg.index >= date_min) if bool(date_min) else True)
64 | & ((dg.index < date_max) if bool(date_max) else True)
65 | ]
66 |
67 | # Checks
68 | assert dh.shape[0] > 0
69 | assert not dh.reset_index()[[global_var.production_dt_utc,global_var.unit_name]].duplicated().sum()
70 |
71 | return dh
72 |
73 |
74 |
--------------------------------------------------------------------------------
/pub_data_visualization/production/load/rte/__init__.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Module to load production data provided by RTE.
4 |
5 | """
6 |
7 | from .load import *
--------------------------------------------------------------------------------
/pub_data_visualization/production/load/rte/paths.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Folders where the raw production data provided by RTE
4 | and the transformed dataframes are saved.
5 |
6 | """
7 |
8 | import os
9 | #
10 | from .... import global_var
11 |
12 |
13 | folder_production_rte_raw = os.path.join(global_var.path_public_data,
14 | '24_RTE',
15 | 'ProductionGroupe',
16 | )
17 | fpath_production_rte_tmp = os.path.join(global_var.path_transformed,
18 | 'RTE',
19 | 'ProductionGroupe',
20 | )
--------------------------------------------------------------------------------
/pub_data_visualization/production/load/rte/transcode/__init__.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Module to format the date strings used by RTE.
4 |
5 | """
6 |
7 | from .format_str_date import *
8 | from .production_source import *
--------------------------------------------------------------------------------
/pub_data_visualization/production/load/rte/transcode/format_str_date.py:
--------------------------------------------------------------------------------
1 |
2 | import re
3 | import datetime as dt
4 | import pandas as pd
5 | #
6 | from ..... import global_tools
7 |
8 |
9 | def format_str_date(str_dates):
10 | """
11 | Parses the dates provided by RTE in the production data frames.
12 |
13 | :param str_dates: The dates with the format chosen by RTE
14 | :type str_dates: string
15 | :return: The formatted timestamp
16 | :rtype: pd.Timestamp
17 | """
18 |
19 | # Parse
20 | dt_match = re.compile(r"^(\d{2})/(\d{2})/(\d{4}) (\d{2}):(\d{2})-(\d{2}):(\d{2})$").match(str_dates)
21 | assert dt_match, str_dates
22 | dt_begin = dt.datetime(year = int(dt_match.group(3)),
23 | month = int(dt_match.group(2)),
24 | day = int(dt_match.group(1)),
25 | hour = int(dt_match.group(4)),
26 | minute = int(dt_match.group(5)),
27 | )
28 | dt_end = dt.datetime(year = int(dt_match.group(3)),
29 | month = int(dt_match.group(2)),
30 | day = int(dt_match.group(1)),
31 | hour = int(dt_match.group(6)) % 24,
32 | minute = int(dt_match.group(7)),
33 | )
34 | # Check
35 | assert (int(dt_match.group(6)) - int(dt_match.group(4))) % 24 == 1
36 | assert int(dt_match.group(5)) == 0
37 | assert int(dt_match.group(7)) == 0
38 | dt_begin = pd.Timestamp(dt_begin)
39 | dt_end = pd.Timestamp(dt_end)
40 | dt_mean = dt_begin + (dt_end - dt_begin)/2
41 | # Format
42 | if global_tools.dt_exists_in_tz(dt_begin, 'CET'):
43 | dt_begin = pd.to_datetime(dt_begin).tz_localize('CET', ambiguous = True)
44 | dt_begin = dt_begin.tz_convert('UTC')
45 | return dt_begin
46 | else:
47 | return None
48 |
--------------------------------------------------------------------------------
/pub_data_visualization/production/load/rte/transcode/production_source.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Matching between the names of the energy production sources
4 | used by RTE and the user defined names.
5 |
6 | """
7 |
8 | from ..... import global_var
9 |
10 |
11 | production_source = {'Autre' : global_var.production_source_other,
12 | 'Biomasse' : global_var.production_source_biomass,
13 | 'Charbon' : global_var.production_source_fossil_coal,
14 | 'Fioul' : global_var.production_source_fossil_oil,
15 | 'Fioul et pointe' : global_var.production_source_fossil_oil,
16 | 'Gaz' : global_var.production_source_fossil_gas,
17 | 'Hydraulique STEP' : global_var.production_source_hydro_pumped_storage,
18 | "Hydraulique fil de l'eau / éclusée" : global_var.production_source_hydro_run_of_river,
19 | "Hydraulique fil et éclusée" : global_var.production_source_hydro_run_of_river,
20 | 'Hydraulique lac' : global_var.production_source_hydro_reservoir,
21 | 'Hydraulique lacs' : global_var.production_source_hydro_reservoir,
22 | 'Nucléaire' : global_var.production_source_nuclear,
23 | }
--------------------------------------------------------------------------------
/pub_data_visualization/production/plot/__init__.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Module to plot production data.
4 |
5 | """
6 |
7 | from .power import *
--------------------------------------------------------------------------------
/pub_data_visualization/production/plot/subplot/__init__.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Module to fill the subplots with production data.
4 |
5 | """
6 |
7 | from .power import *
--------------------------------------------------------------------------------
/pub_data_visualization/production/plot/subplot/power.py:
--------------------------------------------------------------------------------
1 |
2 | #
3 | from .... import global_var
4 | #
5 |
6 | def power(ax,
7 | df,
8 | map_code = None,
9 | production_nature = None,
10 | production_unit = None,
11 | production_source = None,
12 | unit_name = None,
13 | **kwargs,
14 | ):
15 | """
16 | Draws in a subplot the weather data.
17 |
18 | :param ax: The ax to fill
19 | :param df: The production data
20 | :param map_code: The delivery zone
21 | :param production_nature: The nature of the data to plot
22 | :param production_source: The energy source of the production
23 | :param unit_name: The name of the production asset
24 | :param kwargs: additional parameter for the plt.plot function
25 | :type ax: matplotlib.axes._subplots.AxesSubplot
26 | :type df: pd.DataFrame
27 | :type map_code: string
28 | :type production_nature: string
29 | :type production_source: string
30 | :type unit_name: string
31 | :type kwargs: dict
32 | :return: None
33 | :rtype: None
34 | """
35 |
36 | if map_code:
37 | df = df.loc[df[global_var.geography_map_code] == map_code]
38 | if production_source:
39 | df = df.loc[df[global_var.production_source] == production_source]
40 | if unit_name:
41 | df = df.loc[df[global_var.unit_name] == unit_name]
42 | if production_nature:
43 | df = df.loc[df[global_var.production_nature] == production_nature]
44 |
45 | dg = df.groupby(df.index)[production_unit].sum()
46 | dg = dg.dropna()
47 | assert not dg.empty
48 |
49 | ax.plot(dg.index,
50 | dg,
51 | **kwargs,
52 | )
53 |
54 |
55 |
56 |
--------------------------------------------------------------------------------
/pub_data_visualization/production_capacity/__init__.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Module to load the data about the capacities.
4 |
5 | """
6 |
7 | from . import unit
8 | from . import aggregated
--------------------------------------------------------------------------------
/pub_data_visualization/production_capacity/aggregated/__init__.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Module to the data about the aggregated capacities.
4 |
5 | """
6 |
7 | from .load import *
8 |
9 |
--------------------------------------------------------------------------------
/pub_data_visualization/production_capacity/aggregated/load/__init__.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Module to load the data about the aggregated capacities.
4 |
5 | """
6 |
7 | from .load import *
--------------------------------------------------------------------------------
/pub_data_visualization/production_capacity/aggregated/load/entsoe/__init__.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Module to load the data about the aggregated capacities provided by ENTSO-E.
4 |
5 | """
6 |
7 |
8 | from .load import *
--------------------------------------------------------------------------------
/pub_data_visualization/production_capacity/aggregated/load/entsoe/load.py:
--------------------------------------------------------------------------------
1 |
2 |
3 | import pandas as pd
4 | import os
5 | #
6 | from ..... import global_var
7 | from . import paths, transcode
8 |
9 |
10 |
11 | def load(map_code = None):
12 | """
13 | Loads the aggregated capacities provided by ENTSO-E.
14 |
15 | :param map_code: The bidding zone
16 | :type map_code: string
17 | :return: The aggregated capacities
18 | :rtype: pd.DataFrame
19 | """
20 | df_path = paths.fpath_tmp.format(map_code = map_code) + '.csv'
21 | try:
22 | print('Load capacity/entsoe - ', end = '')
23 | df = pd.read_csv(df_path,
24 | header = [0],
25 | sep = ';',
26 | )
27 | print('Loaded')
28 | except Exception as e:
29 | print('fail')
30 | print(e)
31 | dikt_capacity = {}
32 | list_files = sorted([fname
33 | for fname in os.listdir(paths.folder_raw)
34 | if os.path.splitext(fname)[1] == '.csv'
35 | ])
36 | assert len(list_files) > 0, ('Files not found.\n'
37 | 'They can be downloaded with the ENTSOE SFTP share\n'
38 | 'and stored in\n'
39 | '{0}'.format(paths.folder_raw)
40 | )
41 | for ii, fname in enumerate(list_files):
42 | print('\r{0:3}/{1:3} - {2}'.format(ii+1,
43 | len(list_files),
44 | fname,
45 | ),
46 | end = '',
47 | )
48 | df = pd.read_csv(os.path.join(paths.folder_raw,
49 | fname,
50 | ),
51 | encoding = 'UTF-8',
52 | sep = '\t',
53 | decimal = '.',
54 | )
55 | df = df.rename(transcode.columns,
56 | axis = 1,
57 | )
58 | df = df[df[global_var.geography_map_code] == map_code]
59 | dikt_capacity[fname] = df
60 |
61 | df = pd.concat([dikt_capacity[key]
62 | for key in dikt_capacity.keys()
63 | ],
64 | axis = 0,
65 | )
66 | df.loc[:,global_var.production_source] = df[global_var.production_source].astype(str).replace(transcode.production_source)
67 | df[global_var.commodity] = global_var.commodity_electricity
68 |
69 | # Save
70 | print('Save')
71 | os.makedirs(os.path.dirname(df_path),
72 | exist_ok = True,
73 | )
74 | df.to_csv(df_path,
75 | sep = ';',
76 | index = False,
77 | )
78 | print('done')
79 | return df
80 |
81 |
--------------------------------------------------------------------------------
/pub_data_visualization/production_capacity/aggregated/load/entsoe/paths.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Paths where the raw aggregated capacities data and the
4 | transformed dataframes are saved.
5 |
6 | """
7 |
8 | import os
9 | #
10 | from ..... import global_var
11 |
12 |
13 |
14 | folder_raw = os.path.join(global_var.path_public_data,
15 | '11_ENTSOE',
16 | 'InstalledGenerationCapacityAggregated',
17 | )
18 | fpath_tmp = os.path.join(global_var.path_transformed,
19 | 'ENTSOE',
20 | 'InstalledGenerationCapacityAggregated',
21 | 'InstalledGenerationCapacityAggregated_{map_code}',
22 | )
--------------------------------------------------------------------------------
/pub_data_visualization/production_capacity/aggregated/load/entsoe/transcode/__init__.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Module to match the user defined names with the names used by ENTSO-E.
4 |
5 | """
6 |
7 | from .columns import *
8 | from .production_source import *
--------------------------------------------------------------------------------
/pub_data_visualization/production_capacity/aggregated/load/entsoe/transcode/columns.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Correspondances between the columns names used by ENTSO-E
4 | and the user defined names.
5 |
6 | """
7 |
8 | from ...... import global_var
9 |
10 | columns = {'AggregatedInstalledCapacity' : global_var.capacity_nominal_mw,
11 | 'AreaCode' : global_var.geography_area_code,
12 | 'AreaName' : global_var.geography_area_name,
13 | 'AreaTypeCode' : global_var.geography_area_type_code,
14 | 'MapCode' : global_var.geography_map_code,
15 | 'DateTime' : global_var.capacity_dt_utc,
16 | 'Day' : global_var.capacity_day_utc,
17 | 'DeletedFlag' : global_var.capacity_flag_deleted,
18 | 'Month' : global_var.capacity_month_utc,
19 | 'ProductionType' : global_var.production_source,
20 | 'ResolutionCode' : global_var.time_resolution_code,
21 | 'UpdateTime' : global_var.publication_dt_utc,
22 | 'Year' : global_var.capacity_year_utc,
23 | }
--------------------------------------------------------------------------------
/pub_data_visualization/production_capacity/aggregated/load/entsoe/transcode/production_source.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Correspondance between the names used by ENTSO-E for the production sources
4 | and the user defined names.
5 |
6 | """
7 |
8 | from ...... import global_var
9 |
10 | production_source = {'Biomass' : global_var.production_source_biomass,
11 | 'Fossil Hard coal' : global_var.production_source_fossil_coal,
12 | 'Fossil Gas' : global_var.production_source_fossil_gas,
13 | 'Fossil Oil' : global_var.production_source_fossil_oil,
14 | 'Hydro Pumped Storage' : global_var.production_source_hydro_pumped_storage,
15 | 'Hydro Water Reservoir' : global_var.production_source_hydro_reservoir,
16 | 'Hydro Run-of-river and poundage' : global_var.production_source_hydro_run_of_river,
17 | 'Marine' : global_var.production_source_marine,
18 | 'Nuclear' : global_var.production_source_nuclear,
19 | 'Other' : global_var.production_source_other,
20 | 'Solar' : global_var.production_source_solar,
21 | 'Wind Offshore' : global_var.production_source_wind_offshore,
22 | 'Wind Onshore' : global_var.production_source_wind_onshore,
23 | }
--------------------------------------------------------------------------------
/pub_data_visualization/production_capacity/aggregated/load/load.py:
--------------------------------------------------------------------------------
1 |
2 |
3 | #
4 | from .... import global_var
5 | from . import entsoe, rte
6 |
7 |
8 |
9 | def load(source = None,
10 | map_code = None,
11 | ):
12 | """
13 | Calls the appropriate loader of the capacities
14 | for the given map_code.
15 |
16 | :param source: The data source
17 | :param map_code: The zone
18 | :type source: string
19 | :type map_code: string
20 | :return: The selected capacities
21 | :rtype: pd.DataFrame
22 | """
23 |
24 | if source == global_var.data_source_capacity_entsoe:
25 | df = entsoe.load(map_code = map_code)
26 |
27 | elif source == global_var.data_source_capacity_rte:
28 | df = rte.load(map_code = map_code)
29 |
30 | else:
31 | raise ValueError('Incorrect source={0} '.format(source))
32 |
33 | return df
34 |
35 |
--------------------------------------------------------------------------------
/pub_data_visualization/production_capacity/aggregated/load/rte/__init__.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Module to load the data about the aggregated capacities provided by RTE.
4 |
5 | """
6 |
7 | from .load import *
--------------------------------------------------------------------------------
/pub_data_visualization/production_capacity/aggregated/load/rte/load.py:
--------------------------------------------------------------------------------
1 |
2 |
3 | import pandas as pd
4 | import os
5 | #
6 | from ..... import global_var
7 | from . import paths, transcode
8 |
9 | def load(map_code = None):
10 | """
11 | Loads the aggregated capacities provided by RTE.
12 |
13 | :param map_code: The zone
14 | :type map_code: string
15 | :return: The aggregated capacities
16 | :rtype: pd.DataFrame
17 | """
18 | assert map_code == global_var.geography_map_code_france
19 | df_path = paths.fpath_tmp.format(map_code = map_code) + '.csv'
20 | try:
21 | print('Load capacity/rte - ', end = '')
22 | df = pd.read_csv(df_path,
23 | header = [0],
24 | sep = ';',
25 | )
26 | for col in [global_var.capacity_dt_local]:
27 | df.loc[:,col] = pd.to_datetime(df[col])
28 | print('Loaded')
29 | except Exception as e:
30 | print('fail - has to read raw data')
31 | print(e)
32 | dikt_capacity = {}
33 | list_files = sorted([fname
34 | for fname in os.listdir(paths.folder_raw)
35 | if os.path.splitext(fname)[1] == '.xls'
36 | ])
37 | for ii, fname in enumerate(list_files):
38 | print('\r{0:3}/{1:3} - {2}'.format(ii+1,
39 | len(list_files),
40 | fname,
41 | ),
42 | end = '',
43 | )
44 | df = pd.read_csv(os.path.join(paths.folder_raw,
45 | fname,
46 | ),
47 | sep = '\t',
48 | encoding = 'latin-1',
49 | na_values = ["*"],
50 | skipinitialspace = True,
51 | low_memory = False,
52 | )
53 | df.columns = [global_var.capacity_mw]
54 | df = df.dropna(axis = 0, how = 'all')
55 | df[global_var.capacity_year_local] = int(df.loc['Type'].item())
56 | df[global_var.geography_map_code] = map_code
57 | df = df.drop('Type',
58 | axis = 0,
59 | )
60 | df.index.name = global_var.production_source
61 | df.index = df.index.astype(str).replace(transcode.production_source)
62 | dikt_capacity[fname] = df
63 | print()
64 |
65 | df = pd.concat([dikt_capacity[key]
66 | for key in dikt_capacity.keys()
67 | ],
68 | axis = 0,
69 | )
70 | df = df.reset_index()
71 | df[global_var.geography_map_code] = map_code
72 | df[global_var.commodity] = global_var.commodity_electricity
73 |
74 | # Save
75 | print('Save')
76 | os.makedirs(os.path.dirname(df_path),
77 | exist_ok = True,
78 | )
79 | df.to_csv(df_path,
80 | sep = ';',
81 | index = False,
82 | )
83 | print('done')
84 | return df
85 |
86 |
87 |
88 |
89 |
--------------------------------------------------------------------------------
/pub_data_visualization/production_capacity/aggregated/load/rte/paths.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Folders where the raw aggregated capacities data and the
4 | transformed dataframes are saved.
5 |
6 | """
7 |
8 | import os
9 | #
10 | from ..... import global_var
11 |
12 |
13 | folder_raw = os.path.join(global_var.path_public_data,
14 | '24_RTE',
15 | 'Capacite_installee_production',
16 | )
17 | fpath_tmp = os.path.join(global_var.path_transformed,
18 | 'RTE',
19 | 'Capacite_installee_production',
20 | 'Capacite_installee_production_{map_code}.csv',
21 | )
--------------------------------------------------------------------------------
/pub_data_visualization/production_capacity/aggregated/load/rte/transcode/__init__.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Module to match the user defined names with the names used by RTE.
4 |
5 | """
6 |
7 | from .production_source import *
--------------------------------------------------------------------------------
/pub_data_visualization/production_capacity/aggregated/load/rte/transcode/production_source.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Correspondances between the names of the production sources used by RTE
4 | and the user defined names.
5 |
6 | """
7 |
8 | from ...... import global_var
9 |
10 | production_source = {'Biomasse' : global_var.production_source_biomass,
11 | 'Charbon' : global_var.production_source_fossil_coal,
12 | 'Gaz' : global_var.production_source_fossil_gas,
13 | 'Fioul' : global_var.production_source_fossil_oil,
14 | 'Hydraulique STEP' : global_var.production_source_hydro_pumped_storage,
15 | 'Hydraulique lacs' : global_var.production_source_hydro_reservoir,
16 | "Hydraulique fil de l'eau / éclusée" : global_var.production_source_hydro_run_of_river,
17 | 'Marin' : global_var.production_source_marine,
18 | 'Nucléaire' : global_var.production_source_nuclear,
19 | 'Autre' : global_var.production_source_other,
20 | 'nan' : global_var.production_source_unknown,
21 | }
--------------------------------------------------------------------------------
/pub_data_visualization/production_capacity/unit/__init__.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Module to load the data about the unit capacities.
4 |
5 | """
6 |
7 | from .load import *
8 |
9 |
--------------------------------------------------------------------------------
/pub_data_visualization/production_capacity/unit/load/__init__.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Module to load the data about the unit capacities.
4 |
5 | """
6 |
7 | from .load import *
--------------------------------------------------------------------------------
/pub_data_visualization/production_capacity/unit/load/load.py:
--------------------------------------------------------------------------------
1 |
2 |
3 | #
4 | from .... import global_var
5 | from . import rte
6 |
7 |
8 | def load(source = None,
9 | map_code = None,
10 | ):
11 | """
12 | Calls the appropriate loader of the unit
13 | capacities for the given map_code.
14 |
15 | :param source: The data source
16 | :param map_code: The zone
17 | :type source: string
18 | :type map_code: string
19 | :return: The selected unit capacities
20 | :rtype: pd.DataFrame
21 | """
22 |
23 | if source == global_var.data_source_capacity_rte:
24 | df = rte.load(map_code = map_code)
25 |
26 | else:
27 | raise ValueError('Incorrect source={0} '.format(source))
28 |
29 | return df
30 |
--------------------------------------------------------------------------------
/pub_data_visualization/production_capacity/unit/load/rte/__init__.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Module to load the data about the unit capacities provided by RTE.
4 |
5 | """
6 |
7 | from .load import *
--------------------------------------------------------------------------------
/pub_data_visualization/production_capacity/unit/load/rte/paths.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Folders where the raw unit capacities data and the
4 | transformed dataframes are saved.
5 |
6 | """
7 |
8 | import os
9 | #
10 | from ..... import global_var
11 |
12 |
13 | folder_raw = os.path.join(global_var.path_public_data,
14 | '24_RTE',
15 | 'Centrales_production_reference',
16 | )
17 | fpath_tmp = os.path.join(global_var.path_transformed,
18 | 'RTE',
19 | 'Centrales_production_reference',
20 | 'Centrales_production_reference_{map_code}',
21 | )
--------------------------------------------------------------------------------
/pub_data_visualization/production_capacity/unit/load/rte/transcode/__init__.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Module to match the user defined names with the names used by RTE.
4 |
5 | """
6 |
7 | from .columns import *
8 | from .production_source import *
--------------------------------------------------------------------------------
/pub_data_visualization/production_capacity/unit/load/rte/transcode/columns.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Correspondances between the names used by RTE for the columns
4 | and the user defined names.
5 |
6 | """
7 |
8 | from ...... import global_var
9 |
10 |
11 | columns = {'Capacité de production Installée (MW)' : global_var.capacity_nominal_mw,
12 | 'Date de suppression' : global_var.capacity_end_date_local,
13 | 'Localisation' : global_var.geography_country,
14 | 'Type' : global_var.production_source,
15 | 'Date de création' : global_var.publication_creation_dt_local,
16 | 'Niveau de tension de connexion (KVT)' : global_var.unit_voltage_connection,
17 | 'Nom de la centrale de production' : global_var.unit_name,
18 | }
--------------------------------------------------------------------------------
/pub_data_visualization/production_capacity/unit/load/rte/transcode/production_source.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Correspondances between the names used by RTE for the production sources
4 | and the user defined names.
5 |
6 | """
7 |
8 | from ...... import global_var
9 |
10 | production_source = {'Biomasse' : global_var.production_source_biomass,
11 | 'Charbon' : global_var.production_source_fossil_coal,
12 | 'Gaz' : global_var.production_source_fossil_gas,
13 | 'Fioul' : global_var.production_source_fossil_oil,
14 | 'Hydraulique STEP' : global_var.production_source_hydro_pumped_storage,
15 | 'Hydraulique lacs' : global_var.production_source_hydro_reservoir,
16 | "Hydraulique fil de l'eau / éclusée" : global_var.production_source_hydro_run_of_river,
17 | 'Marin' : global_var.production_source_marine,
18 | 'Nucléaire' : global_var.production_source_nuclear,
19 | 'Autre' : global_var.production_source_other,
20 | 'nan' : global_var.production_source_unknown,
21 | }
--------------------------------------------------------------------------------
/pub_data_visualization/transmission/__init__.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Module to load and plot transmission data.
4 |
5 | """
6 |
7 |
8 | from .load import *
9 | from . import plot
10 |
--------------------------------------------------------------------------------
/pub_data_visualization/transmission/load/__init__.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Module to load transmission data.
4 |
5 | """
6 |
7 |
8 |
9 | from .load import *
--------------------------------------------------------------------------------
/pub_data_visualization/transmission/load/entsog_nominations/__init__.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Module to load the allocations from V_ALLOCATION.
4 |
5 | """
6 |
7 | from .load import *
--------------------------------------------------------------------------------
/pub_data_visualization/transmission/load/entsog_nominations/load.py:
--------------------------------------------------------------------------------
1 |
2 | import os
3 | import numpy as np
4 | import pandas as pd
5 | import requests
6 | import io
7 | #
8 | from pub_data_visualization import global_tools, global_var
9 | from . import paths, query, transcode
10 |
11 |
12 | def load(date_min = None,
13 | date_max = None,
14 | ):
15 | """
16 | Loads the nominated capacities from ENTSOG API.
17 |
18 | :param date_min: The left bound
19 | :param date_max: The right bound
20 | :type date_min: pd.Timestamp
21 | :type date_max: pd.Timestamp
22 | :return: The selected capacities
23 | :rtype: pd.DataFrame
24 | """
25 |
26 | df_path = os.path.join(paths.fpath_tmp,
27 | '_'.join(filter(None, ['entsog_nominations',
28 | date_min.strftime('%Y%m%d_%H%M') if bool(date_min) else '',
29 | date_max.strftime('%Y%m%d_%H%M') if bool(date_max) else '',
30 | ])))
31 | try:
32 | print('Load df_nominations - ', end='')
33 | df = pd.read_csv(df_path,
34 | sep=';',
35 | )
36 | df.loc[:,global_var.transmission_begin_dt_utc] = pd.to_datetime(df[global_var.transmission_begin_dt_utc])
37 | df.loc[:,global_var.transmission_end_dt_utc] = pd.to_datetime(df[global_var.transmission_end_dt_utc])
38 | df.loc[:, global_var.transmission_begin_dt_local] = df[global_var.transmission_begin_dt_utc].dt.tz_convert('CET')
39 | df.loc[:, global_var.transmission_end_dt_local] = df[global_var.transmission_end_dt_utc].dt.tz_convert('CET')
40 | print('Loaded')
41 | except FileNotFoundError:
42 | print('Not loaded')
43 |
44 | ### URL Connection
45 | api_query = query.entsog_nominations(date_min = date_min,
46 | date_max = date_max,
47 | )
48 | response = requests.get(api_query)
49 | response.raise_for_status()
50 | data = response.content
51 | df = pd.read_csv(io.StringIO(data.decode('utf-8')))
52 |
53 | ### Columns
54 | assert set(df.columns) == set(transcode.columns).union(transcode.columns_dropped)
55 | df = df.drop(transcode.columns_dropped, axis=1)
56 | df = df.rename(transcode.columns, axis=1)
57 | if df.empty: return df
58 |
59 | ### Datetimes
60 | df[global_var.transmission_begin_dt_local] = pd.to_datetime(df[global_var.transmission_begin_dt_local]).dt.tz_localize('CET')
61 | df[global_var.transmission_end_dt_local] = pd.to_datetime(df[global_var.transmission_end_dt_local]).dt.tz_localize('CET') + pd.Timedelta(seconds = 1)
62 | df[global_var.transmission_begin_dt_utc] = df[global_var.transmission_begin_dt_local].dt.tz_convert('UTC')
63 | df[global_var.transmission_end_dt_utc] = df[global_var.transmission_end_dt_local].dt.tz_convert('UTC')
64 |
65 | # Power units
66 | assert set(df[global_var.transmission_unit].unique()) == {'kWh/d'}
67 | df[global_var.transmission_power_mwh_d] = df[global_var.transmission_value]/1e3
68 |
69 | # Additional infos
70 | df[global_var.data_source_transmission] = global_var.data_source_transmission_entsog_nominations
71 | df[global_var.commodity] = global_var.commodity_gas
72 |
73 | # Drop
74 | df.drop([global_var.transmission_unit,
75 | global_var.transmission_value,
76 | ],
77 | axis=1,
78 | inplace=True,
79 | )
80 |
81 | # Save
82 | print('Save')
83 | os.makedirs(os.path.dirname(df_path),
84 | exist_ok=True,
85 | )
86 | df.to_csv(df_path,
87 | sep=';',
88 | index=False,
89 | )
90 | print('done : df.shape = {0}'.format(df.shape))
91 | return df
92 |
93 |
--------------------------------------------------------------------------------
/pub_data_visualization/transmission/load/entsog_nominations/paths.py:
--------------------------------------------------------------------------------
1 | """
2 | Folders where transformed dataframes are saved.
3 |
4 | """
5 |
6 | import os
7 | #
8 | from pub_data_visualization import global_var
9 |
10 | fpath_tmp = os.path.join(global_var.path_transformed,
11 | 'transmission',
12 | 'sql_allocation',
13 | 'df_{0}_{1}.csv',
14 | )
--------------------------------------------------------------------------------
/pub_data_visualization/transmission/load/entsog_nominations/query.py:
--------------------------------------------------------------------------------
1 |
2 | #from .... import global_var
3 | import pandas as pd
4 |
5 |
6 | def entsog_nominations(date_min=pd.Timestamp.now() - pd.Timedelta(days=2),
7 | date_max=pd.Timestamp.now() + pd.Timedelta(days=2),
8 | indicator="Nomination",
9 | periodType="day",
10 | pointDirection="FR-TSO-0003ITP-00526exit",
11 | operatorLabel = 'TERÉGA',
12 | timeZone='CET',
13 | limit=-1,
14 | ):
15 | """
16 | Writes the API query to load nominations from ENTSOG API
17 |
18 | :param date_min: The left bound
19 | :param date_max: The right bound
20 | :type date_min: pd.Timestamp
21 | :type date_max: pd.Timestamp
22 | :return: The API query
23 | :rtype: string
24 | """
25 |
26 | query = ("https://transparency.entsog.eu/api/v1/operationaldatas.csv?"
27 | + "&".join(["indicator={}".format(indicator),
28 | "pointDirection={}".format(pointDirection),
29 | #"operatorLabel={}".format(operatorLabel),
30 | "from={}".format(date_min.strftime('%Y-%m-%d')),
31 | "to={}".format(date_max.strftime('%Y-%m-%d')),
32 | "periodType={}".format(periodType),
33 | "timeZone={}".format(timeZone),
34 | "limit={}".format(limit),
35 | ])
36 | ).replace(' ', '%20')
37 |
38 | return query
39 |
--------------------------------------------------------------------------------
/pub_data_visualization/transmission/load/entsog_nominations/transcode/__init__.py:
--------------------------------------------------------------------------------
1 |
2 |
3 | """
4 | Module to match the names defined by the user with names
5 | used in V_ALLOCATION.
6 |
7 | """
8 |
9 | from .columns import *
10 | from .columns_dropped import *
11 |
12 |
--------------------------------------------------------------------------------
/pub_data_visualization/transmission/load/entsog_nominations/transcode/columns.py:
--------------------------------------------------------------------------------
1 | """
2 | Module to match the names of the columns.
3 |
4 | This module establishes the connections between the user defined names
5 | and the names used in V_ALLOCATION.
6 |
7 | """
8 |
9 | from pub_data_visualization import global_var
10 |
11 | columns = {
12 | 'id': global_var.transmission_id,
13 | 'periodFrom': global_var.transmission_begin_dt_local,
14 | 'periodTo': global_var.transmission_end_dt_local,
15 | 'pointLabel': global_var.geography_point,
16 | 'operatorLabel' : global_var.transmission_tso,
17 | 'directionKey': global_var.transmission_direction,
18 | 'unit': global_var.transmission_unit,
19 | 'value': global_var.transmission_value,
20 | 'pointKey' : global_var.geography_point_type,
21 | }
22 |
23 |
--------------------------------------------------------------------------------
/pub_data_visualization/transmission/load/entsog_nominations/transcode/columns_dropped.py:
--------------------------------------------------------------------------------
1 | """
2 | Module to drop some of the columns that are not used.
3 |
4 | """
5 |
6 | columns_dropped = [
7 | 'bookingPlatformKey',
8 | 'bookingPlatformLabel',
9 | 'bookingPlatformURL',
10 | 'capacityBookingStatus',
11 | 'capacityType',
12 | 'dataSet',
13 | 'flowStatus',
14 | 'generalRemarks',
15 | 'indicator',
16 | 'interruptionCalculationRemark',
17 | 'interruptionType',
18 | 'isArchived',
19 | 'isCamRelevant',
20 | 'isCmpRelevant',
21 | 'isNA',
22 | 'isUnlimited',
23 | 'itemRemarks',
24 | 'lastUpdateDateTime',
25 | 'operatorKey',
26 | 'originalPeriodFrom',
27 | 'periodType',
28 | 'restorationInformation',
29 | 'tsoEicCode',
30 | 'tsoItemIdentifier',
31 | ]
32 |
--------------------------------------------------------------------------------
/pub_data_visualization/transmission/load/load.py:
--------------------------------------------------------------------------------
1 |
2 | import pandas as pd
3 | #
4 | from ... import global_var
5 | from . import entsog_nominations
6 |
7 |
8 | def load(source = None,
9 | date_min = None,
10 | date_max = None,
11 | ):
12 | """
13 | Calls the appropriate loader
14 | of the transmission data
15 | from the given data source.
16 |
17 | :param source: The data source
18 | :param date_min: The left bound
19 | :param date_max: The right bound
20 | :type source: string
21 | :type date_min: pd.Timestamp
22 | :type date_max: pd.Timestamp
23 | :return: The selected transmission data
24 | :rtype: pd.DataFrame
25 | """
26 |
27 | if source == global_var.data_source_transmission_entsog_nominations:
28 | df = entsog_nominations.load(date_min = date_min,
29 | date_max = date_max,
30 | )
31 |
32 | else:
33 | raise ValueError
34 |
35 | # Sort
36 | assert not set(col_orders).difference((df.columns))
37 | dg = df.reindex(col_orders+[col for col in df.columns if col not in col_orders], axis = 1)
38 | dg = dg.sort_values([global_var.transmission_begin_dt_local,
39 | global_var.transmission_end_dt_local,
40 | #global_var.geography_zone,
41 | global_var.geography_point_type,
42 | global_var.geography_point,
43 | global_var.transmission_direction,
44 | ])
45 | dg = dg.reset_index(drop = True)
46 |
47 | # Filter
48 | dh = dg.loc[ pd.Series(True, index = dg.index)
49 | & ((dg[global_var.transmission_end_dt_local] >= date_min) if bool(date_min) else True)
50 | & ((dg[global_var.transmission_begin_dt_local] < date_max) if bool(date_max) else True)
51 | ]
52 |
53 | # Checks
54 | assert dh.shape[0] > 0
55 |
56 | return dh
57 |
58 | col_orders = [
59 | #global_var.geography_zone,
60 | global_var.geography_point_type,
61 | global_var.geography_point,
62 | global_var.transmission_direction,
63 | global_var.transmission_begin_dt_local,
64 | global_var.transmission_end_dt_local,
65 | global_var.transmission_power_mwh_d,
66 | global_var.transmission_tso,
67 | global_var.commodity,
68 | global_var.data_source_transmission,
69 | global_var.transmission_id,
70 | #global_var.geography_zone_info,
71 | ]
72 |
73 |
74 |
--------------------------------------------------------------------------------
/pub_data_visualization/transmission/plot/__init__.py:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/pub_data_visualization/weather/__init__.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Module to load and plot weather data.
4 |
5 | """
6 |
7 | from .load import *
8 | from . import plot
9 |
--------------------------------------------------------------------------------
/pub_data_visualization/weather/load/__init__.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Module to load the weather data.
4 |
5 | """
6 |
7 |
8 | from .load import *
--------------------------------------------------------------------------------
/pub_data_visualization/weather/load/load.py:
--------------------------------------------------------------------------------
1 |
2 | import pandas as pd
3 | #
4 | from ... import global_var
5 | from . import meteofrance
6 |
7 |
8 | def load(source = global_var.data_source_weather_meteofrance,
9 | zone = global_var.geography_zone_france,
10 | date_min = pd.Timestamp('2015').tz_localize('CET'),
11 | date_max = pd.Timestamp('{}'.format(pd.Timestamp.now().year)).tz_localize('CET'),
12 | ):
13 | """
14 | Calls the appropriate loader of the weather data
15 | between two dates in the given zone.
16 |
17 | :param source: The data source
18 | :param zone: The selected zone
19 | :param date_min: The left bound
20 | :param date_max: The right bound
21 | :type source: string
22 | :type zone: string
23 | :type date_min: pd.Timestamp
24 | :type date_max: pd.Timestamp
25 | :return: The selected weather data
26 | :rtype: pd.DataFrame
27 | """
28 |
29 | if source == global_var.data_source_weather_meteofrance:
30 | df, coordinates_weather, trash_weather = meteofrance.load(zone = zone,
31 | date_min = date_min,
32 | date_max = date_max,
33 | )
34 |
35 | else:
36 | raise ValueError('Incorrect source : {0}'.format(source))
37 |
38 | assert set(df.columns) == {global_var.weather_physical_quantity,
39 | global_var.weather_physical_quantity_value,
40 | global_var.weather_dt_utc,
41 | global_var.weather_nature,
42 | global_var.weather_site_name,
43 | }
44 |
45 | # Drop locations and average
46 | dg = df.drop(global_var.weather_site_name, axis = 1)
47 | dg = dg.groupby([global_var.weather_dt_utc,
48 | global_var.weather_nature,
49 | global_var.weather_physical_quantity,
50 | ]).mean().reset_index()
51 |
52 | # Format
53 | dg = dg.set_index(global_var.weather_dt_utc)
54 | dg = dg.reindex(sorted(dg.columns), axis = 1)
55 | dg = dg.sort_index()
56 |
57 | # Filter
58 | dh = dg.loc[ pd.Series(True, index = dg.index)
59 | & ((dg.index >= date_min) if bool(date_min) else True)
60 | & ((dg.index < date_max) if bool(date_max) else True)
61 | ]
62 |
63 | # Checks
64 | assert dh.shape[0] > 0
65 | assert not dh.reset_index()[[global_var.weather_dt_utc,global_var.weather_physical_quantity]].duplicated().sum()
66 |
67 | return dh
68 |
69 |
70 |
--------------------------------------------------------------------------------
/pub_data_visualization/weather/load/meteofrance/__init__.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Module to load the weather data provided by Météo-France.
4 |
5 | """
6 |
7 | from .load import *
--------------------------------------------------------------------------------
/pub_data_visualization/weather/load/meteofrance/geography.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Geographical information about the weather data provided by Météo-France.
4 |
5 | """
6 |
7 |
8 | metropolis_latitude_min = 41
9 | metropolis_latitude_max = 51.2
10 | metropolis_longitude_min = -5.2
11 | metropolis_longitude_max = 8.3
12 |
--------------------------------------------------------------------------------
/pub_data_visualization/weather/load/meteofrance/paths.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Folders where the raw weather data provided by Météo-France
4 | and the transformed dataframes are saved.
5 |
6 | """
7 |
8 | import os
9 | #
10 | from .... import global_var
11 |
12 |
13 | folder_weather_meteofrance_raw = os.path.join(global_var.path_public_data,
14 | '20_MeteoFrance',
15 | 'synop',
16 | )
17 | fpath_weather_meteofrance_tmp = os.path.join(global_var.path_transformed,
18 | 'MeteoFrance',
19 | 'synop',
20 | )
21 |
22 |
23 | dikt_files = {'weather.file_year_month' : 'synop.{year:d}{month:02d}',
24 | 'weather.description' : 'postesSynop',
25 | }
--------------------------------------------------------------------------------
/pub_data_visualization/weather/load/meteofrance/transcode/__init__.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Module to match the user defined names with the names used by Météo-France.
4 |
5 | """
6 |
7 | from .columns import *
--------------------------------------------------------------------------------
/pub_data_visualization/weather/load/meteofrance/transcode/columns.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Correspondances between the names used by Météo-France
4 | and the user defined names.
5 |
6 | """
7 |
8 | from ..... import global_var
9 |
10 | columns = {'date' : global_var.weather_dt_utc,
11 | 't' : global_var.weather_temperature_kelvin,
12 | 'n' : global_var.weather_nebulosity,
13 | 'ff' : global_var.weather_wind_speed,
14 | 'numer_sta' : global_var.weather_site_id,
15 | 'ID' : global_var.weather_site_id,
16 | 'Nom' : global_var.weather_site_name,
17 | 'Latitude' : global_var.geography_latitude,
18 | 'Longitude' : global_var.geography_longitude,
19 | }
20 |
--------------------------------------------------------------------------------
/pub_data_visualization/weather/load/meteofrance/url.py:
--------------------------------------------------------------------------------
1 |
2 |
3 | """
4 | URLs and filenames to download the data provided by Météo-France.
5 |
6 | """
7 |
8 | dikt= {'weather' : 'https://donneespubliques.meteofrance.fr/donnees_libres/Txt/Synop/Archive/',
9 | 'weather.stations' : 'https://donneespubliques.meteofrance.fr/donnees_libres/Txt/Synop/',
10 | }
--------------------------------------------------------------------------------
/pub_data_visualization/weather/plot/__init__.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Module to plot the weather data.
4 |
5 | """
6 |
7 |
8 | from .curve import *
9 | from .distribution import *
--------------------------------------------------------------------------------
/pub_data_visualization/weather/plot/distribution.py:
--------------------------------------------------------------------------------
1 |
2 |
3 | import os
4 | #
5 | from ... import global_tools, global_var
6 | from . import subplot
7 | #
8 | import seaborn as sns
9 | import matplotlib as mpl
10 | import matplotlib.pyplot as plt
11 | from pandas.plotting import register_matplotlib_converters; register_matplotlib_converters()
12 | from matplotlib.font_manager import FontProperties
13 | global_tools.set_mpl(mpl, plt, FontProperties())
14 | #
15 |
16 |
17 | def distribution(df,
18 | nature = None,
19 | source = None,
20 | physical_quantity = None,
21 | folder_out = None,
22 | close = True,
23 | figsize = global_var.figsize_vertical,
24 | ):
25 | """
26 | Plots the boxplots of the weather data by creating a figure and
27 | calling the function to fill the subplot.
28 |
29 | :param df: The weather data
30 | :param nature: The nature of the weather data to plot
31 | :param source: The source of the weather data to plot
32 | :param physical_quantity: The weather quantity to plot
33 | :param folder_out: The folder where the figure is saved
34 | :param close: Boolean to close the figure after it is saved
35 | :param figsize: Desired size of the figure
36 | :type df: pd.DataFrame
37 | :type nature: string
38 | :type source: string
39 | :type physical_quantity: string
40 | :param folder_out: string
41 | :param close: bool
42 | :param figsize: (int,int)
43 | :return: None
44 | :rtype: None
45 | """
46 |
47 | ### Interactive mode
48 | if close:
49 | plt.ioff()
50 | else:
51 | plt.ion()
52 |
53 | ### Figure
54 | fig, ax = plt.subplots(figsize = figsize,
55 | nrows = 1,
56 | ncols = 1,
57 | )
58 |
59 | ### Subplot
60 | subplot.distribution(ax,
61 | df,
62 | figsize,
63 | nature = nature,
64 | physical_quantity = physical_quantity,
65 | )
66 |
67 | ### Finalize
68 | title = ' - '.join(filter(None, [
69 | 'source = {source}' if source else '',
70 | 'nature = {nature}' if nature else '',
71 | ])).format(source = source,
72 | nature = nature,
73 | weather_quantity = physical_quantity,
74 | )
75 | fig.suptitle(global_tools.format_latex(title))
76 |
77 | ### Save
78 | full_path = os.path.join(folder_out,
79 | "weather_distribution",
80 | title,
81 | )
82 | os.makedirs(os.path.dirname(full_path),
83 | exist_ok = True,
84 | )
85 | plt.savefig(
86 | full_path + ".png",
87 | format = "png",
88 | bbox_inches = "tight",
89 | )
90 | if close:
91 | plt.close(fig)
92 |
93 |
94 |
--------------------------------------------------------------------------------
/pub_data_visualization/weather/plot/subplot/__init__.py:
--------------------------------------------------------------------------------
1 |
2 | """
3 | Module to fill subplots with weather data.
4 |
5 | """
6 |
7 | from .curve import *
8 | from .distribution import *
--------------------------------------------------------------------------------
/pub_data_visualization/weather/plot/subplot/curve.py:
--------------------------------------------------------------------------------
1 |
2 |
3 | #
4 | from .... import global_tools, global_var
5 |
6 |
7 | def curve(ax,
8 | df,
9 | nature = None,
10 | physical_quantity = None,
11 | **kwargs,
12 | ):
13 | """
14 | Draws in a subplot the weather curve.
15 |
16 | :param ax: The ax to fill
17 | :param df: The production data
18 | :param physical_quantity: The weather quantity to plot
19 | :param kwargs: additional parameter for the plt.plot function
20 | :type ax: matplotlib.axes._subplots.AxesSubplot
21 | :type df: pd.DataFrame
22 | :type physical_quantity: string
23 | :type kwargs: dict
24 | :return: None
25 | :rtype: None
26 | """
27 |
28 | ds = df.loc[ (df[global_var.weather_nature] == nature)
29 | & (df[global_var.weather_physical_quantity] == physical_quantity)
30 | ][global_var.weather_physical_quantity_value]
31 | assert not ds.empty
32 |
33 | ax.plot(ds.index,
34 | ds,
35 | label = global_tools.format_latex(physical_quantity),
36 | **kwargs,
37 | )
38 |
39 |
40 |
41 |
42 |
--------------------------------------------------------------------------------
/pub_data_visualization/weather/plot/subplot/distribution.py:
--------------------------------------------------------------------------------
1 |
2 |
3 | import calendar
4 | import numpy as np
5 | #
6 | from .... import global_var
7 | #
8 |
9 |
10 | def distribution(ax,
11 | df,
12 | figsize,
13 | nature = None,
14 | physical_quantity = None,
15 | ):
16 | """
17 | Draws in a subplot the boxplots of the weather data.
18 |
19 | :param ax: The ax to fill
20 | :param df: The production data
21 | :param figsize: Desired size of the figure
22 | :param nature: The nature of the weather data to plot
23 | :param weather_quantity: The weather quantity to plot
24 | :type ax: matplotlib.axes._subplots.AxesSubplot
25 | :type df: pd.DataFrame
26 | :type fig_size: (int,int)
27 | :type nature: string
28 | :type weather_quantity: string
29 | :return: None
30 | :rtype: None
31 | """
32 |
33 | data = df.loc[ (df[global_var.weather_nature] == nature)
34 | & (df[global_var.weather_physical_quantity] == physical_quantity)
35 | ][global_var.weather_physical_quantity_value]
36 |
37 | MONTHS = np.unique(data.index.month)
38 | YEARS = np.unique(data.index.year)
39 |
40 | df_grouped = data.groupby(by = [data.index.year, data.index.month])
41 |
42 | interspace_years = 8
43 | slice_order = slice(None, None, ( 1
44 | if figsize[0] > figsize[1]
45 | else
46 | -1
47 | ))
48 |
49 | positions = [
50 | (month - data.index.month.min())*interspace_years + (year - data.index.year.min())
51 | for year, month in df_grouped.groups.keys()
52 | ] [slice_order]
53 |
54 | labels = [
55 | calendar.month_abbr[month]
56 | if year == YEARS.min()
57 | else
58 | ''
59 | for year, month in df_grouped.groups.keys()
60 | ]
61 |
62 |
63 | widths = [0.9
64 | for key in df_grouped.groups.keys()
65 | ]
66 |
67 | flierprops = dict(marker = '.',
68 | markerfacecolor = 'k',
69 | markersize = 2,
70 | linestyle = 'none',
71 | markeredgecolor = 'k',
72 | )
73 |
74 | b_plot = ax.boxplot([df_grouped.get_group(e) for e in df_grouped.groups.keys()],
75 | vert = figsize[0] > figsize[1],
76 | positions = positions,
77 | labels = labels,
78 | notch = True,
79 | patch_artist = True,
80 | widths = widths,
81 | flierprops = flierprops,
82 | whis = [1,99],
83 | )
84 |
85 |
86 | colors = ['pink',
87 | 'lightblue',
88 | 'lightgreen',
89 | 'yellow',
90 | 'orange',
91 | ]
92 | for ii, patch in enumerate(b_plot['boxes']):
93 | patch.set_facecolor(colors[ii//len(MONTHS)])
94 | patch.set(linewidth=0.25)
95 |
96 | if figsize[0] > figsize[1]:
97 | ax.yaxis.grid(True)
98 | ax.set_xlim(-1, interspace_years*(len(MONTHS)-1) + len(YEARS) + 1)
99 | ax.set_ylabel(physical_quantity)
100 | else:
101 | ax.xaxis.grid(True)
102 | ax.set_ylim(-1, interspace_years*(len(MONTHS)-1) + len(YEARS) + 1)
103 | ax.set_xlabel(physical_quantity)
104 |
105 |
106 | _ = ax.legend(
107 | [pp for pp in b_plot['boxes'][::len(MONTHS)]],
108 | YEARS,
109 | ncol = 1,
110 | )
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | alabaster==0.7.12
2 | arrow==1.2.2
3 | astroid==2.11.5
4 | atomicwrites==1.4.0
5 | attrs==21.4.0
6 | autopep8==1.6.0
7 | Babel==2.10.1
8 | backcall==0.2.0
9 | backports.entry-points-selectable==1.1.0
10 | bcrypt==3.2.2
11 | beautifulsoup4==4.11.1
12 | binaryornot==0.4.4
13 | black==22.3.0
14 | bleach==5.0.0
15 | certifi==2022.5.18.1
16 | cffi==1.15.0
17 | chardet==4.0.0
18 | charset-normalizer==2.0.12
19 | click==8.1.3
20 | cloudpickle==2.1.0
21 | colorama==0.4.4
22 | cookiecutter==2.1.0
23 | cryptography==37.0.2
24 | cycler==0.10.0
25 | debugpy==1.6.0
26 | decorator==5.1.0
27 | defusedxml==0.7.1
28 | diff-match-patch==20200713
29 | dill==0.3.5.1
30 | distlib==0.3.3
31 | docutils==0.18.1
32 | entrypoints==0.4
33 | fastjsonschema==2.15.3
34 | filelock==3.3.1
35 | flake8==4.0.1
36 | h5py==3.7.0
37 | idna==3.3
38 | imagesize==1.3.0
39 | importlib-metadata==4.11.4
40 | importlib-resources==5.7.1
41 | inflection==0.5.1
42 | intervaltree==3.1.0
43 | ipdb==0.13.9
44 | ipykernel==6.13.0
45 | ipython==7.34.0
46 | ipython-genutils==0.2.0
47 | isort==5.10.1
48 | jedi==0.18.0
49 | jellyfish==0.9.0
50 | Jinja2==3.1.2
51 | jinja2-time==0.2.0
52 | joblib==1.1.0
53 | jsonschema==4.5.1
54 | jupyter-client==7.3.1
55 | jupyter-core==4.10.0
56 | jupyterlab-pygments==0.2.2
57 | keyring==23.5.1
58 | kiwisolver==1.3.2
59 | lazy-object-proxy==1.7.1
60 | MarkupSafe==2.1.1
61 | matplotlib==3.4.3
62 | matplotlib-inline==0.1.3
63 | mccabe==0.6.1
64 | mistune==0.8.4
65 | mypy-extensions==0.4.3
66 | nbclient==0.6.4
67 | nbconvert==6.5.0
68 | nbformat==5.4.0
69 | nest-asyncio==1.5.5
70 | numpy==1.21.3
71 | numpydoc==1.3.1
72 | packaging==21.3
73 | pandas==1.3.4
74 | pandocfilters==1.5.0
75 | paramiko==2.11.0
76 | parso==0.8.2
77 | pathspec==0.9.0
78 | pexpect==4.8.0
79 | pickleshare==0.7.5
80 | Pillow==8.4.0
81 | platformdirs==2.4.0
82 | pluggy==1.0.0
83 | prompt-toolkit==3.0.21
84 | psutil==5.9.1
85 | ptyprocess==0.7.0
86 | pycodestyle==2.8.0
87 | pycparser==2.21
88 | pydocstyle==6.1.1
89 | pyflakes==2.4.0
90 | Pygments==2.10.0
91 | pylint==2.14.0
92 | pyls-spyder==0.4.0
93 | PyNaCl==1.5.0
94 | pyodbc==4.0.32
95 | pyparsing==3.0.3
96 | PyQt5==5.15.6
97 | PyQt5-Qt5==5.15.2
98 | PyQt5-sip==12.10.1
99 | PyQtWebEngine==5.15.5
100 | PyQtWebEngine-Qt5==5.15.2
101 | pyrsistent==0.18.1
102 | python-dateutil==2.8.2
103 | python-lsp-black==1.2.1
104 | python-lsp-jsonrpc==1.0.0
105 | python-lsp-server==1.4.1
106 | python-slugify==6.1.2
107 | pytz==2021.3
108 | pywin32==304
109 | pywin32-ctypes==0.2.0
110 | PyYAML==6.0
111 | pyzmq==23.0.0
112 | QDarkStyle==3.0.3
113 | qstylizer==0.2.1
114 | QtAwesome==1.1.1
115 | qtconsole==5.3.0
116 | QtPy==2.1.0
117 | requests==2.27.1
118 | rope==1.1.1
119 | Rtree==1.0.0
120 | scikit-learn==1.1.1
121 | scipy==1.7.1
122 | seaborn==0.11.2
123 | six==1.16.0
124 | snowballstemmer==2.2.0
125 | sortedcontainers==2.4.0
126 | soupsieve==2.3.2.post1
127 | Sphinx==5.0.0
128 | sphinxcontrib-applehelp==1.0.2
129 | sphinxcontrib-devhelp==1.0.2
130 | sphinxcontrib-htmlhelp==2.0.0
131 | sphinxcontrib-jsmath==1.0.1
132 | sphinxcontrib-qthelp==1.0.3
133 | sphinxcontrib-serializinghtml==1.1.5
134 | spyder==5.3.1
135 | spyder-kernels==2.3.1
136 | termcolor==1.1.0
137 | text-unidecode==1.3
138 | textdistance==4.2.2
139 | threadpoolctl==3.1.0
140 | three-merge==0.1.1
141 | tinycss2==1.1.1
142 | toml==0.10.2
143 | tomli==2.0.1
144 | tomlkit==0.11.0
145 | tornado==6.1
146 | traitlets==5.2.2.post1
147 | typing_extensions==4.2.0
148 | ujson==5.3.0
149 | Unidecode==1.3.4
150 | urllib3==1.26.9
151 | virtualenv==20.9.0
152 | watchdog==2.1.8
153 | wcwidth==0.2.5
154 | webencodings==0.5.1
155 | wrapt==1.14.1
156 | yapf==0.32.0
157 | zipp==3.8.0
158 |
--------------------------------------------------------------------------------
/scripts/indices/main_price.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # -*- coding: utf-8 -*-
3 |
4 | """
5 | The script allows the user to draw
6 | the auction prices for a given bidding zone.
7 | """
8 |
9 | import pandas as pd
10 | #
11 | from pub_data_visualization import indices, global_var
12 |
13 | ###############################################################################
14 | data_source_auctions = global_var.data_source_auctions_entsoe
15 | map_code = [global_var.geography_map_code_france,
16 | global_var.geography_map_code_belgium,
17 | ]
18 | delivery_end_dt_min = None
19 | delivery_begin_dt_max = None
20 | ###############################################################################
21 | figsize = global_var.figsize_horizontal_ppt
22 | folder_out = global_var.path_plots
23 | close = False
24 | ###############################################################################
25 |
26 | ### Load
27 | df = indices.load(date_min = delivery_end_dt_min,
28 | date_max = delivery_begin_dt_max,
29 | source = data_source_auctions,
30 | map_code = map_code,
31 | )
32 |
33 | ### Pivot
34 | dg = df.pivot_table(values = global_var.auction_price_euro_mwh,
35 | index = [global_var.contract_delivery_begin_year_local,
36 | global_var.contract_frequency,
37 | global_var.contract_delivery_begin_dt_utc,
38 | global_var.contract_profile,
39 | ],
40 | columns = [global_var.geography_map_code,
41 | ],
42 | )
43 | dg = dg.sort_index()
44 |
45 | ### Plot
46 | indices.plot.price(dg,
47 | source = data_source_auctions,
48 | map_code = map_code,
49 | date_min = delivery_end_dt_min,
50 | date_max = delivery_begin_dt_max,
51 | figsize = figsize,
52 | folder_out = folder_out,
53 | close = close,
54 | )
55 |
56 |
--------------------------------------------------------------------------------
/scripts/load/main_forecasting_error.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # -*- coding: utf-8 -*-
3 |
4 | """
5 | The script allows the user to draw the load
6 | and the forecasting errors for a given zone.
7 | """
8 |
9 | import pandas as pd
10 | #
11 | from pub_data_visualization import global_var, load
12 |
13 | ###############################################################################
14 | data_source_load = global_var.data_source_load_eco2mix
15 | load_power_unit = global_var.load_power_gw
16 | load_nature_forecast = global_var.load_nature_forecast_day1
17 | map_code = global_var.geography_map_code_france
18 | date_min = pd.Timestamp("2018-01-01 00:00").tz_localize(global_var.dikt_tz[map_code])
19 | date_max = pd.Timestamp("2019-01-01 00:00").tz_localize(global_var.dikt_tz[map_code])
20 | ###############################################################################
21 | figsize = global_var.figsize_horizontal_ppt
22 | folder_out = global_var.path_plots
23 | close = False
24 | ###############################################################################
25 |
26 | if ( data_source_load != global_var.data_source_load_eco2mix
27 | or map_code != global_var.geography_map_code_france
28 | ):
29 | raise NotImplementedError
30 |
31 | ### Load
32 | dg = load.load(source = data_source_load,
33 | map_code = map_code,
34 | date_min = date_min,
35 | date_max = date_max,
36 | )
37 |
38 | ### Plot
39 | load.plot.forecasting_error(dg,
40 | source_load = data_source_load,
41 | load_unit = load_power_unit,
42 | load_nature_forecast = load_nature_forecast,
43 | map_code = map_code,
44 | date_min = date_min,
45 | date_max = date_max,
46 | figsize = figsize,
47 | folder_out = folder_out,
48 | close = close,
49 | )
--------------------------------------------------------------------------------
/scripts/load/main_power.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # -*- coding: utf-8 -*-
3 |
4 | """
5 | The script allows the user to draw the load of a given zone.
6 | """
7 |
8 | import pandas as pd
9 | #
10 | from pub_data_visualization import global_var, load
11 |
12 | ###############################################################################
13 | data_source_load = global_var.data_source_load_entsoe
14 | map_code = global_var.geography_map_code_france
15 | load_nature = global_var.load_nature_observation
16 | load_power_unit = global_var.load_power_gw
17 | date_min = pd.Timestamp("2016-01-01 00:00").tz_localize(global_var.dikt_tz[map_code])
18 | date_max = pd.Timestamp("2022-01-01 00:00").tz_localize(global_var.dikt_tz[map_code])
19 | ###############################################################################
20 | figsize = global_var.figsize_horizontal_ppt
21 | folder_out = global_var.path_plots
22 | close = False
23 | ###############################################################################
24 |
25 | ### Load
26 | df = load.load(source = data_source_load,
27 | map_code = map_code,
28 | date_min = date_min,
29 | date_max = date_max,
30 | )
31 |
32 | ### Plot
33 | load.plot.power(df,
34 | source_load = data_source_load,
35 | load_nature = load_nature,
36 | load_unit = load_power_unit,
37 | map_code = map_code,
38 | date_min = date_min,
39 | date_max = date_max,
40 | figsize = figsize,
41 | folder_out = folder_out,
42 | close = close,
43 | )
44 |
--------------------------------------------------------------------------------
/scripts/multiplots/main_scatter_price_weather.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # -*- coding: utf-8 -*-
3 |
4 | """
5 | The script allows the user to draw
6 | a scatterplot of national production and day-ahead prices.
7 | """
8 |
9 | import os
10 | import numpy as np
11 | import pandas as pd
12 | #
13 | from pub_data_visualization import global_var, global_tools, weather, indices, multiplots
14 | #
15 | import seaborn as sns
16 | import matplotlib as mpl
17 | import matplotlib.pyplot as plt
18 | import matplotlib.dates as mdates
19 | from pandas.plotting import register_matplotlib_converters; register_matplotlib_converters()
20 | from matplotlib.font_manager import FontProperties
21 | global_tools.set_mpl(mpl, plt, FontProperties())
22 | import matplotlib.patches as mpatches
23 |
24 | ###############################################################################
25 | map_code = global_var.geography_map_code_france
26 | date_min = None
27 | date_max = None
28 | #
29 | data_source_weather = global_var.data_source_weather_meteofrance
30 | weather_nature = global_var.weather_nature_observation
31 | physical_quantity = global_var.weather_temperature_celsius
32 | #
33 | data_source_auctions = global_var.data_source_auctions_entsoe
34 | map_code_auctions = global_var.geography_map_code_france
35 | #
36 | kernel_plot = False
37 | ###############################################################################
38 | figsize = global_var.figsize_horizontal_ppt
39 | folder_out = global_var.path_plots
40 | close = False
41 | ###############################################################################
42 |
43 | ### Weather
44 | df = weather.load(source = data_source_weather,
45 | date_min = date_min,
46 | date_max = date_max,
47 | )
48 | ds_weather = df.loc[ (df[global_var.weather_physical_quantity] == physical_quantity)
49 | & (df[global_var.weather_nature] == weather_nature)
50 | ][global_var.weather_physical_quantity_value]
51 |
52 | ### Auctions
53 | df_auctions = indices.load(date_min = date_min,
54 | date_max = date_max,
55 | source = data_source_auctions,
56 | map_code = map_code_auctions,
57 | )
58 | dg_auctions = df_auctions.pivot_table(values = global_var.auction_price_euro_mwh,
59 | index = [global_var.contract_delivery_begin_year_local,
60 | global_var.contract_frequency,
61 | global_var.contract_delivery_begin_date_local,
62 | global_var.contract_delivery_period_index,
63 | global_var.contract_delivery_begin_dt_local,
64 | global_var.contract_delivery_begin_dt_utc,
65 | global_var.contract_profile,
66 | ],
67 | columns = [global_var.geography_map_code,
68 | ],
69 | )
70 | dg_auctions = dg_auctions.sort_index()
71 |
72 | ### Plot
73 | common_index = dg_auctions.index.get_level_values(global_var.contract_delivery_begin_dt_utc).intersection(ds_weather.index)
74 | X = dg_auctions.loc[(slice(None), slice(None), slice(None), slice(None), slice(None), common_index),:]
75 | Y = ds_weather.loc[common_index]
76 | x_label = global_var.auction_price_euro_mwh
77 | y_label = physical_quantity
78 | plot_name = 'scatter_price_weather'
79 |
80 | multiplots.cloud_2d(X,
81 | Y,
82 | x_label = x_label,
83 | y_label = y_label,
84 | kernel_plot = kernel_plot,
85 | plot_name = plot_name,
86 | date_min = date_min,
87 | date_max = date_max,
88 | map_code = map_code,
89 | figsize = figsize,
90 | folder_out = folder_out,
91 | close = close,
92 | )
93 |
94 |
--------------------------------------------------------------------------------
/scripts/multiplots/main_transparent_production.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # -*- coding: utf-8 -*-
3 |
4 | """
5 | The script allows the user to plot the observed production and
6 | the production expected from the transparency publications
7 | for a given set of production units.
8 | """
9 |
10 | import pandas as pd
11 | #
12 | from pub_data_visualization import global_var, global_tools, outages, production, multiplots
13 |
14 | ###############################################################################
15 | map_code = global_var.geography_map_code_france
16 | unit_name = global_tools.format_unit_name('CORDEMAIS 2')
17 | date_min = None
18 | date_max = None
19 | #
20 | data_source_outages = global_var.data_source_outages_rte
21 | producer_outages = None
22 | #
23 | data_source_production = global_var.data_source_production_rte
24 | production_source = None
25 | production_nature = global_var.production_nature_observation
26 | production_unit = global_var.production_power_mw
27 | local_tz = 'CET'
28 | ###############################################################################
29 | figsize = global_var.figsize_horizontal_ppt
30 | folder_out = global_var.path_plots
31 | close = False
32 | ###############################################################################
33 |
34 | ### Production
35 | df_production = production.load(source = data_source_production,
36 | map_code = map_code,
37 | date_min = date_min,
38 | date_max = date_max,
39 | )
40 |
41 | ### Outages
42 | df = outages.load(source = data_source_outages,
43 | map_code = map_code,
44 | producer = producer_outages,
45 | unit_name = unit_name,
46 | production_source = production_source,
47 | )
48 | dikt_programs, _ = outages.tools.compute_all_programs(df)
49 | df_program = dikt_programs[unit_name]
50 | df_awaited_program = outages.tools.cross_section_view(df_program,
51 | pd.Timedelta(minutes = 0),
52 | )
53 |
54 | ### Plot program and production
55 | multiplots.transparent_production(df_awaited_program,
56 | df_production,
57 | source_outages = data_source_outages,
58 | source_production = data_source_production,
59 | map_code = map_code,
60 | unit_name = unit_name,
61 | date_min = date_min,
62 | date_max = date_max,
63 | production_source = production_source,
64 | production_nature = production_nature,
65 | production_unit = production_unit,
66 | local_tz = local_tz,
67 | figsize = figsize,
68 | folder_out = folder_out,
69 | close = close,
70 | )
71 |
72 |
73 |
74 |
75 |
--------------------------------------------------------------------------------
/scripts/outages/main_animated_availability.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # -*- coding: utf-8 -*-
3 |
4 | """
5 | The script allows the user to plot interactively the expected availability
6 | of a given set of production units from different temporal viewpoints.
7 | """
8 |
9 | import pandas as pd
10 | #
11 | from pub_data_visualization import global_var, outages
12 |
13 | ###############################################################################
14 | data_source_outages = global_var.data_source_outages_rte
15 | map_code = global_var.geography_map_code_france
16 | producer_outages = None
17 | production_source = global_var.production_source_nuclear
18 | unit_name = None
19 | production_dt_min = pd.Timestamp("2019-01-01").tz_localize(global_var.dikt_tz[map_code])
20 | production_dt_max = pd.Timestamp("2020-01-01").tz_localize(global_var.dikt_tz[map_code])
21 | publication_dt_min = pd.Timestamp("2019-01-01").tz_localize(global_var.dikt_tz[map_code])
22 | publication_dt_max = pd.Timestamp("2020-01-01").tz_localize(global_var.dikt_tz[map_code])
23 | ###############################################################################
24 | figsize = global_var.figsize_horizontal_ppt
25 | folder_out = global_var.path_plots
26 | close = False
27 | ###############################################################################
28 |
29 | ### Load
30 | df = outages.load(source=data_source_outages,
31 | map_code=map_code,
32 | producer=producer_outages,
33 | unit_name=unit_name,
34 | production_source=production_source,
35 | )
36 |
37 | ### Transform
38 | dikt_programs, _ = outages.tools.compute_all_programs(df)
39 | dh = outages.tools.sum_programs(dikt_programs,
40 | production_dt_min=production_dt_min,
41 | production_dt_max=production_dt_max,
42 | publication_dt_min=publication_dt_min,
43 | publication_dt_max=publication_dt_max,
44 | )
45 |
46 | ### Plot
47 | outages.plot.animated_availability(dh,
48 | production_dt_min=production_dt_min,
49 | production_dt_max=production_dt_max,
50 | data_source=data_source_outages,
51 | map_code=map_code,
52 | producer=producer_outages,
53 | production_source=production_source,
54 | unit_name=unit_name,
55 | figsize=figsize,
56 | folder_out=folder_out,
57 | close=close,
58 | )
59 |
--------------------------------------------------------------------------------
/scripts/outages/main_evolution_mean_availability.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # -*- coding: utf-8 -*-
3 |
4 | """
5 | The script allows the user to plot the expected availability of
6 | a given set of production unit during the delivery period
7 | of a given contract.
8 | """
9 |
10 | import pandas as pd
11 | #
12 | from pub_data_visualization import global_tools, global_var, outages
13 |
14 | ###############################################################################
15 | contract_delivery_begin_year = 2018
16 | contract_frequency = global_var.contract_frequency_month
17 | contract_delivery_period_index = 10
18 | contract_profile = global_var.contract_profile_base
19 | #
20 | date_source_outages = global_var.data_source_outages_rte
21 | map_code = global_var.geography_map_code_france
22 | producer_outages = None
23 | production_source = None
24 | unit_name = None
25 | date_min = None
26 | date_max = None
27 | ###############################################################################
28 | figsize = global_var.figsize_horizontal_ppt
29 | folder_out = global_var.path_plots
30 | close = False
31 | ###############################################################################
32 |
33 | ### Contract name
34 | contract_name = global_tools.format_contract_name(year = contract_delivery_begin_year,
35 | frequency = contract_frequency,
36 | delivery_period = contract_delivery_period_index,
37 | profile = contract_profile,
38 | )
39 |
40 | ### Load
41 | df = outages.load(source = date_source_outages,
42 | map_code = map_code,
43 | producer = producer_outages,
44 | unit_name = unit_name,
45 | production_source = production_source,
46 | )
47 |
48 | ### Transform
49 | dikt_programs, _ = outages.tools.compute_all_programs(df)
50 | product_delivery_windows = global_tools.compute_delivery_windows(delivery_begin_year_local = contract_delivery_begin_year,
51 | frequency = contract_frequency,
52 | delivery_period_index = contract_delivery_period_index,
53 | profile = contract_profile,
54 | tz_local = global_var.dikt_tz[map_code],
55 | )
56 | nb_hours = global_tools.compute_nb_hours(product_delivery_windows)
57 | df_energy_tot = outages.tools.compute_missing_energy(product_delivery_windows,
58 | dikt_programs,
59 | )
60 | df_power_tot = df_energy_tot/nb_hours
61 | ### Plot
62 | outages.plot.evolution_mean_availability(df_power_tot,
63 | contract_name = contract_name,
64 | nb_hours = nb_hours,
65 | date_min = date_min,
66 | date_max = date_max,
67 | source = date_source_outages,
68 | map_code = map_code,
69 | producer = producer_outages,
70 | production_source = production_source,
71 | unit_name = unit_name,
72 | figsize = figsize,
73 | folder_out = folder_out,
74 | close = close,
75 | )
76 |
77 |
78 |
--------------------------------------------------------------------------------
/scripts/outages/main_expected_program.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # -*- coding: utf-8 -*-
3 |
4 | """
5 | The script allows the user to draw
6 | the expected availability program of
7 | a given set of production units.
8 | """
9 |
10 | import pandas as pd
11 | #
12 | from pub_data_visualization import global_var, outages
13 |
14 | ###############################################################################
15 | data_source_outages = global_var.data_source_outages_entsoe
16 | map_code = global_var.geography_map_code_france
17 | producer_outages = None
18 | production_source = global_var.production_source_nuclear
19 | unit_name = 'BELLEVILLE 1'
20 | date_min = None
21 | date_max = None
22 | ###############################################################################
23 | figsize = global_var.figsize_horizontal_ppt
24 | folder_out = global_var.path_plots
25 | close = False
26 | ###############################################################################
27 |
28 | ### Load
29 | df = outages.load(source = data_source_outages,
30 | map_code = map_code,
31 | producer = producer_outages,
32 | unit_name = unit_name,
33 | production_source = production_source,
34 | )
35 |
36 | ### Transform
37 | dikt_programs, _ = outages.tools.compute_all_programs(df)
38 | df_program = dikt_programs[unit_name]
39 | df_expected_program = outages.tools.cross_section_view(df_program)
40 |
41 | ### Plot
42 | outages.plot.expected_program(df_expected_program,
43 | date_min = date_min,
44 | date_max = date_max,
45 | source = data_source_outages,
46 | map_code = map_code,
47 | producer = producer_outages,
48 | production_source = production_source,
49 | unit_name = unit_name,
50 | figsize = figsize,
51 | folder_out = folder_out,
52 | close = close,
53 | )
54 |
55 |
56 |
57 |
--------------------------------------------------------------------------------
/scripts/outages/main_incremental_programs.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # -*- coding: utf-8 -*-
3 |
4 | """
5 | The script allows the user to draw the expected availability
6 | of a given set of production units from several temporal viewpoints.
7 | """
8 |
9 | import pandas as pd
10 | #
11 | from pub_data_visualization import global_var, outages
12 |
13 | ###############################################################################
14 | data_source_outages = global_var.data_source_outages_rte
15 | map_code = global_var.geography_map_code_france
16 | producer_outages = None
17 | production_source = global_var.production_source_nuclear
18 | unit_name = None
19 | date_min = pd.Timestamp("2022-01-01 00:00").tz_localize(global_var.dikt_tz[map_code])
20 | date_max = pd.Timestamp("2024-01-01 00:00").tz_localize(global_var.dikt_tz[map_code])
21 | viewpoint_dt_extrapolate = [pd.Timestamp('2021-12-01 00:00').tz_localize(global_var.dikt_tz[map_code]),
22 | pd.Timestamp('2022-01-01 00:00').tz_localize(global_var.dikt_tz[map_code]),
23 | pd.Timestamp('2022-02-01 00:00').tz_localize(global_var.dikt_tz[map_code]),
24 | ]
25 | ###############################################################################
26 | local_tz = 'CET'
27 | diff_init = False
28 | smoother = 'basic'
29 | figsize = global_var.figsize_horizontal_ppt
30 | folder_out = global_var.path_plots
31 | close = False
32 | ###############################################################################
33 |
34 | # Load
35 | df = outages.load(source = data_source_outages,
36 | map_code = map_code,
37 | producer = producer_outages,
38 | unit_name = unit_name,
39 | production_source = production_source,
40 | )
41 |
42 | # Transform
43 | dikt_programs, _ = outages.tools.compute_all_programs(df)
44 | dh = outages.tools.extrapolate_programs(dikt_programs,
45 | viewpoint_dt_extrapolate,
46 | production_dt_min = date_min,
47 | production_dt_max = date_max,
48 | )
49 |
50 | # Plot program
51 | outages.plot.incremental_programs(dh,
52 | diff_init = diff_init,
53 | smoother = smoother,
54 | date_min = date_min,
55 | date_max = date_max,
56 | #
57 | source_outages = data_source_outages,
58 | map_code = map_code,
59 | producer = producer_outages,
60 | production_source = production_source,
61 | unit_name = unit_name,
62 | local_tz = local_tz,
63 | figsize = figsize,
64 | folder_out = folder_out,
65 | close = close,
66 | )
67 |
68 |
--------------------------------------------------------------------------------
/scripts/outages/main_regression_delays.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # -*- coding: utf-8 -*-
3 |
4 | """
5 | The script allows the user to draw the comparison
6 | between the initially announced length of the outages
7 | and the finnaly observed length of the outages
8 | of a given set of production units.
9 | """
10 |
11 | #
12 | from pub_data_visualization import global_var, outages
13 |
14 | ###############################################################################
15 | data_source_outages = global_var.data_source_outages_rte
16 | map_code = global_var.geography_map_code_france
17 | producer_outages = None
18 | production_source = global_var.production_source_nuclear
19 | unit_name = 'CHINON 2'
20 | date_min = None
21 | date_max = None
22 | ###############################################################################
23 | figsize = global_var.figsize_horizontal_ppt
24 | folder_out = global_var.path_plots
25 | close = False
26 | ###############################################################################
27 |
28 | ### Load
29 | df = outages.load(source = data_source_outages,
30 | map_code = map_code,
31 | producer = producer_outages,
32 | unit_name = unit_name,
33 | production_source = production_source,
34 | publication_dt_min = date_min,
35 | publication_dt_max = date_max,
36 | )
37 |
38 | ### Plot
39 | outages.plot.regression_delays(df,
40 | source = data_source_outages,
41 | producer = producer_outages,
42 | production_source = production_source,
43 | unit_name = unit_name,
44 | figsize = figsize,
45 | close = close,
46 | folder_out = folder_out,
47 | )
48 |
49 |
50 |
--------------------------------------------------------------------------------
/scripts/production/main_power.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # -*- coding: utf-8 -*-
3 |
4 | """
5 | The script allows the user to draw the observed production
6 | of a given set of production units.
7 | """
8 |
9 | #
10 | from pub_data_visualization import global_var, production
11 |
12 | ###############################################################################
13 | data_source_production = global_var.data_source_production_entsoe
14 | map_code = global_var.geography_map_code_france
15 | production_nature = global_var.production_nature_observation
16 | production_unit = global_var.production_power_gw
17 | production_source = None
18 | unit_name = None
19 | date_min = None
20 | date_max = None
21 | ###############################################################################
22 | figsize = global_var.figsize_horizontal_ppt
23 | folder_out = global_var.path_plots
24 | close = False
25 | ###############################################################################
26 |
27 | ### Load
28 | df = production.load(source = data_source_production,
29 | map_code = map_code,
30 | date_min = date_min,
31 | date_max = date_max,
32 | )
33 |
34 | ### plot
35 | production.plot.power(df,
36 | map_code = map_code,
37 | unit_name = unit_name,
38 | production_source = production_source,
39 | production_nature = production_nature,
40 | production_unit = production_unit,
41 | date_min = date_min,
42 | date_max = date_max,
43 | source = data_source_production,
44 | figsize = figsize,
45 | folder_out = folder_out,
46 | close = close,
47 | )
48 |
49 |
50 |
--------------------------------------------------------------------------------
/scripts/transmission/main_draft.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # -*- coding: utf-8 -*-
3 |
4 | """
5 | The script allows the user to load
6 | the transmission capacity.
7 | """
8 |
9 | import pandas as pd
10 | #
11 | from pub_data_visualization import global_var, transmission
12 |
13 | ###############################################################################
14 | data_source_transmission = global_var.data_source_transmission_entsog_nominations
15 | transmission_dt_min = pd.Timestamp('2019-01-10').tz_localize('CET')
16 | transmission_dt_max = pd.Timestamp('2019-01-14').tz_localize('CET')
17 | ###############################################################################
18 | figsize = global_var.figsize_horizontal_ppt
19 | folder_out = global_var.path_plots
20 | close = False
21 | ###############################################################################
22 |
23 | ### Load
24 | df = transmission.load(date_min = transmission_dt_min,
25 | date_max = transmission_dt_max,
26 | source = data_source_transmission,
27 | )
28 |
29 |
--------------------------------------------------------------------------------
/scripts/weather/main_curve.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # -*- coding: utf-8 -*-
3 |
4 | """
5 | The script allows the user to draw the weather conditions
6 | observed in a given zone.
7 | """
8 |
9 | import numpy as np
10 | import pandas as pd
11 | #
12 | from pub_data_visualization import global_var, weather
13 |
14 | ###############################################################################
15 | data_source_weather = global_var.data_source_weather_meteofrance
16 | weather_nature = global_var.weather_nature_observation
17 | weather_quantity = global_var.weather_wind_speed
18 | date_min = None
19 | date_max = None
20 | ###############################################################################
21 | figsize = global_var.figsize_horizontal_ppt
22 | folder_out = global_var.path_plots
23 | close = False
24 | ###############################################################################
25 |
26 | ### Load
27 | df = weather.load(source = data_source_weather,
28 | date_min = date_min,
29 | date_max = date_max,
30 | )
31 |
32 | ### Plot
33 | weather.plot.curve(df,
34 | date_min,
35 | date_max,
36 | source = data_source_weather,
37 | nature = weather_nature,
38 | physical_quantity = weather_quantity,
39 | folder_out = folder_out,
40 | close = close,
41 | figsize = figsize,
42 | )
--------------------------------------------------------------------------------
/scripts/weather/main_distribution.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # -*- coding: utf-8 -*-
3 |
4 | """
5 | The script allows the user to draw the empirical boxplots
6 | of the weather conditions for the different months and different years.
7 | """
8 |
9 | import pandas as pd
10 | #
11 | from pub_data_visualization import global_var, weather
12 |
13 | ###############################################################################
14 | data_source_weather = global_var.data_source_weather_meteofrance
15 | weather_nature = global_var.weather_nature_observation
16 | physical_quantity = global_var.weather_temperature_celsius
17 | date_min = pd.Timestamp('2016').tz_localize('CET')
18 | date_max = pd.Timestamp('2021').tz_localize('CET')
19 | ###############################################################################
20 | figsize = global_var.figsize_horizontal_ppt
21 | folder_out = global_var.path_plots
22 | close = False
23 | ###############################################################################
24 |
25 | ### Load
26 | df = weather.load(source = data_source_weather,
27 | date_min = date_min,
28 | date_max = date_max,
29 | )
30 |
31 | ### Plot
32 | weather.plot.distribution(df,
33 | source = data_source_weather,
34 | nature = weather_nature,
35 | physical_quantity = physical_quantity,
36 | folder_out = folder_out,
37 | close = close,
38 | figsize = figsize,
39 | )
40 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | import setuptools
2 | from distutils.core import setup
3 |
4 | setup(
5 | name = 'pub-data-visualization',
6 | version = '0.1.dev0',
7 | packages = setuptools.find_packages(),
8 | scripts = [],
9 | author = 'CRE',
10 | author_email = 'opensource[at]cre.fr',
11 | license = 'MIT License',
12 | long_description = open('README.md').read(),
13 | python_requires = ">= 3.8",
14 | install_requires = ['ipython',
15 | 'matplotlib',
16 | 'numpy',
17 | 'pandas',
18 | 'requests',
19 | 'seaborn',
20 | 'termcolor',
21 | ],
22 | )
--------------------------------------------------------------------------------