├── .github
└── CODEOWNERS
├── .gitignore
├── LICENSE.md
├── MTB.pfd
├── MTB.pslx
├── README.md
├── case_setup.py
├── config.ini
├── execute_pf.py
├── execute_pscad.py
├── interface.f
├── plotter
├── Case.py
├── Cursor.py
├── Figure.py
├── Result.py
├── config.ini
├── cursorSetup.csv
├── cursor_image_logic.py
├── cursor_type.py
├── down_sampling_method.py
├── figureSetup.csv
├── plot_cursor_functions.py
├── plotter.py
├── process_psout.py
├── psout_to_csv.py
├── read_and_write_functions.py
├── read_configs.py
└── sampling_functions.py
├── powerfactory.pyi
├── pscad_update_ums.py
├── recordings
├── DK1_fault1.csv
├── DK1_fault2.meas
├── DK1_frekvens.meas
├── KAS_emt_fault.out
└── slow_recovery.csv
├── requirements.txt
├── setup_examples
├── MTB_Setup_Example.pfd
├── MTB_Setup_Example.pswx
└── SimpleSolarFarm.pscx
├── sim_interface.py
├── testcases.xlsx
└── utility_scripts
├── Check PowerFactory Model.pfd
├── Get Component Data.pfd
├── Get DSL Checksums.pfd
├── Get Relay Data.pfd
├── check_powerfactory_model.py
├── compare_component_data_with_pscad.py
├── get_component_data_from_powerfactory.py
├── get_dsl_checksums_from_powerfactory.py
└── get_relay_data_from_powerfactory.py
/.github/CODEOWNERS:
--------------------------------------------------------------------------------
1 | * @CVLenerginet @PRWenerginet
2 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .vscode/
2 | .idea/
3 | *.code-workspace
4 | emt*
5 | rms*
6 | results
7 | export
8 |
9 | # Byte-compiled / optimized / DLL files
10 | __pycache__/
11 | *.py[cod]
12 | *$py.class
13 |
14 | # C extensions
15 | *.so
16 |
17 | # Distribution / packaging
18 | .Python
19 | build/
20 | develop-eggs/
21 | dist/
22 | downloads/
23 | eggs/
24 | .eggs/
25 | lib/
26 | lib64/
27 | parts/
28 | sdist/
29 | var/
30 | wheels/
31 | share/python-wheels/
32 | *.egg-info/
33 | .installed.cfg
34 | *.egg
35 | MANIFEST
36 |
37 | # PyInstaller
38 | # Usually these files are written by a python script from a template
39 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
40 | *.manifest
41 | *.spec
42 |
43 | # Installer logs
44 | pip-log.txt
45 | pip-delete-this-directory.txt
46 |
47 | # Unit test / coverage reports
48 | htmlcov/
49 | .tox/
50 | .nox/
51 | .coverage
52 | .coverage.*
53 | .cache
54 | nosetests.xml
55 | coverage.xml
56 | *.cover
57 | *.py,cover
58 | .hypothesis/
59 | .pytest_cache/
60 | cover/
61 |
62 | # Translations
63 | *.mo
64 | *.pot
65 |
66 | # Django stuff:
67 | *.log
68 | local_settings.py
69 | db.sqlite3
70 | db.sqlite3-journal
71 |
72 | # Flask stuff:
73 | instance/
74 | .webassets-cache
75 |
76 | # Scrapy stuff:
77 | .scrapy
78 |
79 | # Sphinx documentation
80 | docs/_build/
81 |
82 | # PyBuilder
83 | .pybuilder/
84 | target/
85 |
86 | # Jupyter Notebook
87 | .ipynb_checkpoints
88 |
89 | # IPython
90 | profile_default/
91 | ipython_config.py
92 |
93 | # pyenv
94 | # For a library or package, you might want to ignore these files since the code is
95 | # intended to run in multiple environments; otherwise, check them in:
96 | # .python-version
97 |
98 | # pipenv
99 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
100 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
101 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
102 | # install all needed dependencies.
103 | #Pipfile.lock
104 |
105 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow
106 | __pypackages__/
107 |
108 | # Celery stuff
109 | celerybeat-schedule
110 | celerybeat.pid
111 |
112 | # SageMath parsed files
113 | *.sage.py
114 |
115 | # Environments
116 | .env
117 | .venv
118 | env/
119 | venv*/
120 | ENV/
121 | env.bak/
122 | venv.bak/
123 |
124 | # Spyder project settings
125 | .spyderproject
126 | .spyproject
127 |
128 | # Rope project settings
129 | .ropeproject
130 |
131 | # mkdocs documentation
132 | /site
133 |
134 | # mypy
135 | .mypy_cache/
136 | .dmypy.json
137 | dmypy.json
138 |
139 | # Pyre type checker
140 | .pyre/
141 |
142 | # pytype static type analyzer
143 | .pytype/
144 |
145 | # Cython debug symbols
146 | cython_debug/
147 |
148 | _junk/
149 |
150 | #powerfactory.pyi
151 | *channels.xlsx
152 | test.py
153 | mhi/
154 | tests/
155 | MTB_*/
156 | E-Tran_V6/
157 | Gantner*
158 | Kasso*
159 | Parametrers List*
160 | Resources/
161 | *_x86/
--------------------------------------------------------------------------------
/LICENSE.md:
--------------------------------------------------------------------------------
1 | ## Information concerning the use of the Model Test Bench.
2 |
3 | Energinet provides the Model Test Bench (MTB) for the purpose of developing a prequalification test bench for production facility and simulation performance which the facility owner may use in its own simulation environment in order to pre-test compliance with the applicable technical requirements for simulation models.
4 |
5 | The MTB is provided under the following considerations:
6 | 1) Use of the MTB and its results are indicative and for informational purposes only. Energinet may only in its own simulation environment perform conclusive testing, performance and compliance of the simulation models developed and supplied by the facility owner.
7 |
8 | 2) The facility owner should always use the latest version of the MTB from Energinet in order to get the most correct results.
9 |
10 | 3) Energinet encourages the facility owner to report issues in MTB and propose amendments to Energinet.
11 |
12 | 4) Use of the MTB are at the facility owners and the users own risk. Energinet is not responsible for any damage to hardware or software, including simulation models or computers.
13 |
14 | 5) All intellectual property rights, including copyright to the MTB remains at Energinet in accordance with applicable Danish law. Energinet does however grant a worldwide, non-exclusive, non-payable right to use, modify and distribute the MTB as a whole or partly. Energinet may withdraw or modify the right to use, modify and distribute.
15 |
--------------------------------------------------------------------------------
/MTB.pfd:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Energinet-SimTools/MTB/695c911cb7fee587599cfcc50b8626cea0784694/MTB.pfd
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 | # MTB (Model Test Bench)
3 |
4 | Connecting new electricity generation and demand facilities to Denmark's public transmission and distribution systems requires thorough grid compliance studies using both RMS/PDT and EMT plant-level models. The Danish TSO, Energinet, mandates that RMS/PDT models be created in [DIgSILENT PowerFactory](https://www.digsilent.de/en/powerfactory.html) and EMT models in [PSCAD](https://www.pscad.com/). Before any facility can begin operation, all electrically significant plants must have their RMS and EMT models reviewed and approved by Energinet to ensure both grid compliance and model quality. Conducting the necessary studies to demonstrate compliance and validate model quality through comparisons of RMS and EMT models can be both time-consuming and prone to error.
5 |
6 | The MTB (Model Test Bench) simplifies and automates this process by enabling seamless grid connection studies across PowerFactory and PSCAD environments. Energinet relies on the MTB for all grid connection studies and strongly recommends its use to all connecting parties. By using the MTB, developers can conduct studies under the exact same conditions as Energinet, ensuring they achieve the same results that Energinet will evaluate.
7 |
8 | The workflow is simple:
9 |
10 | 1. **Define the Required Studies** in the provided Excel sheet. The MTB is preconfigured for the studies required in most grid connection cases in Denmark but is also adaptable to all regions following the EU RfG. Modifying or extending the study case set is straightforward.
11 | 2. **Integrate the PSCAD MTB Component** into the plant's PSCAD model.
12 | 3. **Integrate the PowerFactory MTB Component** into the plant's PowerFactory model.
13 | 4. **Execute Simulations** using the MTB Python scripts.
14 | 5. **Visualize the Results** with the included plotter tool.
15 |
16 | For the latest release notes, please visit the [Releases page](https://github.com/Energinet-AIG/MTB/releases). Learn more about the regulations for grid connection of new facilities in Denmark: [Danish](https://energinet.dk/regler/el/nettilslutning) or [English](https://en.energinet.dk/electricity/rules-and-regulations/regulations-for-new-facilities).
17 |
18 | 
19 | *Example comparative study between RMS (red) and EMT (blue) models.*
20 | ## Getting Started
21 |
22 | To start using the MTB, refer to the Quickstart Guides available on the [MTB wiki Home page](https://github.com/Energinet-AIG/MTB/wiki) on GitHub. These guides provide instructions on using the Casesheet, PowerFactory, PSCAD, and the plotter tool.
23 |
24 | ## Requirements
25 |
26 | To install all necessary dependencies, run:
27 |
28 | ```bash
29 | pip install -r requirements.txt
30 | ```
31 |
32 | ### Tested Environments
33 |
34 | - **PowerFactory**: Tested on version 2024 SP4 with Python versions >= 3.8.8.
35 | - **PSCAD**: Tested on version 5.0.2.0 with Python 3.7.2 (embedded Python). Compatibility is guaranteed only with Intel Fortran Compilers.
36 |
37 | ## Contributing
38 |
39 | We welcome contributions! To contribute, please file an issue via the MTB [Issues tab](https://github.com/Energinet-AIG/MTB/issues). You can report bugs, request features, or suggest improvements. Before submitting, please check for any known issues.
40 |
41 | ## Contact
42 |
43 | For inquiries, please contact the Energinet simulation model team: simuleringsmodeller@energinet.dk
44 |
--------------------------------------------------------------------------------
/case_setup.py:
--------------------------------------------------------------------------------
1 | '''
2 | Contains the specific setup for the testbench. Connecting the waveforms to the PSCAD and PowerFactory interfaces.
3 | '''
4 | from __future__ import annotations
5 | from typing import Union, Tuple, List, Optional
6 | import pandas as pd
7 | import sim_interface as si
8 | from math import isnan, sqrt
9 | from warnings import warn
10 |
11 | FAULT_TYPES = {
12 | '3p fault' : 7.0,
13 | '2p-g fault' : 5.0,
14 | '2p fault' : 3.0,
15 | '1p fault' : 1.0,
16 | '3p fault (ohm)' : 8.0,
17 | '2p-g fault (ohm)' : 6.0,
18 | '2p fault (ohm)' : 4.0,
19 | '1p fault (ohm)' : 2.0
20 | }
21 |
22 | QMODES = {
23 | 'q': 0,
24 | 'q(u)': 1,
25 | 'pf': 2,
26 | 'qmode3': 3,
27 | 'qmode4': 4,
28 | 'qmode5': 5,
29 | 'qmode6': 6,
30 | }
31 |
32 | PMODES = {
33 | 'no p(f)': 0,
34 | 'lfsm': 1,
35 | 'fsm': 2,
36 | 'lfsm+fsm': 3,
37 | 'pmode4': 4,
38 | 'pmode5': 5,
39 | 'pmode6': 6,
40 | 'pmode7': 7
41 | }
42 |
43 | class PlantSettings:
44 | def __init__(self, path : str) -> None:
45 | df : pd.DataFrame = pd.read_excel(path, sheet_name='Settings', header=None) # type: ignore
46 |
47 | df.set_index(0, inplace = True) # type: ignore
48 | inputs : pd.Series[Union[str, float]] = df.iloc[1:, 0]
49 |
50 | self.Casegroup = str(inputs['Casegroup'])
51 | self.Run_custom_cases = bool(inputs['Run custom cases'])
52 | self.Projectname = str(inputs['Projectname']).replace(' ', '_')
53 | self.Pn = float(inputs['Pn'])
54 | self.Uc = float(inputs['Uc'])
55 | self.Un = float(inputs['Un'])
56 | self.Area = str(inputs['Area'])
57 | self.SCR_min = float(inputs['SCR min'])
58 | self.SCR_tuning = float(inputs['SCR tuning'])
59 | self.SCR_max = float(inputs['SCR max'])
60 | self.V_droop = float(inputs['V droop'])
61 | self.XR_SCR_min = float(inputs['X/R SCR min'])
62 | self.XR_SCR_tuning = float(inputs['X/R SCR tuning'])
63 | self.XR_SCR_max = float(inputs['X/R SCR max'])
64 | self.R0 = float(inputs['R0'])
65 | self.X0 = float(inputs['X0'])
66 | self.Default_Q_mode = str(inputs['Default Q mode'])
67 | self.PSCAD_Timestep = float(inputs['PSCAD Timestep'])
68 | self.PSCAD_init_time = float(inputs['PSCAD Initialization time'])
69 | self.PF_flat_time = float(inputs['PF flat time'])
70 | self.PF_variable_step = bool(inputs['PF variable step'])
71 | self.PF_enforced_sync = bool(inputs['PF enforced sync.'])
72 | self.PF_force_asymmetrical_sim = bool(inputs['PF force asymmetrical sim.'])
73 | self.PF_enforce_P_limits_in_LDF = bool(inputs['PF enforce P limits in LDF'])
74 | self.PF_enforce_Q_limits_in_LDF = bool(inputs['PF enforce Q limits in LDF'])
75 |
76 | class Case:
77 | def __init__(self, case: 'pd.Series[Union[str, int, float, bool]]') -> None:
78 | self.rank: int = int(case['Rank'])
79 | self.RMS: bool = bool(case['RMS'])
80 | self.EMT: bool = bool(case['EMT'])
81 | self.Name: str = str(case['Name'])
82 | self.U0: float = float(case['U0'])
83 | self.P0: float = float(case['P0'])
84 | self.Pmode: str = str(case['Pmode'])
85 | self.Qmode: str = str(case['Qmode'])
86 | self.Qref0: float = float(case['Qref0'])
87 | self.SCR0: float = float(case['SCR0'])
88 | self.XR0: float = float(case['XR0'])
89 | self.Simulationtime: float = float(case['Simulationtime'])
90 | self.Events : List[Tuple[str, float, Union[float, str], Union[float, str]]] = []
91 |
92 | index : pd.Index[str] = case.index # type: ignore
93 | i = 0
94 | while(True):
95 | typeLabel = f'type.{i}' if i > 0 else 'type'
96 | timeLabel = f'time.{i}' if i > 0 else 'time'
97 | x1Label = f'X1.{i}' if i > 0 else 'X1'
98 | x2Label = f'X2.{i}' if i > 0 else 'X2'
99 |
100 | if typeLabel in index and timeLabel in index and x1Label in index and x2Label in index:
101 | try:
102 | x1value = float(str(case[x1Label]).replace(' ',''))
103 | except ValueError:
104 | x1value = str(case[x1Label])
105 |
106 | try:
107 | x2value = float(str(case[x2Label]).replace(' ',''))
108 | except ValueError:
109 | x2value = str(case[x2Label])
110 |
111 | self.Events.append((str(case[typeLabel]), float(case[timeLabel]), x1value, x2value))
112 | i += 1
113 | else:
114 | break
115 |
116 | def setup(casesheetPath : str, pscad : bool, pfEncapsulation : Optional[si.PFinterface]) -> Tuple[PlantSettings, List[si.Channel], List[Case], int, List[Case]]:
117 | '''
118 | Sets up the simulation channels and cases from the given casesheet. Returns plant settings, channels, cases, max rank and emtCases.
119 | '''
120 | def impedance_uk_pcu(scr : float, xr : float, pn : float, un : float, uc : float) -> Tuple[float, float]:
121 | scr_ = max(scr, 0.001)
122 | pcu = (uc*uc)/(un*un)*pn/sqrt(xr*xr + 1)/scr_ if scr >= 0.0 else 0.0
123 | uk = (uc*uc)/(un*un)/scr_ if scr >= 0.0 else 0.0
124 | return 100.0 * uk, 1000.0 * pcu
125 |
126 | def signal(name : str, pscad : bool = True, defaultConnection : bool = True, measFile : bool = False) -> si.Signal:
127 | newSignal = si.Signal(name, pscad, pfEncapsulation)
128 |
129 | if defaultConnection:
130 | newSignal.addPFsub_S(f'{name}.ElmDsl', 's:x')
131 | newSignal.addPFsub_R(f'{name}.ElmDsl', 'slope')
132 | newSignal.addPFsub_S0(f'{name}.ElmDsl', 'x0')
133 | newSignal.addPFsub_T(f'{name}.ElmDsl', 'mode')
134 | if measFile:
135 | newSignal.setElmFile(f'{name}_meas.ElmFile')
136 |
137 | channels.append(newSignal)
138 | return newSignal
139 |
140 | def constant(name : str, value : float, pscad : bool = True) -> si.Constant:
141 | newConstant = si.Constant(name, value, pscad, pfEncapsulation)
142 | channels.append(newConstant)
143 | return newConstant
144 |
145 | def pfObjRefer(name : str) -> si.PfObjRefer:
146 | newPfObjRefer = si.PfObjRefer(name, pfEncapsulation)
147 | channels.append(newPfObjRefer)
148 | return newPfObjRefer
149 |
150 | def string(name : str) -> si.String:
151 | newString = si.String(name, pfEncapsulation)
152 | channels.append(newString)
153 | return newString
154 |
155 | pf = pfEncapsulation is not None
156 |
157 | channels : List[si.Channel] = []
158 | plantSettings = PlantSettings(casesheetPath)
159 |
160 | si.pf_time_offset = plantSettings.PF_flat_time
161 | si.pscad_time_offset = plantSettings.PSCAD_init_time
162 |
163 | # Voltage source control
164 | mtb_t_vmode = signal('mtb_t_vmode', defaultConnection = False) # only to be used in PSCAD
165 | mtb_s_vref_pu = signal('mtb_s_vref_pu', measFile = True)
166 | mtb_s_vref_pu.addPFsub_S0('vac.ElmVac', 'usetp', lambda _, x : abs(x))
167 | mtb_s_vref_pu.addPFsub_S0('initializer_script.ComDpl', 'IntExpr:5', lambda _, x : abs(x))
168 | mtb_s_vref_pu.addPFsub_S0('initializer_qdsl.ElmQdsl', 'initVals:5', lambda _, x : abs(x))
169 | mtb_s_vref_pu.addPFsub_T('initializer_script.ComDpl', 'IntExpr:4', lambda _, x : abs(x))
170 | mtb_s_vref_pu.addPFsub_T('initializer_qdsl.ElmQdsl', 'initVals:4', lambda _, x : abs(x))
171 |
172 | mtb_s_dvref_pu = signal('mtb_s_dvref_pu')
173 | mtb_s_phref_deg = signal('mtb_s_phref_deg', measFile = True)
174 | mtb_s_phref_deg.addPFsub_S0('vac.ElmVac', 'phisetp')
175 | mtb_s_fref_hz = signal('mtb_s_fref_hz', measFile = True)
176 |
177 | mtb_s_varef_pu = signal('mtb_s_varef_pu', defaultConnection = False)
178 | mtb_s_vbref_pu = signal('mtb_s_vbref_pu', defaultConnection = False)
179 | mtb_s_vcref_pu = signal('mtb_s_vcref_pu', defaultConnection = False)
180 |
181 | # Grid impedance
182 | mtb_s_scr = signal('mtb_s_scr')
183 | mtb_s_scr.addPFsub_S0('initializer_script.ComDpl', 'IntExpr:11')
184 | mtb_s_scr.addPFsub_S0('initializer_qdsl.ElmQdsl', 'initVals:11')
185 |
186 | mtb_s_xr = signal('mtb_s_xr')
187 | mtb_s_xr.addPFsub_S0('initializer_script.ComDpl', 'IntExpr:12')
188 | mtb_s_xr.addPFsub_S0('initializer_qdsl.ElmQdsl', 'initVals:12')
189 |
190 | ldf_t_uk = signal('ldf_t_uk', pscad = False, defaultConnection = False)
191 | ldf_t_uk.addPFsub_S0('z.ElmSind', 'uk')
192 | ldf_t_pcu_kw = signal('ldf_t_pcu_kw', pscad = False, defaultConnection = False)
193 | ldf_t_pcu_kw.addPFsub_S0('z.ElmSind', 'Pcu')
194 |
195 | # Zero sequence impedance
196 | mtb_t_r0_ohm = signal('mtb_t_r0_ohm', defaultConnection = False)
197 | mtb_t_r0_ohm.addPFsub_S0('vac.ElmVac', 'R0')
198 | mtb_t_r0_ohm.addPFsub_S0('fault_ctrl.ElmDsl', 'r0')
199 |
200 | mtb_t_x0_ohm = signal('mtb_t_x0_ohm', defaultConnection = False)
201 | mtb_t_x0_ohm.addPFsub_S0('vac.ElmVac', 'X0')
202 | mtb_t_x0_ohm.addPFsub_S0('fault_ctrl.ElmDsl', 'x0')
203 |
204 | # Standard plant references and outputs
205 | mtb_s_pref_pu = signal('mtb_s_pref_pu', measFile = True)
206 | mtb_s_pref_pu.addPFsub_S0('initializer_script.ComDpl', 'IntExpr:6')
207 | mtb_s_pref_pu.addPFsub_S0('initializer_qdsl.ElmQdsl', 'initVals:6')
208 | mtb_s_pref_pu.addPFsub_S0('powerf_ctrl.ElmSecctrl', 'psetp', lambda _, x : x * plantSettings.Pn)
209 |
210 | mtb_s_qref = signal('mtb_s_qref', measFile = True)
211 | mtb_s_qref.addPFsub_S0('initializer_script.ComDpl', 'IntExpr:9')
212 | mtb_s_qref.addPFsub_S0('initializer_qdsl.ElmQdsl', 'initVals:9')
213 | mtb_s_qref.addPFsub_S0('station_ctrl.ElmStactrl', 'usetp', lambda _, x: 1.0 if x <= 0.0 else x)
214 | mtb_s_qref.addPFsub_S0('station_ctrl.ElmStactrl', 'qsetp', lambda _, x : -x * plantSettings.Pn)
215 | mtb_s_qref.addPFsub_S0('station_ctrl.ElmStactrl', 'pfsetp', lambda _, x: min(abs(x), 1.0))
216 | mtb_s_qref.addPFsub_S0('station_ctrl.ElmStactrl', 'pf_recap', lambda _, x: 0 if x > 0 else 1)
217 |
218 | mtb_s_qref_q_pu = signal('mtb_s_qref_q_pu', measFile = True)
219 | mtb_s_qref_qu_pu = signal('mtb_s_qref_qu_pu', measFile = True)
220 | mtb_s_qref_pf = signal('mtb_s_qref_pf', measFile = True)
221 | mtb_s_qref_3 = signal('mtb_s_qref_3', measFile = True)
222 | mtb_s_qref_4 = signal('mtb_s_qref_4', measFile = True)
223 | mtb_s_qref_5 = signal('mtb_s_qref_5', measFile = True)
224 | mtb_s_qref_6 = signal('mtb_s_qref_6', measFile = True)
225 |
226 | mtb_t_qmode = signal('mtb_t_qmode')
227 | mtb_t_qmode.addPFsub_S0('initializer_script.ComDpl', 'IntExpr:8')
228 | mtb_t_qmode.addPFsub_S0('initializer_qdsl.ElmQdsl', 'initVals:8')
229 |
230 | def stactrl_mode_switch(self : si.Signal, qmode : float):
231 | if qmode == 1:
232 | return 0
233 | elif qmode == 2:
234 | return 2
235 | else:
236 | return 1
237 |
238 | mtb_t_qmode.addPFsub_S0('station_ctrl.ElmStactrl', 'i_ctrl', stactrl_mode_switch)
239 |
240 | mtb_t_pmode = signal('mtb_t_pmode')
241 | mtb_t_pmode.addPFsub_S0('initializer_script.ComDpl', 'IntExpr:7')
242 | mtb_t_pmode.addPFsub_S0('initializer_qdsl.ElmQdsl', 'initVals:7')
243 |
244 | # Constants
245 | mtb_c_pn = constant('mtb_c_pn', plantSettings.Pn)
246 | mtb_c_pn.addPFsub('initializer_script.ComDpl', 'IntExpr:0')
247 | mtb_c_pn.addPFsub('initializer_qdsl.ElmQdsl', 'initVals:0')
248 | mtb_c_pn.addPFsub('measurements.ElmDsl', 'pn')
249 | mtb_c_pn.addPFsub('rx_calc.ElmDsl', 'pn')
250 | mtb_c_pn.addPFsub('z.ElmSind', 'Sn')
251 |
252 | mtb_c_qn = constant('mtb_c_qn', 0.33 * plantSettings.Pn, pscad = False)
253 | mtb_c_qn.addPFsub('station_ctrl.ElmStactrl', 'Srated')
254 |
255 | mtb_c_vbase = constant('mtb_c_vbase', plantSettings.Un)
256 | mtb_c_vbase.addPFsub('initializer_script.ComDpl', 'IntExpr:1')
257 | mtb_c_vbase.addPFsub('initializer_qdsl.ElmQdsl', 'initVals:1')
258 | mtb_c_vbase.addPFsub('measurements.ElmDsl', 'vbase')
259 | mtb_c_vbase.addPFsub('pcc.ElmTerm', 'uknom')
260 | mtb_c_vbase.addPFsub('ext.ElmTerm', 'uknom')
261 | mtb_c_vbase.addPFsub('fault_node.ElmTerm', 'uknom')
262 | mtb_c_vbase.addPFsub('z.ElmSind', 'ucn')
263 | mtb_c_vbase.addPFsub('fz.ElmSind', 'ucn')
264 | mtb_c_vbase.addPFsub('connector.ElmSind', 'ucn')
265 | mtb_c_vbase.addPFsub('vac.ElmVac', 'Unom')
266 |
267 | mtb_c_vc = constant('mtb_c_vc', plantSettings.Uc)
268 | mtb_c_vc.addPFsub('initializer_script.ComDpl', 'IntExpr:2')
269 | mtb_c_vc.addPFsub('initializer_qdsl.ElmQdsl', 'initVals:2')
270 | mtb_c_vc.addPFsub('rx_calc.ElmDsl', 'vc')
271 |
272 | constant('mtb_c_inittime_s', plantSettings.PSCAD_init_time)
273 |
274 | mtb_c_flattime_s = constant('mtb_c_flattime_s', plantSettings.PF_flat_time, pscad = False)
275 | mtb_c_flattime_s.addPFsub('initializer_script.ComDpl', 'IntExpr:3')
276 | mtb_c_flattime_s.addPFsub('initializer_qdsl.ElmQdsl', 'initVals:3')
277 |
278 | mtb_c_vdroop = constant('mtb_c_vdroop', plantSettings.V_droop, pscad = False)
279 | mtb_c_vdroop.addPFsub('initializer_script.ComDpl', 'IntExpr:10')
280 | mtb_c_vdroop.addPFsub('initializer_qdsl.ElmQdsl', 'initVals:10')
281 | mtb_c_vdroop.addPFsub('station_ctrl.ElmStactrl', 'ddroop')
282 |
283 | # Time and rank control
284 | mtb_t_simtimePscad_s = signal('mtb_t_simtimePscad_s', defaultConnection = False)
285 | mtb_t_simtimePf_s = signal('mtb_t_simtimePf_s', defaultConnection = False)
286 | mtb_t_simtimePf_s.addPFsub_S0('$studycase$\\ComSim', 'tstop')
287 |
288 | # From rank to PSCAD task ID
289 | mtb_s_task = signal('mtb_s_task', defaultConnection = False)
290 |
291 | # Fault
292 | flt_s_type = signal('flt_s_type')
293 | flt_s_rf_ohm = signal('flt_s_rf_ohm')
294 | flt_s_resxf = signal('flt_s_resxf')
295 |
296 | mtb_s : List[si.Signal] = []
297 | # Custom signals
298 | mtb_s.append(signal('mtb_s_1', measFile = True))
299 | mtb_s[-1].addPFsub_S0('initializer_script.ComDpl', 'IntExpr:13')
300 | mtb_s[-1].addPFsub_S0('initializer_qdsl.ElmQdsl', 'initVals:13')
301 | mtb_s.append(signal('mtb_s_2', measFile = True))
302 | mtb_s[-1].addPFsub_S0('initializer_script.ComDpl', 'IntExpr:14')
303 | mtb_s[-1].addPFsub_S0('initializer_qdsl.ElmQdsl', 'initVals:14')
304 | mtb_s.append(signal('mtb_s_3', measFile = True))
305 | mtb_s[-1].addPFsub_S0('initializer_script.ComDpl', 'IntExpr:15')
306 | mtb_s[-1].addPFsub_S0('initializer_qdsl.ElmQdsl', 'initVals:15')
307 | mtb_s.append(signal('mtb_s_4', measFile = True))
308 | mtb_s[-1].addPFsub_S0('initializer_script.ComDpl', 'IntExpr:16')
309 | mtb_s[-1].addPFsub_S0('initializer_qdsl.ElmQdsl', 'initVals:16')
310 | mtb_s.append(signal('mtb_s_5', measFile = True))
311 | mtb_s[-1].addPFsub_S0('initializer_script.ComDpl', 'IntExpr:17')
312 | mtb_s[-1].addPFsub_S0('initializer_qdsl.ElmQdsl', 'initVals:17')
313 | mtb_s.append(signal('mtb_s_6', measFile = True))
314 | mtb_s[-1].addPFsub_S0('initializer_script.ComDpl', 'IntExpr:18')
315 | mtb_s[-1].addPFsub_S0('initializer_qdsl.ElmQdsl', 'initVals:18')
316 | mtb_s.append(signal('mtb_s_7', measFile = True))
317 | mtb_s[-1].addPFsub_S0('initializer_script.ComDpl', 'IntExpr:19')
318 | mtb_s[-1].addPFsub_S0('initializer_qdsl.ElmQdsl', 'initVals:19')
319 | mtb_s.append(signal('mtb_s_8', measFile = True))
320 | mtb_s[-1].addPFsub_S0('initializer_script.ComDpl', 'IntExpr:20')
321 | mtb_s[-1].addPFsub_S0('initializer_qdsl.ElmQdsl', 'initVals:20')
322 | mtb_s.append(signal('mtb_s_9', measFile = True))
323 | mtb_s[-1].addPFsub_S0('initializer_script.ComDpl', 'IntExpr:21')
324 | mtb_s[-1].addPFsub_S0('initializer_qdsl.ElmQdsl', 'initVals:21')
325 | mtb_s.append(signal('mtb_s_10', measFile = True))
326 | mtb_s[-1].addPFsub_S0('initializer_script.ComDpl', 'IntExpr:22')
327 | mtb_s[-1].addPFsub_S0('initializer_qdsl.ElmQdsl', 'initVals:22')
328 |
329 | # Powerfactory references
330 | ldf_r_vcNode = pfObjRefer('mtb_r_vcNode')
331 | ldf_r_vcNode.addPFsub('vac.ElmVac', 'contbar')
332 |
333 | # Refences outserv time invariants
334 | ldf_t_refOOS = signal('ldf_t_refOOS', pscad = False, defaultConnection = False)
335 | ldf_t_refOOS.addPFsub_S0('mtb_s_pref_pu.ElmDsl', 'outserv')
336 | ldf_t_refOOS.addPFsub_S0('mtb_s_qref_q_pu.ElmDsl', 'outserv')
337 | ldf_t_refOOS.addPFsub_S0('mtb_s_qref_qu_pu.ElmDsl', 'outserv')
338 | ldf_t_refOOS.addPFsub_S0('mtb_s_qref_pf.ElmDsl', 'outserv')
339 | ldf_t_refOOS.addPFsub_S0('mtb_t_qmode.ElmDsl', 'outserv')
340 | ldf_t_refOOS.addPFsub_S0('mtb_t_pmode.ElmDsl', 'outserv')
341 | ldf_t_refOOS.addPFsub_S0('mtb_s_1.ElmDsl', 'outserv')
342 | ldf_t_refOOS.addPFsub_S0('mtb_s_2.ElmDsl', 'outserv')
343 | ldf_t_refOOS.addPFsub_S0('mtb_s_3.ElmDsl', 'outserv')
344 | ldf_t_refOOS.addPFsub_S0('mtb_s_4.ElmDsl', 'outserv')
345 | ldf_t_refOOS.addPFsub_S0('mtb_s_5.ElmDsl', 'outserv')
346 | ldf_t_refOOS.addPFsub_S0('mtb_s_6.ElmDsl', 'outserv')
347 | ldf_t_refOOS.addPFsub_S0('mtb_s_7.ElmDsl', 'outserv')
348 | ldf_t_refOOS.addPFsub_S0('mtb_s_8.ElmDsl', 'outserv')
349 | ldf_t_refOOS.addPFsub_S0('mtb_s_9.ElmDsl', 'outserv')
350 | ldf_t_refOOS.addPFsub_S0('mtb_s_10.ElmDsl', 'outserv')
351 |
352 | # Calculation settings constants and timeVariants
353 | ldf_c_iopt_lim = constant('ldf_c_iopt_lim', int(plantSettings.PF_enforce_Q_limits_in_LDF), pscad = False)
354 | ldf_c_iopt_lim.addPFsub('$studycase$\\ComLdf', 'iopt_lim')
355 |
356 | ldf_c_iopt_apdist = constant('ldf_c_iopt_apdist', 1, pscad = False)
357 | ldf_c_iopt_apdist.addPFsub('$studycase$\\ComLdf', 'iopt_apdist')
358 |
359 | ldf_c_iPST_at = constant('ldf_c_iPST_at', 1, pscad = False)
360 | ldf_c_iPST_at.addPFsub('$studycase$\\ComLdf', 'iPST_at')
361 |
362 | ldf_c_iopt_at = constant('ldf_c_iopt_at', 1, pscad = False)
363 | ldf_c_iopt_at.addPFsub('$studycase$\\ComLdf', 'iopt_at')
364 |
365 | ldf_c_iopt_asht = constant('ldf_c_iopt_asht', 1, pscad = False)
366 | ldf_c_iopt_asht.addPFsub('$studycase$\\ComLdf', 'iopt_asht')
367 |
368 | ldf_c_iopt_plim = constant('ldf_c_iopt_plim', int(plantSettings.PF_enforce_P_limits_in_LDF), pscad = False)
369 | ldf_c_iopt_plim.addPFsub('$studycase$\\ComLdf', 'iopt_plim')
370 |
371 | ldf_c_iopt_net = signal('ldf_c_iopt_net', pscad = False, defaultConnection = False) # ldf asymmetrical option boolean
372 | ldf_c_iopt_net.addPFsub_S0('$studycase$\\ComLdf', 'iopt_net')
373 |
374 | inc_c_iopt_net = string('inc_c_iopt_net') # inc asymmetrical option
375 | inc_c_iopt_net.addPFsub('$studycase$\\ComInc', 'iopt_net')
376 |
377 | inc_c_iopt_show = constant('inc_c_iopt_show', 1, pscad = False)
378 | inc_c_iopt_show.addPFsub('$studycase$\\ComInc', 'iopt_show')
379 |
380 | inc_c_dtgrd = constant('inc_c_dtgrd', 0.001, pscad = False)
381 | inc_c_dtgrd.addPFsub('$studycase$\\ComInc', 'dtgrd')
382 |
383 | inc_c_dtgrd_max = constant('inc_c_dtgrd_max', 0.01, pscad = False)
384 | inc_c_dtgrd_max.addPFsub('$studycase$\\ComInc', 'dtgrd_max')
385 |
386 | inc_c_tstart = constant('inc_c_tstart', 0, pscad = False)
387 | inc_c_tstart.addPFsub('$studycase$\\ComInc', 'tstart')
388 |
389 | inc_c_iopt_sync = constant('inc_c_iopt_sync', plantSettings.PF_enforced_sync, pscad = False) # enforced sync. option
390 | inc_c_iopt_sync.addPFsub('$studycase$\\ComInc', 'iopt_sync')
391 |
392 | inc_c_syncperiod = constant('inc_c_syncperiod', 0.001, pscad = False)
393 | inc_c_syncperiod.addPFsub('$studycase$\\ComInc', 'syncperiod')
394 |
395 | inc_c_iopt_adapt = constant('inc_c_iopt_adapt', plantSettings.PF_variable_step, pscad = False) # variable step option
396 | inc_c_iopt_adapt.addPFsub('$studycase$\\ComInc', 'iopt_adapt')
397 |
398 | inc_c_iopt_lt = constant('inc_c_iopt_lt', 0, pscad = False)
399 | inc_c_iopt_lt.addPFsub('$studycase$\\ComInc', 'iopt_lt')
400 |
401 | inc_c_errseq = constant('inc_c_errseq', 0.01, pscad = False)
402 | inc_c_errseq.addPFsub('$studycase$\\ComInc', 'errseq')
403 |
404 | inc_c_autocomp = constant('inc_c_autocomp', 0, pscad = False)
405 | inc_c_autocomp.addPFsub('$studycase$\\ComInc', 'automaticCompilation')
406 |
407 | df = pd.read_excel(casesheetPath, sheet_name=f'{plantSettings.Casegroup} cases', header=1) # type: ignore
408 |
409 | maxRank = 0
410 | cases : List[Case] = []
411 | emtCases : List[Case] = []
412 |
413 | for _, case in df.iterrows(): # type: ignore
414 | cases.append(Case(case)) # type: ignore
415 | maxRank = max(maxRank, cases[-1].rank)
416 |
417 | if plantSettings.Run_custom_cases and plantSettings.Casegroup != 'Custom':
418 | dfc = pd.read_excel(casesheetPath, sheet_name='Custom cases', header=1) # type: ignore
419 | for _, case in dfc.iterrows(): # type: ignore
420 | cases.append(Case(case)) # type: ignore
421 | maxRank = max(maxRank, cases[-1].rank)
422 |
423 | for case in cases:
424 | # Simulation time
425 | pf_lonRec = pscad_lonRec = 0.0
426 |
427 | # PF: Default symmetrical simulation
428 | ldf_c_iopt_net[case.rank] = 0
429 | inc_c_iopt_net[case.rank] = 'sym'
430 |
431 | # Voltage source control default setup
432 | mtb_t_vmode[case.rank] = 0
433 | mtb_s_vref_pu[case.rank] = -case.U0
434 | mtb_s_phref_deg[case.rank] = 0.0
435 | mtb_s_dvref_pu[case.rank] = 0.0
436 | mtb_s_fref_hz[case.rank] = 50.0
437 |
438 | mtb_s_varef_pu[case.rank] = 0.0
439 | mtb_s_vbref_pu[case.rank] = 0.0
440 | mtb_s_vcref_pu[case.rank] = 0.0
441 |
442 | mtb_s_scr[case.rank] = case.SCR0
443 | mtb_s_xr[case.rank] = case.XR0
444 |
445 | ldf_t_uk[case.rank], ldf_t_pcu_kw[case.rank] = impedance_uk_pcu(case.SCR0, case.XR0, plantSettings.Pn, plantSettings.Un, plantSettings.Uc)
446 |
447 | mtb_t_r0_ohm[case.rank] = plantSettings.R0
448 | mtb_t_x0_ohm[case.rank] = plantSettings.X0
449 |
450 | # Standard plant references and outputs default setup
451 | mtb_s_pref_pu[case.rank] = case.P0
452 |
453 | # Set Qmode
454 | if case.Qmode.lower() == 'default':
455 | case.Qmode = plantSettings.Default_Q_mode
456 |
457 | mtb_t_qmode[case.rank] = QMODES[case.Qmode.lower()]
458 |
459 | mtb_s_qref[case.rank] = case.Qref0
460 | mtb_s_qref_q_pu[case.rank] = case.Qref0 if mtb_t_qmode[case.rank].s0 == 0 else 0.0
461 | mtb_s_qref_qu_pu[case.rank] = case.Qref0 if mtb_t_qmode[case.rank].s0 == 1 else 0.0
462 | mtb_s_qref_pf[case.rank] = case.Qref0 if mtb_t_qmode[case.rank].s0 == 2 else 0.0
463 | mtb_s_qref_3[case.rank] = case.Qref0 if mtb_t_qmode[case.rank].s0 == 3 else 0.0
464 | mtb_s_qref_4[case.rank] = case.Qref0 if mtb_t_qmode[case.rank].s0 == 4 else 0.0
465 | mtb_s_qref_5[case.rank] = case.Qref0 if mtb_t_qmode[case.rank].s0 == 5 else 0.0
466 | mtb_s_qref_6[case.rank] = case.Qref0 if mtb_t_qmode[case.rank].s0 == 6 else 0.0
467 |
468 | mtb_t_pmode[case.rank] = PMODES[case.Pmode.lower()]
469 |
470 | # Fault signals
471 | flt_s_type[case.rank] = 0.0
472 | flt_s_rf_ohm[case.rank] = 0.0
473 | flt_s_resxf[case.rank] = 0.0
474 |
475 | # Default custom signal values
476 | mtb_s[0][case.rank] = 0.0
477 | mtb_s[1][case.rank] = 0.0
478 | mtb_s[2][case.rank] = 0.0
479 | mtb_s[3][case.rank] = 0.0
480 | mtb_s[4][case.rank] = 0.0
481 | mtb_s[5][case.rank] = 0.0
482 | mtb_s[6][case.rank] = 0.0
483 | mtb_s[7][case.rank] = 0.0
484 | mtb_s[8][case.rank] = 0.0
485 | mtb_s[9][case.rank] = 0.0
486 |
487 | # Default OOS references
488 | ldf_t_refOOS[case.rank] = 0
489 |
490 | # Parse events
491 | for event in case.Events:
492 | eventType = event[0]
493 | eventTime = event[1]
494 | eventX1 = event[2]
495 | eventX2 = event[3]
496 |
497 | if eventType == 'Pref':
498 | assert isinstance(eventX1, float)
499 | assert isinstance(eventX2, float)
500 | mtb_s_pref_pu[case.rank].add(eventTime, eventX1, eventX2)
501 |
502 | elif eventType == 'Qref':
503 | assert isinstance(eventX1, float)
504 | assert isinstance(eventX2, float)
505 | mtb_s_qref[case.rank].add(eventTime, eventX1, eventX2)
506 |
507 | if mtb_t_qmode[case.rank].s0 == 0:
508 | mtb_s_qref_q_pu[case.rank].add(eventTime, eventX1, eventX2)
509 | elif mtb_t_qmode[case.rank].s0 == 1:
510 | mtb_s_qref_qu_pu[case.rank].add(eventTime, eventX1, eventX2)
511 | elif mtb_t_qmode[case.rank].s0 == 2:
512 | mtb_s_qref_pf[case.rank].add(eventTime, eventX1, eventX2)
513 | elif mtb_t_qmode[case.rank].s0 == 3:
514 | mtb_s_qref_3[case.rank].add(eventTime, eventX1, eventX2)
515 | elif mtb_t_qmode[case.rank].s0 == 4:
516 | mtb_s_qref_4[case.rank].add(eventTime, eventX1, eventX2)
517 | elif mtb_t_qmode[case.rank].s0 == 5:
518 | mtb_s_qref_5[case.rank].add(eventTime, eventX1, eventX2)
519 | elif mtb_t_qmode[case.rank].s0 == 6:
520 | mtb_s_qref_6[case.rank].add(eventTime, eventX1, eventX2)
521 | else:
522 | raise ValueError('Invalid Q mode')
523 |
524 | elif eventType == 'Voltage':
525 | assert isinstance(eventX1, float)
526 | assert isinstance(eventX2, float)
527 | mtb_s_vref_pu[case.rank].add(eventTime, eventX1, eventX2)
528 |
529 | elif eventType == 'dVoltage':
530 | assert isinstance(eventX1, float)
531 | assert isinstance(eventX2, float)
532 | mtb_s_dvref_pu[case.rank].add(eventTime, eventX1, eventX2)
533 |
534 | elif eventType == 'Phase':
535 | assert isinstance(eventX1, float)
536 | assert isinstance(eventX2, float)
537 | mtb_s_phref_deg[case.rank].add(eventTime, eventX1, eventX2)
538 |
539 | elif eventType == 'Frequency':
540 | assert isinstance(eventX1, float)
541 | assert isinstance(eventX2, float)
542 | mtb_s_fref_hz[case.rank].add(eventTime, eventX1, eventX2)
543 |
544 | elif eventType == 'SCR':
545 | assert isinstance(eventX1, float)
546 | assert isinstance(eventX2, float)
547 | mtb_s_scr[case.rank].add(eventTime, eventX1, 0.0)
548 | mtb_s_xr[case.rank].add(eventTime, eventX2, 0.0)
549 |
550 | elif eventType.count('fault') > 0 and eventType != 'Clear fault':
551 | assert isinstance(eventX1, float)
552 | assert isinstance(eventX2, float)
553 |
554 | flt_s_type[case.rank].add(eventTime, FAULT_TYPES[eventType], 0.0)
555 | flt_s_type[case.rank].add(eventTime + eventX2, 0.0, 0.0)
556 | flt_s_resxf[case.rank].add(eventTime, eventX1, 0.0)
557 | if FAULT_TYPES[eventType] < 7:
558 | ldf_c_iopt_net[case.rank] = 1
559 | inc_c_iopt_net[case.rank] = 'rst'
560 |
561 | elif eventType == 'Clear fault':
562 | flt_s_type[case.rank].add(eventTime, 0.0, 0.0)
563 |
564 | elif eventType == 'Pref recording':
565 | assert isinstance(eventX1, str)
566 | assert isinstance(eventX2, float)
567 | wf = mtb_s_pref_pu[case.rank] = si.Recorded(path=eventX1, column=1, scale=eventX2, pf=pf, pscad=pscad)
568 | pscad_lonRec = max(wf.pscadLen, pscad_lonRec)
569 | pf_lonRec = max(wf.pfLen, pf_lonRec)
570 |
571 | elif eventType == 'Qref recording':
572 | assert isinstance(eventX1, str)
573 | assert isinstance(eventX2, float)
574 | wf = si.Recorded(path=eventX1, column=1, scale=eventX2, pf=pf, pscad=pscad)
575 |
576 | mtb_s_qref[case.rank] = wf
577 | mtb_s_qref_q_pu[case.rank] = 0
578 | mtb_s_qref_qu_pu[case.rank] = 0
579 | mtb_s_qref_pf[case.rank] = 0
580 | mtb_s_qref_3[case.rank] = 0
581 | mtb_s_qref_4[case.rank] = 0
582 | mtb_s_qref_5[case.rank] = 0
583 | mtb_s_qref_6[case.rank] = 0
584 |
585 | if mtb_t_qmode[case.rank].s0 == 0:
586 | mtb_s_qref_q_pu[case.rank] = wf
587 | elif mtb_t_qmode[case.rank].s0 == 1:
588 | mtb_s_qref_qu_pu[case.rank] = wf
589 | elif mtb_t_qmode[case.rank].s0 == 2:
590 | mtb_s_qref_pf[case.rank] = wf
591 | elif mtb_t_qmode[case.rank].s0 == 3:
592 | mtb_s_qref_3[case.rank] = wf
593 | elif mtb_t_qmode[case.rank].s0 == 4:
594 | mtb_s_qref_4[case.rank] = wf
595 | elif mtb_t_qmode[case.rank].s0 == 5:
596 | mtb_s_qref_5[case.rank] = wf
597 | elif mtb_t_qmode[case.rank].s0 == 6:
598 | mtb_s_qref_6[case.rank] = wf
599 | else:
600 | raise ValueError('Invalid Q mode')
601 |
602 | pscad_lonRec = max(wf.pscadLen, pscad_lonRec)
603 | pf_lonRec = max(wf.pfLen, pf_lonRec)
604 |
605 | elif eventType == 'Voltage recording':
606 | assert isinstance(eventX1, str)
607 | assert isinstance(eventX2, float)
608 | if mtb_t_vmode[case.rank].s0 != 2:
609 | mtb_t_vmode[case.rank] = 1
610 | wf = mtb_s_vref_pu[case.rank] = si.Recorded(path=eventX1, column=1, scale=eventX2, pf=pf, pscad=pscad)
611 | pscad_lonRec = max(wf.pscadLen, pscad_lonRec)
612 | pf_lonRec = max(wf.pfLen, pf_lonRec)
613 |
614 | elif eventType == 'Inst. Voltage recording':
615 | assert isinstance(eventX1, str)
616 | assert isinstance(eventX2, float)
617 | mtb_t_vmode[case.rank] = 2
618 | mtb_s_varef_pu[case.rank] = si.Recorded(path=eventX1, column=1, scale=eventX2, pf=False, pscad=pscad)
619 | mtb_s_vbref_pu[case.rank] = si.Recorded(path=eventX1, column=2, scale=eventX2, pf=False, pscad=pscad)
620 | wf = mtb_s_vcref_pu[case.rank] = si.Recorded(path=eventX1, column=3, scale=eventX2, pf=False, pscad=pscad)
621 | pscad_lonRec = max(wf.pscadLen, pscad_lonRec)
622 |
623 | elif eventType == 'Phase recording':
624 | assert isinstance(eventX1, str)
625 | assert isinstance(eventX2, float)
626 | wf = mtb_s_phref_deg[case.rank] = si.Recorded(path=eventX1, column=1, scale=eventX2, pf=pf, pscad=pscad)
627 | pscad_lonRec = max(wf.pscadLen, pscad_lonRec)
628 | pf_lonRec = max(wf.pfLen, pf_lonRec)
629 |
630 | elif eventType == 'Frequency recording':
631 | assert isinstance(eventX1, str)
632 | assert isinstance(eventX2, float)
633 | wf = mtb_s_fref_hz[case.rank] = si.Recorded(path=eventX1, column=1, scale=eventX2, pf=pf, pscad=pscad)
634 | pscad_lonRec = max(wf.pscadLen, pscad_lonRec)
635 | pf_lonRec = max(wf.pfLen, pf_lonRec)
636 |
637 | elif eventType.lower().startswith('signal'):
638 | eventNr = int(eventType.lower().replace('signal','').replace('recording',''))
639 | customSignal = mtb_s[eventNr - 1]
640 | assert isinstance(customSignal, si.Signal)
641 |
642 | if eventType.lower().endswith('recording'):
643 | assert isinstance(eventX1, str)
644 | assert isinstance(eventX2, float)
645 | wf = customSignal[case.rank] = si.Recorded(path=eventX1, column=1, scale=eventX2, pf=pf, pscad=pscad)
646 | pscad_lonRec = max(wf.pscadLen, pscad_lonRec)
647 | pf_lonRec = max(wf.pfLen, pf_lonRec)
648 | else:
649 | assert isinstance(eventX1, float)
650 | assert isinstance(eventX2, float)
651 | customSignal[case.rank].add(eventTime, eventX1, eventX2)
652 |
653 | elif eventType == 'PF disconnect all ref.':
654 | ldf_t_refOOS[case.rank] = 1
655 |
656 | elif eventType == 'PF force asymmetrical':
657 | ldf_c_iopt_net[case.rank] = 1
658 | inc_c_iopt_net[case.rank] = 'rst'
659 |
660 | if isnan(case.Simulationtime) or case.Simulationtime == 0:
661 | mtb_t_simtimePf_s[case.rank] = pf_lonRec
662 | mtb_t_simtimePscad_s[case.rank] = pscad_lonRec
663 |
664 | if pf_lonRec == 0 and case.RMS:
665 | warn(f'Rank: {case.rank}. Powerfactory simulationtime set to 0.0s.')
666 | if pscad_lonRec == 0 and case.EMT:
667 | warn(f'Rank: {case.rank}. PSCAD simulationtime set to 0.0s.')
668 | else:
669 | mtb_t_simtimePscad_s[case.rank] = case.Simulationtime + plantSettings.PSCAD_init_time
670 | mtb_t_simtimePf_s[case.rank] = case.Simulationtime + plantSettings.PF_flat_time
671 |
672 | if not case.EMT:
673 | mtb_t_simtimePscad_s[case.rank] = -1.0
674 | else:
675 | emtCases.append(case)
676 |
677 | if isinstance(mtb_s_vref_pu[case.rank], si.Recorded):
678 | ldf_r_vcNode[case.rank] = ''
679 | else:
680 | ldf_r_vcNode[case.rank] = '$nochange$'
681 |
682 | emtCases.sort(key = lambda x: x.Simulationtime)
683 |
684 | taskId = 1
685 | for emtCase in emtCases:
686 | mtb_s_task[taskId] = emtCase.rank
687 | taskId += 1
688 | mtb_s_task.__pfInterface__ = None
689 | return plantSettings, channels, cases, maxRank, emtCases
--------------------------------------------------------------------------------
/config.ini:
--------------------------------------------------------------------------------
1 | [config]
2 | Casesheet path = testcases.xlsx
3 | ;Optional path to append to the python path
4 | Python path =
5 | ;Path to export result files (relative to execute.py)
6 | Export folder = export
7 | ;PSCAD volley size
8 | Volley = 16
9 | ;Powerfactory parallel task automation. AS per 2023 SP5 there is a bug in PF that causes QDSL blocks to be ignored in parallel simulations.
10 | Parallel = True
11 | ;Powerfactory temporary workaround: QDSL controller sometimes fails when not in same grid as calc. relevant statgens.
12 | ;The QDSL controller will be copied to the following grid. Disable by setting to empty string:
13 | QDSL copy grid =
--------------------------------------------------------------------------------
/execute_pf.py:
--------------------------------------------------------------------------------
1 | '''
2 | Executes the Powerplant model testbench in Powerfactory.
3 | '''
4 | from __future__ import annotations
5 | DEBUG = True
6 | import os
7 | #Ensure right working directory
8 | executePath = os.path.abspath(__file__)
9 | executeFolder = os.path.dirname(executePath)
10 | os.chdir(executeFolder)
11 |
12 | from configparser import ConfigParser
13 |
14 | class readConfig:
15 | def __init__(self) -> None:
16 | self.cp = ConfigParser(allow_no_value=True)
17 | self.cp.read('config.ini')
18 | self.parsedConf = self.cp['config']
19 | self.sheetPath = str(self.parsedConf['Casesheet path'])
20 | self.pythonPath = str(self.parsedConf['Python path'])
21 | self.volley = int(self.parsedConf['Volley'])
22 | self.parallel = bool(self.parsedConf['Parallel'])
23 | self.exportPath = str(self.parsedConf['Export folder'])
24 | self.QDSLcopyGrid = str(self.parsedConf['QDSL copy grid'])
25 |
26 | config = readConfig()
27 | import sys
28 | sys.path.append(config.pythonPath)
29 |
30 | from typing import Optional, Tuple, List, Union
31 | if getattr(sys, 'gettrace', None) is not None:
32 | sys.path.append('C:\\Program Files\\DIgSILENT\\PowerFactory 2024 SP4\\Python\\3.8')
33 | import powerfactory as pf #type: ignore
34 |
35 | import re
36 | import time
37 | from datetime import datetime
38 | import case_setup as cs
39 | import sim_interface as si
40 |
41 | def script_GetExtObj(script : pf.ComPython, name : str) -> Optional[pf.DataObject]:
42 | '''
43 | Get script external object.
44 | '''
45 | retVal : List[Union[int, pf.DataObject, None]] = script.GetExternalObject(name)
46 | assert isinstance(retVal[1], (pf.DataObject, type(None)))
47 | return retVal[1]
48 |
49 | def script_GetStr(script : pf.ComPython, name : str) -> Optional[str]:
50 | '''
51 | Get script string parameter.
52 | '''
53 | retVal : List[Union[int, str]] = script.GetInputParameterString(name)
54 | if retVal[0] == 0:
55 | assert isinstance(retVal[1], str)
56 | return retVal[1]
57 | else:
58 | return None
59 |
60 | def script_GetDouble(script : pf.ComPython, name : str) -> Optional[float]:
61 | '''
62 | Get script double parameter.
63 | '''
64 | retVal : List[Union[int, float]] = script.GetInputParameterDouble(name)
65 | if retVal[0] == 0:
66 | assert isinstance(retVal[1], float)
67 | return retVal[1]
68 | else:
69 | return None
70 |
71 | def script_GetInt(script : pf.ComPython, name : str) -> Optional[int]:
72 | '''
73 | Get script integer parameter.
74 | '''
75 | retVal : List[Union[int, int]] = script.GetInputParameterInt(name)
76 | if retVal[0] == 0:
77 | assert isinstance(retVal[1], int)
78 | return retVal[1]
79 | else:
80 | return None
81 |
82 | def connectPF() -> Tuple[pf.Application, pf.IntPrj, pf.ComPython, int]:
83 | '''
84 | Connects to the powerfactory application and returns the application, project and this script object.
85 | '''
86 | app : Optional[pf.Application] = pf.GetApplicationExt()
87 | if not app:
88 | raise RuntimeError('No connection to powerfactory application')
89 | app.Show()
90 | app.ClearOutputWindow()
91 | app.PrintInfo(f'Powerfactory application connected externally. Executable: {sys.executable}')
92 | app.PrintInfo(f'Imported powerfactory module from {pf.__file__}')
93 |
94 | version : str = pf.__version__
95 | pfVersion = 2000 + int(version.split('.')[0])
96 | app.PrintInfo(f'Powerfactory version registred: {pfVersion}')
97 |
98 | project : Optional[pf.IntPrj] = app.GetActiveProject() #type: ignore
99 |
100 | if DEBUG:
101 | while project is None:
102 | time.sleep(1)
103 | project = app.GetActiveProject() #type: ignore
104 |
105 | assert project is not None
106 |
107 | networkData = app.GetProjectFolder('netdat')
108 | assert networkData is not None
109 |
110 | thisScript : pf.ComPython = networkData.SearchObject('MTB\\MTB\\execute.ComPython') #type: ignore
111 | assert thisScript is not None
112 |
113 | return app, project, thisScript, pfVersion
114 |
115 | def resetProjectUnits(project : pf.IntPrj) -> None:
116 | '''
117 | Resets the project units to the default units.
118 | '''
119 | SetPrj = project.SearchObject('Settings.SetFold')
120 | if SetPrj:
121 | SetPrj.Delete()
122 |
123 | project.Deactivate()
124 | project.Activate()
125 |
126 | def setupResFiles(app : pf.Application, script : pf.ComPython, root : pf.DataObject):
127 | '''
128 | Setup the result files for the studycase.
129 | '''
130 | elmRes : pf.ElmRes = app.GetFromStudyCase('ElmRes') #type: ignore
131 | assert elmRes is not None
132 |
133 | measurementBlock = root.SearchObject('measurements.ElmDsl')
134 | assert measurementBlock is not None
135 |
136 | elmRes.AddVariable(measurementBlock, 's:Ia_pu')
137 | elmRes.AddVariable(measurementBlock, 's:Ib_pu')
138 | elmRes.AddVariable(measurementBlock, 's:Ic_pu')
139 | elmRes.AddVariable(measurementBlock, 's:Vab_pu')
140 | elmRes.AddVariable(measurementBlock, 's:Vag_pu')
141 | elmRes.AddVariable(measurementBlock, 's:Vbc_pu')
142 | elmRes.AddVariable(measurementBlock, 's:Vbg_pu')
143 | elmRes.AddVariable(measurementBlock, 's:Vca_pu')
144 | elmRes.AddVariable(measurementBlock, 's:Vcg_pu')
145 | elmRes.AddVariable(measurementBlock, 's:f_hz')
146 | elmRes.AddVariable(measurementBlock, 's:neg_Id_pu')
147 | elmRes.AddVariable(measurementBlock, 's:neg_Imag_pu')
148 | elmRes.AddVariable(measurementBlock, 's:neg_Iq_pu')
149 | elmRes.AddVariable(measurementBlock, 's:neg_Vmag_pu')
150 | elmRes.AddVariable(measurementBlock, 's:pos_Id_pu')
151 | elmRes.AddVariable(measurementBlock, 's:pos_Imag_pu')
152 | elmRes.AddVariable(measurementBlock, 's:pos_Iq_pu')
153 | elmRes.AddVariable(measurementBlock, 's:pos_Vmag_pu')
154 | elmRes.AddVariable(measurementBlock, 's:ppoc_pu')
155 | elmRes.AddVariable(measurementBlock, 's:qpoc_pu')
156 |
157 | signals = [
158 | 'mtb_s_pref_pu.ElmDsl',
159 | 'mtb_s_qref.ElmDsl',
160 | 'mtb_s_qref_q_pu.ElmDsl',
161 | 'mtb_s_qref_qu_pu.ElmDsl',
162 | 'mtb_s_qref_pf.ElmDsl',
163 | 'mtb_s_qref_3.ElmDsl',
164 | 'mtb_s_qref_4.ElmDsl',
165 | 'mtb_s_qref_5.ElmDsl',
166 | 'mtb_s_qref_6.ElmDsl',
167 | 'mtb_s_1.ElmDsl',
168 | 'mtb_s_2.ElmDsl',
169 | 'mtb_s_3.ElmDsl',
170 | 'mtb_s_4.ElmDsl',
171 | 'mtb_s_5.ElmDsl',
172 | 'mtb_s_6.ElmDsl',
173 | 'mtb_s_7.ElmDsl',
174 | 'mtb_s_8.ElmDsl',
175 | 'mtb_s_9.ElmDsl',
176 | 'mtb_s_10.ElmDsl'
177 | ]
178 |
179 | for signal in signals:
180 | signalObj = root.SearchObject(signal)
181 | assert signalObj is not None
182 | elmRes.AddVariable(signalObj, 's:yo')
183 |
184 | # Include measurement objects and set alias
185 | for i in range(1, 100):
186 | Meas_obj_n = script_GetExtObj(script, f'Meas_obj_{i}')
187 | if Meas_obj_n is not None:
188 | Meas_obj_n_signals = script_GetStr(script, f'Meas_obj_{i}_signals')
189 | assert Meas_obj_n_signals is not None
190 | Meas_obj_n_signals = Meas_obj_n_signals.split(';')
191 |
192 | for signal in Meas_obj_n_signals:
193 | if signal != '':
194 | elmRes.AddVariable(Meas_obj_n, signal)
195 |
196 | Meas_obj_n_alias = script_GetStr(script, f'Meas_obj_{i}_alias')
197 | assert Meas_obj_n_alias is not None
198 | Meas_obj_n.SetAttribute('for_name', Meas_obj_n_alias)
199 |
200 | def setupExport(app : pf.Application, filename : str):
201 | '''
202 | Setup the export component for the studycase.
203 | '''
204 | comRes : pf.ComRes = app.GetFromStudyCase('ComRes') #type: ignore
205 | elmRes : pf.ElmRes = app.GetFromStudyCase('ElmRes') #type: ignore
206 | assert comRes is not None
207 | assert elmRes is not None
208 |
209 | csvFileName = f'{filename}.csv'
210 | comRes.SetAttribute('pResult', elmRes)
211 | comRes.SetAttribute('iopt_exp', 6)
212 | comRes.SetAttribute('iopt_sep', 0)
213 | comRes.SetAttribute('ciopt_head', 1)
214 | comRes.SetAttribute('iopt_locn', 4)
215 | comRes.SetAttribute('dec_Sep', ',')
216 | comRes.SetAttribute('col_Sep', ';')
217 | comRes.SetAttribute('f_name', csvFileName)
218 |
219 | def setupPlots(app : pf.Application, root : pf.DataObject):
220 | '''
221 | Setup the plots for the studycase.
222 | '''
223 | measurementBlock = root.SearchObject('measurements.ElmDsl')
224 | assert measurementBlock is not None
225 |
226 | board : pf.SetDesktop = app.GetFromStudyCase('SetDesktop') #type: ignore
227 | assert board is not None
228 |
229 | plots : List[pf.GrpPage]= board.GetContents('*.GrpPage',1) #type: ignore
230 |
231 | for p in plots:
232 | p.RemovePage()
233 |
234 | # Create pages
235 | plotPage : pf.GrpPage = board.GetPage('Plot', 1, 'GrpPage') #type: ignore
236 | assert plotPage is not None
237 |
238 | # PQ plot
239 | pqPlot : pf.PltLinebarplot = plotPage.GetOrInsertPlot('PQ', 1) #type: ignore
240 | assert pqPlot is not None
241 | pqPlotDS : pf.PltDataseries = pqPlot.GetDataSeries() #type: ignore
242 | assert pqPlotDS is not None
243 | pqPlotDS.AddCurve(measurementBlock, 's:ppoc_pu')
244 | pqPlotDS.AddCurve(measurementBlock, 's:qpoc_pu')
245 | pqPlot.DoAutoScale()
246 |
247 | # U plot
248 | uPlot : pf.PltLinebarplot = plotPage.GetOrInsertPlot('U', 1) #type: ignore
249 | assert uPlot is not None
250 | uPlotDS : pf.PltDataseries = uPlot.GetDataSeries() #type: ignore
251 | assert uPlotDS is not None
252 | uPlotDS.AddCurve(measurementBlock, 's:pos_Vmag_pu')
253 | uPlotDS.AddCurve(measurementBlock, 's:neg_Vmag_pu')
254 | uPlot.DoAutoScale()
255 |
256 | # I plot
257 | iPlot : pf.PltLinebarplot = plotPage.GetOrInsertPlot('I', 1) #type: ignore
258 | assert iPlot is not None
259 | iPlotDS : pf.PltDataseries = iPlot.GetDataSeries() #type: ignore
260 | assert iPlotDS is not None
261 | iPlotDS.AddCurve(measurementBlock, 's:pos_Id_pu')
262 | iPlotDS.AddCurve(measurementBlock, 's:pos_Iq_pu')
263 | iPlotDS.AddCurve(measurementBlock, 's:neg_Id_pu')
264 | iPlotDS.AddCurve(measurementBlock, 's:neg_Iq_pu')
265 | iPlot.DoAutoScale()
266 |
267 | # F plot
268 | fPlot : pf.PltLinebarplot = plotPage.GetOrInsertPlot('F', 1) #type: ignore
269 | assert fPlot is not None
270 | fPlotDS : pf.PltDataseries = fPlot.GetDataSeries() #type: ignore
271 | assert fPlotDS is not None
272 | fPlotDS.AddCurve(measurementBlock, 's:f_hz')
273 | fPlot.DoAutoScale()
274 |
275 | app.WriteChangesToDb()
276 |
277 | def addCustomSubscribers(thisScript : pf.ComPython, channels : List[si.Channel]) -> None:
278 | '''
279 | Add custom subscribers to the channels. For example, references applied as parameter events directly to control blocks.
280 | '''
281 | def getChnlByName(name : str) -> si.Channel:
282 | for ch in channels:
283 | if ch.name == name:
284 | return ch
285 | raise RuntimeError(f'Channel {name} not found.')
286 |
287 | custConfStr = script_GetStr(thisScript, 'sub_conf_str')
288 | assert isinstance(custConfStr, str)
289 |
290 | def convertToConfStr(param : str, signal : str) -> str:
291 | sub_obj = script_GetExtObj(thisScript, f'{param}_sub')
292 | sub_attrib = script_GetStr(thisScript, f'{param}_sub_attrib')
293 | assert isinstance(sub_attrib, str)
294 | if sub_obj is not None and sub_attrib != '':
295 | sub_scale = script_GetDouble(thisScript, f'{param}_sub_scale')
296 | assert isinstance(sub_scale, float)
297 | sub_signal = getChnlByName(f'{signal}')
298 | assert isinstance(sub_signal, si.Signal)
299 | return f'\\{sub_obj.GetFullName()}:{sub_attrib}={signal}:S~{sub_scale} * x'
300 | return ''
301 |
302 | pref_conf = convertToConfStr('Pref', 'mtb_s_pref_pu')
303 | qref1_conf = convertToConfStr('Qref_q', 'mtb_s_qref_q_pu')
304 | qref2_conf = convertToConfStr('Qref_qu', 'mtb_s_qref_qu_pu')
305 | qref3_conf = convertToConfStr('Qref_pf', 'mtb_s_qref_pf')
306 | custom1_conf = convertToConfStr('Custom1', 'mtb_s_1')
307 | custom2_conf = convertToConfStr('Custom2', 'mtb_s_2')
308 | custom3_conf = convertToConfStr('Custom3', 'mtb_s_3')
309 |
310 | configs = custConfStr.split(';') + [pref_conf, qref1_conf, qref2_conf, qref3_conf, custom1_conf, custom2_conf, custom3_conf]
311 |
312 | confFilterStr = r"^([^:*?=\",~|\n\r]+):((?:\w:)?\w+(?::\d+)?)=(\w+):(S|s|S0|s0|R|r|T|t|C|c)~(.*)"
313 | confFilter = re.compile(confFilterStr)
314 |
315 | for config in configs:
316 | confFilterMatch = confFilter.match(config)
317 | if confFilterMatch is not None:
318 | obj = confFilterMatch.group(1)
319 | attrib = confFilterMatch.group(2)
320 | sub = confFilterMatch.group(3)
321 | typ = confFilterMatch.group(4)
322 | lamb = confFilterMatch.group(5)
323 |
324 | chnl = getChnlByName(sub)
325 | if isinstance(chnl, si.Signal):
326 | if typ.lower() == 's' or typ.lower() == 'c':
327 | chnl.addPFsub_S(obj, attrib, lambda _,x,l=lamb : eval(l))
328 | elif typ.lower() == 's0':
329 | chnl.addPFsub_S0(obj, attrib, lambda _,x,l=lamb : eval(l)) #Not exactly safe
330 | elif typ.lower() == 'r':
331 | chnl.addPFsub_R(obj, attrib, lambda _,x,l=lamb : eval(l))
332 | elif typ.lower() == 't':
333 | chnl.addPFsub_T(obj, attrib, lambda _,x,l=lamb : eval(l))
334 | elif isinstance(chnl, si.Constant) or isinstance(chnl, si.PfObjRefer) or isinstance(chnl, si.String):
335 | chnl.addPFsub(obj, attrib)
336 |
337 | def main():
338 | # Connect to Powerfactory
339 | app, project, thisScript, pfVersion = connectPF()
340 |
341 | # Check if any studycase is active
342 | currentStudyCase : Optional[pf.IntCase] = app.GetActiveStudyCase() #type: ignore
343 |
344 | if currentStudyCase is None:
345 | raise RuntimeError('Please activate a studycase.')
346 |
347 | studyTime : int = currentStudyCase.GetAttribute('iStudyTime')
348 |
349 | # Get and check for active grids
350 | networkData = app.GetProjectFolder('netdat')
351 | assert networkData is not None
352 | grids : List[pf.ElmNet] = networkData.GetContents('.ElmNet', 1) #type: ignore
353 | activeGrids = list(filter(lambda x : x.IsCalcRelevant(), grids))
354 |
355 | if len(activeGrids) == 0:
356 | raise RuntimeError('No active grids.')
357 |
358 | # Make project backup
359 | project.CreateVersion(f'PRE_MTB_{datetime.now().strftime(r"%d%m%Y%H%M%S")}')
360 |
361 | resetProjectUnits(project)
362 | currentStudyCase.Consolidate()
363 |
364 | netFolder = app.GetProjectFolder('netmod')
365 | assert netFolder is not None
366 | varFolder = app.GetProjectFolder('scheme')
367 |
368 | # Create variation folder
369 | if varFolder is None:
370 | varFolder = netFolder.CreateObject('IntPrjfolder', 'Variations')
371 | varFolder.SetAttribute('iopt_typ', 'scheme')
372 |
373 | # Create studycase folder
374 | studyCaseFolder = app.GetProjectFolder('study')
375 | if studyCaseFolder is None:
376 | studyCaseFolder = project.CreateObject('IntPrjfolder', 'Study Cases')
377 | studyCaseFolder.SetAttribute('iopt_typ', 'study')
378 |
379 | # Create task automation
380 | taskAuto : pf.ComTasks = studyCaseFolder.CreateObject('ComTasks') #type: ignore
381 | taskAuto.SetAttribute('iEnableParal', int(config.parallel))
382 | taskAuto.SetAttribute('parMethod', 0)
383 | (taskAuto.GetAttribute('parallelSetting')).SetAttribute('procTimeOut', 3600)
384 |
385 | # Find root object
386 | root = thisScript.GetParent()
387 |
388 | # Read and setup cases from sheet
389 | pfInterface = si.PFencapsulation(app, root)
390 | plantSettings, channels, cases, maxRank, ___ = cs.setup(casesheetPath = config.sheetPath,
391 | pscad = False,
392 | pfEncapsulation = pfInterface)
393 |
394 | # Add user channel subscribers
395 | addCustomSubscribers(thisScript, channels)
396 |
397 | #Create export folder if it does not exist
398 | if not os.path.exists(config.exportPath):
399 | os.makedirs(config.exportPath)
400 |
401 | #Creating a datetime stamped subfolder
402 | datetimeFolder = f'MTB_{datetime.now().strftime(r"%d%m%Y%H%M%S")}'
403 |
404 | #Create the folder for the PowerFactory CSV results
405 | csvFolder = os.path.join(config.exportPath, datetimeFolder)
406 | os.mkdir(csvFolder)
407 |
408 | # Find initializer script object
409 | initScript : pf.ComDpl = root.SearchObject('initializer_script.ComDpl') #type: ignore
410 | assert initScript is not None
411 |
412 | # List of created studycases for later activation
413 | studycases : List[pf.IntCase] = []
414 |
415 | currentStudyCase.Deactivate()
416 |
417 | # Filter cases if Only_setup > 0
418 | onlySetup = script_GetInt(thisScript, 'Only_setup')
419 | assert isinstance(onlySetup, int)
420 |
421 | if onlySetup > 0:
422 | cases = list(filter(lambda x : x.rank == onlySetup, cases))
423 |
424 | app.EchoOff()
425 | for case in cases:
426 | if case.RMS:
427 | # Set-up studycase, variation and balance
428 | caseName = f'{str(case.rank).zfill(len(str(maxRank)))}_{case.Name}'.replace('.', '')
429 | exportName = os.path.join(os.path.abspath(csvFolder), f'{plantSettings.Projectname}_{case.rank}')
430 | newStudycase : pf.IntCase = studyCaseFolder.CreateObject('IntCase', caseName) #type: ignore
431 | assert newStudycase is not None
432 | studycases.append(newStudycase)
433 | newStudycase.Activate()
434 | newStudycase.SetStudyTime(studyTime)
435 |
436 | # Activate the relevant networks
437 | for g in activeGrids:
438 | g.Activate()
439 |
440 | newVar : pf.IntScheme = varFolder.CreateObject('IntScheme', caseName) #type: ignore
441 | assert newVar is not None
442 | newStage : pf.IntSstage = newVar.CreateObject('IntSstage', caseName) #type: ignore
443 | assert newStage is not None
444 | newStage.SetAttribute('e:tAcTime', studyTime)
445 | newVar.Activate()
446 | newStage.Activate()
447 |
448 | si.applyToPowerfactory(channels, case.rank)
449 |
450 | initScript.Execute()
451 |
452 | ### WORKAROUND FOR QDSL FAILING WHEN IN MTB-GRID ###
453 | #TODO: REMOVE WHEN FIXED
454 | if config.QDSLcopyGrid != '':
455 | qdslInitializer = root.SearchObject('initializer_qdsl.ElmQdsl')
456 | assert qdslInitializer is not None
457 | for g in activeGrids:
458 | gridName = g.GetFullName()
459 | assert isinstance(gridName, str)
460 | if gridName.lower().endswith(f'{config.QDSLcopyGrid.lower()}.elmnet'):
461 | g.AddCopy(qdslInitializer) #type: ignore
462 |
463 | qdslInitializer.SetAttribute('outserv', 1)
464 | ### END WORKAROUND ###
465 |
466 | inc = app.GetFromStudyCase('ComInc')
467 | assert inc is not None
468 | sim = app.GetFromStudyCase('ComSim')
469 | assert sim is not None
470 | comRes : pf.ComRes = app.GetFromStudyCase('ComRes') #type: ignore
471 | assert comRes is not None
472 |
473 | taskAuto.AppendStudyCase(newStudycase)
474 | taskAuto.AppendCommand(inc, -1)
475 | taskAuto.AppendCommand(sim, -1)
476 | taskAuto.AppendCommand(comRes, -1)
477 | setupResFiles(app, thisScript, root)
478 | app.WriteChangesToDb()
479 | setupExport(app, exportName)
480 | app.WriteChangesToDb()
481 | newStudycase.Deactivate()
482 | app.WriteChangesToDb()
483 |
484 | app.EchoOn()
485 |
486 | if onlySetup == 0:
487 | taskAuto.Execute()
488 |
489 | if pfVersion >= 2024:
490 | for studycase in studycases:
491 | studycase.Activate()
492 | setupPlots(app, root)
493 | app.WriteChangesToDb()
494 | studycase.Deactivate()
495 | app.WriteChangesToDb()
496 | else:
497 | app.PrintWarn('Plot setup not supported for PowerFactory versions older than 2024.')
498 |
499 | # Create post run backup
500 | postBackup = script_GetInt(thisScript, 'Post_run_backup')
501 | assert isinstance(postBackup, int)
502 | if postBackup > 0:
503 | project.CreateVersion(f'POST_MTB_{datetime.now().strftime(r"%d%m%Y%H%M%S")}')
504 |
505 | if __name__ == "__main__":
506 | main()
--------------------------------------------------------------------------------
/execute_pscad.py:
--------------------------------------------------------------------------------
1 | '''
2 | Executes the Powerplant model testbench in PSCAD.
3 | '''
4 | from __future__ import annotations
5 | import os
6 | import sys
7 |
8 | try:
9 | LOG_FILE = open('execute_pscad.log', 'w')
10 | except:
11 | print('Failed to open log file. Logging to file disabled.')
12 | LOG_FILE = None #type: ignore
13 |
14 | def print(*args): #type: ignore
15 | '''
16 | Overwrites the print function to also write to a log file.
17 | '''
18 | outputString = ''.join(map(str, args)) + '\n' #type: ignore
19 | sys.stdout.write(outputString)
20 | if LOG_FILE:
21 | LOG_FILE.write(outputString)
22 | LOG_FILE.flush()
23 |
24 | if __name__ == '__main__':
25 | print(sys.version)
26 | #Ensure right working directory
27 | executePath = os.path.abspath(__file__)
28 | executeFolder = os.path.dirname(executePath)
29 | os.chdir(executeFolder)
30 | if not executeFolder in sys.path:
31 | sys.path.append(executeFolder)
32 | print(f'CWD: {executeFolder}')
33 | print('sys.path:')
34 | for path in sys.path:
35 | if path != '':
36 | print(f'\t{path}')
37 |
38 | from configparser import ConfigParser
39 |
40 | class readConfig:
41 | def __init__(self) -> None:
42 | self.cp = ConfigParser(allow_no_value=True)
43 | self.cp.read('config.ini')
44 | self.parsedConf = self.cp['config']
45 | self.sheetPath = str(self.parsedConf['Casesheet path'])
46 | self.pythonPath = str(self.parsedConf['Python path'])
47 | self.volley = int(self.parsedConf['Volley'])
48 | self.exportPath = str(self.parsedConf['Export folder'])
49 |
50 | config = readConfig()
51 | sys.path.append(config.pythonPath)
52 |
53 | from datetime import datetime
54 | import shutil
55 | import psutil #type: ignore
56 | from typing import List, Optional
57 | import sim_interface as si
58 | import case_setup as cs
59 | from pscad_update_ums import updateUMs
60 |
61 | try:
62 | import mhi.pscad
63 | except ImportError:
64 | print("Could not import mhi.pscad. Make sure PSCAD Automation Library is installed and available in your Python environment.")
65 | sys.exit(1)
66 |
67 | def connectPSCAD() -> mhi.pscad.PSCAD:
68 | pid = os.getpid()
69 | ports = [con.laddr.port for con in psutil.net_connections() if con.status == psutil.CONN_LISTEN and con.pid == pid] #type: ignore
70 |
71 | if len(ports) == 0: #type: ignore
72 | exit('No PSCAD listening ports found')
73 | elif len(ports) > 1: #type: ignore
74 | print('WARNING: Multiple PSCAD listening ports found. Using the first one.')
75 |
76 | return mhi.pscad.connect(port = ports[0]) #type: ignore
77 |
78 | def outToCsv(srcPath : str, dstPath : str):
79 | """
80 | Converts PSCAD .out file into .csv file
81 | """
82 | with open(srcPath) as out, \
83 | open(dstPath, 'w') as csv:
84 | csv.writelines(','.join(line.split()) +'\n' for line in out)
85 |
86 | def moveFiles(srcPath : str, dstPath : str, types : List[str], suffix : str = '') -> None:
87 | '''
88 | Moves files of the specified types from srcPath to dstPath.
89 | '''
90 | for file in os.listdir(srcPath):
91 | _, typ = os.path.splitext(file)
92 | if typ in types:
93 | shutil.move(os.path.join(srcPath, file), os.path.join(dstPath, file + suffix))
94 |
95 | def taskIdToRank(psoutFolder : str, projectName : str, emtCases : List[cs.Case], rank: int):
96 | '''
97 | Changes task ID to rank of the .psout files in psoutFolder.
98 | '''
99 | for file in os.listdir(psoutFolder):
100 | _, fileName = os.path.split(file)
101 | root, typ = os.path.splitext(fileName)
102 | if rank is None:
103 | if typ == '.psout_taskid' and root.startswith(projectName + '_'):
104 | suffix = root[len(projectName) + 1:]
105 | parts = suffix.split('_')
106 | if len(parts) > 0 and parts[0].isnumeric():
107 | taskId = int(parts[0])
108 | if taskId - 1 < len(emtCases):
109 | parts[0] = str(emtCases[taskId - 1].rank)
110 | newName = projectName + '_' + '_'.join(parts) + typ.replace('_taskid', '')
111 | print(f'Renaming {fileName} to {newName}')
112 | os.rename(os.path.join(psoutFolder, fileName), os.path.join(psoutFolder, newName))
113 | else:
114 | print(f'WARNING: {fileName} has a task ID that is out of bounds. Ignoring file.')
115 | else:
116 | print(f'WARNING: {fileName} has an invalid task ID. Ignoring file.')
117 | else:
118 | if typ == '.psout_taskid':
119 | newName = f'{projectName}_{rank}.psout'
120 | else:
121 | print(f'WARNING: {fileName} is of unknown type. Ignoring file.')
122 | continue
123 | print(f'Renaming {fileName} to {newName}')
124 | os.rename(os.path.join(psoutFolder, fileName), os.path.join(psoutFolder, newName))
125 |
126 | def cleanUpPsoutFiles(buildPath : str, exportPath : str, projectName : str) -> str:
127 | '''
128 | Cleans up the build folder by moving .psout files to an time-stamped results folder in the export path.
129 | Return path to .psout folder.
130 | '''
131 | # Create the exportPath if requied
132 | if not os.path.exists(exportPath):
133 | os.mkdir(exportPath)
134 | else:
135 | for dir in os.listdir(exportPath):
136 | _dir = os.path.join(exportPath, dir)
137 | if os.path.isdir(_dir) and dir.startswith('MTB_'):
138 | if os.listdir(_dir) == []:
139 | shutil.rmtree(_dir)
140 |
141 | #Creating a datetime stamped results subfolder
142 | resultsFolder = f'MTB_{datetime.now().strftime(r"%d%m%Y%H%M%S")}'
143 |
144 | #Move .psout files away from build folder into results subfolder in the export folder
145 | psoutFolder = os.path.join(exportPath, resultsFolder)
146 | os.mkdir(psoutFolder)
147 | moveFiles(buildPath, psoutFolder, ['.psout'], '_taskid')
148 |
149 | return psoutFolder
150 |
151 | def cleanBuildfolder(buildPath : str):
152 | '''
153 | "Cleans" the build folder by trying to delete it.
154 | '''
155 | try:
156 | shutil.rmtree(buildPath)
157 | except FileNotFoundError:
158 | pass
159 |
160 | def findMTB(pscad : mhi.pscad.PSCAD) -> mhi.pscad.UserCmp:
161 | '''
162 | Finds the MTB block in the project.
163 | '''
164 | projectLst = pscad.projects()
165 | MTBcand : Optional[mhi.pscad.UserCmp] = None
166 | for prjDic in projectLst:
167 | if prjDic['type'].lower() == 'case':
168 | project = pscad.project(prjDic['name'])
169 | MTBs : List[mhi.pscad.UserCmp]= project.find_all(Name_='$MTB_9124$') #type: ignore
170 | if len(MTBs) > 0:
171 | if MTBcand or len(MTBs) > 1:
172 | exit('Multiple MTB blocks found in workspace.')
173 | else:
174 | MTBcand = MTBs[0]
175 |
176 | if not MTBcand:
177 | exit('No MTB block found in workspace.')
178 | return MTBcand
179 |
180 | def addInterfaceFile(project : mhi.pscad.Project):
181 | '''
182 | Adds the interface file to the project.
183 | '''
184 | resList = project.resources()
185 | for res in resList:
186 | if res.path == r'.\interface.f' or res.name == 'interface.f':
187 | return
188 |
189 | print('Adding interface.f to project')
190 | project.create_resource(r'.\interface.f')
191 |
192 | def main():
193 | print()
194 | print('execute_pscad.py started at:', datetime.now().strftime('%Y-%m-%d %H:%M:%S'), '\n')
195 | pscad = connectPSCAD()
196 |
197 | plantSettings, channels, _, _, emtCases = cs.setup(config.sheetPath, pscad = True, pfEncapsulation = None)
198 |
199 | #Print plant settings from casesheet
200 | print('Plant settings:')
201 | for setting in plantSettings.__dict__:
202 | print(f'{setting} : {plantSettings.__dict__[setting]}')
203 | print()
204 |
205 | #Prepare MTB based on execution mode
206 | MTB = findMTB(pscad)
207 | project = pscad.project(MTB.project_name)
208 | caseList = []
209 | for case in emtCases:
210 | caseList.append(case.rank)
211 |
212 | if MTB.parameters()['par_mode'] == 'VOLLEY':
213 | #Output ranks in relation to task
214 | print('---------EXECUTING VOLLEY MODE---------')
215 | print('Rank / Task ID / Casename:')
216 | for case in emtCases:
217 | print(f'{case.rank} / {emtCases.index(case) + 1} / {case.Name}')
218 | singleRank = None
219 | elif MTB.parameters()['par_mode'] == 'MANUAL' and MTB.parameters()['par_manualrank'] in caseList:
220 | #Output rank in relation to task id
221 | singleRank = MTB.parameters()['par_manualrank']
222 | singleName = emtCases[caseList.index(MTB.parameters()['par_manualrank'])].Name
223 | print('---------EXECUTING MANUAL MODE---------')
224 | print(f'Excecuting only Rank {singleRank}: {singleName}')
225 | else:
226 | raise ValueError('Invalid rank selected for par_manualrank in MTB block.')
227 |
228 | print()
229 | si.renderFortran('interface.f', channels)
230 |
231 | #Set executed flag
232 | MTB.parameters(executed = 1) #type: ignore
233 |
234 | #Update pgb names for all unit measurement components
235 | updateUMs(pscad)
236 |
237 | #Add interface file to project
238 | addInterfaceFile(project)
239 |
240 | buildFolder : str = project.temp_folder #type: ignore
241 | cleanBuildfolder(buildFolder) #type: ignore
242 |
243 | project.parameters(time_duration = 999, time_step = plantSettings.PSCAD_Timestep, sample_step = '1000') #type: ignore
244 | project.parameters(PlotType = '2', output_filename = f'{plantSettings.Projectname}.psout') #type: ignore
245 | project.parameters(SnapType='0', SnapTime='2', snapshot_filename='pannatest5us.snp') #type: ignore
246 |
247 | pscad.remove_all_simulation_sets()
248 | pmr = pscad.create_simulation_set('MTB')
249 | pmr.add_tasks(MTB.project_name)
250 | project_pmr = pmr.task(MTB.project_name)
251 | project_pmr.parameters(ammunition = len(emtCases) if MTB.parameters()['par_mode'] == 'VOLLEY' else 1 , volley = config.volley, affinity_type = '2') #type: ignore
252 |
253 | pscad.run_simulation_sets('MTB') #type: ignore ??? By sideeffect changes current working directory ???
254 | os.chdir(executeFolder)
255 |
256 | psoutFolder = cleanUpPsoutFiles(buildFolder, config.exportPath, plantSettings.Projectname)
257 | print()
258 | taskIdToRank(psoutFolder, plantSettings.Projectname, emtCases, singleRank)
259 |
260 | print('execute_pscad.py finished at: ', datetime.now().strftime('%m-%d %H:%M:%S'))
261 |
262 | if __name__ == '__main__':
263 | main()
264 |
265 | if LOG_FILE:
266 | LOG_FILE.close()
--------------------------------------------------------------------------------
/interface.f:
--------------------------------------------------------------------------------
1 | !
--------------------------------------------------------------------------------
/plotter/Case.py:
--------------------------------------------------------------------------------
1 | from typing import List, Tuple, Union
2 |
3 |
4 | class Case:
5 | def __init__(self, case: 'pd.Series[Union[str, int, float, bool]]') -> None:
6 | self.rank: int = int(case['Rank'])
7 | self.RMS: bool = bool(case['RMS'])
8 | self.EMT: bool = bool(case['EMT'])
9 | self.Name: str = str(case['Name'])
10 | self.U0: float = float(case['U0'])
11 | self.P0: float = float(case['P0'])
12 | self.Pmode: str = str(case['Pmode'])
13 | self.Qmode: str = str(case['Qmode'])
14 | self.Qref0: float = float(case['Qref0'])
15 | self.SCR0: float = float(case['SCR0'])
16 | self.XR0: float = float(case['XR0'])
17 | self.Simulationtime: float = float(case['Simulationtime'])
18 | self.Events : List[Tuple[str, float, Union[float, str], Union[float, str]]] = []
19 |
20 | index : pd.Index[str] = case.index # type: ignore
21 | i = 0
22 | while(True):
23 | typeLabel = f'type.{i}' if i > 0 else 'type'
24 | timeLabel = f'time.{i}' if i > 0 else 'time'
25 | x1Label = f'X1.{i}' if i > 0 else 'X1'
26 | x2Label = f'X2.{i}' if i > 0 else 'X2'
27 |
28 | if typeLabel in index and timeLabel in index and x1Label in index and x2Label in index:
29 | try:
30 | x1value = float(str(case[x1Label]).replace(' ',''))
31 | except ValueError:
32 | x1value = str(case[x1Label])
33 |
34 | try:
35 | x2value = float(str(case[x2Label]).replace(' ',''))
36 | except ValueError:
37 | x2value = str(case[x2Label])
38 |
39 | self.Events.append((str(case[typeLabel]), float(case[timeLabel]), x1value, x2value))
40 | i += 1
41 | else:
42 | break
--------------------------------------------------------------------------------
/plotter/Cursor.py:
--------------------------------------------------------------------------------
1 | from typing import List
2 |
3 |
4 | class Cursor:
5 | def __init__(self,
6 | id: int,
7 | title: str,
8 | cursor_options: List[str],
9 | emt_signals: List[str],
10 | rms_signals: List[str],
11 | time_ranges: List[int]) -> None:
12 | self.id = id
13 | self.title = title
14 | self.cursor_options = cursor_options
15 | self.emt_signals = emt_signals
16 | self.rms_signals = rms_signals
17 | self.time_ranges = time_ranges
--------------------------------------------------------------------------------
/plotter/Figure.py:
--------------------------------------------------------------------------------
1 | from down_sampling_method import DownSamplingMethod
2 | from typing import List
3 |
4 |
5 | class Figure:
6 | def __init__(self,
7 | id: int,
8 | title: str,
9 | units: str,
10 | emt_signal_1: str,
11 | emt_signal_2: str,
12 | emt_signal_3: str,
13 | rms_signal_1: str,
14 | rms_signal_2: str,
15 | rms_signal_3: str,
16 | gradient_threshold: float,
17 | down_sampling_method: DownSamplingMethod,
18 | include_in_case: List[int],
19 | exclude_in_case: List[int]) -> None:
20 | self.id = id
21 | self.title = title
22 | self.units = units
23 | self.emt_signal_1 = emt_signal_1
24 | self.emt_signal_2 = emt_signal_2
25 | self.emt_signal_3 = emt_signal_3
26 | self.rms_signal_1 = rms_signal_1
27 | self.rms_signal_2 = rms_signal_2
28 | self.rms_signal_3 = rms_signal_3
29 | self.gradient_threshold = float(gradient_threshold)
30 | self.down_sampling_method = down_sampling_method
31 | self.include_in_case: List[int] = include_in_case
32 | self.exclude_in_case: List[int] = exclude_in_case
--------------------------------------------------------------------------------
/plotter/Result.py:
--------------------------------------------------------------------------------
1 | from enum import Enum
2 |
3 |
4 | class ResultType(Enum):
5 | RMS = 0 #PowerFactory standard output
6 | EMT_INF = 1 #PSCAD legacy .inf/.csv support
7 | EMT_PSOUT = 2 #PSCAD .psout
8 | EMT_CSV = 3 #PSCAD .psout -> .csv support
9 | EMT_ZIP = 4 #PSCAD .psout -> .zip, .gz, .bz2 and .xz support
10 |
11 |
12 | class Result:
13 | def __init__(self, typ : ResultType, rank : int, projectName : str, bulkname : str, fullpath : str, group : str) -> None:
14 | self.typ = typ
15 | self.rank = rank
16 | self.projectName = projectName
17 | self.bulkname = bulkname
18 | self.fullpath = fullpath
19 | self.group = group
20 | self.shorthand = f'{group}\\{projectName}'
21 |
--------------------------------------------------------------------------------
/plotter/config.ini:
--------------------------------------------------------------------------------
1 | [config]
2 | resultsDir = results
3 | genHTML = True
4 | genImage = True
5 | imageFormat = png
6 | htmlColumns = 1
7 | imageColumns = 3
8 | htmlCursorColumns = 1
9 | imageCursorColumns = 1
10 | threads = 10
11 | pfFlatTime = 0.1
12 | pscadInitTime = 3.5
13 | optionalCasesheet = ..\testcases.xlsx
14 |
15 | [Simulation data paths]
16 | Path1LegendName = ..\export\MTB_26052025142441
17 | Path2LegendName = ..\export\MTB_28052025100949
--------------------------------------------------------------------------------
/plotter/cursor_image_logic.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 | import pandas as pd
3 | from plotly.subplots import make_subplots # type: ignore
4 | import plotly.graph_objects as go # type: ignore
5 | from typing import List, Tuple
6 | import plot_cursor_functions
7 | from Result import ResultType
8 | from Cursor import Cursor
9 | from math import ceil
10 | from Result import Result
11 | from read_and_write_functions import loadEMT
12 | from process_psout import getSignals
13 |
14 | def addCursors(htmlPlots: List[go.Figure],
15 | resultList: List[Result],
16 | cursorDict: List[Cursor],
17 | pfFlatTIme: float,
18 | pscadInitTime: float,
19 | rank: int,
20 | nColumns: int,
21 | emtRankSignalnamesList: List):
22 |
23 | cursor_settings = [i for i in cursorDict if i.id == rank]
24 | if len(cursor_settings) == 0:
25 | return list()
26 |
27 | # Initialize subplot positions
28 | fi = -1 # Start index from -1 as it is incremented before use
29 | for cursor_setting in cursor_settings:
30 | # Loop through rank settings
31 | totalRawSigNames = []
32 | time_ranges = getattr(cursor_setting, 'time_ranges')
33 | cursor_options = getattr(cursor_setting, 'cursor_options')
34 | # Increment plot index
35 | fi += 1
36 |
37 | # Select the correct plot
38 | plot = htmlPlots[fi] if nColumns == 1 else htmlPlots[0]
39 |
40 | x = []
41 | y = []
42 | for result in resultList:
43 | signalKey = result.typ.name.lower().split('_')[0]
44 | rawSigNames = getattr(cursor_setting, f'{signalKey}_signals')
45 | totalRawSigNames.extend(rawSigNames)
46 | data = None
47 | if result.typ == ResultType.RMS:
48 | data: pd.DataFrame = pd.read_csv(result.fullpath, sep=';', decimal=',', header=[0, 1]) # type: ignore
49 | elif result.typ == ResultType.EMT_INF:
50 | data: pd.DataFrame = loadEMT(result.fullpath)
51 | elif result.typ == ResultType.EMT_PSOUT:
52 | data: pd.DataFrame = getSignals(result.fullpath, emtRankSignalnamesList)
53 | elif result.typ == ResultType.EMT_CSV or result.typ == ResultType.EMT_ZIP:
54 | data: pd.DataFrame = pd.read_csv(result.fullpath, sep=';', decimal=',') # type: ignore
55 | if len(rawSigNames) == 0:
56 | continue
57 | for rawSigName in rawSigNames:
58 | if result.typ == ResultType.RMS:
59 | # Remove hash and split signal name
60 | while rawSigName.startswith('#'):
61 | rawSigName = rawSigName[1:]
62 | splitSigName = rawSigName.split('\\')
63 |
64 | if len(splitSigName) == 2:
65 | sigColumn = ('##' + splitSigName[0], splitSigName[1])
66 | else:
67 | sigColumn = rawSigName
68 | else:
69 | sigColumn = rawSigName
70 |
71 | # Determine the time column and offset based on the type
72 | timeColName = 'time' if result.typ == ResultType.EMT_INF or result.typ == ResultType.EMT_PSOUT or result.typ == ResultType.EMT_CSV or result.typ == ResultType.EMT_ZIP else data.columns[0]
73 | timeoffset = pfFlatTIme if result.typ == ResultType.RMS else pscadInitTime
74 |
75 | if sigColumn in data.columns:
76 | # Get the signal data and time values
77 | x.extend(data[timeColName] - timeoffset) # type: ignore
78 | y.extend(data[sigColumn]) # type: ignore
79 |
80 | # Filter the data based on the time_ranges
81 | if len(y) != 0:
82 | x = pd.Series(x)
83 | y = pd.Series(y)
84 | index_number = fi if nColumns != 1 else 0
85 | plot_cursor_functions.add_text_subplot(plot, x, y, cursor_options, index_number, time_ranges, totalRawSigNames)
86 |
87 | return htmlPlots
88 |
89 |
90 | def setupPlotLayoutCursors(config, ranksCursor: List, htmlPlots: List[go.Figure],
91 | imagePlots: List[go.Figure]):
92 | lst: List[Tuple[int, List[go.Figure]]] = []
93 |
94 | if config.genHTML:
95 | lst.append((config.htmlCursorColumns, htmlPlots))
96 | if config.genImage:
97 | lst.append((config.imageCursorColumns, imagePlots))
98 |
99 | for columnNr, plotList in lst:
100 | if columnNr == 1:
101 | for rankCursor in ranksCursor:
102 | # Prepare cursor data for the table
103 | table = create_cursor_table()
104 |
105 | # Create a figure to contain the table
106 | fig_table = go.Figure(data=[table])
107 | fig_table.update_layout(title=rankCursor.title, height=140*max(len(rankCursor.cursor_options), 1))
108 | plotList.append(fig_table)
109 |
110 | elif columnNr > 1:
111 | num_rows = ceil(len(ranksCursor) / columnNr)
112 | titles = [rankCursor.title for rankCursor in ranksCursor] # Gather titles for each table
113 |
114 | # Create subplots specifically for tables
115 | fig_subplots = make_subplots(rows=num_rows, cols=columnNr,
116 | subplot_titles=titles,
117 | specs=[[{'type': 'table'} for _ in range(columnNr)] for _ in
118 | range(num_rows)]) # Define all as table subplots
119 | height_to_use = 500
120 | for i, rankCursor in enumerate(ranksCursor):
121 | # Prepare cursor data for the table
122 | table = create_cursor_table()
123 |
124 | # Add table to the subplot layout
125 | fig_subplots.add_trace(table, row=i // columnNr + 1, col=i % columnNr + 1)
126 |
127 | # Update the layout of the subplot figure
128 | height_to_use = max(500*len(rankCursor.cursor_options), height_to_use)
129 | fig_subplots.update_layout(height=height_to_use)
130 |
131 | plotList.append(fig_subplots)
132 |
133 |
134 | def create_cursor_table():
135 | cursor_data = [{'type': 'None', 'signals': 'None', 'time_values': 'None', 'value': 'None'}]
136 | # Prepare data for the table, including two additional placeholder columns
137 | table_data = [
138 | [cursor['type'], cursor['signals'], cursor['time_values'], cursor['value']] for cursor in cursor_data
139 | ]
140 | # Create the table with additional columns in the header and cells
141 | table = go.Table(
142 | header=dict(values=["Cursor type", "Signals", "Cursor time points", "Values"],
143 | fill_color='paleturquoise', align='left'),
144 | cells=dict(values=list(zip(*table_data)), fill_color='lavender', align='left')
145 | )
146 | return table
147 |
--------------------------------------------------------------------------------
/plotter/cursor_type.py:
--------------------------------------------------------------------------------
1 | from enum import Enum
2 |
3 | class CursorType(Enum):
4 | MIN_MAX = 1
5 | AVERAGE = 2
6 |
7 | @classmethod
8 | def from_string(cls, string : str):
9 | try:
10 | return cls[string.upper()]
11 | except KeyError:
12 | raise ValueError(f"{string} is not a valid {cls.__name__}")
--------------------------------------------------------------------------------
/plotter/down_sampling_method.py:
--------------------------------------------------------------------------------
1 | from enum import Enum
2 |
3 | class DownSamplingMethod(Enum):
4 | GRADIENT = 1
5 | AMOUNT = 2
6 | NO_DOWN_SAMPLING = 3
7 |
8 | @classmethod
9 | def from_string(cls, string : str):
10 | try:
11 | return cls[string.upper()]
12 | except KeyError:
13 | raise ValueError(f"{string} is not a valid {cls.__name__}")
--------------------------------------------------------------------------------
/plotter/figureSetup.csv:
--------------------------------------------------------------------------------
1 | figure;title;units;emt_signal_1;emt_signal_2;emt_signal_3;rms_signal_1;rms_signal_2;rms_signal_3;down_sampling_method;gradient_threshold;include_in_case;exclude_in_case
2 | 1;Vpp;pu;MTB\meas_Vab_pu;MTB\meas_Vbc_pu;MTB\meas_Vca_pu;meas\s:Vab_pu;meas\s:Vbc_pu;meas\s:Vca_pu;gradient;0.5;;
3 | 2;Vpg;pu;MTB\meas_Vag_pu;MTB\meas_Vbg_pu;MTB\meas_Vcg_pu;meas\s:Vag_pu;meas\s:Vbg_pu;meas\s:Vcg_pu;gradient;0.5;;
4 | 3;Vseq;pu;MTB\fft_pos_Vmag_pu;MTB\fft_neg_Vmag_pu;;meas\s:pos_Vmag_pu;meas\s:neg_Vmag_pu;;gradient;0.5;;
5 | 4;Itotal;pu;MTB\meas_Ia_pu;MTB\meas_Ib_pu;MTB\meas_Ic_pu;meas\s:Ia_pu;meas\s:Ib_pu;meas\s:Ic_pu;gradient;0.5;;
6 | 5;Iactive;pu;MTB\fft_pos_Id_pu;MTB\fft_neg_Id_pu;;meas\s:pos_Id_pu;meas\s:neg_Id_pu;;gradient;0.5;;
7 | 6;Ireactive;pu;MTB\fft_pos_Iq_pu;MTB\fft_neg_Iq_pu;;meas\s:pos_Iq_pu;meas\s:neg_Iq_pu;;gradient;0.5;;
8 | 7;Ppoc;pu;MTB\P_pu_PoC;MTB\mtb_s_pref_pu;;meas\s:ppoc_pu;;;gradient;0.5;;
9 | 8;Qpoc;pu;MTB\Q_pu_PoC;MTB\mtb_s_qref;;meas\s:qpoc_pu;;;gradient;0.5;;
10 | 9;F;Hz;MTB\pll_f_hz;;;meas\s:f_hz;;;gradient;0.5;;
11 | 10;Id_pll;pu;MTB\pll_pos_Id_pu;MTB\pll_neg_Id_pu;;;;;gradient;0.5;;
12 | 11;Iq_pll;pu;MTB\pll_pos_Iq_pu;MTB\pll_neg_Iq_pu;;;;;gradient;0.5;;
13 | 12;Terminal;pu;Unit_1\unit_fft_pos_Id_pu;Unit_1\unit_fft_pos_Iq_pu;Unit_1\unit_fft_pos_Vmag_pu;Unit_1\m:i1P:bus1 in p.u.;Unit_1\m:i1Q:bus1 in p.u.;Unit_1\m:u1:bus1 in p.u.;gradient;0.5;;
14 | 13;Instantaneous Voltage (pg);kV;MTB\meas_Vag_kV;MTB\meas_Vbg_kV;MTB\meas_Vcg_kV;;;;gradient;0.5;1,2,3,4,5,6,7,8,9,10,98;
15 | 14;Instantaneous Current (kA);kA;MTB\meas_Ia_kA;MTB\meas_Ib_kA;MTB\meas_Ic_kA;;;;gradient;0.5;1,2,3,4,5,6,7,8,9,10,98;
16 |
--------------------------------------------------------------------------------
/plotter/plot_cursor_functions.py:
--------------------------------------------------------------------------------
1 | import plotly.graph_objects as go
2 | from typing import List
3 | from cursor_type import CursorType
4 |
5 |
6 | def min_max_value_text(x, y, time_ranges):
7 | if len(time_ranges) > 0:
8 | mask = (x >= time_ranges[0]) & (x < time_ranges[1]) if len(time_ranges) == 2 else (x >= time_ranges[0])
9 | y = y[mask]
10 | x = x[mask]
11 | # Find the min and max of y
12 | min_y = y.min()
13 | max_y = y.max()
14 |
15 | # Find the corresponding x-values
16 | min_x = x[y.idxmin()] # x-value where y is minimum
17 | max_x = x[y.idxmax()] # x-value where y is maximum
18 |
19 | # Construct the text
20 | annotation_text = (f"Max: {max_y:.2f} at t = {max_x}
"
21 | f"Min: {min_y:.2f} at t = {min_x}
")
22 | return annotation_text
23 |
24 |
25 | def mean_value_text(x, y, time_ranges):
26 | if len(time_ranges) > 0:
27 | mask = (x >= time_ranges[0]) & (x < time_ranges[1]) if len(time_ranges) == 2 else (x >= time_ranges[0])
28 | y = y[mask]
29 | x = x[mask]
30 | mean_y = sum(y) / len(y)
31 | annotation_text = f"Mean: {mean_y:.2f}
"
32 | return annotation_text
33 |
34 |
35 | def signals_text(rawSigNames):
36 | rawSigNames_text = ""
37 | for i, rawSigName in enumerate(rawSigNames):
38 | if i > 0:
39 | rawSigNames_text += "; "
40 | rawSigNames_text += f"{rawSigName}"
41 | return rawSigNames_text
42 |
43 |
44 | def time_ranges_text(time_ranges):
45 | time_ranges_text = ""
46 | for i in range(len(time_ranges)):
47 | if i > 0:
48 | time_ranges_text += ", "
49 | time_ranges_text += f"t{i}={time_ranges[i]}"
50 | return time_ranges_text
51 |
52 |
53 | # Function to append the text as a scatter trace to the provided figure
54 | def add_text_subplot(fig: go.Figure, x, y, cursor_types: List[CursorType], index_number, time_ranges, rawSigNames):
55 | table_data = fig.data[index_number].cells.values
56 |
57 | # Access the values for the specific cell in the table
58 | # The values are arranged in a way that we can access them based on rowPos and colPos
59 | cursor_type = table_data[0]
60 | signals = table_data[1]
61 | time_values = table_data[2]
62 | values = table_data[3] # Assuming the second entry contains the values
63 |
64 | # Append the annotation text to the corresponding value
65 | updated_values = values[:]
66 | updated_signals = signals[:]
67 | updated_time_values = time_values[:]
68 | updated_cursor_type = cursor_type[:]
69 | index = 0
70 | if CursorType.MIN_MAX in cursor_types:
71 | set_or_append_cursor_data(updated_cursor_type, updated_signals, updated_time_values, updated_values, index,
72 | rawSigNames, time_ranges, "Min and Max values", min_max_value_text(x, y, time_ranges))
73 | index += 1
74 |
75 | if CursorType.AVERAGE in cursor_types:
76 | set_or_append_value(updated_cursor_type, index, "Average values")
77 | set_or_append_cursor_data(updated_cursor_type, updated_signals, updated_time_values, updated_values, index,
78 | rawSigNames, time_ranges, "Average values", mean_value_text(x, y, time_ranges))
79 | index += 1
80 |
81 | # Update the table with the modified values
82 | fig.data[index_number].cells.values = [updated_cursor_type, updated_signals, updated_time_values, updated_values]
83 |
84 | return fig
85 |
86 |
87 | def set_or_append_cursor_data(updated_cursor_type, updated_signals, updated_time_values, updated_values, index,
88 | rawSigNames, time_ranges, cursor_type_text, cursor_value_text):
89 | set_or_append_value(updated_cursor_type, index, cursor_type_text)
90 | set_or_append_value(updated_signals, index, signals_text(rawSigNames))
91 | set_or_append_value(updated_time_values, index, time_ranges_text(time_ranges))
92 | set_or_append_value(updated_values, index, cursor_value_text)
93 |
94 |
95 | def set_or_append_value(list_to_update, index, value):
96 | """
97 | Update a list by setting the value at a specific index if within bounds,
98 | or appending the value if the index is out of bounds.
99 |
100 | Args:
101 | list_to_update (list): The list to be updated.
102 | index (int): The index at which the value should be set or appended.
103 | value: The value to be set or appended.
104 | """
105 | if index >= len(list_to_update):
106 | # Append if index is out of bounds
107 | list_to_update.append(value)
108 | else:
109 | # Set the value at the specified index
110 | list_to_update[index] = value
111 |
--------------------------------------------------------------------------------
/plotter/plotter.py:
--------------------------------------------------------------------------------
1 | '''
2 | Minimal script to plot simulation results from PSCAD and PowerFactory.
3 | '''
4 | from __future__ import annotations
5 | from os import listdir, makedirs
6 | from os.path import join, split, exists
7 | import re
8 | import pandas as pd
9 | from plotly.subplots import make_subplots # type: ignore
10 | import plotly.graph_objects as go # type: ignore
11 | from typing import List, Dict, Union, Tuple, Set
12 | import sampling_functions
13 | from down_sampling_method import DownSamplingMethod
14 | from threading import Thread, Lock
15 | import time
16 | import sys
17 | from math import ceil
18 | from collections import defaultdict
19 | from cursor_image_logic import addCursors, setupPlotLayoutCursors
20 | from read_configs import ReadConfig, readFigureSetup, readCursorSetup
21 | from Figure import Figure
22 | from Result import ResultType, Result
23 | from Case import Case
24 | from Cursor import Cursor
25 | from read_and_write_functions import loadEMT
26 | from process_psout import getAllSignalnames, getCaseSignalnames, getSignals
27 |
28 | try:
29 | LOG_FILE = open('plotter.log', 'w')
30 | except:
31 | print('Failed to open log file. Logging to file disabled.')
32 | LOG_FILE = None # type: ignore
33 |
34 | gLock = Lock()
35 |
36 |
37 | def print(*args): # type: ignore
38 | '''
39 | Overwrites the print function to also write to a log file.
40 | '''
41 | gLock.acquire()
42 | outputString = ''.join(map(str, args)) + '\n' # type: ignore
43 | sys.stdout.write(outputString)
44 | if LOG_FILE:
45 | try:
46 | LOG_FILE.write(outputString)
47 | LOG_FILE.flush()
48 | except:
49 | pass
50 | gLock.release()
51 |
52 |
53 | def idFile(filePath: str) -> Tuple[
54 | Union[ResultType, None], Union[int, None], Union[str, None], Union[str, None], Union[str, None]]:
55 | '''
56 | Identifies the type (EMT or RMS), root and case id of a given file. If the file is not recognized, a none tuple is returned.
57 | '''
58 | path, fileName = split(filePath)
59 | match = re.match(r'^(\w+?)_([0-9]+).(inf|csv|psout|zip|gz|bz2|xz)$', fileName.lower())
60 | if match:
61 | rank = int(match.group(2))
62 | projectName = match.group(1)
63 | bulkName = join(path, match.group(1))
64 | fullpath = filePath
65 | if match.group(3) == 'psout':
66 | fileType = ResultType.EMT_PSOUT
67 | return (fileType, rank, projectName, bulkName, fullpath)
68 | elif match.group(3) == 'zip' or match.group(3) == 'gz' or match.group(3) == 'bz2' or match.group(3) == 'xz':
69 | fileType = ResultType.EMT_ZIP
70 | return (fileType, rank, projectName, bulkName, fullpath)
71 | else:
72 | with open(filePath, 'r') as file:
73 | firstLine = file.readline()
74 | if match.group(3) == 'inf' and firstLine.startswith('PGB(1)'):
75 | fileType = ResultType.EMT_INF
76 | return (fileType, rank, projectName, bulkName, fullpath)
77 | elif match.group(3) == 'csv' and firstLine.startswith('time;'):
78 | fileType = ResultType.EMT_CSV
79 | return (fileType, rank, projectName, bulkName, fullpath)
80 | elif match.group(3) == 'csv':
81 | secondLine = file.readline()
82 | if secondLine.startswith(r'"b:tnow in s"'):
83 | fileType = ResultType.RMS
84 | return (fileType, rank, projectName, bulkName, fullpath)
85 |
86 | return (None, None, None, None, None)
87 |
88 |
89 | def mapResultFiles(config: ReadConfig) -> Dict[int, List[Result]]:
90 | '''
91 | Goes through all files in the given directories and maps them to a dictionary of cases.
92 | '''
93 | files: List[Tuple[str, str]] = list()
94 | for dir_ in config.simDataDirs:
95 | for file_ in listdir(dir_[1]):
96 | files.append((dir_[0], join(dir_[1], file_)))
97 |
98 | results: Dict[int, List[Result]] = dict()
99 |
100 | for file in files:
101 | group = file[0]
102 | fullpath = file[1]
103 | typ, rank, projectName, bulkName, fullpath = idFile(fullpath)
104 |
105 | if typ is None:
106 | continue
107 | assert rank is not None
108 | assert projectName is not None
109 | assert bulkName is not None
110 | assert fullpath is not None
111 |
112 | newResult = Result(typ, rank, projectName, bulkName, fullpath, group)
113 |
114 | if rank in results.keys():
115 | results[rank].append(newResult)
116 | else:
117 | results[rank] = [newResult]
118 |
119 | return results
120 |
121 | def colorMap(results: Dict[int, List[Result]]) -> Dict[str, List[str]]:
122 | '''
123 | Select colors for the given projects. Return a dictionary with the project name as key and a list of colors as value.
124 | '''
125 | colors = ['#e6194B', '#3cb44b', '#ffe119', '#4363d8', '#f58231', '#911eb4', '#42d4f4', '#f032e6', '#bfef45',
126 | '#fabed4', '#469990', '#dcbeff', '#9A6324', '#fffac8', '#800000', '#aaffc3', '#808000', '#ffd8b1',
127 | '#000075', '#a9a9a9', '#000000']
128 |
129 | projects: Set[str] = set()
130 |
131 | for rank in results.keys():
132 | for result in results[rank]:
133 | projects.add(result.shorthand)
134 |
135 | cMap: Dict[str, List[str]] = dict()
136 |
137 | if len(list(projects)) > 2:
138 | i = 0
139 | for p in list(projects):
140 | cMap[p] = [colors[i % len(colors)]] * 3
141 | i += 1
142 | return cMap
143 | else:
144 | i = 0
145 | for p in list(projects):
146 | cMap[p] = colors[i:i + 3]
147 | i += 3
148 | return cMap
149 |
150 |
151 | def addResults(plots: List[go.Figure],
152 | typ: ResultType,
153 | data: pd.DataFrame,
154 | figures: List[Figure],
155 | resultName: str,
156 | file: str, # Only for error messages
157 | colors: Dict[str, List[str]],
158 | nColumns: int,
159 | pfFlatTIme: float,
160 | pscadInitTime: float) -> None:
161 | '''
162 | Add result to plot.
163 | '''
164 |
165 | assert nColumns > 0
166 |
167 | if nColumns > 1:
168 | plotlyFigure = plots[0]
169 | else:
170 | assert len(plots) == len(figures)
171 |
172 | rowPos = 1
173 | colPos = 1
174 | fi = -1
175 | for figure in figures:
176 | fi += 1
177 |
178 | if nColumns == 1:
179 | plotlyFigure = plots[fi]
180 | else:
181 | rowPos = (fi // nColumns) + 1
182 | colPos = (fi % nColumns) + 1
183 |
184 | downsampling_method = figure.down_sampling_method
185 | traces = 0
186 | for sig in range(1, 4):
187 | signalKey = typ.name.lower().split('_')[0]
188 | rawSigName: str = getattr(figure, f'{signalKey}_signal_{sig}')
189 |
190 | if typ == ResultType.RMS:
191 | while rawSigName.startswith('#'):
192 | rawSigName = rawSigName[1:]
193 | splitSigName = rawSigName.split('\\')
194 |
195 | if len(splitSigName) == 2:
196 | sigColumn = ('##' + splitSigName[0], splitSigName[1])
197 | else:
198 | sigColumn = rawSigName
199 | elif typ == ResultType.EMT_INF or typ == ResultType.EMT_CSV or typ == ResultType.EMT_ZIP:
200 | # uses only the signal name - last part of the hierarchical signal name
201 | rawSigName = rawSigName.split('\\')[-1]
202 | sigColumn = rawSigName
203 | elif typ == ResultType.EMT_PSOUT:
204 | # uses the full hierarchical signal name
205 | sigColumn = rawSigName
206 | else:
207 | print(f'File type: {typ} unknown')
208 |
209 |
210 | displayName = f'{resultName}:{rawSigName.split(" ")[0]}'
211 |
212 | timeColName = 'time' if typ == ResultType.EMT_INF or typ == ResultType.EMT_PSOUT or typ == ResultType.EMT_CSV or typ == ResultType.EMT_ZIP else data.columns[0]
213 | timeoffset = pfFlatTIme if typ == ResultType.RMS else pscadInitTime
214 |
215 | if sigColumn in data.columns:
216 | x_value = data[timeColName] - timeoffset # type: ignore
217 | y_value = data[sigColumn] # type: ignore
218 | if downsampling_method == DownSamplingMethod.GRADIENT:
219 | x_value, y_value = sampling_functions.downsample_based_on_gradient(x_value, y_value,
220 | figure.gradient_threshold) # type: ignore
221 | elif downsampling_method == DownSamplingMethod.AMOUNT:
222 | x_value, y_value = sampling_functions.down_sample(x_value, y_value) # type: ignore
223 |
224 | add_scatterplot_for_result(colPos, colors, displayName, nColumns, plotlyFigure, resultName, rowPos,
225 | traces, x_value, y_value)
226 |
227 | # plot_cursor_functions.add_annotations(x_value, y_value, plotlyFigure)
228 | traces += 1
229 | elif sigColumn != '':
230 | print(f'Signal "{rawSigName}" not recognized in resultfile: {file}')
231 | add_scatterplot_for_result(colPos, colors, f'{displayName} (Unknown)', nColumns, plotlyFigure, resultName, rowPos,
232 | traces, None, None)
233 | traces += 1
234 |
235 | update_y_and_x_axis(colPos, figure, nColumns, plotlyFigure, rowPos)
236 |
237 |
238 | def update_y_and_x_axis(colPos, figure, nColumns, plotlyFigure, rowPos):
239 | if nColumns == 1:
240 | yaxisTitle = f'[{figure.units}]'
241 | else:
242 | yaxisTitle = f'{figure.title}[{figure.units}]'
243 | if nColumns == 1:
244 | plotlyFigure.update_xaxes( # type: ignore
245 | title_text='Time[s]'
246 | )
247 | plotlyFigure.update_yaxes( # type: ignore
248 | title_text=yaxisTitle
249 | )
250 | else:
251 | plotlyFigure.update_xaxes( # type: ignore
252 | title_text='Time[s]',
253 | row=rowPos, col=colPos
254 | )
255 | plotlyFigure.update_yaxes( # type: ignore
256 | title_text=yaxisTitle,
257 | row=rowPos, col=colPos
258 | )
259 |
260 |
261 | def add_scatterplot_for_result(colPos, colors, displayName, nColumns, plotlyFigure, resultName, rowPos, traces, x_value,
262 | y_value):
263 | if nColumns == 1:
264 | plotlyFigure.add_trace( # type: ignore
265 | go.Scatter(
266 | x=x_value,
267 | y=y_value,
268 | #line_color=colors[resultName][traces],
269 | name=displayName,
270 | legendgroup=displayName,
271 | showlegend=True
272 | )
273 | )
274 | else:
275 | plotlyFigure.add_trace( # type: ignore
276 | go.Scatter(
277 | x=x_value,
278 | y=y_value,
279 | line_color=colors[resultName][traces],
280 | name=displayName,
281 | legendgroup=resultName,
282 | showlegend=True
283 | ),
284 | row=rowPos, col=colPos
285 | )
286 |
287 |
288 | def drawPlot(rank: int,
289 | resultDict: Dict[int, List[Result]],
290 | figureDict: Dict[int, List[Figure]],
291 | caseDict: Dict[int, str],
292 | colorMap: Dict[str, List[str]],
293 | cursorDict: List[Cursor],
294 | config: ReadConfig,
295 | emtRankSignalnamesList: List):
296 | '''
297 | Draws plots for html and static image export.
298 | '''
299 |
300 | print(f'Drawing plot for rank {rank}.')
301 |
302 | resultList = resultDict.get(rank, [])
303 | rankList = list(resultDict.keys())
304 | rankList.sort()
305 | figureList = figureDict[rank]
306 | ranksCursor = [i for i in cursorDict if i.id == rank]
307 |
308 | if resultList == [] or figureList == []:
309 | return
310 |
311 | figurePath = join(config.resultsDir, str(rank))
312 |
313 | htmlPlots: List[go.Figure] = list()
314 | imagePlots: List[go.Figure] = list()
315 | htmlPlotsCursors: List[go.Figure] = list()
316 | imagePlotsCursors: List[go.Figure] = list()
317 |
318 | columnNr = setupPlotLayout(caseDict, config, figureList, htmlPlots, imagePlots, rank)
319 | if len(ranksCursor) > 0:
320 | setupPlotLayoutCursors(config, ranksCursor, htmlPlotsCursors, imagePlotsCursors)
321 | for result in resultList:
322 | print(result.typ)
323 | if result.typ == ResultType.RMS:
324 | resultData: pd.DataFrame = pd.read_csv(result.fullpath, sep=';', decimal=',', header=[0, 1]) # type: ignore
325 | elif result.typ == ResultType.EMT_INF:
326 | resultData: pd.DataFrame = loadEMT(result.fullpath)
327 | elif result.typ == ResultType.EMT_PSOUT:
328 | resultData: pd.DataFrame = getSignals(result.fullpath, emtRankSignalnamesList)
329 | elif result.typ == ResultType.EMT_CSV or result.typ == ResultType.EMT_ZIP:
330 | resultData: pd.DataFrame = pd.read_csv(result.fullpath, sep=';', decimal=',') # type: ignore
331 | else:
332 | continue
333 | if config.genHTML:
334 | addResults(htmlPlots, result.typ, resultData, figureList, result.shorthand, result.fullpath, colorMap,
335 | config.htmlColumns, config.pfFlatTIme, config.pscadInitTime)
336 | if config.genImage:
337 | addResults(imagePlots, result.typ, resultData, figureList, result.shorthand, result.fullpath, colorMap,
338 | config.imageColumns, config.pfFlatTIme, config.pscadInitTime)
339 |
340 | if config.genHTML:
341 | addCursors(htmlPlotsCursors, resultList, cursorDict, config.pfFlatTIme, config.pscadInitTime,
342 | rank, config.htmlCursorColumns, emtRankSignalnamesList)
343 | create_html(htmlPlots, htmlPlotsCursors, figurePath, caseDict[rank] if caseDict is not None else "", rank, config, rankList)
344 | print(f'Exported plot for rank {rank} to {figurePath}.html')
345 |
346 | if config.genImage:
347 | # Cursor plots are not currently supported for image export and commented out
348 | # addCursors(imagePlotsCursors, resultList, cursorDict, config.pfFlatTIme, config.pscadInitTime,
349 | # rank, config.imageCursorColumns)
350 | create_image_plots(columnNr, config, figureList, figurePath, imagePlots)
351 | # create_cursor_plots(config.htmlCursorColumns, config, figurePath, imagePlotsCursors, ranksCursor)
352 | print(f'Exported plot for rank {rank} to {figurePath}.{config.imageFormat}')
353 |
354 | print(f'Plot for rank {rank} done.')
355 |
356 |
357 | def create_image_plots(columnNr, config, figureList, figurePath, imagePlots):
358 | if columnNr == 1:
359 | # Combine all figures into a single plot, same as for nColumns > 1 but no grid needed
360 | combined_plot = make_subplots(rows=len(imagePlots), cols=1,
361 | subplot_titles=[fig.layout.title.text for fig in imagePlots])
362 |
363 | for i, plot in enumerate(imagePlots):
364 | for trace in plot['data']: # Add each trace to the combined plot
365 | combined_plot.add_trace(trace, row=i + 1, col=1)
366 |
367 | # Copy over the x and y axis titles from the original plot
368 | combined_plot.update_xaxes(title_text=plot.layout.xaxis.title.text, row=i + 1, col=1)
369 | combined_plot.update_yaxes(title_text=plot.layout.yaxis.title.text, row=i + 1, col=1)
370 |
371 | # Explicitly set the width and height in the layout
372 | combined_plot.update_layout(
373 | height=500 * len(imagePlots), # Height adjusted based on number of plots
374 | width=2000, # Set the desired width here, adjust as needed
375 | showlegend=True,
376 | )
377 |
378 | # Save the combined plot as a single image
379 | combined_plot.write_image(f'{figurePath}.{config.imageFormat}', height=500 * len(imagePlots), width=2000)
380 |
381 | else:
382 | # Combine all figures into a grid when nColumns > 1
383 | imagePlots[0].update_layout(
384 | height=500 * ceil(len(figureList) / columnNr),
385 | width=500 * config.imageColumns, # Adjust width based on column number
386 | showlegend=True,
387 | )
388 | imagePlots[0].write_image(f'{figurePath}.{config.imageFormat}', height=500 * ceil(len(figureList) / columnNr),
389 | width=500 * config.imageColumns) # type: ignore
390 |
391 |
392 | def create_cursor_plots(columnNr, config, figurePath, imagePlotsCursors, ranksCursor):
393 | # Handle the cursor plots (which are tables)
394 | if len(ranksCursor) > 0:
395 | cursor_path = figurePath + "_cursor"
396 | if columnNr == 1:
397 | # Create a combined plot for tables using the 'table' spec type
398 | combined_cursor_plot = make_subplots(rows=len(imagePlotsCursors), cols=1,
399 | specs=[[{"type": "table"}]] * len(imagePlotsCursors),
400 | # 'table' type for each subplot
401 | subplot_titles=[fig.layout.title.text for fig in imagePlotsCursors])
402 | for i, cursor_plot in enumerate(imagePlotsCursors):
403 | for trace in cursor_plot['data']: # Add each trace (table) to the combined cursor plot
404 | combined_cursor_plot.add_trace(trace, row=i + 1, col=1)
405 |
406 | # Explicitly set width and height in the layout for table plots
407 | combined_cursor_plot.update_layout(
408 | height=500 * len(imagePlotsCursors),
409 | width=600, # Set the desired width for tables
410 | showlegend=False,
411 | )
412 |
413 | # Save the combined table plot as a single image
414 | combined_cursor_plot.write_image(f'{cursor_path}.{config.imageFormat}', height=500 * len(imagePlotsCursors),
415 | width=600)
416 | else:
417 | imagePlotsCursors[0].update_layout(
418 | height=500 * ceil(len(ranksCursor) / columnNr),
419 | width=500 * config.imageColumns, # Adjust width for multiple columns
420 | showlegend=False,
421 | )
422 | imagePlotsCursors[0].write_image(f'{cursor_path}.{config.imageFormat}',
423 | height=500 * ceil(len(ranksCursor) / columnNr),
424 | width=500 * config.imageColumns)
425 |
426 |
427 | def setupPlotLayout(caseDict, config, figureList, htmlPlots, imagePlots, rank):
428 | lst: List[Tuple[int, List[go.Figure]]] = []
429 | if config.genHTML:
430 | lst.append((config.htmlColumns, htmlPlots))
431 | if config.genImage:
432 | lst.append((config.imageColumns, imagePlots))
433 |
434 | for columnNr, plotList in lst:
435 | if columnNr == 1:
436 | for fig in figureList:
437 | # Create a direct Figure instead of subplots when there's only 1 column
438 | plotList.append(go.Figure()) # Normal figure, no subplots
439 | plotList[-1].update_layout(
440 | title=fig.title, # Add the figure title directly
441 | height=500, # Set height for the plot
442 | legend=dict(
443 | orientation="h",
444 | yanchor="top",
445 | y=1.22,
446 | xanchor="left",
447 | x=0.12,
448 | )
449 | )
450 | elif columnNr > 1:
451 | plotList.append(make_subplots(rows=ceil(len(figureList) / columnNr), cols=columnNr))
452 | plotList[-1].update_layout(height=500 * ceil(len(figureList) / columnNr)) # type: ignore
453 | if plotList == imagePlots and caseDict is not None:
454 | plotList[-1].update_layout(title_text=caseDict[rank]) # type: ignore
455 | return columnNr
456 |
457 |
458 | def create_css(resultsDir):
459 |
460 | css_path = join(resultsDir, "mtb.css")
461 |
462 | css_content = r'''body {
463 | font-family: Arial, Helvetica, sans-serif;
464 | }
465 |
466 | .navbar {
467 | overflow: hidden;
468 | background-color: #028B76;
469 | font-family: Arial, Helvetica, sans-serif;
470 | }
471 |
472 | .navbar {
473 | overflow: hidden;
474 | background-color: #028B76;
475 | font-family: Arial, Helvetica, sans-serif;
476 | }
477 |
478 | .navbar a {
479 | float: left;
480 | font-size: 16px;
481 | color: white;
482 | text-align: center;
483 | padding: 14px 16px;
484 | text-decoration: none;
485 | }
486 |
487 | .dropdown {
488 | float: left;
489 | overflow: hidden;
490 | }
491 |
492 | .dropdown .dropbtn {
493 | font-size: 16px;
494 | border: none;
495 | outline: none;
496 | color: white;
497 | padding: 14px 16px;
498 | background-color: inherit;
499 | font-family: inherit;
500 | margin: 0;
501 | }
502 |
503 | .navbar a:hover, .dropdown:hover .dropbtn {
504 | background-color: #ddd;
505 | color: black;
506 | }
507 |
508 | .dropdown-content {
509 | display: none;
510 | position: absolute;
511 | background-color: #f9f9f9;
512 | min-width: 160px;
513 | box-shadow: 0px 8px 16px 0px rgba(0,0,0,0.2);
514 | z-index: 1;
515 | }
516 |
517 | .dropdown-content a {
518 | float: none;
519 | color: black;
520 | padding: 12px 16px;
521 | text-decoration: none;
522 | display: block;
523 | text-align: left;
524 | }
525 |
526 | .dropdown-content a:hover {
527 | background-color: #ddd;
528 | }
529 |
530 | .dropdown:hover .dropdown-content {
531 | display: block;'''
532 |
533 | with open(f'{css_path}', 'w') as file:
534 | file.write(css_content)
535 |
536 |
537 | def create_html(plots: List[go.Figure], cursor_plots: List[go.Figure], path: str, title: str, rank: int,
538 | config: ReadConfig, rankList) -> None:
539 |
540 | source_list = '
{group[0]} = {group[1]}
' 544 | 545 | source_list += '