├── .github └── CODEOWNERS ├── .gitignore ├── LICENSE.md ├── MTB.pfd ├── MTB.pslx ├── README.md ├── case_setup.py ├── config.ini ├── execute_pf.py ├── execute_pscad.py ├── interface.f ├── plotter ├── Case.py ├── Cursor.py ├── Figure.py ├── Result.py ├── config.ini ├── cursorSetup.csv ├── cursor_image_logic.py ├── cursor_type.py ├── down_sampling_method.py ├── figureSetup.csv ├── plot_cursor_functions.py ├── plotter.py ├── process_psout.py ├── psout_to_csv.py ├── read_and_write_functions.py ├── read_configs.py └── sampling_functions.py ├── powerfactory.pyi ├── pscad_update_ums.py ├── recordings ├── DK1_fault1.csv ├── DK1_fault2.meas ├── DK1_frekvens.meas ├── KAS_emt_fault.out └── slow_recovery.csv ├── requirements.txt ├── setup_examples ├── MTB_Setup_Example.pfd ├── MTB_Setup_Example.pswx └── SimpleSolarFarm.pscx ├── sim_interface.py ├── testcases.xlsx └── utility_scripts ├── Check PowerFactory Model.pfd ├── Get Component Data.pfd ├── Get DSL Checksums.pfd ├── Get Relay Data.pfd ├── check_powerfactory_model.py ├── compare_component_data_with_pscad.py ├── get_component_data_from_powerfactory.py ├── get_dsl_checksums_from_powerfactory.py └── get_relay_data_from_powerfactory.py /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | * @CVLenerginet @PRWenerginet 2 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .vscode/ 2 | .idea/ 3 | *.code-workspace 4 | emt* 5 | rms* 6 | results 7 | export 8 | 9 | # Byte-compiled / optimized / DLL files 10 | __pycache__/ 11 | *.py[cod] 12 | *$py.class 13 | 14 | # C extensions 15 | *.so 16 | 17 | # Distribution / packaging 18 | .Python 19 | build/ 20 | develop-eggs/ 21 | dist/ 22 | downloads/ 23 | eggs/ 24 | .eggs/ 25 | lib/ 26 | lib64/ 27 | parts/ 28 | sdist/ 29 | var/ 30 | wheels/ 31 | share/python-wheels/ 32 | *.egg-info/ 33 | .installed.cfg 34 | *.egg 35 | MANIFEST 36 | 37 | # PyInstaller 38 | # Usually these files are written by a python script from a template 39 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 40 | *.manifest 41 | *.spec 42 | 43 | # Installer logs 44 | pip-log.txt 45 | pip-delete-this-directory.txt 46 | 47 | # Unit test / coverage reports 48 | htmlcov/ 49 | .tox/ 50 | .nox/ 51 | .coverage 52 | .coverage.* 53 | .cache 54 | nosetests.xml 55 | coverage.xml 56 | *.cover 57 | *.py,cover 58 | .hypothesis/ 59 | .pytest_cache/ 60 | cover/ 61 | 62 | # Translations 63 | *.mo 64 | *.pot 65 | 66 | # Django stuff: 67 | *.log 68 | local_settings.py 69 | db.sqlite3 70 | db.sqlite3-journal 71 | 72 | # Flask stuff: 73 | instance/ 74 | .webassets-cache 75 | 76 | # Scrapy stuff: 77 | .scrapy 78 | 79 | # Sphinx documentation 80 | docs/_build/ 81 | 82 | # PyBuilder 83 | .pybuilder/ 84 | target/ 85 | 86 | # Jupyter Notebook 87 | .ipynb_checkpoints 88 | 89 | # IPython 90 | profile_default/ 91 | ipython_config.py 92 | 93 | # pyenv 94 | # For a library or package, you might want to ignore these files since the code is 95 | # intended to run in multiple environments; otherwise, check them in: 96 | # .python-version 97 | 98 | # pipenv 99 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 100 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 101 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 102 | # install all needed dependencies. 103 | #Pipfile.lock 104 | 105 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 106 | __pypackages__/ 107 | 108 | # Celery stuff 109 | celerybeat-schedule 110 | celerybeat.pid 111 | 112 | # SageMath parsed files 113 | *.sage.py 114 | 115 | # Environments 116 | .env 117 | .venv 118 | env/ 119 | venv*/ 120 | ENV/ 121 | env.bak/ 122 | venv.bak/ 123 | 124 | # Spyder project settings 125 | .spyderproject 126 | .spyproject 127 | 128 | # Rope project settings 129 | .ropeproject 130 | 131 | # mkdocs documentation 132 | /site 133 | 134 | # mypy 135 | .mypy_cache/ 136 | .dmypy.json 137 | dmypy.json 138 | 139 | # Pyre type checker 140 | .pyre/ 141 | 142 | # pytype static type analyzer 143 | .pytype/ 144 | 145 | # Cython debug symbols 146 | cython_debug/ 147 | 148 | _junk/ 149 | 150 | #powerfactory.pyi 151 | *channels.xlsx 152 | test.py 153 | mhi/ 154 | tests/ 155 | MTB_*/ 156 | E-Tran_V6/ 157 | Gantner* 158 | Kasso* 159 | Parametrers List* 160 | Resources/ 161 | *_x86/ -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | ## Information concerning the use of the Model Test Bench. 2 | 3 | Energinet provides the Model Test Bench (MTB) for the purpose of developing a prequalification test bench for production facility and simulation performance which the facility owner may use in its own simulation environment in order to pre-test compliance with the applicable technical requirements for simulation models. 4 | 5 | The MTB is provided under the following considerations: 6 | 1) Use of the MTB and its results are indicative and for informational purposes only. Energinet may only in its own simulation environment perform conclusive testing, performance and compliance of the simulation models developed and supplied by the facility owner. 7 | 8 | 2) The facility owner should always use the latest version of the MTB from Energinet in order to get the most correct results. 9 | 10 | 3) Energinet encourages the facility owner to report issues in MTB and propose amendments to Energinet. 11 | 12 | 4) Use of the MTB are at the facility owners and the users own risk. Energinet is not responsible for any damage to hardware or software, including simulation models or computers. 13 | 14 | 5) All intellectual property rights, including copyright to the MTB remains at Energinet in accordance with applicable Danish law. Energinet does however grant a worldwide, non-exclusive, non-payable right to use, modify and distribute the MTB as a whole or partly. Energinet may withdraw or modify the right to use, modify and distribute. 15 | -------------------------------------------------------------------------------- /MTB.pfd: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Energinet-SimTools/MTB/695c911cb7fee587599cfcc50b8626cea0784694/MTB.pfd -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | 2 | # MTB (Model Test Bench) 3 | 4 | Connecting new electricity generation and demand facilities to Denmark's public transmission and distribution systems requires thorough grid compliance studies using both RMS/PDT and EMT plant-level models. The Danish TSO, Energinet, mandates that RMS/PDT models be created in [DIgSILENT PowerFactory](https://www.digsilent.de/en/powerfactory.html) and EMT models in [PSCAD](https://www.pscad.com/). Before any facility can begin operation, all electrically significant plants must have their RMS and EMT models reviewed and approved by Energinet to ensure both grid compliance and model quality. Conducting the necessary studies to demonstrate compliance and validate model quality through comparisons of RMS and EMT models can be both time-consuming and prone to error. 5 | 6 | The MTB (Model Test Bench) simplifies and automates this process by enabling seamless grid connection studies across PowerFactory and PSCAD environments. Energinet relies on the MTB for all grid connection studies and strongly recommends its use to all connecting parties. By using the MTB, developers can conduct studies under the exact same conditions as Energinet, ensuring they achieve the same results that Energinet will evaluate. 7 | 8 | The workflow is simple: 9 | 10 | 1. **Define the Required Studies** in the provided Excel sheet. The MTB is preconfigured for the studies required in most grid connection cases in Denmark but is also adaptable to all regions following the EU RfG. Modifying or extending the study case set is straightforward. 11 | 2. **Integrate the PSCAD MTB Component** into the plant's PSCAD model. 12 | 3. **Integrate the PowerFactory MTB Component** into the plant's PowerFactory model. 13 | 4. **Execute Simulations** using the MTB Python scripts. 14 | 5. **Visualize the Results** with the included plotter tool. 15 | 16 | For the latest release notes, please visit the [Releases page](https://github.com/Energinet-AIG/MTB/releases). Learn more about the regulations for grid connection of new facilities in Denmark: [Danish](https://energinet.dk/regler/el/nettilslutning) or [English](https://en.energinet.dk/electricity/rules-and-regulations/regulations-for-new-facilities). 17 | 18 | ![96](https://github.com/user-attachments/assets/6ce6746c-83b6-4d3f-a433-71c7ce5409de) 19 | *Example comparative study between RMS (red) and EMT (blue) models.* 20 | ## Getting Started 21 | 22 | To start using the MTB, refer to the Quickstart Guides available on the [MTB wiki Home page](https://github.com/Energinet-AIG/MTB/wiki) on GitHub. These guides provide instructions on using the Casesheet, PowerFactory, PSCAD, and the plotter tool. 23 | 24 | ## Requirements 25 | 26 | To install all necessary dependencies, run: 27 | 28 | ```bash 29 | pip install -r requirements.txt 30 | ``` 31 | 32 | ### Tested Environments 33 | 34 | - **PowerFactory**: Tested on version 2024 SP4 with Python versions >= 3.8.8. 35 | - **PSCAD**: Tested on version 5.0.2.0 with Python 3.7.2 (embedded Python). Compatibility is guaranteed only with Intel Fortran Compilers. 36 | 37 | ## Contributing 38 | 39 | We welcome contributions! To contribute, please file an issue via the MTB [Issues tab](https://github.com/Energinet-AIG/MTB/issues). You can report bugs, request features, or suggest improvements. Before submitting, please check for any known issues. 40 | 41 | ## Contact 42 | 43 | For inquiries, please contact the Energinet simulation model team: simuleringsmodeller@energinet.dk 44 | -------------------------------------------------------------------------------- /case_setup.py: -------------------------------------------------------------------------------- 1 | ''' 2 | Contains the specific setup for the testbench. Connecting the waveforms to the PSCAD and PowerFactory interfaces. 3 | ''' 4 | from __future__ import annotations 5 | from typing import Union, Tuple, List, Optional 6 | import pandas as pd 7 | import sim_interface as si 8 | from math import isnan, sqrt 9 | from warnings import warn 10 | 11 | FAULT_TYPES = { 12 | '3p fault' : 7.0, 13 | '2p-g fault' : 5.0, 14 | '2p fault' : 3.0, 15 | '1p fault' : 1.0, 16 | '3p fault (ohm)' : 8.0, 17 | '2p-g fault (ohm)' : 6.0, 18 | '2p fault (ohm)' : 4.0, 19 | '1p fault (ohm)' : 2.0 20 | } 21 | 22 | QMODES = { 23 | 'q': 0, 24 | 'q(u)': 1, 25 | 'pf': 2, 26 | 'qmode3': 3, 27 | 'qmode4': 4, 28 | 'qmode5': 5, 29 | 'qmode6': 6, 30 | } 31 | 32 | PMODES = { 33 | 'no p(f)': 0, 34 | 'lfsm': 1, 35 | 'fsm': 2, 36 | 'lfsm+fsm': 3, 37 | 'pmode4': 4, 38 | 'pmode5': 5, 39 | 'pmode6': 6, 40 | 'pmode7': 7 41 | } 42 | 43 | class PlantSettings: 44 | def __init__(self, path : str) -> None: 45 | df : pd.DataFrame = pd.read_excel(path, sheet_name='Settings', header=None) # type: ignore 46 | 47 | df.set_index(0, inplace = True) # type: ignore 48 | inputs : pd.Series[Union[str, float]] = df.iloc[1:, 0] 49 | 50 | self.Casegroup = str(inputs['Casegroup']) 51 | self.Run_custom_cases = bool(inputs['Run custom cases']) 52 | self.Projectname = str(inputs['Projectname']).replace(' ', '_') 53 | self.Pn = float(inputs['Pn']) 54 | self.Uc = float(inputs['Uc']) 55 | self.Un = float(inputs['Un']) 56 | self.Area = str(inputs['Area']) 57 | self.SCR_min = float(inputs['SCR min']) 58 | self.SCR_tuning = float(inputs['SCR tuning']) 59 | self.SCR_max = float(inputs['SCR max']) 60 | self.V_droop = float(inputs['V droop']) 61 | self.XR_SCR_min = float(inputs['X/R SCR min']) 62 | self.XR_SCR_tuning = float(inputs['X/R SCR tuning']) 63 | self.XR_SCR_max = float(inputs['X/R SCR max']) 64 | self.R0 = float(inputs['R0']) 65 | self.X0 = float(inputs['X0']) 66 | self.Default_Q_mode = str(inputs['Default Q mode']) 67 | self.PSCAD_Timestep = float(inputs['PSCAD Timestep']) 68 | self.PSCAD_init_time = float(inputs['PSCAD Initialization time']) 69 | self.PF_flat_time = float(inputs['PF flat time']) 70 | self.PF_variable_step = bool(inputs['PF variable step']) 71 | self.PF_enforced_sync = bool(inputs['PF enforced sync.']) 72 | self.PF_force_asymmetrical_sim = bool(inputs['PF force asymmetrical sim.']) 73 | self.PF_enforce_P_limits_in_LDF = bool(inputs['PF enforce P limits in LDF']) 74 | self.PF_enforce_Q_limits_in_LDF = bool(inputs['PF enforce Q limits in LDF']) 75 | 76 | class Case: 77 | def __init__(self, case: 'pd.Series[Union[str, int, float, bool]]') -> None: 78 | self.rank: int = int(case['Rank']) 79 | self.RMS: bool = bool(case['RMS']) 80 | self.EMT: bool = bool(case['EMT']) 81 | self.Name: str = str(case['Name']) 82 | self.U0: float = float(case['U0']) 83 | self.P0: float = float(case['P0']) 84 | self.Pmode: str = str(case['Pmode']) 85 | self.Qmode: str = str(case['Qmode']) 86 | self.Qref0: float = float(case['Qref0']) 87 | self.SCR0: float = float(case['SCR0']) 88 | self.XR0: float = float(case['XR0']) 89 | self.Simulationtime: float = float(case['Simulationtime']) 90 | self.Events : List[Tuple[str, float, Union[float, str], Union[float, str]]] = [] 91 | 92 | index : pd.Index[str] = case.index # type: ignore 93 | i = 0 94 | while(True): 95 | typeLabel = f'type.{i}' if i > 0 else 'type' 96 | timeLabel = f'time.{i}' if i > 0 else 'time' 97 | x1Label = f'X1.{i}' if i > 0 else 'X1' 98 | x2Label = f'X2.{i}' if i > 0 else 'X2' 99 | 100 | if typeLabel in index and timeLabel in index and x1Label in index and x2Label in index: 101 | try: 102 | x1value = float(str(case[x1Label]).replace(' ','')) 103 | except ValueError: 104 | x1value = str(case[x1Label]) 105 | 106 | try: 107 | x2value = float(str(case[x2Label]).replace(' ','')) 108 | except ValueError: 109 | x2value = str(case[x2Label]) 110 | 111 | self.Events.append((str(case[typeLabel]), float(case[timeLabel]), x1value, x2value)) 112 | i += 1 113 | else: 114 | break 115 | 116 | def setup(casesheetPath : str, pscad : bool, pfEncapsulation : Optional[si.PFinterface]) -> Tuple[PlantSettings, List[si.Channel], List[Case], int, List[Case]]: 117 | ''' 118 | Sets up the simulation channels and cases from the given casesheet. Returns plant settings, channels, cases, max rank and emtCases. 119 | ''' 120 | def impedance_uk_pcu(scr : float, xr : float, pn : float, un : float, uc : float) -> Tuple[float, float]: 121 | scr_ = max(scr, 0.001) 122 | pcu = (uc*uc)/(un*un)*pn/sqrt(xr*xr + 1)/scr_ if scr >= 0.0 else 0.0 123 | uk = (uc*uc)/(un*un)/scr_ if scr >= 0.0 else 0.0 124 | return 100.0 * uk, 1000.0 * pcu 125 | 126 | def signal(name : str, pscad : bool = True, defaultConnection : bool = True, measFile : bool = False) -> si.Signal: 127 | newSignal = si.Signal(name, pscad, pfEncapsulation) 128 | 129 | if defaultConnection: 130 | newSignal.addPFsub_S(f'{name}.ElmDsl', 's:x') 131 | newSignal.addPFsub_R(f'{name}.ElmDsl', 'slope') 132 | newSignal.addPFsub_S0(f'{name}.ElmDsl', 'x0') 133 | newSignal.addPFsub_T(f'{name}.ElmDsl', 'mode') 134 | if measFile: 135 | newSignal.setElmFile(f'{name}_meas.ElmFile') 136 | 137 | channels.append(newSignal) 138 | return newSignal 139 | 140 | def constant(name : str, value : float, pscad : bool = True) -> si.Constant: 141 | newConstant = si.Constant(name, value, pscad, pfEncapsulation) 142 | channels.append(newConstant) 143 | return newConstant 144 | 145 | def pfObjRefer(name : str) -> si.PfObjRefer: 146 | newPfObjRefer = si.PfObjRefer(name, pfEncapsulation) 147 | channels.append(newPfObjRefer) 148 | return newPfObjRefer 149 | 150 | def string(name : str) -> si.String: 151 | newString = si.String(name, pfEncapsulation) 152 | channels.append(newString) 153 | return newString 154 | 155 | pf = pfEncapsulation is not None 156 | 157 | channels : List[si.Channel] = [] 158 | plantSettings = PlantSettings(casesheetPath) 159 | 160 | si.pf_time_offset = plantSettings.PF_flat_time 161 | si.pscad_time_offset = plantSettings.PSCAD_init_time 162 | 163 | # Voltage source control 164 | mtb_t_vmode = signal('mtb_t_vmode', defaultConnection = False) # only to be used in PSCAD 165 | mtb_s_vref_pu = signal('mtb_s_vref_pu', measFile = True) 166 | mtb_s_vref_pu.addPFsub_S0('vac.ElmVac', 'usetp', lambda _, x : abs(x)) 167 | mtb_s_vref_pu.addPFsub_S0('initializer_script.ComDpl', 'IntExpr:5', lambda _, x : abs(x)) 168 | mtb_s_vref_pu.addPFsub_S0('initializer_qdsl.ElmQdsl', 'initVals:5', lambda _, x : abs(x)) 169 | mtb_s_vref_pu.addPFsub_T('initializer_script.ComDpl', 'IntExpr:4', lambda _, x : abs(x)) 170 | mtb_s_vref_pu.addPFsub_T('initializer_qdsl.ElmQdsl', 'initVals:4', lambda _, x : abs(x)) 171 | 172 | mtb_s_dvref_pu = signal('mtb_s_dvref_pu') 173 | mtb_s_phref_deg = signal('mtb_s_phref_deg', measFile = True) 174 | mtb_s_phref_deg.addPFsub_S0('vac.ElmVac', 'phisetp') 175 | mtb_s_fref_hz = signal('mtb_s_fref_hz', measFile = True) 176 | 177 | mtb_s_varef_pu = signal('mtb_s_varef_pu', defaultConnection = False) 178 | mtb_s_vbref_pu = signal('mtb_s_vbref_pu', defaultConnection = False) 179 | mtb_s_vcref_pu = signal('mtb_s_vcref_pu', defaultConnection = False) 180 | 181 | # Grid impedance 182 | mtb_s_scr = signal('mtb_s_scr') 183 | mtb_s_scr.addPFsub_S0('initializer_script.ComDpl', 'IntExpr:11') 184 | mtb_s_scr.addPFsub_S0('initializer_qdsl.ElmQdsl', 'initVals:11') 185 | 186 | mtb_s_xr = signal('mtb_s_xr') 187 | mtb_s_xr.addPFsub_S0('initializer_script.ComDpl', 'IntExpr:12') 188 | mtb_s_xr.addPFsub_S0('initializer_qdsl.ElmQdsl', 'initVals:12') 189 | 190 | ldf_t_uk = signal('ldf_t_uk', pscad = False, defaultConnection = False) 191 | ldf_t_uk.addPFsub_S0('z.ElmSind', 'uk') 192 | ldf_t_pcu_kw = signal('ldf_t_pcu_kw', pscad = False, defaultConnection = False) 193 | ldf_t_pcu_kw.addPFsub_S0('z.ElmSind', 'Pcu') 194 | 195 | # Zero sequence impedance 196 | mtb_t_r0_ohm = signal('mtb_t_r0_ohm', defaultConnection = False) 197 | mtb_t_r0_ohm.addPFsub_S0('vac.ElmVac', 'R0') 198 | mtb_t_r0_ohm.addPFsub_S0('fault_ctrl.ElmDsl', 'r0') 199 | 200 | mtb_t_x0_ohm = signal('mtb_t_x0_ohm', defaultConnection = False) 201 | mtb_t_x0_ohm.addPFsub_S0('vac.ElmVac', 'X0') 202 | mtb_t_x0_ohm.addPFsub_S0('fault_ctrl.ElmDsl', 'x0') 203 | 204 | # Standard plant references and outputs 205 | mtb_s_pref_pu = signal('mtb_s_pref_pu', measFile = True) 206 | mtb_s_pref_pu.addPFsub_S0('initializer_script.ComDpl', 'IntExpr:6') 207 | mtb_s_pref_pu.addPFsub_S0('initializer_qdsl.ElmQdsl', 'initVals:6') 208 | mtb_s_pref_pu.addPFsub_S0('powerf_ctrl.ElmSecctrl', 'psetp', lambda _, x : x * plantSettings.Pn) 209 | 210 | mtb_s_qref = signal('mtb_s_qref', measFile = True) 211 | mtb_s_qref.addPFsub_S0('initializer_script.ComDpl', 'IntExpr:9') 212 | mtb_s_qref.addPFsub_S0('initializer_qdsl.ElmQdsl', 'initVals:9') 213 | mtb_s_qref.addPFsub_S0('station_ctrl.ElmStactrl', 'usetp', lambda _, x: 1.0 if x <= 0.0 else x) 214 | mtb_s_qref.addPFsub_S0('station_ctrl.ElmStactrl', 'qsetp', lambda _, x : -x * plantSettings.Pn) 215 | mtb_s_qref.addPFsub_S0('station_ctrl.ElmStactrl', 'pfsetp', lambda _, x: min(abs(x), 1.0)) 216 | mtb_s_qref.addPFsub_S0('station_ctrl.ElmStactrl', 'pf_recap', lambda _, x: 0 if x > 0 else 1) 217 | 218 | mtb_s_qref_q_pu = signal('mtb_s_qref_q_pu', measFile = True) 219 | mtb_s_qref_qu_pu = signal('mtb_s_qref_qu_pu', measFile = True) 220 | mtb_s_qref_pf = signal('mtb_s_qref_pf', measFile = True) 221 | mtb_s_qref_3 = signal('mtb_s_qref_3', measFile = True) 222 | mtb_s_qref_4 = signal('mtb_s_qref_4', measFile = True) 223 | mtb_s_qref_5 = signal('mtb_s_qref_5', measFile = True) 224 | mtb_s_qref_6 = signal('mtb_s_qref_6', measFile = True) 225 | 226 | mtb_t_qmode = signal('mtb_t_qmode') 227 | mtb_t_qmode.addPFsub_S0('initializer_script.ComDpl', 'IntExpr:8') 228 | mtb_t_qmode.addPFsub_S0('initializer_qdsl.ElmQdsl', 'initVals:8') 229 | 230 | def stactrl_mode_switch(self : si.Signal, qmode : float): 231 | if qmode == 1: 232 | return 0 233 | elif qmode == 2: 234 | return 2 235 | else: 236 | return 1 237 | 238 | mtb_t_qmode.addPFsub_S0('station_ctrl.ElmStactrl', 'i_ctrl', stactrl_mode_switch) 239 | 240 | mtb_t_pmode = signal('mtb_t_pmode') 241 | mtb_t_pmode.addPFsub_S0('initializer_script.ComDpl', 'IntExpr:7') 242 | mtb_t_pmode.addPFsub_S0('initializer_qdsl.ElmQdsl', 'initVals:7') 243 | 244 | # Constants 245 | mtb_c_pn = constant('mtb_c_pn', plantSettings.Pn) 246 | mtb_c_pn.addPFsub('initializer_script.ComDpl', 'IntExpr:0') 247 | mtb_c_pn.addPFsub('initializer_qdsl.ElmQdsl', 'initVals:0') 248 | mtb_c_pn.addPFsub('measurements.ElmDsl', 'pn') 249 | mtb_c_pn.addPFsub('rx_calc.ElmDsl', 'pn') 250 | mtb_c_pn.addPFsub('z.ElmSind', 'Sn') 251 | 252 | mtb_c_qn = constant('mtb_c_qn', 0.33 * plantSettings.Pn, pscad = False) 253 | mtb_c_qn.addPFsub('station_ctrl.ElmStactrl', 'Srated') 254 | 255 | mtb_c_vbase = constant('mtb_c_vbase', plantSettings.Un) 256 | mtb_c_vbase.addPFsub('initializer_script.ComDpl', 'IntExpr:1') 257 | mtb_c_vbase.addPFsub('initializer_qdsl.ElmQdsl', 'initVals:1') 258 | mtb_c_vbase.addPFsub('measurements.ElmDsl', 'vbase') 259 | mtb_c_vbase.addPFsub('pcc.ElmTerm', 'uknom') 260 | mtb_c_vbase.addPFsub('ext.ElmTerm', 'uknom') 261 | mtb_c_vbase.addPFsub('fault_node.ElmTerm', 'uknom') 262 | mtb_c_vbase.addPFsub('z.ElmSind', 'ucn') 263 | mtb_c_vbase.addPFsub('fz.ElmSind', 'ucn') 264 | mtb_c_vbase.addPFsub('connector.ElmSind', 'ucn') 265 | mtb_c_vbase.addPFsub('vac.ElmVac', 'Unom') 266 | 267 | mtb_c_vc = constant('mtb_c_vc', plantSettings.Uc) 268 | mtb_c_vc.addPFsub('initializer_script.ComDpl', 'IntExpr:2') 269 | mtb_c_vc.addPFsub('initializer_qdsl.ElmQdsl', 'initVals:2') 270 | mtb_c_vc.addPFsub('rx_calc.ElmDsl', 'vc') 271 | 272 | constant('mtb_c_inittime_s', plantSettings.PSCAD_init_time) 273 | 274 | mtb_c_flattime_s = constant('mtb_c_flattime_s', plantSettings.PF_flat_time, pscad = False) 275 | mtb_c_flattime_s.addPFsub('initializer_script.ComDpl', 'IntExpr:3') 276 | mtb_c_flattime_s.addPFsub('initializer_qdsl.ElmQdsl', 'initVals:3') 277 | 278 | mtb_c_vdroop = constant('mtb_c_vdroop', plantSettings.V_droop, pscad = False) 279 | mtb_c_vdroop.addPFsub('initializer_script.ComDpl', 'IntExpr:10') 280 | mtb_c_vdroop.addPFsub('initializer_qdsl.ElmQdsl', 'initVals:10') 281 | mtb_c_vdroop.addPFsub('station_ctrl.ElmStactrl', 'ddroop') 282 | 283 | # Time and rank control 284 | mtb_t_simtimePscad_s = signal('mtb_t_simtimePscad_s', defaultConnection = False) 285 | mtb_t_simtimePf_s = signal('mtb_t_simtimePf_s', defaultConnection = False) 286 | mtb_t_simtimePf_s.addPFsub_S0('$studycase$\\ComSim', 'tstop') 287 | 288 | # From rank to PSCAD task ID 289 | mtb_s_task = signal('mtb_s_task', defaultConnection = False) 290 | 291 | # Fault 292 | flt_s_type = signal('flt_s_type') 293 | flt_s_rf_ohm = signal('flt_s_rf_ohm') 294 | flt_s_resxf = signal('flt_s_resxf') 295 | 296 | mtb_s : List[si.Signal] = [] 297 | # Custom signals 298 | mtb_s.append(signal('mtb_s_1', measFile = True)) 299 | mtb_s[-1].addPFsub_S0('initializer_script.ComDpl', 'IntExpr:13') 300 | mtb_s[-1].addPFsub_S0('initializer_qdsl.ElmQdsl', 'initVals:13') 301 | mtb_s.append(signal('mtb_s_2', measFile = True)) 302 | mtb_s[-1].addPFsub_S0('initializer_script.ComDpl', 'IntExpr:14') 303 | mtb_s[-1].addPFsub_S0('initializer_qdsl.ElmQdsl', 'initVals:14') 304 | mtb_s.append(signal('mtb_s_3', measFile = True)) 305 | mtb_s[-1].addPFsub_S0('initializer_script.ComDpl', 'IntExpr:15') 306 | mtb_s[-1].addPFsub_S0('initializer_qdsl.ElmQdsl', 'initVals:15') 307 | mtb_s.append(signal('mtb_s_4', measFile = True)) 308 | mtb_s[-1].addPFsub_S0('initializer_script.ComDpl', 'IntExpr:16') 309 | mtb_s[-1].addPFsub_S0('initializer_qdsl.ElmQdsl', 'initVals:16') 310 | mtb_s.append(signal('mtb_s_5', measFile = True)) 311 | mtb_s[-1].addPFsub_S0('initializer_script.ComDpl', 'IntExpr:17') 312 | mtb_s[-1].addPFsub_S0('initializer_qdsl.ElmQdsl', 'initVals:17') 313 | mtb_s.append(signal('mtb_s_6', measFile = True)) 314 | mtb_s[-1].addPFsub_S0('initializer_script.ComDpl', 'IntExpr:18') 315 | mtb_s[-1].addPFsub_S0('initializer_qdsl.ElmQdsl', 'initVals:18') 316 | mtb_s.append(signal('mtb_s_7', measFile = True)) 317 | mtb_s[-1].addPFsub_S0('initializer_script.ComDpl', 'IntExpr:19') 318 | mtb_s[-1].addPFsub_S0('initializer_qdsl.ElmQdsl', 'initVals:19') 319 | mtb_s.append(signal('mtb_s_8', measFile = True)) 320 | mtb_s[-1].addPFsub_S0('initializer_script.ComDpl', 'IntExpr:20') 321 | mtb_s[-1].addPFsub_S0('initializer_qdsl.ElmQdsl', 'initVals:20') 322 | mtb_s.append(signal('mtb_s_9', measFile = True)) 323 | mtb_s[-1].addPFsub_S0('initializer_script.ComDpl', 'IntExpr:21') 324 | mtb_s[-1].addPFsub_S0('initializer_qdsl.ElmQdsl', 'initVals:21') 325 | mtb_s.append(signal('mtb_s_10', measFile = True)) 326 | mtb_s[-1].addPFsub_S0('initializer_script.ComDpl', 'IntExpr:22') 327 | mtb_s[-1].addPFsub_S0('initializer_qdsl.ElmQdsl', 'initVals:22') 328 | 329 | # Powerfactory references 330 | ldf_r_vcNode = pfObjRefer('mtb_r_vcNode') 331 | ldf_r_vcNode.addPFsub('vac.ElmVac', 'contbar') 332 | 333 | # Refences outserv time invariants 334 | ldf_t_refOOS = signal('ldf_t_refOOS', pscad = False, defaultConnection = False) 335 | ldf_t_refOOS.addPFsub_S0('mtb_s_pref_pu.ElmDsl', 'outserv') 336 | ldf_t_refOOS.addPFsub_S0('mtb_s_qref_q_pu.ElmDsl', 'outserv') 337 | ldf_t_refOOS.addPFsub_S0('mtb_s_qref_qu_pu.ElmDsl', 'outserv') 338 | ldf_t_refOOS.addPFsub_S0('mtb_s_qref_pf.ElmDsl', 'outserv') 339 | ldf_t_refOOS.addPFsub_S0('mtb_t_qmode.ElmDsl', 'outserv') 340 | ldf_t_refOOS.addPFsub_S0('mtb_t_pmode.ElmDsl', 'outserv') 341 | ldf_t_refOOS.addPFsub_S0('mtb_s_1.ElmDsl', 'outserv') 342 | ldf_t_refOOS.addPFsub_S0('mtb_s_2.ElmDsl', 'outserv') 343 | ldf_t_refOOS.addPFsub_S0('mtb_s_3.ElmDsl', 'outserv') 344 | ldf_t_refOOS.addPFsub_S0('mtb_s_4.ElmDsl', 'outserv') 345 | ldf_t_refOOS.addPFsub_S0('mtb_s_5.ElmDsl', 'outserv') 346 | ldf_t_refOOS.addPFsub_S0('mtb_s_6.ElmDsl', 'outserv') 347 | ldf_t_refOOS.addPFsub_S0('mtb_s_7.ElmDsl', 'outserv') 348 | ldf_t_refOOS.addPFsub_S0('mtb_s_8.ElmDsl', 'outserv') 349 | ldf_t_refOOS.addPFsub_S0('mtb_s_9.ElmDsl', 'outserv') 350 | ldf_t_refOOS.addPFsub_S0('mtb_s_10.ElmDsl', 'outserv') 351 | 352 | # Calculation settings constants and timeVariants 353 | ldf_c_iopt_lim = constant('ldf_c_iopt_lim', int(plantSettings.PF_enforce_Q_limits_in_LDF), pscad = False) 354 | ldf_c_iopt_lim.addPFsub('$studycase$\\ComLdf', 'iopt_lim') 355 | 356 | ldf_c_iopt_apdist = constant('ldf_c_iopt_apdist', 1, pscad = False) 357 | ldf_c_iopt_apdist.addPFsub('$studycase$\\ComLdf', 'iopt_apdist') 358 | 359 | ldf_c_iPST_at = constant('ldf_c_iPST_at', 1, pscad = False) 360 | ldf_c_iPST_at.addPFsub('$studycase$\\ComLdf', 'iPST_at') 361 | 362 | ldf_c_iopt_at = constant('ldf_c_iopt_at', 1, pscad = False) 363 | ldf_c_iopt_at.addPFsub('$studycase$\\ComLdf', 'iopt_at') 364 | 365 | ldf_c_iopt_asht = constant('ldf_c_iopt_asht', 1, pscad = False) 366 | ldf_c_iopt_asht.addPFsub('$studycase$\\ComLdf', 'iopt_asht') 367 | 368 | ldf_c_iopt_plim = constant('ldf_c_iopt_plim', int(plantSettings.PF_enforce_P_limits_in_LDF), pscad = False) 369 | ldf_c_iopt_plim.addPFsub('$studycase$\\ComLdf', 'iopt_plim') 370 | 371 | ldf_c_iopt_net = signal('ldf_c_iopt_net', pscad = False, defaultConnection = False) # ldf asymmetrical option boolean 372 | ldf_c_iopt_net.addPFsub_S0('$studycase$\\ComLdf', 'iopt_net') 373 | 374 | inc_c_iopt_net = string('inc_c_iopt_net') # inc asymmetrical option 375 | inc_c_iopt_net.addPFsub('$studycase$\\ComInc', 'iopt_net') 376 | 377 | inc_c_iopt_show = constant('inc_c_iopt_show', 1, pscad = False) 378 | inc_c_iopt_show.addPFsub('$studycase$\\ComInc', 'iopt_show') 379 | 380 | inc_c_dtgrd = constant('inc_c_dtgrd', 0.001, pscad = False) 381 | inc_c_dtgrd.addPFsub('$studycase$\\ComInc', 'dtgrd') 382 | 383 | inc_c_dtgrd_max = constant('inc_c_dtgrd_max', 0.01, pscad = False) 384 | inc_c_dtgrd_max.addPFsub('$studycase$\\ComInc', 'dtgrd_max') 385 | 386 | inc_c_tstart = constant('inc_c_tstart', 0, pscad = False) 387 | inc_c_tstart.addPFsub('$studycase$\\ComInc', 'tstart') 388 | 389 | inc_c_iopt_sync = constant('inc_c_iopt_sync', plantSettings.PF_enforced_sync, pscad = False) # enforced sync. option 390 | inc_c_iopt_sync.addPFsub('$studycase$\\ComInc', 'iopt_sync') 391 | 392 | inc_c_syncperiod = constant('inc_c_syncperiod', 0.001, pscad = False) 393 | inc_c_syncperiod.addPFsub('$studycase$\\ComInc', 'syncperiod') 394 | 395 | inc_c_iopt_adapt = constant('inc_c_iopt_adapt', plantSettings.PF_variable_step, pscad = False) # variable step option 396 | inc_c_iopt_adapt.addPFsub('$studycase$\\ComInc', 'iopt_adapt') 397 | 398 | inc_c_iopt_lt = constant('inc_c_iopt_lt', 0, pscad = False) 399 | inc_c_iopt_lt.addPFsub('$studycase$\\ComInc', 'iopt_lt') 400 | 401 | inc_c_errseq = constant('inc_c_errseq', 0.01, pscad = False) 402 | inc_c_errseq.addPFsub('$studycase$\\ComInc', 'errseq') 403 | 404 | inc_c_autocomp = constant('inc_c_autocomp', 0, pscad = False) 405 | inc_c_autocomp.addPFsub('$studycase$\\ComInc', 'automaticCompilation') 406 | 407 | df = pd.read_excel(casesheetPath, sheet_name=f'{plantSettings.Casegroup} cases', header=1) # type: ignore 408 | 409 | maxRank = 0 410 | cases : List[Case] = [] 411 | emtCases : List[Case] = [] 412 | 413 | for _, case in df.iterrows(): # type: ignore 414 | cases.append(Case(case)) # type: ignore 415 | maxRank = max(maxRank, cases[-1].rank) 416 | 417 | if plantSettings.Run_custom_cases and plantSettings.Casegroup != 'Custom': 418 | dfc = pd.read_excel(casesheetPath, sheet_name='Custom cases', header=1) # type: ignore 419 | for _, case in dfc.iterrows(): # type: ignore 420 | cases.append(Case(case)) # type: ignore 421 | maxRank = max(maxRank, cases[-1].rank) 422 | 423 | for case in cases: 424 | # Simulation time 425 | pf_lonRec = pscad_lonRec = 0.0 426 | 427 | # PF: Default symmetrical simulation 428 | ldf_c_iopt_net[case.rank] = 0 429 | inc_c_iopt_net[case.rank] = 'sym' 430 | 431 | # Voltage source control default setup 432 | mtb_t_vmode[case.rank] = 0 433 | mtb_s_vref_pu[case.rank] = -case.U0 434 | mtb_s_phref_deg[case.rank] = 0.0 435 | mtb_s_dvref_pu[case.rank] = 0.0 436 | mtb_s_fref_hz[case.rank] = 50.0 437 | 438 | mtb_s_varef_pu[case.rank] = 0.0 439 | mtb_s_vbref_pu[case.rank] = 0.0 440 | mtb_s_vcref_pu[case.rank] = 0.0 441 | 442 | mtb_s_scr[case.rank] = case.SCR0 443 | mtb_s_xr[case.rank] = case.XR0 444 | 445 | ldf_t_uk[case.rank], ldf_t_pcu_kw[case.rank] = impedance_uk_pcu(case.SCR0, case.XR0, plantSettings.Pn, plantSettings.Un, plantSettings.Uc) 446 | 447 | mtb_t_r0_ohm[case.rank] = plantSettings.R0 448 | mtb_t_x0_ohm[case.rank] = plantSettings.X0 449 | 450 | # Standard plant references and outputs default setup 451 | mtb_s_pref_pu[case.rank] = case.P0 452 | 453 | # Set Qmode 454 | if case.Qmode.lower() == 'default': 455 | case.Qmode = plantSettings.Default_Q_mode 456 | 457 | mtb_t_qmode[case.rank] = QMODES[case.Qmode.lower()] 458 | 459 | mtb_s_qref[case.rank] = case.Qref0 460 | mtb_s_qref_q_pu[case.rank] = case.Qref0 if mtb_t_qmode[case.rank].s0 == 0 else 0.0 461 | mtb_s_qref_qu_pu[case.rank] = case.Qref0 if mtb_t_qmode[case.rank].s0 == 1 else 0.0 462 | mtb_s_qref_pf[case.rank] = case.Qref0 if mtb_t_qmode[case.rank].s0 == 2 else 0.0 463 | mtb_s_qref_3[case.rank] = case.Qref0 if mtb_t_qmode[case.rank].s0 == 3 else 0.0 464 | mtb_s_qref_4[case.rank] = case.Qref0 if mtb_t_qmode[case.rank].s0 == 4 else 0.0 465 | mtb_s_qref_5[case.rank] = case.Qref0 if mtb_t_qmode[case.rank].s0 == 5 else 0.0 466 | mtb_s_qref_6[case.rank] = case.Qref0 if mtb_t_qmode[case.rank].s0 == 6 else 0.0 467 | 468 | mtb_t_pmode[case.rank] = PMODES[case.Pmode.lower()] 469 | 470 | # Fault signals 471 | flt_s_type[case.rank] = 0.0 472 | flt_s_rf_ohm[case.rank] = 0.0 473 | flt_s_resxf[case.rank] = 0.0 474 | 475 | # Default custom signal values 476 | mtb_s[0][case.rank] = 0.0 477 | mtb_s[1][case.rank] = 0.0 478 | mtb_s[2][case.rank] = 0.0 479 | mtb_s[3][case.rank] = 0.0 480 | mtb_s[4][case.rank] = 0.0 481 | mtb_s[5][case.rank] = 0.0 482 | mtb_s[6][case.rank] = 0.0 483 | mtb_s[7][case.rank] = 0.0 484 | mtb_s[8][case.rank] = 0.0 485 | mtb_s[9][case.rank] = 0.0 486 | 487 | # Default OOS references 488 | ldf_t_refOOS[case.rank] = 0 489 | 490 | # Parse events 491 | for event in case.Events: 492 | eventType = event[0] 493 | eventTime = event[1] 494 | eventX1 = event[2] 495 | eventX2 = event[3] 496 | 497 | if eventType == 'Pref': 498 | assert isinstance(eventX1, float) 499 | assert isinstance(eventX2, float) 500 | mtb_s_pref_pu[case.rank].add(eventTime, eventX1, eventX2) 501 | 502 | elif eventType == 'Qref': 503 | assert isinstance(eventX1, float) 504 | assert isinstance(eventX2, float) 505 | mtb_s_qref[case.rank].add(eventTime, eventX1, eventX2) 506 | 507 | if mtb_t_qmode[case.rank].s0 == 0: 508 | mtb_s_qref_q_pu[case.rank].add(eventTime, eventX1, eventX2) 509 | elif mtb_t_qmode[case.rank].s0 == 1: 510 | mtb_s_qref_qu_pu[case.rank].add(eventTime, eventX1, eventX2) 511 | elif mtb_t_qmode[case.rank].s0 == 2: 512 | mtb_s_qref_pf[case.rank].add(eventTime, eventX1, eventX2) 513 | elif mtb_t_qmode[case.rank].s0 == 3: 514 | mtb_s_qref_3[case.rank].add(eventTime, eventX1, eventX2) 515 | elif mtb_t_qmode[case.rank].s0 == 4: 516 | mtb_s_qref_4[case.rank].add(eventTime, eventX1, eventX2) 517 | elif mtb_t_qmode[case.rank].s0 == 5: 518 | mtb_s_qref_5[case.rank].add(eventTime, eventX1, eventX2) 519 | elif mtb_t_qmode[case.rank].s0 == 6: 520 | mtb_s_qref_6[case.rank].add(eventTime, eventX1, eventX2) 521 | else: 522 | raise ValueError('Invalid Q mode') 523 | 524 | elif eventType == 'Voltage': 525 | assert isinstance(eventX1, float) 526 | assert isinstance(eventX2, float) 527 | mtb_s_vref_pu[case.rank].add(eventTime, eventX1, eventX2) 528 | 529 | elif eventType == 'dVoltage': 530 | assert isinstance(eventX1, float) 531 | assert isinstance(eventX2, float) 532 | mtb_s_dvref_pu[case.rank].add(eventTime, eventX1, eventX2) 533 | 534 | elif eventType == 'Phase': 535 | assert isinstance(eventX1, float) 536 | assert isinstance(eventX2, float) 537 | mtb_s_phref_deg[case.rank].add(eventTime, eventX1, eventX2) 538 | 539 | elif eventType == 'Frequency': 540 | assert isinstance(eventX1, float) 541 | assert isinstance(eventX2, float) 542 | mtb_s_fref_hz[case.rank].add(eventTime, eventX1, eventX2) 543 | 544 | elif eventType == 'SCR': 545 | assert isinstance(eventX1, float) 546 | assert isinstance(eventX2, float) 547 | mtb_s_scr[case.rank].add(eventTime, eventX1, 0.0) 548 | mtb_s_xr[case.rank].add(eventTime, eventX2, 0.0) 549 | 550 | elif eventType.count('fault') > 0 and eventType != 'Clear fault': 551 | assert isinstance(eventX1, float) 552 | assert isinstance(eventX2, float) 553 | 554 | flt_s_type[case.rank].add(eventTime, FAULT_TYPES[eventType], 0.0) 555 | flt_s_type[case.rank].add(eventTime + eventX2, 0.0, 0.0) 556 | flt_s_resxf[case.rank].add(eventTime, eventX1, 0.0) 557 | if FAULT_TYPES[eventType] < 7: 558 | ldf_c_iopt_net[case.rank] = 1 559 | inc_c_iopt_net[case.rank] = 'rst' 560 | 561 | elif eventType == 'Clear fault': 562 | flt_s_type[case.rank].add(eventTime, 0.0, 0.0) 563 | 564 | elif eventType == 'Pref recording': 565 | assert isinstance(eventX1, str) 566 | assert isinstance(eventX2, float) 567 | wf = mtb_s_pref_pu[case.rank] = si.Recorded(path=eventX1, column=1, scale=eventX2, pf=pf, pscad=pscad) 568 | pscad_lonRec = max(wf.pscadLen, pscad_lonRec) 569 | pf_lonRec = max(wf.pfLen, pf_lonRec) 570 | 571 | elif eventType == 'Qref recording': 572 | assert isinstance(eventX1, str) 573 | assert isinstance(eventX2, float) 574 | wf = si.Recorded(path=eventX1, column=1, scale=eventX2, pf=pf, pscad=pscad) 575 | 576 | mtb_s_qref[case.rank] = wf 577 | mtb_s_qref_q_pu[case.rank] = 0 578 | mtb_s_qref_qu_pu[case.rank] = 0 579 | mtb_s_qref_pf[case.rank] = 0 580 | mtb_s_qref_3[case.rank] = 0 581 | mtb_s_qref_4[case.rank] = 0 582 | mtb_s_qref_5[case.rank] = 0 583 | mtb_s_qref_6[case.rank] = 0 584 | 585 | if mtb_t_qmode[case.rank].s0 == 0: 586 | mtb_s_qref_q_pu[case.rank] = wf 587 | elif mtb_t_qmode[case.rank].s0 == 1: 588 | mtb_s_qref_qu_pu[case.rank] = wf 589 | elif mtb_t_qmode[case.rank].s0 == 2: 590 | mtb_s_qref_pf[case.rank] = wf 591 | elif mtb_t_qmode[case.rank].s0 == 3: 592 | mtb_s_qref_3[case.rank] = wf 593 | elif mtb_t_qmode[case.rank].s0 == 4: 594 | mtb_s_qref_4[case.rank] = wf 595 | elif mtb_t_qmode[case.rank].s0 == 5: 596 | mtb_s_qref_5[case.rank] = wf 597 | elif mtb_t_qmode[case.rank].s0 == 6: 598 | mtb_s_qref_6[case.rank] = wf 599 | else: 600 | raise ValueError('Invalid Q mode') 601 | 602 | pscad_lonRec = max(wf.pscadLen, pscad_lonRec) 603 | pf_lonRec = max(wf.pfLen, pf_lonRec) 604 | 605 | elif eventType == 'Voltage recording': 606 | assert isinstance(eventX1, str) 607 | assert isinstance(eventX2, float) 608 | if mtb_t_vmode[case.rank].s0 != 2: 609 | mtb_t_vmode[case.rank] = 1 610 | wf = mtb_s_vref_pu[case.rank] = si.Recorded(path=eventX1, column=1, scale=eventX2, pf=pf, pscad=pscad) 611 | pscad_lonRec = max(wf.pscadLen, pscad_lonRec) 612 | pf_lonRec = max(wf.pfLen, pf_lonRec) 613 | 614 | elif eventType == 'Inst. Voltage recording': 615 | assert isinstance(eventX1, str) 616 | assert isinstance(eventX2, float) 617 | mtb_t_vmode[case.rank] = 2 618 | mtb_s_varef_pu[case.rank] = si.Recorded(path=eventX1, column=1, scale=eventX2, pf=False, pscad=pscad) 619 | mtb_s_vbref_pu[case.rank] = si.Recorded(path=eventX1, column=2, scale=eventX2, pf=False, pscad=pscad) 620 | wf = mtb_s_vcref_pu[case.rank] = si.Recorded(path=eventX1, column=3, scale=eventX2, pf=False, pscad=pscad) 621 | pscad_lonRec = max(wf.pscadLen, pscad_lonRec) 622 | 623 | elif eventType == 'Phase recording': 624 | assert isinstance(eventX1, str) 625 | assert isinstance(eventX2, float) 626 | wf = mtb_s_phref_deg[case.rank] = si.Recorded(path=eventX1, column=1, scale=eventX2, pf=pf, pscad=pscad) 627 | pscad_lonRec = max(wf.pscadLen, pscad_lonRec) 628 | pf_lonRec = max(wf.pfLen, pf_lonRec) 629 | 630 | elif eventType == 'Frequency recording': 631 | assert isinstance(eventX1, str) 632 | assert isinstance(eventX2, float) 633 | wf = mtb_s_fref_hz[case.rank] = si.Recorded(path=eventX1, column=1, scale=eventX2, pf=pf, pscad=pscad) 634 | pscad_lonRec = max(wf.pscadLen, pscad_lonRec) 635 | pf_lonRec = max(wf.pfLen, pf_lonRec) 636 | 637 | elif eventType.lower().startswith('signal'): 638 | eventNr = int(eventType.lower().replace('signal','').replace('recording','')) 639 | customSignal = mtb_s[eventNr - 1] 640 | assert isinstance(customSignal, si.Signal) 641 | 642 | if eventType.lower().endswith('recording'): 643 | assert isinstance(eventX1, str) 644 | assert isinstance(eventX2, float) 645 | wf = customSignal[case.rank] = si.Recorded(path=eventX1, column=1, scale=eventX2, pf=pf, pscad=pscad) 646 | pscad_lonRec = max(wf.pscadLen, pscad_lonRec) 647 | pf_lonRec = max(wf.pfLen, pf_lonRec) 648 | else: 649 | assert isinstance(eventX1, float) 650 | assert isinstance(eventX2, float) 651 | customSignal[case.rank].add(eventTime, eventX1, eventX2) 652 | 653 | elif eventType == 'PF disconnect all ref.': 654 | ldf_t_refOOS[case.rank] = 1 655 | 656 | elif eventType == 'PF force asymmetrical': 657 | ldf_c_iopt_net[case.rank] = 1 658 | inc_c_iopt_net[case.rank] = 'rst' 659 | 660 | if isnan(case.Simulationtime) or case.Simulationtime == 0: 661 | mtb_t_simtimePf_s[case.rank] = pf_lonRec 662 | mtb_t_simtimePscad_s[case.rank] = pscad_lonRec 663 | 664 | if pf_lonRec == 0 and case.RMS: 665 | warn(f'Rank: {case.rank}. Powerfactory simulationtime set to 0.0s.') 666 | if pscad_lonRec == 0 and case.EMT: 667 | warn(f'Rank: {case.rank}. PSCAD simulationtime set to 0.0s.') 668 | else: 669 | mtb_t_simtimePscad_s[case.rank] = case.Simulationtime + plantSettings.PSCAD_init_time 670 | mtb_t_simtimePf_s[case.rank] = case.Simulationtime + plantSettings.PF_flat_time 671 | 672 | if not case.EMT: 673 | mtb_t_simtimePscad_s[case.rank] = -1.0 674 | else: 675 | emtCases.append(case) 676 | 677 | if isinstance(mtb_s_vref_pu[case.rank], si.Recorded): 678 | ldf_r_vcNode[case.rank] = '' 679 | else: 680 | ldf_r_vcNode[case.rank] = '$nochange$' 681 | 682 | emtCases.sort(key = lambda x: x.Simulationtime) 683 | 684 | taskId = 1 685 | for emtCase in emtCases: 686 | mtb_s_task[taskId] = emtCase.rank 687 | taskId += 1 688 | mtb_s_task.__pfInterface__ = None 689 | return plantSettings, channels, cases, maxRank, emtCases -------------------------------------------------------------------------------- /config.ini: -------------------------------------------------------------------------------- 1 | [config] 2 | Casesheet path = testcases.xlsx 3 | ;Optional path to append to the python path 4 | Python path = 5 | ;Path to export result files (relative to execute.py) 6 | Export folder = export 7 | ;PSCAD volley size 8 | Volley = 16 9 | ;Powerfactory parallel task automation. AS per 2023 SP5 there is a bug in PF that causes QDSL blocks to be ignored in parallel simulations. 10 | Parallel = True 11 | ;Powerfactory temporary workaround: QDSL controller sometimes fails when not in same grid as calc. relevant statgens. 12 | ;The QDSL controller will be copied to the following grid. Disable by setting to empty string: 13 | QDSL copy grid = -------------------------------------------------------------------------------- /execute_pf.py: -------------------------------------------------------------------------------- 1 | ''' 2 | Executes the Powerplant model testbench in Powerfactory. 3 | ''' 4 | from __future__ import annotations 5 | DEBUG = True 6 | import os 7 | #Ensure right working directory 8 | executePath = os.path.abspath(__file__) 9 | executeFolder = os.path.dirname(executePath) 10 | os.chdir(executeFolder) 11 | 12 | from configparser import ConfigParser 13 | 14 | class readConfig: 15 | def __init__(self) -> None: 16 | self.cp = ConfigParser(allow_no_value=True) 17 | self.cp.read('config.ini') 18 | self.parsedConf = self.cp['config'] 19 | self.sheetPath = str(self.parsedConf['Casesheet path']) 20 | self.pythonPath = str(self.parsedConf['Python path']) 21 | self.volley = int(self.parsedConf['Volley']) 22 | self.parallel = bool(self.parsedConf['Parallel']) 23 | self.exportPath = str(self.parsedConf['Export folder']) 24 | self.QDSLcopyGrid = str(self.parsedConf['QDSL copy grid']) 25 | 26 | config = readConfig() 27 | import sys 28 | sys.path.append(config.pythonPath) 29 | 30 | from typing import Optional, Tuple, List, Union 31 | if getattr(sys, 'gettrace', None) is not None: 32 | sys.path.append('C:\\Program Files\\DIgSILENT\\PowerFactory 2024 SP4\\Python\\3.8') 33 | import powerfactory as pf #type: ignore 34 | 35 | import re 36 | import time 37 | from datetime import datetime 38 | import case_setup as cs 39 | import sim_interface as si 40 | 41 | def script_GetExtObj(script : pf.ComPython, name : str) -> Optional[pf.DataObject]: 42 | ''' 43 | Get script external object. 44 | ''' 45 | retVal : List[Union[int, pf.DataObject, None]] = script.GetExternalObject(name) 46 | assert isinstance(retVal[1], (pf.DataObject, type(None))) 47 | return retVal[1] 48 | 49 | def script_GetStr(script : pf.ComPython, name : str) -> Optional[str]: 50 | ''' 51 | Get script string parameter. 52 | ''' 53 | retVal : List[Union[int, str]] = script.GetInputParameterString(name) 54 | if retVal[0] == 0: 55 | assert isinstance(retVal[1], str) 56 | return retVal[1] 57 | else: 58 | return None 59 | 60 | def script_GetDouble(script : pf.ComPython, name : str) -> Optional[float]: 61 | ''' 62 | Get script double parameter. 63 | ''' 64 | retVal : List[Union[int, float]] = script.GetInputParameterDouble(name) 65 | if retVal[0] == 0: 66 | assert isinstance(retVal[1], float) 67 | return retVal[1] 68 | else: 69 | return None 70 | 71 | def script_GetInt(script : pf.ComPython, name : str) -> Optional[int]: 72 | ''' 73 | Get script integer parameter. 74 | ''' 75 | retVal : List[Union[int, int]] = script.GetInputParameterInt(name) 76 | if retVal[0] == 0: 77 | assert isinstance(retVal[1], int) 78 | return retVal[1] 79 | else: 80 | return None 81 | 82 | def connectPF() -> Tuple[pf.Application, pf.IntPrj, pf.ComPython, int]: 83 | ''' 84 | Connects to the powerfactory application and returns the application, project and this script object. 85 | ''' 86 | app : Optional[pf.Application] = pf.GetApplicationExt() 87 | if not app: 88 | raise RuntimeError('No connection to powerfactory application') 89 | app.Show() 90 | app.ClearOutputWindow() 91 | app.PrintInfo(f'Powerfactory application connected externally. Executable: {sys.executable}') 92 | app.PrintInfo(f'Imported powerfactory module from {pf.__file__}') 93 | 94 | version : str = pf.__version__ 95 | pfVersion = 2000 + int(version.split('.')[0]) 96 | app.PrintInfo(f'Powerfactory version registred: {pfVersion}') 97 | 98 | project : Optional[pf.IntPrj] = app.GetActiveProject() #type: ignore 99 | 100 | if DEBUG: 101 | while project is None: 102 | time.sleep(1) 103 | project = app.GetActiveProject() #type: ignore 104 | 105 | assert project is not None 106 | 107 | networkData = app.GetProjectFolder('netdat') 108 | assert networkData is not None 109 | 110 | thisScript : pf.ComPython = networkData.SearchObject('MTB\\MTB\\execute.ComPython') #type: ignore 111 | assert thisScript is not None 112 | 113 | return app, project, thisScript, pfVersion 114 | 115 | def resetProjectUnits(project : pf.IntPrj) -> None: 116 | ''' 117 | Resets the project units to the default units. 118 | ''' 119 | SetPrj = project.SearchObject('Settings.SetFold') 120 | if SetPrj: 121 | SetPrj.Delete() 122 | 123 | project.Deactivate() 124 | project.Activate() 125 | 126 | def setupResFiles(app : pf.Application, script : pf.ComPython, root : pf.DataObject): 127 | ''' 128 | Setup the result files for the studycase. 129 | ''' 130 | elmRes : pf.ElmRes = app.GetFromStudyCase('ElmRes') #type: ignore 131 | assert elmRes is not None 132 | 133 | measurementBlock = root.SearchObject('measurements.ElmDsl') 134 | assert measurementBlock is not None 135 | 136 | elmRes.AddVariable(measurementBlock, 's:Ia_pu') 137 | elmRes.AddVariable(measurementBlock, 's:Ib_pu') 138 | elmRes.AddVariable(measurementBlock, 's:Ic_pu') 139 | elmRes.AddVariable(measurementBlock, 's:Vab_pu') 140 | elmRes.AddVariable(measurementBlock, 's:Vag_pu') 141 | elmRes.AddVariable(measurementBlock, 's:Vbc_pu') 142 | elmRes.AddVariable(measurementBlock, 's:Vbg_pu') 143 | elmRes.AddVariable(measurementBlock, 's:Vca_pu') 144 | elmRes.AddVariable(measurementBlock, 's:Vcg_pu') 145 | elmRes.AddVariable(measurementBlock, 's:f_hz') 146 | elmRes.AddVariable(measurementBlock, 's:neg_Id_pu') 147 | elmRes.AddVariable(measurementBlock, 's:neg_Imag_pu') 148 | elmRes.AddVariable(measurementBlock, 's:neg_Iq_pu') 149 | elmRes.AddVariable(measurementBlock, 's:neg_Vmag_pu') 150 | elmRes.AddVariable(measurementBlock, 's:pos_Id_pu') 151 | elmRes.AddVariable(measurementBlock, 's:pos_Imag_pu') 152 | elmRes.AddVariable(measurementBlock, 's:pos_Iq_pu') 153 | elmRes.AddVariable(measurementBlock, 's:pos_Vmag_pu') 154 | elmRes.AddVariable(measurementBlock, 's:ppoc_pu') 155 | elmRes.AddVariable(measurementBlock, 's:qpoc_pu') 156 | 157 | signals = [ 158 | 'mtb_s_pref_pu.ElmDsl', 159 | 'mtb_s_qref.ElmDsl', 160 | 'mtb_s_qref_q_pu.ElmDsl', 161 | 'mtb_s_qref_qu_pu.ElmDsl', 162 | 'mtb_s_qref_pf.ElmDsl', 163 | 'mtb_s_qref_3.ElmDsl', 164 | 'mtb_s_qref_4.ElmDsl', 165 | 'mtb_s_qref_5.ElmDsl', 166 | 'mtb_s_qref_6.ElmDsl', 167 | 'mtb_s_1.ElmDsl', 168 | 'mtb_s_2.ElmDsl', 169 | 'mtb_s_3.ElmDsl', 170 | 'mtb_s_4.ElmDsl', 171 | 'mtb_s_5.ElmDsl', 172 | 'mtb_s_6.ElmDsl', 173 | 'mtb_s_7.ElmDsl', 174 | 'mtb_s_8.ElmDsl', 175 | 'mtb_s_9.ElmDsl', 176 | 'mtb_s_10.ElmDsl' 177 | ] 178 | 179 | for signal in signals: 180 | signalObj = root.SearchObject(signal) 181 | assert signalObj is not None 182 | elmRes.AddVariable(signalObj, 's:yo') 183 | 184 | # Include measurement objects and set alias 185 | for i in range(1, 100): 186 | Meas_obj_n = script_GetExtObj(script, f'Meas_obj_{i}') 187 | if Meas_obj_n is not None: 188 | Meas_obj_n_signals = script_GetStr(script, f'Meas_obj_{i}_signals') 189 | assert Meas_obj_n_signals is not None 190 | Meas_obj_n_signals = Meas_obj_n_signals.split(';') 191 | 192 | for signal in Meas_obj_n_signals: 193 | if signal != '': 194 | elmRes.AddVariable(Meas_obj_n, signal) 195 | 196 | Meas_obj_n_alias = script_GetStr(script, f'Meas_obj_{i}_alias') 197 | assert Meas_obj_n_alias is not None 198 | Meas_obj_n.SetAttribute('for_name', Meas_obj_n_alias) 199 | 200 | def setupExport(app : pf.Application, filename : str): 201 | ''' 202 | Setup the export component for the studycase. 203 | ''' 204 | comRes : pf.ComRes = app.GetFromStudyCase('ComRes') #type: ignore 205 | elmRes : pf.ElmRes = app.GetFromStudyCase('ElmRes') #type: ignore 206 | assert comRes is not None 207 | assert elmRes is not None 208 | 209 | csvFileName = f'{filename}.csv' 210 | comRes.SetAttribute('pResult', elmRes) 211 | comRes.SetAttribute('iopt_exp', 6) 212 | comRes.SetAttribute('iopt_sep', 0) 213 | comRes.SetAttribute('ciopt_head', 1) 214 | comRes.SetAttribute('iopt_locn', 4) 215 | comRes.SetAttribute('dec_Sep', ',') 216 | comRes.SetAttribute('col_Sep', ';') 217 | comRes.SetAttribute('f_name', csvFileName) 218 | 219 | def setupPlots(app : pf.Application, root : pf.DataObject): 220 | ''' 221 | Setup the plots for the studycase. 222 | ''' 223 | measurementBlock = root.SearchObject('measurements.ElmDsl') 224 | assert measurementBlock is not None 225 | 226 | board : pf.SetDesktop = app.GetFromStudyCase('SetDesktop') #type: ignore 227 | assert board is not None 228 | 229 | plots : List[pf.GrpPage]= board.GetContents('*.GrpPage',1) #type: ignore 230 | 231 | for p in plots: 232 | p.RemovePage() 233 | 234 | # Create pages 235 | plotPage : pf.GrpPage = board.GetPage('Plot', 1, 'GrpPage') #type: ignore 236 | assert plotPage is not None 237 | 238 | # PQ plot 239 | pqPlot : pf.PltLinebarplot = plotPage.GetOrInsertPlot('PQ', 1) #type: ignore 240 | assert pqPlot is not None 241 | pqPlotDS : pf.PltDataseries = pqPlot.GetDataSeries() #type: ignore 242 | assert pqPlotDS is not None 243 | pqPlotDS.AddCurve(measurementBlock, 's:ppoc_pu') 244 | pqPlotDS.AddCurve(measurementBlock, 's:qpoc_pu') 245 | pqPlot.DoAutoScale() 246 | 247 | # U plot 248 | uPlot : pf.PltLinebarplot = plotPage.GetOrInsertPlot('U', 1) #type: ignore 249 | assert uPlot is not None 250 | uPlotDS : pf.PltDataseries = uPlot.GetDataSeries() #type: ignore 251 | assert uPlotDS is not None 252 | uPlotDS.AddCurve(measurementBlock, 's:pos_Vmag_pu') 253 | uPlotDS.AddCurve(measurementBlock, 's:neg_Vmag_pu') 254 | uPlot.DoAutoScale() 255 | 256 | # I plot 257 | iPlot : pf.PltLinebarplot = plotPage.GetOrInsertPlot('I', 1) #type: ignore 258 | assert iPlot is not None 259 | iPlotDS : pf.PltDataseries = iPlot.GetDataSeries() #type: ignore 260 | assert iPlotDS is not None 261 | iPlotDS.AddCurve(measurementBlock, 's:pos_Id_pu') 262 | iPlotDS.AddCurve(measurementBlock, 's:pos_Iq_pu') 263 | iPlotDS.AddCurve(measurementBlock, 's:neg_Id_pu') 264 | iPlotDS.AddCurve(measurementBlock, 's:neg_Iq_pu') 265 | iPlot.DoAutoScale() 266 | 267 | # F plot 268 | fPlot : pf.PltLinebarplot = plotPage.GetOrInsertPlot('F', 1) #type: ignore 269 | assert fPlot is not None 270 | fPlotDS : pf.PltDataseries = fPlot.GetDataSeries() #type: ignore 271 | assert fPlotDS is not None 272 | fPlotDS.AddCurve(measurementBlock, 's:f_hz') 273 | fPlot.DoAutoScale() 274 | 275 | app.WriteChangesToDb() 276 | 277 | def addCustomSubscribers(thisScript : pf.ComPython, channels : List[si.Channel]) -> None: 278 | ''' 279 | Add custom subscribers to the channels. For example, references applied as parameter events directly to control blocks. 280 | ''' 281 | def getChnlByName(name : str) -> si.Channel: 282 | for ch in channels: 283 | if ch.name == name: 284 | return ch 285 | raise RuntimeError(f'Channel {name} not found.') 286 | 287 | custConfStr = script_GetStr(thisScript, 'sub_conf_str') 288 | assert isinstance(custConfStr, str) 289 | 290 | def convertToConfStr(param : str, signal : str) -> str: 291 | sub_obj = script_GetExtObj(thisScript, f'{param}_sub') 292 | sub_attrib = script_GetStr(thisScript, f'{param}_sub_attrib') 293 | assert isinstance(sub_attrib, str) 294 | if sub_obj is not None and sub_attrib != '': 295 | sub_scale = script_GetDouble(thisScript, f'{param}_sub_scale') 296 | assert isinstance(sub_scale, float) 297 | sub_signal = getChnlByName(f'{signal}') 298 | assert isinstance(sub_signal, si.Signal) 299 | return f'\\{sub_obj.GetFullName()}:{sub_attrib}={signal}:S~{sub_scale} * x' 300 | return '' 301 | 302 | pref_conf = convertToConfStr('Pref', 'mtb_s_pref_pu') 303 | qref1_conf = convertToConfStr('Qref_q', 'mtb_s_qref_q_pu') 304 | qref2_conf = convertToConfStr('Qref_qu', 'mtb_s_qref_qu_pu') 305 | qref3_conf = convertToConfStr('Qref_pf', 'mtb_s_qref_pf') 306 | custom1_conf = convertToConfStr('Custom1', 'mtb_s_1') 307 | custom2_conf = convertToConfStr('Custom2', 'mtb_s_2') 308 | custom3_conf = convertToConfStr('Custom3', 'mtb_s_3') 309 | 310 | configs = custConfStr.split(';') + [pref_conf, qref1_conf, qref2_conf, qref3_conf, custom1_conf, custom2_conf, custom3_conf] 311 | 312 | confFilterStr = r"^([^:*?=\",~|\n\r]+):((?:\w:)?\w+(?::\d+)?)=(\w+):(S|s|S0|s0|R|r|T|t|C|c)~(.*)" 313 | confFilter = re.compile(confFilterStr) 314 | 315 | for config in configs: 316 | confFilterMatch = confFilter.match(config) 317 | if confFilterMatch is not None: 318 | obj = confFilterMatch.group(1) 319 | attrib = confFilterMatch.group(2) 320 | sub = confFilterMatch.group(3) 321 | typ = confFilterMatch.group(4) 322 | lamb = confFilterMatch.group(5) 323 | 324 | chnl = getChnlByName(sub) 325 | if isinstance(chnl, si.Signal): 326 | if typ.lower() == 's' or typ.lower() == 'c': 327 | chnl.addPFsub_S(obj, attrib, lambda _,x,l=lamb : eval(l)) 328 | elif typ.lower() == 's0': 329 | chnl.addPFsub_S0(obj, attrib, lambda _,x,l=lamb : eval(l)) #Not exactly safe 330 | elif typ.lower() == 'r': 331 | chnl.addPFsub_R(obj, attrib, lambda _,x,l=lamb : eval(l)) 332 | elif typ.lower() == 't': 333 | chnl.addPFsub_T(obj, attrib, lambda _,x,l=lamb : eval(l)) 334 | elif isinstance(chnl, si.Constant) or isinstance(chnl, si.PfObjRefer) or isinstance(chnl, si.String): 335 | chnl.addPFsub(obj, attrib) 336 | 337 | def main(): 338 | # Connect to Powerfactory 339 | app, project, thisScript, pfVersion = connectPF() 340 | 341 | # Check if any studycase is active 342 | currentStudyCase : Optional[pf.IntCase] = app.GetActiveStudyCase() #type: ignore 343 | 344 | if currentStudyCase is None: 345 | raise RuntimeError('Please activate a studycase.') 346 | 347 | studyTime : int = currentStudyCase.GetAttribute('iStudyTime') 348 | 349 | # Get and check for active grids 350 | networkData = app.GetProjectFolder('netdat') 351 | assert networkData is not None 352 | grids : List[pf.ElmNet] = networkData.GetContents('.ElmNet', 1) #type: ignore 353 | activeGrids = list(filter(lambda x : x.IsCalcRelevant(), grids)) 354 | 355 | if len(activeGrids) == 0: 356 | raise RuntimeError('No active grids.') 357 | 358 | # Make project backup 359 | project.CreateVersion(f'PRE_MTB_{datetime.now().strftime(r"%d%m%Y%H%M%S")}') 360 | 361 | resetProjectUnits(project) 362 | currentStudyCase.Consolidate() 363 | 364 | netFolder = app.GetProjectFolder('netmod') 365 | assert netFolder is not None 366 | varFolder = app.GetProjectFolder('scheme') 367 | 368 | # Create variation folder 369 | if varFolder is None: 370 | varFolder = netFolder.CreateObject('IntPrjfolder', 'Variations') 371 | varFolder.SetAttribute('iopt_typ', 'scheme') 372 | 373 | # Create studycase folder 374 | studyCaseFolder = app.GetProjectFolder('study') 375 | if studyCaseFolder is None: 376 | studyCaseFolder = project.CreateObject('IntPrjfolder', 'Study Cases') 377 | studyCaseFolder.SetAttribute('iopt_typ', 'study') 378 | 379 | # Create task automation 380 | taskAuto : pf.ComTasks = studyCaseFolder.CreateObject('ComTasks') #type: ignore 381 | taskAuto.SetAttribute('iEnableParal', int(config.parallel)) 382 | taskAuto.SetAttribute('parMethod', 0) 383 | (taskAuto.GetAttribute('parallelSetting')).SetAttribute('procTimeOut', 3600) 384 | 385 | # Find root object 386 | root = thisScript.GetParent() 387 | 388 | # Read and setup cases from sheet 389 | pfInterface = si.PFencapsulation(app, root) 390 | plantSettings, channels, cases, maxRank, ___ = cs.setup(casesheetPath = config.sheetPath, 391 | pscad = False, 392 | pfEncapsulation = pfInterface) 393 | 394 | # Add user channel subscribers 395 | addCustomSubscribers(thisScript, channels) 396 | 397 | #Create export folder if it does not exist 398 | if not os.path.exists(config.exportPath): 399 | os.makedirs(config.exportPath) 400 | 401 | #Creating a datetime stamped subfolder 402 | datetimeFolder = f'MTB_{datetime.now().strftime(r"%d%m%Y%H%M%S")}' 403 | 404 | #Create the folder for the PowerFactory CSV results 405 | csvFolder = os.path.join(config.exportPath, datetimeFolder) 406 | os.mkdir(csvFolder) 407 | 408 | # Find initializer script object 409 | initScript : pf.ComDpl = root.SearchObject('initializer_script.ComDpl') #type: ignore 410 | assert initScript is not None 411 | 412 | # List of created studycases for later activation 413 | studycases : List[pf.IntCase] = [] 414 | 415 | currentStudyCase.Deactivate() 416 | 417 | # Filter cases if Only_setup > 0 418 | onlySetup = script_GetInt(thisScript, 'Only_setup') 419 | assert isinstance(onlySetup, int) 420 | 421 | if onlySetup > 0: 422 | cases = list(filter(lambda x : x.rank == onlySetup, cases)) 423 | 424 | app.EchoOff() 425 | for case in cases: 426 | if case.RMS: 427 | # Set-up studycase, variation and balance 428 | caseName = f'{str(case.rank).zfill(len(str(maxRank)))}_{case.Name}'.replace('.', '') 429 | exportName = os.path.join(os.path.abspath(csvFolder), f'{plantSettings.Projectname}_{case.rank}') 430 | newStudycase : pf.IntCase = studyCaseFolder.CreateObject('IntCase', caseName) #type: ignore 431 | assert newStudycase is not None 432 | studycases.append(newStudycase) 433 | newStudycase.Activate() 434 | newStudycase.SetStudyTime(studyTime) 435 | 436 | # Activate the relevant networks 437 | for g in activeGrids: 438 | g.Activate() 439 | 440 | newVar : pf.IntScheme = varFolder.CreateObject('IntScheme', caseName) #type: ignore 441 | assert newVar is not None 442 | newStage : pf.IntSstage = newVar.CreateObject('IntSstage', caseName) #type: ignore 443 | assert newStage is not None 444 | newStage.SetAttribute('e:tAcTime', studyTime) 445 | newVar.Activate() 446 | newStage.Activate() 447 | 448 | si.applyToPowerfactory(channels, case.rank) 449 | 450 | initScript.Execute() 451 | 452 | ### WORKAROUND FOR QDSL FAILING WHEN IN MTB-GRID ### 453 | #TODO: REMOVE WHEN FIXED 454 | if config.QDSLcopyGrid != '': 455 | qdslInitializer = root.SearchObject('initializer_qdsl.ElmQdsl') 456 | assert qdslInitializer is not None 457 | for g in activeGrids: 458 | gridName = g.GetFullName() 459 | assert isinstance(gridName, str) 460 | if gridName.lower().endswith(f'{config.QDSLcopyGrid.lower()}.elmnet'): 461 | g.AddCopy(qdslInitializer) #type: ignore 462 | 463 | qdslInitializer.SetAttribute('outserv', 1) 464 | ### END WORKAROUND ### 465 | 466 | inc = app.GetFromStudyCase('ComInc') 467 | assert inc is not None 468 | sim = app.GetFromStudyCase('ComSim') 469 | assert sim is not None 470 | comRes : pf.ComRes = app.GetFromStudyCase('ComRes') #type: ignore 471 | assert comRes is not None 472 | 473 | taskAuto.AppendStudyCase(newStudycase) 474 | taskAuto.AppendCommand(inc, -1) 475 | taskAuto.AppendCommand(sim, -1) 476 | taskAuto.AppendCommand(comRes, -1) 477 | setupResFiles(app, thisScript, root) 478 | app.WriteChangesToDb() 479 | setupExport(app, exportName) 480 | app.WriteChangesToDb() 481 | newStudycase.Deactivate() 482 | app.WriteChangesToDb() 483 | 484 | app.EchoOn() 485 | 486 | if onlySetup == 0: 487 | taskAuto.Execute() 488 | 489 | if pfVersion >= 2024: 490 | for studycase in studycases: 491 | studycase.Activate() 492 | setupPlots(app, root) 493 | app.WriteChangesToDb() 494 | studycase.Deactivate() 495 | app.WriteChangesToDb() 496 | else: 497 | app.PrintWarn('Plot setup not supported for PowerFactory versions older than 2024.') 498 | 499 | # Create post run backup 500 | postBackup = script_GetInt(thisScript, 'Post_run_backup') 501 | assert isinstance(postBackup, int) 502 | if postBackup > 0: 503 | project.CreateVersion(f'POST_MTB_{datetime.now().strftime(r"%d%m%Y%H%M%S")}') 504 | 505 | if __name__ == "__main__": 506 | main() -------------------------------------------------------------------------------- /execute_pscad.py: -------------------------------------------------------------------------------- 1 | ''' 2 | Executes the Powerplant model testbench in PSCAD. 3 | ''' 4 | from __future__ import annotations 5 | import os 6 | import sys 7 | 8 | try: 9 | LOG_FILE = open('execute_pscad.log', 'w') 10 | except: 11 | print('Failed to open log file. Logging to file disabled.') 12 | LOG_FILE = None #type: ignore 13 | 14 | def print(*args): #type: ignore 15 | ''' 16 | Overwrites the print function to also write to a log file. 17 | ''' 18 | outputString = ''.join(map(str, args)) + '\n' #type: ignore 19 | sys.stdout.write(outputString) 20 | if LOG_FILE: 21 | LOG_FILE.write(outputString) 22 | LOG_FILE.flush() 23 | 24 | if __name__ == '__main__': 25 | print(sys.version) 26 | #Ensure right working directory 27 | executePath = os.path.abspath(__file__) 28 | executeFolder = os.path.dirname(executePath) 29 | os.chdir(executeFolder) 30 | if not executeFolder in sys.path: 31 | sys.path.append(executeFolder) 32 | print(f'CWD: {executeFolder}') 33 | print('sys.path:') 34 | for path in sys.path: 35 | if path != '': 36 | print(f'\t{path}') 37 | 38 | from configparser import ConfigParser 39 | 40 | class readConfig: 41 | def __init__(self) -> None: 42 | self.cp = ConfigParser(allow_no_value=True) 43 | self.cp.read('config.ini') 44 | self.parsedConf = self.cp['config'] 45 | self.sheetPath = str(self.parsedConf['Casesheet path']) 46 | self.pythonPath = str(self.parsedConf['Python path']) 47 | self.volley = int(self.parsedConf['Volley']) 48 | self.exportPath = str(self.parsedConf['Export folder']) 49 | 50 | config = readConfig() 51 | sys.path.append(config.pythonPath) 52 | 53 | from datetime import datetime 54 | import shutil 55 | import psutil #type: ignore 56 | from typing import List, Optional 57 | import sim_interface as si 58 | import case_setup as cs 59 | from pscad_update_ums import updateUMs 60 | 61 | try: 62 | import mhi.pscad 63 | except ImportError: 64 | print("Could not import mhi.pscad. Make sure PSCAD Automation Library is installed and available in your Python environment.") 65 | sys.exit(1) 66 | 67 | def connectPSCAD() -> mhi.pscad.PSCAD: 68 | pid = os.getpid() 69 | ports = [con.laddr.port for con in psutil.net_connections() if con.status == psutil.CONN_LISTEN and con.pid == pid] #type: ignore 70 | 71 | if len(ports) == 0: #type: ignore 72 | exit('No PSCAD listening ports found') 73 | elif len(ports) > 1: #type: ignore 74 | print('WARNING: Multiple PSCAD listening ports found. Using the first one.') 75 | 76 | return mhi.pscad.connect(port = ports[0]) #type: ignore 77 | 78 | def outToCsv(srcPath : str, dstPath : str): 79 | """ 80 | Converts PSCAD .out file into .csv file 81 | """ 82 | with open(srcPath) as out, \ 83 | open(dstPath, 'w') as csv: 84 | csv.writelines(','.join(line.split()) +'\n' for line in out) 85 | 86 | def moveFiles(srcPath : str, dstPath : str, types : List[str], suffix : str = '') -> None: 87 | ''' 88 | Moves files of the specified types from srcPath to dstPath. 89 | ''' 90 | for file in os.listdir(srcPath): 91 | _, typ = os.path.splitext(file) 92 | if typ in types: 93 | shutil.move(os.path.join(srcPath, file), os.path.join(dstPath, file + suffix)) 94 | 95 | def taskIdToRank(psoutFolder : str, projectName : str, emtCases : List[cs.Case], rank: int): 96 | ''' 97 | Changes task ID to rank of the .psout files in psoutFolder. 98 | ''' 99 | for file in os.listdir(psoutFolder): 100 | _, fileName = os.path.split(file) 101 | root, typ = os.path.splitext(fileName) 102 | if rank is None: 103 | if typ == '.psout_taskid' and root.startswith(projectName + '_'): 104 | suffix = root[len(projectName) + 1:] 105 | parts = suffix.split('_') 106 | if len(parts) > 0 and parts[0].isnumeric(): 107 | taskId = int(parts[0]) 108 | if taskId - 1 < len(emtCases): 109 | parts[0] = str(emtCases[taskId - 1].rank) 110 | newName = projectName + '_' + '_'.join(parts) + typ.replace('_taskid', '') 111 | print(f'Renaming {fileName} to {newName}') 112 | os.rename(os.path.join(psoutFolder, fileName), os.path.join(psoutFolder, newName)) 113 | else: 114 | print(f'WARNING: {fileName} has a task ID that is out of bounds. Ignoring file.') 115 | else: 116 | print(f'WARNING: {fileName} has an invalid task ID. Ignoring file.') 117 | else: 118 | if typ == '.psout_taskid': 119 | newName = f'{projectName}_{rank}.psout' 120 | else: 121 | print(f'WARNING: {fileName} is of unknown type. Ignoring file.') 122 | continue 123 | print(f'Renaming {fileName} to {newName}') 124 | os.rename(os.path.join(psoutFolder, fileName), os.path.join(psoutFolder, newName)) 125 | 126 | def cleanUpPsoutFiles(buildPath : str, exportPath : str, projectName : str) -> str: 127 | ''' 128 | Cleans up the build folder by moving .psout files to an time-stamped results folder in the export path. 129 | Return path to .psout folder. 130 | ''' 131 | # Create the exportPath if requied 132 | if not os.path.exists(exportPath): 133 | os.mkdir(exportPath) 134 | else: 135 | for dir in os.listdir(exportPath): 136 | _dir = os.path.join(exportPath, dir) 137 | if os.path.isdir(_dir) and dir.startswith('MTB_'): 138 | if os.listdir(_dir) == []: 139 | shutil.rmtree(_dir) 140 | 141 | #Creating a datetime stamped results subfolder 142 | resultsFolder = f'MTB_{datetime.now().strftime(r"%d%m%Y%H%M%S")}' 143 | 144 | #Move .psout files away from build folder into results subfolder in the export folder 145 | psoutFolder = os.path.join(exportPath, resultsFolder) 146 | os.mkdir(psoutFolder) 147 | moveFiles(buildPath, psoutFolder, ['.psout'], '_taskid') 148 | 149 | return psoutFolder 150 | 151 | def cleanBuildfolder(buildPath : str): 152 | ''' 153 | "Cleans" the build folder by trying to delete it. 154 | ''' 155 | try: 156 | shutil.rmtree(buildPath) 157 | except FileNotFoundError: 158 | pass 159 | 160 | def findMTB(pscad : mhi.pscad.PSCAD) -> mhi.pscad.UserCmp: 161 | ''' 162 | Finds the MTB block in the project. 163 | ''' 164 | projectLst = pscad.projects() 165 | MTBcand : Optional[mhi.pscad.UserCmp] = None 166 | for prjDic in projectLst: 167 | if prjDic['type'].lower() == 'case': 168 | project = pscad.project(prjDic['name']) 169 | MTBs : List[mhi.pscad.UserCmp]= project.find_all(Name_='$MTB_9124$') #type: ignore 170 | if len(MTBs) > 0: 171 | if MTBcand or len(MTBs) > 1: 172 | exit('Multiple MTB blocks found in workspace.') 173 | else: 174 | MTBcand = MTBs[0] 175 | 176 | if not MTBcand: 177 | exit('No MTB block found in workspace.') 178 | return MTBcand 179 | 180 | def addInterfaceFile(project : mhi.pscad.Project): 181 | ''' 182 | Adds the interface file to the project. 183 | ''' 184 | resList = project.resources() 185 | for res in resList: 186 | if res.path == r'.\interface.f' or res.name == 'interface.f': 187 | return 188 | 189 | print('Adding interface.f to project') 190 | project.create_resource(r'.\interface.f') 191 | 192 | def main(): 193 | print() 194 | print('execute_pscad.py started at:', datetime.now().strftime('%Y-%m-%d %H:%M:%S'), '\n') 195 | pscad = connectPSCAD() 196 | 197 | plantSettings, channels, _, _, emtCases = cs.setup(config.sheetPath, pscad = True, pfEncapsulation = None) 198 | 199 | #Print plant settings from casesheet 200 | print('Plant settings:') 201 | for setting in plantSettings.__dict__: 202 | print(f'{setting} : {plantSettings.__dict__[setting]}') 203 | print() 204 | 205 | #Prepare MTB based on execution mode 206 | MTB = findMTB(pscad) 207 | project = pscad.project(MTB.project_name) 208 | caseList = [] 209 | for case in emtCases: 210 | caseList.append(case.rank) 211 | 212 | if MTB.parameters()['par_mode'] == 'VOLLEY': 213 | #Output ranks in relation to task 214 | print('---------EXECUTING VOLLEY MODE---------') 215 | print('Rank / Task ID / Casename:') 216 | for case in emtCases: 217 | print(f'{case.rank} / {emtCases.index(case) + 1} / {case.Name}') 218 | singleRank = None 219 | elif MTB.parameters()['par_mode'] == 'MANUAL' and MTB.parameters()['par_manualrank'] in caseList: 220 | #Output rank in relation to task id 221 | singleRank = MTB.parameters()['par_manualrank'] 222 | singleName = emtCases[caseList.index(MTB.parameters()['par_manualrank'])].Name 223 | print('---------EXECUTING MANUAL MODE---------') 224 | print(f'Excecuting only Rank {singleRank}: {singleName}') 225 | else: 226 | raise ValueError('Invalid rank selected for par_manualrank in MTB block.') 227 | 228 | print() 229 | si.renderFortran('interface.f', channels) 230 | 231 | #Set executed flag 232 | MTB.parameters(executed = 1) #type: ignore 233 | 234 | #Update pgb names for all unit measurement components 235 | updateUMs(pscad) 236 | 237 | #Add interface file to project 238 | addInterfaceFile(project) 239 | 240 | buildFolder : str = project.temp_folder #type: ignore 241 | cleanBuildfolder(buildFolder) #type: ignore 242 | 243 | project.parameters(time_duration = 999, time_step = plantSettings.PSCAD_Timestep, sample_step = '1000') #type: ignore 244 | project.parameters(PlotType = '2', output_filename = f'{plantSettings.Projectname}.psout') #type: ignore 245 | project.parameters(SnapType='0', SnapTime='2', snapshot_filename='pannatest5us.snp') #type: ignore 246 | 247 | pscad.remove_all_simulation_sets() 248 | pmr = pscad.create_simulation_set('MTB') 249 | pmr.add_tasks(MTB.project_name) 250 | project_pmr = pmr.task(MTB.project_name) 251 | project_pmr.parameters(ammunition = len(emtCases) if MTB.parameters()['par_mode'] == 'VOLLEY' else 1 , volley = config.volley, affinity_type = '2') #type: ignore 252 | 253 | pscad.run_simulation_sets('MTB') #type: ignore ??? By sideeffect changes current working directory ??? 254 | os.chdir(executeFolder) 255 | 256 | psoutFolder = cleanUpPsoutFiles(buildFolder, config.exportPath, plantSettings.Projectname) 257 | print() 258 | taskIdToRank(psoutFolder, plantSettings.Projectname, emtCases, singleRank) 259 | 260 | print('execute_pscad.py finished at: ', datetime.now().strftime('%m-%d %H:%M:%S')) 261 | 262 | if __name__ == '__main__': 263 | main() 264 | 265 | if LOG_FILE: 266 | LOG_FILE.close() -------------------------------------------------------------------------------- /interface.f: -------------------------------------------------------------------------------- 1 | ! -------------------------------------------------------------------------------- /plotter/Case.py: -------------------------------------------------------------------------------- 1 | from typing import List, Tuple, Union 2 | 3 | 4 | class Case: 5 | def __init__(self, case: 'pd.Series[Union[str, int, float, bool]]') -> None: 6 | self.rank: int = int(case['Rank']) 7 | self.RMS: bool = bool(case['RMS']) 8 | self.EMT: bool = bool(case['EMT']) 9 | self.Name: str = str(case['Name']) 10 | self.U0: float = float(case['U0']) 11 | self.P0: float = float(case['P0']) 12 | self.Pmode: str = str(case['Pmode']) 13 | self.Qmode: str = str(case['Qmode']) 14 | self.Qref0: float = float(case['Qref0']) 15 | self.SCR0: float = float(case['SCR0']) 16 | self.XR0: float = float(case['XR0']) 17 | self.Simulationtime: float = float(case['Simulationtime']) 18 | self.Events : List[Tuple[str, float, Union[float, str], Union[float, str]]] = [] 19 | 20 | index : pd.Index[str] = case.index # type: ignore 21 | i = 0 22 | while(True): 23 | typeLabel = f'type.{i}' if i > 0 else 'type' 24 | timeLabel = f'time.{i}' if i > 0 else 'time' 25 | x1Label = f'X1.{i}' if i > 0 else 'X1' 26 | x2Label = f'X2.{i}' if i > 0 else 'X2' 27 | 28 | if typeLabel in index and timeLabel in index and x1Label in index and x2Label in index: 29 | try: 30 | x1value = float(str(case[x1Label]).replace(' ','')) 31 | except ValueError: 32 | x1value = str(case[x1Label]) 33 | 34 | try: 35 | x2value = float(str(case[x2Label]).replace(' ','')) 36 | except ValueError: 37 | x2value = str(case[x2Label]) 38 | 39 | self.Events.append((str(case[typeLabel]), float(case[timeLabel]), x1value, x2value)) 40 | i += 1 41 | else: 42 | break -------------------------------------------------------------------------------- /plotter/Cursor.py: -------------------------------------------------------------------------------- 1 | from typing import List 2 | 3 | 4 | class Cursor: 5 | def __init__(self, 6 | id: int, 7 | title: str, 8 | cursor_options: List[str], 9 | emt_signals: List[str], 10 | rms_signals: List[str], 11 | time_ranges: List[int]) -> None: 12 | self.id = id 13 | self.title = title 14 | self.cursor_options = cursor_options 15 | self.emt_signals = emt_signals 16 | self.rms_signals = rms_signals 17 | self.time_ranges = time_ranges -------------------------------------------------------------------------------- /plotter/Figure.py: -------------------------------------------------------------------------------- 1 | from down_sampling_method import DownSamplingMethod 2 | from typing import List 3 | 4 | 5 | class Figure: 6 | def __init__(self, 7 | id: int, 8 | title: str, 9 | units: str, 10 | emt_signal_1: str, 11 | emt_signal_2: str, 12 | emt_signal_3: str, 13 | rms_signal_1: str, 14 | rms_signal_2: str, 15 | rms_signal_3: str, 16 | gradient_threshold: float, 17 | down_sampling_method: DownSamplingMethod, 18 | include_in_case: List[int], 19 | exclude_in_case: List[int]) -> None: 20 | self.id = id 21 | self.title = title 22 | self.units = units 23 | self.emt_signal_1 = emt_signal_1 24 | self.emt_signal_2 = emt_signal_2 25 | self.emt_signal_3 = emt_signal_3 26 | self.rms_signal_1 = rms_signal_1 27 | self.rms_signal_2 = rms_signal_2 28 | self.rms_signal_3 = rms_signal_3 29 | self.gradient_threshold = float(gradient_threshold) 30 | self.down_sampling_method = down_sampling_method 31 | self.include_in_case: List[int] = include_in_case 32 | self.exclude_in_case: List[int] = exclude_in_case -------------------------------------------------------------------------------- /plotter/Result.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | 3 | 4 | class ResultType(Enum): 5 | RMS = 0 #PowerFactory standard output 6 | EMT_INF = 1 #PSCAD legacy .inf/.csv support 7 | EMT_PSOUT = 2 #PSCAD .psout 8 | EMT_CSV = 3 #PSCAD .psout -> .csv support 9 | EMT_ZIP = 4 #PSCAD .psout -> .zip, .gz, .bz2 and .xz support 10 | 11 | 12 | class Result: 13 | def __init__(self, typ : ResultType, rank : int, projectName : str, bulkname : str, fullpath : str, group : str) -> None: 14 | self.typ = typ 15 | self.rank = rank 16 | self.projectName = projectName 17 | self.bulkname = bulkname 18 | self.fullpath = fullpath 19 | self.group = group 20 | self.shorthand = f'{group}\\{projectName}' 21 | -------------------------------------------------------------------------------- /plotter/config.ini: -------------------------------------------------------------------------------- 1 | [config] 2 | resultsDir = results 3 | genHTML = True 4 | genImage = True 5 | imageFormat = png 6 | htmlColumns = 1 7 | imageColumns = 3 8 | htmlCursorColumns = 1 9 | imageCursorColumns = 1 10 | threads = 10 11 | pfFlatTime = 0.1 12 | pscadInitTime = 3.5 13 | optionalCasesheet = ..\testcases.xlsx 14 | 15 | [Simulation data paths] 16 | Path1LegendName = ..\export\MTB_26052025142441 17 | Path2LegendName = ..\export\MTB_28052025100949 -------------------------------------------------------------------------------- /plotter/cursor_image_logic.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | import pandas as pd 3 | from plotly.subplots import make_subplots # type: ignore 4 | import plotly.graph_objects as go # type: ignore 5 | from typing import List, Tuple 6 | import plot_cursor_functions 7 | from Result import ResultType 8 | from Cursor import Cursor 9 | from math import ceil 10 | from Result import Result 11 | from read_and_write_functions import loadEMT 12 | from process_psout import getSignals 13 | 14 | def addCursors(htmlPlots: List[go.Figure], 15 | resultList: List[Result], 16 | cursorDict: List[Cursor], 17 | pfFlatTIme: float, 18 | pscadInitTime: float, 19 | rank: int, 20 | nColumns: int, 21 | emtRankSignalnamesList: List): 22 | 23 | cursor_settings = [i for i in cursorDict if i.id == rank] 24 | if len(cursor_settings) == 0: 25 | return list() 26 | 27 | # Initialize subplot positions 28 | fi = -1 # Start index from -1 as it is incremented before use 29 | for cursor_setting in cursor_settings: 30 | # Loop through rank settings 31 | totalRawSigNames = [] 32 | time_ranges = getattr(cursor_setting, 'time_ranges') 33 | cursor_options = getattr(cursor_setting, 'cursor_options') 34 | # Increment plot index 35 | fi += 1 36 | 37 | # Select the correct plot 38 | plot = htmlPlots[fi] if nColumns == 1 else htmlPlots[0] 39 | 40 | x = [] 41 | y = [] 42 | for result in resultList: 43 | signalKey = result.typ.name.lower().split('_')[0] 44 | rawSigNames = getattr(cursor_setting, f'{signalKey}_signals') 45 | totalRawSigNames.extend(rawSigNames) 46 | data = None 47 | if result.typ == ResultType.RMS: 48 | data: pd.DataFrame = pd.read_csv(result.fullpath, sep=';', decimal=',', header=[0, 1]) # type: ignore 49 | elif result.typ == ResultType.EMT_INF: 50 | data: pd.DataFrame = loadEMT(result.fullpath) 51 | elif result.typ == ResultType.EMT_PSOUT: 52 | data: pd.DataFrame = getSignals(result.fullpath, emtRankSignalnamesList) 53 | elif result.typ == ResultType.EMT_CSV or result.typ == ResultType.EMT_ZIP: 54 | data: pd.DataFrame = pd.read_csv(result.fullpath, sep=';', decimal=',') # type: ignore 55 | if len(rawSigNames) == 0: 56 | continue 57 | for rawSigName in rawSigNames: 58 | if result.typ == ResultType.RMS: 59 | # Remove hash and split signal name 60 | while rawSigName.startswith('#'): 61 | rawSigName = rawSigName[1:] 62 | splitSigName = rawSigName.split('\\') 63 | 64 | if len(splitSigName) == 2: 65 | sigColumn = ('##' + splitSigName[0], splitSigName[1]) 66 | else: 67 | sigColumn = rawSigName 68 | else: 69 | sigColumn = rawSigName 70 | 71 | # Determine the time column and offset based on the type 72 | timeColName = 'time' if result.typ == ResultType.EMT_INF or result.typ == ResultType.EMT_PSOUT or result.typ == ResultType.EMT_CSV or result.typ == ResultType.EMT_ZIP else data.columns[0] 73 | timeoffset = pfFlatTIme if result.typ == ResultType.RMS else pscadInitTime 74 | 75 | if sigColumn in data.columns: 76 | # Get the signal data and time values 77 | x.extend(data[timeColName] - timeoffset) # type: ignore 78 | y.extend(data[sigColumn]) # type: ignore 79 | 80 | # Filter the data based on the time_ranges 81 | if len(y) != 0: 82 | x = pd.Series(x) 83 | y = pd.Series(y) 84 | index_number = fi if nColumns != 1 else 0 85 | plot_cursor_functions.add_text_subplot(plot, x, y, cursor_options, index_number, time_ranges, totalRawSigNames) 86 | 87 | return htmlPlots 88 | 89 | 90 | def setupPlotLayoutCursors(config, ranksCursor: List, htmlPlots: List[go.Figure], 91 | imagePlots: List[go.Figure]): 92 | lst: List[Tuple[int, List[go.Figure]]] = [] 93 | 94 | if config.genHTML: 95 | lst.append((config.htmlCursorColumns, htmlPlots)) 96 | if config.genImage: 97 | lst.append((config.imageCursorColumns, imagePlots)) 98 | 99 | for columnNr, plotList in lst: 100 | if columnNr == 1: 101 | for rankCursor in ranksCursor: 102 | # Prepare cursor data for the table 103 | table = create_cursor_table() 104 | 105 | # Create a figure to contain the table 106 | fig_table = go.Figure(data=[table]) 107 | fig_table.update_layout(title=rankCursor.title, height=140*max(len(rankCursor.cursor_options), 1)) 108 | plotList.append(fig_table) 109 | 110 | elif columnNr > 1: 111 | num_rows = ceil(len(ranksCursor) / columnNr) 112 | titles = [rankCursor.title for rankCursor in ranksCursor] # Gather titles for each table 113 | 114 | # Create subplots specifically for tables 115 | fig_subplots = make_subplots(rows=num_rows, cols=columnNr, 116 | subplot_titles=titles, 117 | specs=[[{'type': 'table'} for _ in range(columnNr)] for _ in 118 | range(num_rows)]) # Define all as table subplots 119 | height_to_use = 500 120 | for i, rankCursor in enumerate(ranksCursor): 121 | # Prepare cursor data for the table 122 | table = create_cursor_table() 123 | 124 | # Add table to the subplot layout 125 | fig_subplots.add_trace(table, row=i // columnNr + 1, col=i % columnNr + 1) 126 | 127 | # Update the layout of the subplot figure 128 | height_to_use = max(500*len(rankCursor.cursor_options), height_to_use) 129 | fig_subplots.update_layout(height=height_to_use) 130 | 131 | plotList.append(fig_subplots) 132 | 133 | 134 | def create_cursor_table(): 135 | cursor_data = [{'type': 'None', 'signals': 'None', 'time_values': 'None', 'value': 'None'}] 136 | # Prepare data for the table, including two additional placeholder columns 137 | table_data = [ 138 | [cursor['type'], cursor['signals'], cursor['time_values'], cursor['value']] for cursor in cursor_data 139 | ] 140 | # Create the table with additional columns in the header and cells 141 | table = go.Table( 142 | header=dict(values=["Cursor type", "Signals", "Cursor time points", "Values"], 143 | fill_color='paleturquoise', align='left'), 144 | cells=dict(values=list(zip(*table_data)), fill_color='lavender', align='left') 145 | ) 146 | return table 147 | -------------------------------------------------------------------------------- /plotter/cursor_type.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | 3 | class CursorType(Enum): 4 | MIN_MAX = 1 5 | AVERAGE = 2 6 | 7 | @classmethod 8 | def from_string(cls, string : str): 9 | try: 10 | return cls[string.upper()] 11 | except KeyError: 12 | raise ValueError(f"{string} is not a valid {cls.__name__}") -------------------------------------------------------------------------------- /plotter/down_sampling_method.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | 3 | class DownSamplingMethod(Enum): 4 | GRADIENT = 1 5 | AMOUNT = 2 6 | NO_DOWN_SAMPLING = 3 7 | 8 | @classmethod 9 | def from_string(cls, string : str): 10 | try: 11 | return cls[string.upper()] 12 | except KeyError: 13 | raise ValueError(f"{string} is not a valid {cls.__name__}") -------------------------------------------------------------------------------- /plotter/figureSetup.csv: -------------------------------------------------------------------------------- 1 | figure;title;units;emt_signal_1;emt_signal_2;emt_signal_3;rms_signal_1;rms_signal_2;rms_signal_3;down_sampling_method;gradient_threshold;include_in_case;exclude_in_case 2 | 1;Vpp;pu;MTB\meas_Vab_pu;MTB\meas_Vbc_pu;MTB\meas_Vca_pu;meas\s:Vab_pu;meas\s:Vbc_pu;meas\s:Vca_pu;gradient;0.5;; 3 | 2;Vpg;pu;MTB\meas_Vag_pu;MTB\meas_Vbg_pu;MTB\meas_Vcg_pu;meas\s:Vag_pu;meas\s:Vbg_pu;meas\s:Vcg_pu;gradient;0.5;; 4 | 3;Vseq;pu;MTB\fft_pos_Vmag_pu;MTB\fft_neg_Vmag_pu;;meas\s:pos_Vmag_pu;meas\s:neg_Vmag_pu;;gradient;0.5;; 5 | 4;Itotal;pu;MTB\meas_Ia_pu;MTB\meas_Ib_pu;MTB\meas_Ic_pu;meas\s:Ia_pu;meas\s:Ib_pu;meas\s:Ic_pu;gradient;0.5;; 6 | 5;Iactive;pu;MTB\fft_pos_Id_pu;MTB\fft_neg_Id_pu;;meas\s:pos_Id_pu;meas\s:neg_Id_pu;;gradient;0.5;; 7 | 6;Ireactive;pu;MTB\fft_pos_Iq_pu;MTB\fft_neg_Iq_pu;;meas\s:pos_Iq_pu;meas\s:neg_Iq_pu;;gradient;0.5;; 8 | 7;Ppoc;pu;MTB\P_pu_PoC;MTB\mtb_s_pref_pu;;meas\s:ppoc_pu;;;gradient;0.5;; 9 | 8;Qpoc;pu;MTB\Q_pu_PoC;MTB\mtb_s_qref;;meas\s:qpoc_pu;;;gradient;0.5;; 10 | 9;F;Hz;MTB\pll_f_hz;;;meas\s:f_hz;;;gradient;0.5;; 11 | 10;Id_pll;pu;MTB\pll_pos_Id_pu;MTB\pll_neg_Id_pu;;;;;gradient;0.5;; 12 | 11;Iq_pll;pu;MTB\pll_pos_Iq_pu;MTB\pll_neg_Iq_pu;;;;;gradient;0.5;; 13 | 12;Terminal;pu;Unit_1\unit_fft_pos_Id_pu;Unit_1\unit_fft_pos_Iq_pu;Unit_1\unit_fft_pos_Vmag_pu;Unit_1\m:i1P:bus1 in p.u.;Unit_1\m:i1Q:bus1 in p.u.;Unit_1\m:u1:bus1 in p.u.;gradient;0.5;; 14 | 13;Instantaneous Voltage (pg);kV;MTB\meas_Vag_kV;MTB\meas_Vbg_kV;MTB\meas_Vcg_kV;;;;gradient;0.5;1,2,3,4,5,6,7,8,9,10,98; 15 | 14;Instantaneous Current (kA);kA;MTB\meas_Ia_kA;MTB\meas_Ib_kA;MTB\meas_Ic_kA;;;;gradient;0.5;1,2,3,4,5,6,7,8,9,10,98; 16 | -------------------------------------------------------------------------------- /plotter/plot_cursor_functions.py: -------------------------------------------------------------------------------- 1 | import plotly.graph_objects as go 2 | from typing import List 3 | from cursor_type import CursorType 4 | 5 | 6 | def min_max_value_text(x, y, time_ranges): 7 | if len(time_ranges) > 0: 8 | mask = (x >= time_ranges[0]) & (x < time_ranges[1]) if len(time_ranges) == 2 else (x >= time_ranges[0]) 9 | y = y[mask] 10 | x = x[mask] 11 | # Find the min and max of y 12 | min_y = y.min() 13 | max_y = y.max() 14 | 15 | # Find the corresponding x-values 16 | min_x = x[y.idxmin()] # x-value where y is minimum 17 | max_x = x[y.idxmax()] # x-value where y is maximum 18 | 19 | # Construct the text 20 | annotation_text = (f"Max: {max_y:.2f} at t = {max_x}
" 21 | f"Min: {min_y:.2f} at t = {min_x}
") 22 | return annotation_text 23 | 24 | 25 | def mean_value_text(x, y, time_ranges): 26 | if len(time_ranges) > 0: 27 | mask = (x >= time_ranges[0]) & (x < time_ranges[1]) if len(time_ranges) == 2 else (x >= time_ranges[0]) 28 | y = y[mask] 29 | x = x[mask] 30 | mean_y = sum(y) / len(y) 31 | annotation_text = f"Mean: {mean_y:.2f}
" 32 | return annotation_text 33 | 34 | 35 | def signals_text(rawSigNames): 36 | rawSigNames_text = "" 37 | for i, rawSigName in enumerate(rawSigNames): 38 | if i > 0: 39 | rawSigNames_text += "; " 40 | rawSigNames_text += f"{rawSigName}" 41 | return rawSigNames_text 42 | 43 | 44 | def time_ranges_text(time_ranges): 45 | time_ranges_text = "" 46 | for i in range(len(time_ranges)): 47 | if i > 0: 48 | time_ranges_text += ", " 49 | time_ranges_text += f"t{i}={time_ranges[i]}" 50 | return time_ranges_text 51 | 52 | 53 | # Function to append the text as a scatter trace to the provided figure 54 | def add_text_subplot(fig: go.Figure, x, y, cursor_types: List[CursorType], index_number, time_ranges, rawSigNames): 55 | table_data = fig.data[index_number].cells.values 56 | 57 | # Access the values for the specific cell in the table 58 | # The values are arranged in a way that we can access them based on rowPos and colPos 59 | cursor_type = table_data[0] 60 | signals = table_data[1] 61 | time_values = table_data[2] 62 | values = table_data[3] # Assuming the second entry contains the values 63 | 64 | # Append the annotation text to the corresponding value 65 | updated_values = values[:] 66 | updated_signals = signals[:] 67 | updated_time_values = time_values[:] 68 | updated_cursor_type = cursor_type[:] 69 | index = 0 70 | if CursorType.MIN_MAX in cursor_types: 71 | set_or_append_cursor_data(updated_cursor_type, updated_signals, updated_time_values, updated_values, index, 72 | rawSigNames, time_ranges, "Min and Max values", min_max_value_text(x, y, time_ranges)) 73 | index += 1 74 | 75 | if CursorType.AVERAGE in cursor_types: 76 | set_or_append_value(updated_cursor_type, index, "Average values") 77 | set_or_append_cursor_data(updated_cursor_type, updated_signals, updated_time_values, updated_values, index, 78 | rawSigNames, time_ranges, "Average values", mean_value_text(x, y, time_ranges)) 79 | index += 1 80 | 81 | # Update the table with the modified values 82 | fig.data[index_number].cells.values = [updated_cursor_type, updated_signals, updated_time_values, updated_values] 83 | 84 | return fig 85 | 86 | 87 | def set_or_append_cursor_data(updated_cursor_type, updated_signals, updated_time_values, updated_values, index, 88 | rawSigNames, time_ranges, cursor_type_text, cursor_value_text): 89 | set_or_append_value(updated_cursor_type, index, cursor_type_text) 90 | set_or_append_value(updated_signals, index, signals_text(rawSigNames)) 91 | set_or_append_value(updated_time_values, index, time_ranges_text(time_ranges)) 92 | set_or_append_value(updated_values, index, cursor_value_text) 93 | 94 | 95 | def set_or_append_value(list_to_update, index, value): 96 | """ 97 | Update a list by setting the value at a specific index if within bounds, 98 | or appending the value if the index is out of bounds. 99 | 100 | Args: 101 | list_to_update (list): The list to be updated. 102 | index (int): The index at which the value should be set or appended. 103 | value: The value to be set or appended. 104 | """ 105 | if index >= len(list_to_update): 106 | # Append if index is out of bounds 107 | list_to_update.append(value) 108 | else: 109 | # Set the value at the specified index 110 | list_to_update[index] = value 111 | -------------------------------------------------------------------------------- /plotter/plotter.py: -------------------------------------------------------------------------------- 1 | ''' 2 | Minimal script to plot simulation results from PSCAD and PowerFactory. 3 | ''' 4 | from __future__ import annotations 5 | from os import listdir, makedirs 6 | from os.path import join, split, exists 7 | import re 8 | import pandas as pd 9 | from plotly.subplots import make_subplots # type: ignore 10 | import plotly.graph_objects as go # type: ignore 11 | from typing import List, Dict, Union, Tuple, Set 12 | import sampling_functions 13 | from down_sampling_method import DownSamplingMethod 14 | from threading import Thread, Lock 15 | import time 16 | import sys 17 | from math import ceil 18 | from collections import defaultdict 19 | from cursor_image_logic import addCursors, setupPlotLayoutCursors 20 | from read_configs import ReadConfig, readFigureSetup, readCursorSetup 21 | from Figure import Figure 22 | from Result import ResultType, Result 23 | from Case import Case 24 | from Cursor import Cursor 25 | from read_and_write_functions import loadEMT 26 | from process_psout import getAllSignalnames, getCaseSignalnames, getSignals 27 | 28 | try: 29 | LOG_FILE = open('plotter.log', 'w') 30 | except: 31 | print('Failed to open log file. Logging to file disabled.') 32 | LOG_FILE = None # type: ignore 33 | 34 | gLock = Lock() 35 | 36 | 37 | def print(*args): # type: ignore 38 | ''' 39 | Overwrites the print function to also write to a log file. 40 | ''' 41 | gLock.acquire() 42 | outputString = ''.join(map(str, args)) + '\n' # type: ignore 43 | sys.stdout.write(outputString) 44 | if LOG_FILE: 45 | try: 46 | LOG_FILE.write(outputString) 47 | LOG_FILE.flush() 48 | except: 49 | pass 50 | gLock.release() 51 | 52 | 53 | def idFile(filePath: str) -> Tuple[ 54 | Union[ResultType, None], Union[int, None], Union[str, None], Union[str, None], Union[str, None]]: 55 | ''' 56 | Identifies the type (EMT or RMS), root and case id of a given file. If the file is not recognized, a none tuple is returned. 57 | ''' 58 | path, fileName = split(filePath) 59 | match = re.match(r'^(\w+?)_([0-9]+).(inf|csv|psout|zip|gz|bz2|xz)$', fileName.lower()) 60 | if match: 61 | rank = int(match.group(2)) 62 | projectName = match.group(1) 63 | bulkName = join(path, match.group(1)) 64 | fullpath = filePath 65 | if match.group(3) == 'psout': 66 | fileType = ResultType.EMT_PSOUT 67 | return (fileType, rank, projectName, bulkName, fullpath) 68 | elif match.group(3) == 'zip' or match.group(3) == 'gz' or match.group(3) == 'bz2' or match.group(3) == 'xz': 69 | fileType = ResultType.EMT_ZIP 70 | return (fileType, rank, projectName, bulkName, fullpath) 71 | else: 72 | with open(filePath, 'r') as file: 73 | firstLine = file.readline() 74 | if match.group(3) == 'inf' and firstLine.startswith('PGB(1)'): 75 | fileType = ResultType.EMT_INF 76 | return (fileType, rank, projectName, bulkName, fullpath) 77 | elif match.group(3) == 'csv' and firstLine.startswith('time;'): 78 | fileType = ResultType.EMT_CSV 79 | return (fileType, rank, projectName, bulkName, fullpath) 80 | elif match.group(3) == 'csv': 81 | secondLine = file.readline() 82 | if secondLine.startswith(r'"b:tnow in s"'): 83 | fileType = ResultType.RMS 84 | return (fileType, rank, projectName, bulkName, fullpath) 85 | 86 | return (None, None, None, None, None) 87 | 88 | 89 | def mapResultFiles(config: ReadConfig) -> Dict[int, List[Result]]: 90 | ''' 91 | Goes through all files in the given directories and maps them to a dictionary of cases. 92 | ''' 93 | files: List[Tuple[str, str]] = list() 94 | for dir_ in config.simDataDirs: 95 | for file_ in listdir(dir_[1]): 96 | files.append((dir_[0], join(dir_[1], file_))) 97 | 98 | results: Dict[int, List[Result]] = dict() 99 | 100 | for file in files: 101 | group = file[0] 102 | fullpath = file[1] 103 | typ, rank, projectName, bulkName, fullpath = idFile(fullpath) 104 | 105 | if typ is None: 106 | continue 107 | assert rank is not None 108 | assert projectName is not None 109 | assert bulkName is not None 110 | assert fullpath is not None 111 | 112 | newResult = Result(typ, rank, projectName, bulkName, fullpath, group) 113 | 114 | if rank in results.keys(): 115 | results[rank].append(newResult) 116 | else: 117 | results[rank] = [newResult] 118 | 119 | return results 120 | 121 | def colorMap(results: Dict[int, List[Result]]) -> Dict[str, List[str]]: 122 | ''' 123 | Select colors for the given projects. Return a dictionary with the project name as key and a list of colors as value. 124 | ''' 125 | colors = ['#e6194B', '#3cb44b', '#ffe119', '#4363d8', '#f58231', '#911eb4', '#42d4f4', '#f032e6', '#bfef45', 126 | '#fabed4', '#469990', '#dcbeff', '#9A6324', '#fffac8', '#800000', '#aaffc3', '#808000', '#ffd8b1', 127 | '#000075', '#a9a9a9', '#000000'] 128 | 129 | projects: Set[str] = set() 130 | 131 | for rank in results.keys(): 132 | for result in results[rank]: 133 | projects.add(result.shorthand) 134 | 135 | cMap: Dict[str, List[str]] = dict() 136 | 137 | if len(list(projects)) > 2: 138 | i = 0 139 | for p in list(projects): 140 | cMap[p] = [colors[i % len(colors)]] * 3 141 | i += 1 142 | return cMap 143 | else: 144 | i = 0 145 | for p in list(projects): 146 | cMap[p] = colors[i:i + 3] 147 | i += 3 148 | return cMap 149 | 150 | 151 | def addResults(plots: List[go.Figure], 152 | typ: ResultType, 153 | data: pd.DataFrame, 154 | figures: List[Figure], 155 | resultName: str, 156 | file: str, # Only for error messages 157 | colors: Dict[str, List[str]], 158 | nColumns: int, 159 | pfFlatTIme: float, 160 | pscadInitTime: float) -> None: 161 | ''' 162 | Add result to plot. 163 | ''' 164 | 165 | assert nColumns > 0 166 | 167 | if nColumns > 1: 168 | plotlyFigure = plots[0] 169 | else: 170 | assert len(plots) == len(figures) 171 | 172 | rowPos = 1 173 | colPos = 1 174 | fi = -1 175 | for figure in figures: 176 | fi += 1 177 | 178 | if nColumns == 1: 179 | plotlyFigure = plots[fi] 180 | else: 181 | rowPos = (fi // nColumns) + 1 182 | colPos = (fi % nColumns) + 1 183 | 184 | downsampling_method = figure.down_sampling_method 185 | traces = 0 186 | for sig in range(1, 4): 187 | signalKey = typ.name.lower().split('_')[0] 188 | rawSigName: str = getattr(figure, f'{signalKey}_signal_{sig}') 189 | 190 | if typ == ResultType.RMS: 191 | while rawSigName.startswith('#'): 192 | rawSigName = rawSigName[1:] 193 | splitSigName = rawSigName.split('\\') 194 | 195 | if len(splitSigName) == 2: 196 | sigColumn = ('##' + splitSigName[0], splitSigName[1]) 197 | else: 198 | sigColumn = rawSigName 199 | elif typ == ResultType.EMT_INF or typ == ResultType.EMT_CSV or typ == ResultType.EMT_ZIP: 200 | # uses only the signal name - last part of the hierarchical signal name 201 | rawSigName = rawSigName.split('\\')[-1] 202 | sigColumn = rawSigName 203 | elif typ == ResultType.EMT_PSOUT: 204 | # uses the full hierarchical signal name 205 | sigColumn = rawSigName 206 | else: 207 | print(f'File type: {typ} unknown') 208 | 209 | 210 | displayName = f'{resultName}:{rawSigName.split(" ")[0]}' 211 | 212 | timeColName = 'time' if typ == ResultType.EMT_INF or typ == ResultType.EMT_PSOUT or typ == ResultType.EMT_CSV or typ == ResultType.EMT_ZIP else data.columns[0] 213 | timeoffset = pfFlatTIme if typ == ResultType.RMS else pscadInitTime 214 | 215 | if sigColumn in data.columns: 216 | x_value = data[timeColName] - timeoffset # type: ignore 217 | y_value = data[sigColumn] # type: ignore 218 | if downsampling_method == DownSamplingMethod.GRADIENT: 219 | x_value, y_value = sampling_functions.downsample_based_on_gradient(x_value, y_value, 220 | figure.gradient_threshold) # type: ignore 221 | elif downsampling_method == DownSamplingMethod.AMOUNT: 222 | x_value, y_value = sampling_functions.down_sample(x_value, y_value) # type: ignore 223 | 224 | add_scatterplot_for_result(colPos, colors, displayName, nColumns, plotlyFigure, resultName, rowPos, 225 | traces, x_value, y_value) 226 | 227 | # plot_cursor_functions.add_annotations(x_value, y_value, plotlyFigure) 228 | traces += 1 229 | elif sigColumn != '': 230 | print(f'Signal "{rawSigName}" not recognized in resultfile: {file}') 231 | add_scatterplot_for_result(colPos, colors, f'{displayName} (Unknown)', nColumns, plotlyFigure, resultName, rowPos, 232 | traces, None, None) 233 | traces += 1 234 | 235 | update_y_and_x_axis(colPos, figure, nColumns, plotlyFigure, rowPos) 236 | 237 | 238 | def update_y_and_x_axis(colPos, figure, nColumns, plotlyFigure, rowPos): 239 | if nColumns == 1: 240 | yaxisTitle = f'[{figure.units}]' 241 | else: 242 | yaxisTitle = f'{figure.title}[{figure.units}]' 243 | if nColumns == 1: 244 | plotlyFigure.update_xaxes( # type: ignore 245 | title_text='Time[s]' 246 | ) 247 | plotlyFigure.update_yaxes( # type: ignore 248 | title_text=yaxisTitle 249 | ) 250 | else: 251 | plotlyFigure.update_xaxes( # type: ignore 252 | title_text='Time[s]', 253 | row=rowPos, col=colPos 254 | ) 255 | plotlyFigure.update_yaxes( # type: ignore 256 | title_text=yaxisTitle, 257 | row=rowPos, col=colPos 258 | ) 259 | 260 | 261 | def add_scatterplot_for_result(colPos, colors, displayName, nColumns, plotlyFigure, resultName, rowPos, traces, x_value, 262 | y_value): 263 | if nColumns == 1: 264 | plotlyFigure.add_trace( # type: ignore 265 | go.Scatter( 266 | x=x_value, 267 | y=y_value, 268 | #line_color=colors[resultName][traces], 269 | name=displayName, 270 | legendgroup=displayName, 271 | showlegend=True 272 | ) 273 | ) 274 | else: 275 | plotlyFigure.add_trace( # type: ignore 276 | go.Scatter( 277 | x=x_value, 278 | y=y_value, 279 | line_color=colors[resultName][traces], 280 | name=displayName, 281 | legendgroup=resultName, 282 | showlegend=True 283 | ), 284 | row=rowPos, col=colPos 285 | ) 286 | 287 | 288 | def drawPlot(rank: int, 289 | resultDict: Dict[int, List[Result]], 290 | figureDict: Dict[int, List[Figure]], 291 | caseDict: Dict[int, str], 292 | colorMap: Dict[str, List[str]], 293 | cursorDict: List[Cursor], 294 | config: ReadConfig, 295 | emtRankSignalnamesList: List): 296 | ''' 297 | Draws plots for html and static image export. 298 | ''' 299 | 300 | print(f'Drawing plot for rank {rank}.') 301 | 302 | resultList = resultDict.get(rank, []) 303 | rankList = list(resultDict.keys()) 304 | rankList.sort() 305 | figureList = figureDict[rank] 306 | ranksCursor = [i for i in cursorDict if i.id == rank] 307 | 308 | if resultList == [] or figureList == []: 309 | return 310 | 311 | figurePath = join(config.resultsDir, str(rank)) 312 | 313 | htmlPlots: List[go.Figure] = list() 314 | imagePlots: List[go.Figure] = list() 315 | htmlPlotsCursors: List[go.Figure] = list() 316 | imagePlotsCursors: List[go.Figure] = list() 317 | 318 | columnNr = setupPlotLayout(caseDict, config, figureList, htmlPlots, imagePlots, rank) 319 | if len(ranksCursor) > 0: 320 | setupPlotLayoutCursors(config, ranksCursor, htmlPlotsCursors, imagePlotsCursors) 321 | for result in resultList: 322 | print(result.typ) 323 | if result.typ == ResultType.RMS: 324 | resultData: pd.DataFrame = pd.read_csv(result.fullpath, sep=';', decimal=',', header=[0, 1]) # type: ignore 325 | elif result.typ == ResultType.EMT_INF: 326 | resultData: pd.DataFrame = loadEMT(result.fullpath) 327 | elif result.typ == ResultType.EMT_PSOUT: 328 | resultData: pd.DataFrame = getSignals(result.fullpath, emtRankSignalnamesList) 329 | elif result.typ == ResultType.EMT_CSV or result.typ == ResultType.EMT_ZIP: 330 | resultData: pd.DataFrame = pd.read_csv(result.fullpath, sep=';', decimal=',') # type: ignore 331 | else: 332 | continue 333 | if config.genHTML: 334 | addResults(htmlPlots, result.typ, resultData, figureList, result.shorthand, result.fullpath, colorMap, 335 | config.htmlColumns, config.pfFlatTIme, config.pscadInitTime) 336 | if config.genImage: 337 | addResults(imagePlots, result.typ, resultData, figureList, result.shorthand, result.fullpath, colorMap, 338 | config.imageColumns, config.pfFlatTIme, config.pscadInitTime) 339 | 340 | if config.genHTML: 341 | addCursors(htmlPlotsCursors, resultList, cursorDict, config.pfFlatTIme, config.pscadInitTime, 342 | rank, config.htmlCursorColumns, emtRankSignalnamesList) 343 | create_html(htmlPlots, htmlPlotsCursors, figurePath, caseDict[rank] if caseDict is not None else "", rank, config, rankList) 344 | print(f'Exported plot for rank {rank} to {figurePath}.html') 345 | 346 | if config.genImage: 347 | # Cursor plots are not currently supported for image export and commented out 348 | # addCursors(imagePlotsCursors, resultList, cursorDict, config.pfFlatTIme, config.pscadInitTime, 349 | # rank, config.imageCursorColumns) 350 | create_image_plots(columnNr, config, figureList, figurePath, imagePlots) 351 | # create_cursor_plots(config.htmlCursorColumns, config, figurePath, imagePlotsCursors, ranksCursor) 352 | print(f'Exported plot for rank {rank} to {figurePath}.{config.imageFormat}') 353 | 354 | print(f'Plot for rank {rank} done.') 355 | 356 | 357 | def create_image_plots(columnNr, config, figureList, figurePath, imagePlots): 358 | if columnNr == 1: 359 | # Combine all figures into a single plot, same as for nColumns > 1 but no grid needed 360 | combined_plot = make_subplots(rows=len(imagePlots), cols=1, 361 | subplot_titles=[fig.layout.title.text for fig in imagePlots]) 362 | 363 | for i, plot in enumerate(imagePlots): 364 | for trace in plot['data']: # Add each trace to the combined plot 365 | combined_plot.add_trace(trace, row=i + 1, col=1) 366 | 367 | # Copy over the x and y axis titles from the original plot 368 | combined_plot.update_xaxes(title_text=plot.layout.xaxis.title.text, row=i + 1, col=1) 369 | combined_plot.update_yaxes(title_text=plot.layout.yaxis.title.text, row=i + 1, col=1) 370 | 371 | # Explicitly set the width and height in the layout 372 | combined_plot.update_layout( 373 | height=500 * len(imagePlots), # Height adjusted based on number of plots 374 | width=2000, # Set the desired width here, adjust as needed 375 | showlegend=True, 376 | ) 377 | 378 | # Save the combined plot as a single image 379 | combined_plot.write_image(f'{figurePath}.{config.imageFormat}', height=500 * len(imagePlots), width=2000) 380 | 381 | else: 382 | # Combine all figures into a grid when nColumns > 1 383 | imagePlots[0].update_layout( 384 | height=500 * ceil(len(figureList) / columnNr), 385 | width=500 * config.imageColumns, # Adjust width based on column number 386 | showlegend=True, 387 | ) 388 | imagePlots[0].write_image(f'{figurePath}.{config.imageFormat}', height=500 * ceil(len(figureList) / columnNr), 389 | width=500 * config.imageColumns) # type: ignore 390 | 391 | 392 | def create_cursor_plots(columnNr, config, figurePath, imagePlotsCursors, ranksCursor): 393 | # Handle the cursor plots (which are tables) 394 | if len(ranksCursor) > 0: 395 | cursor_path = figurePath + "_cursor" 396 | if columnNr == 1: 397 | # Create a combined plot for tables using the 'table' spec type 398 | combined_cursor_plot = make_subplots(rows=len(imagePlotsCursors), cols=1, 399 | specs=[[{"type": "table"}]] * len(imagePlotsCursors), 400 | # 'table' type for each subplot 401 | subplot_titles=[fig.layout.title.text for fig in imagePlotsCursors]) 402 | for i, cursor_plot in enumerate(imagePlotsCursors): 403 | for trace in cursor_plot['data']: # Add each trace (table) to the combined cursor plot 404 | combined_cursor_plot.add_trace(trace, row=i + 1, col=1) 405 | 406 | # Explicitly set width and height in the layout for table plots 407 | combined_cursor_plot.update_layout( 408 | height=500 * len(imagePlotsCursors), 409 | width=600, # Set the desired width for tables 410 | showlegend=False, 411 | ) 412 | 413 | # Save the combined table plot as a single image 414 | combined_cursor_plot.write_image(f'{cursor_path}.{config.imageFormat}', height=500 * len(imagePlotsCursors), 415 | width=600) 416 | else: 417 | imagePlotsCursors[0].update_layout( 418 | height=500 * ceil(len(ranksCursor) / columnNr), 419 | width=500 * config.imageColumns, # Adjust width for multiple columns 420 | showlegend=False, 421 | ) 422 | imagePlotsCursors[0].write_image(f'{cursor_path}.{config.imageFormat}', 423 | height=500 * ceil(len(ranksCursor) / columnNr), 424 | width=500 * config.imageColumns) 425 | 426 | 427 | def setupPlotLayout(caseDict, config, figureList, htmlPlots, imagePlots, rank): 428 | lst: List[Tuple[int, List[go.Figure]]] = [] 429 | if config.genHTML: 430 | lst.append((config.htmlColumns, htmlPlots)) 431 | if config.genImage: 432 | lst.append((config.imageColumns, imagePlots)) 433 | 434 | for columnNr, plotList in lst: 435 | if columnNr == 1: 436 | for fig in figureList: 437 | # Create a direct Figure instead of subplots when there's only 1 column 438 | plotList.append(go.Figure()) # Normal figure, no subplots 439 | plotList[-1].update_layout( 440 | title=fig.title, # Add the figure title directly 441 | height=500, # Set height for the plot 442 | legend=dict( 443 | orientation="h", 444 | yanchor="top", 445 | y=1.22, 446 | xanchor="left", 447 | x=0.12, 448 | ) 449 | ) 450 | elif columnNr > 1: 451 | plotList.append(make_subplots(rows=ceil(len(figureList) / columnNr), cols=columnNr)) 452 | plotList[-1].update_layout(height=500 * ceil(len(figureList) / columnNr)) # type: ignore 453 | if plotList == imagePlots and caseDict is not None: 454 | plotList[-1].update_layout(title_text=caseDict[rank]) # type: ignore 455 | return columnNr 456 | 457 | 458 | def create_css(resultsDir): 459 | 460 | css_path = join(resultsDir, "mtb.css") 461 | 462 | css_content = r'''body { 463 | font-family: Arial, Helvetica, sans-serif; 464 | } 465 | 466 | .navbar { 467 | overflow: hidden; 468 | background-color: #028B76; 469 | font-family: Arial, Helvetica, sans-serif; 470 | } 471 | 472 | .navbar { 473 | overflow: hidden; 474 | background-color: #028B76; 475 | font-family: Arial, Helvetica, sans-serif; 476 | } 477 | 478 | .navbar a { 479 | float: left; 480 | font-size: 16px; 481 | color: white; 482 | text-align: center; 483 | padding: 14px 16px; 484 | text-decoration: none; 485 | } 486 | 487 | .dropdown { 488 | float: left; 489 | overflow: hidden; 490 | } 491 | 492 | .dropdown .dropbtn { 493 | font-size: 16px; 494 | border: none; 495 | outline: none; 496 | color: white; 497 | padding: 14px 16px; 498 | background-color: inherit; 499 | font-family: inherit; 500 | margin: 0; 501 | } 502 | 503 | .navbar a:hover, .dropdown:hover .dropbtn { 504 | background-color: #ddd; 505 | color: black; 506 | } 507 | 508 | .dropdown-content { 509 | display: none; 510 | position: absolute; 511 | background-color: #f9f9f9; 512 | min-width: 160px; 513 | box-shadow: 0px 8px 16px 0px rgba(0,0,0,0.2); 514 | z-index: 1; 515 | } 516 | 517 | .dropdown-content a { 518 | float: none; 519 | color: black; 520 | padding: 12px 16px; 521 | text-decoration: none; 522 | display: block; 523 | text-align: left; 524 | } 525 | 526 | .dropdown-content a:hover { 527 | background-color: #ddd; 528 | } 529 | 530 | .dropdown:hover .dropdown-content { 531 | display: block;''' 532 | 533 | with open(f'{css_path}', 'w') as file: 534 | file.write(css_content) 535 | 536 | 537 | def create_html(plots: List[go.Figure], cursor_plots: List[go.Figure], path: str, title: str, rank: int, 538 | config: ReadConfig, rankList) -> None: 539 | 540 | source_list = '
' 541 | source_list += '

Source data:

' 542 | for group in config.simDataDirs: 543 | source_list += f'

{group[0]} = {group[1]}

' 544 | 545 | source_list += '
' 546 | 547 | html_content = create_html_plots(config.htmlColumns, plots, title, rank) 548 | html_content_cursors = create_html_plots(config.htmlCursorColumns, cursor_plots, "Relevant signal metrics", rank) if len( 549 | cursor_plots) > 0 else "" 550 | 551 | # Create Dropdown Content for the Navbar 552 | idx = 0 553 | dropdown_content = '' 554 | while idx < len(rankList): 555 | dropdown_content += f'Rank {rankList[idx]}\n' 556 | idx += 5 557 | 558 | # Determine the Previous and Next Rank html page for the Navbar 559 | idx = rankList.index(rank) 560 | rankPrev = rankList[idx-1] 561 | rankNext = rankList[idx+1 if idx+1 < len(rankList) else 0] 562 | 563 | full_html_content = f''' 564 | 565 | 566 | 567 | 569 | 570 | 582 | 601 | {html_content} 602 | {html_content_cursors} 603 | {source_list} 604 |

Generated with Energinets Model Testbench

605 | 606 | ''' 607 | 608 | with open(f'{path}.html', 'w') as file: 609 | file.write(full_html_content) 610 | 611 | 612 | def create_html_plots(columns, plots, title, rank): 613 | if columns == 1: 614 | figur_links = '
' 615 | figur_links += '

Figures:

' 616 | for p in plots: 617 | plot_title: str = p['layout']['title']['text'] # type: ignore 618 | figur_links += f'{plot_title} ' 619 | 620 | figur_links += '
' 621 | else: 622 | figur_links = '' 623 | html_content = f'

Rank {rank}: {title}

' 624 | html_content += figur_links 625 | for p in plots: 626 | plot_title: str = p['layout']['title']['text'] # type: ignore 627 | html_content += f'
' + p.to_html(full_html=False, 628 | include_plotlyjs='cdn') + '
' # type: ignore 629 | return html_content 630 | 631 | 632 | def readCasesheet(casesheetPath: str) -> Dict[int, str]: 633 | ''' 634 | Reads optional casesheets and provides dict mapping rank to case title. 635 | ''' 636 | if not casesheetPath: 637 | return None 638 | try: 639 | pd.read_excel(casesheetPath, sheet_name='RfG cases', header=1) # type: ignore 640 | except FileNotFoundError: 641 | print(f'Casesheet not found at {casesheetPath}.') 642 | return dict() 643 | 644 | cases: List[Case] = list() 645 | for sheet in ['RfG', 'DCC', 'Unit', 'Custom']: 646 | dfc = pd.read_excel(casesheetPath, sheet_name=f'{sheet} cases', header=1) # type: ignore 647 | for _, case in dfc.iterrows(): # type: ignore 648 | cases.append(Case(case)) # type: ignore 649 | 650 | caseDict: Dict[int, str] = defaultdict(lambda: 'Unknown case') 651 | for case in cases: 652 | caseDict[case.rank] = case.Name 653 | return caseDict 654 | 655 | 656 | def main() -> None: 657 | config = ReadConfig() 658 | 659 | print('Starting plotter main thread') 660 | 661 | # Output config 662 | print('Configuration:') 663 | for setting in config.__dict__: 664 | print(f'\t{setting}: {config.__dict__[setting]}') 665 | 666 | print() 667 | 668 | resultDict = mapResultFiles(config) 669 | figureDict = readFigureSetup('figureSetup.csv') 670 | cursorDict = readCursorSetup('cursorSetup.csv') 671 | caseDict = readCasesheet(config.optionalCasesheet) 672 | emtAllSignalnamesDF = getAllSignalnames('figureSetup.csv') # Required to process .psout files 673 | colorSchemeMap = colorMap(resultDict) 674 | 675 | if not exists(config.resultsDir): 676 | makedirs(config.resultsDir) 677 | 678 | create_css(config.resultsDir) 679 | 680 | threads: List[Thread] = list() 681 | 682 | for rank in resultDict.keys(): 683 | emtRankSignalnamesList = getCaseSignalnames(emtAllSignalnamesDF, rank) # Required to process .psout files 684 | if config.threads > 1: 685 | threads.append(Thread(target=drawPlot, 686 | args=(rank, resultDict, figureDict, caseDict, colorSchemeMap, cursorDict, config, emtRankSignalnamesList))) 687 | else: 688 | drawPlot(rank, resultDict, figureDict, caseDict, colorSchemeMap, cursorDict, config, emtRankSignalnamesList) 689 | 690 | NoT = len(threads) 691 | if NoT > 0: 692 | sched = threads.copy() 693 | inProg: List[Thread] = [] 694 | 695 | while len(sched) > 0: 696 | for t in inProg: 697 | if not t.is_alive(): 698 | print(f'Thread {t.native_id} finished') 699 | inProg.remove(t) 700 | 701 | while len(inProg) < config.threads and len(sched) > 0: 702 | nextThread = sched.pop() 703 | nextThread.start() 704 | print(f'Started thread {nextThread.native_id}') 705 | inProg.append(nextThread) 706 | 707 | time.sleep(0.5) 708 | 709 | print('Finished plotter main thread') 710 | 711 | 712 | if __name__ == "__main__": 713 | main() 714 | 715 | if LOG_FILE: 716 | LOG_FILE.close() 717 | -------------------------------------------------------------------------------- /plotter/process_psout.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | ''' 3 | A set of functions to process Manitoba Hydro International (MHI) PSOUT files. 4 | This library provides functions to read and process PSOUT files. 5 | It uses the signal names defined in a figure setup CSV file to extract relevant signals from the PSOUT files. 6 | It is designed to be used in conjunction with the Manitoba Hydro International (MHI) PSOUT File Reader Library. 7 | ''' 8 | 9 | import sys 10 | import numpy as np 11 | import pandas as pd 12 | try: 13 | import mhi.psout 14 | except ImportError: 15 | print("Could not import mhi.psout. Make sure Manitoba Hydro International (MHI) PSOUT File Reader Library is installed and available in your Python environment.") 16 | sys.exit(1) 17 | 18 | def getAllSignalnames(figureSetupPath): 19 | ''' 20 | Get all the EMT signal names required from figureSetup.csv for the figures in the HTML and PNG plotter output 21 | ''' 22 | 23 | figureSetupDF = pd.read_csv(figureSetupPath, sep=';') 24 | 25 | signamecase=[] 26 | for index, row in figureSetupDF.iterrows(): 27 | if pd.isnull(row['include_in_case']): 28 | if not pd.isnull(row['emt_signal_1']): signamecase.append([row['emt_signal_1'], np.nan]) 29 | if not pd.isnull(row['emt_signal_2']): signamecase.append([row['emt_signal_2'], np.nan]) 30 | if not pd.isnull(row['emt_signal_3']): signamecase.append([row['emt_signal_3'], np.nan]) 31 | else: 32 | if not pd.isnull(row['emt_signal_1']): signamecase.append([row['emt_signal_1'], row['include_in_case']]) 33 | if not pd.isnull(row['emt_signal_2']): signamecase.append([row['emt_signal_2'], row['include_in_case']]) 34 | if not pd.isnull(row['emt_signal_3']): signamecase.append([row['emt_signal_3'], row['include_in_case']]) 35 | 36 | return pd.DataFrame(signamecase, columns=['Signalname', 'Case']) 37 | 38 | 39 | def getCaseSignalnames(emtSignalnamesDF, case): 40 | ''' 41 | Get a list of required signalnames for the specific case 42 | ''' 43 | signalnames = list() 44 | for index, row in emtSignalnamesDF.iterrows(): 45 | if pd.isnull(row['Case']): 46 | signalnames.append(row['Signalname']) 47 | else: 48 | cases = [int(val) for val in row['Case'].split(',')] # The Cases are stored as comma separated string, and needs to be converted to a list 49 | if case in cases: 50 | signalnames.append(row['Signalname']) 51 | return signalnames 52 | 53 | 54 | def _getSignal(psoutFile, signalname): 55 | ''' 56 | Get time and trace/signal from the opened psout file 57 | This function should not be called directly 58 | ''' 59 | data_path = 'Root\\Main\\'+ signalname +'\\0' # figureSetup.csv uses '\\' to be consistend with PowerFactory (and MHI's Enerplot) 60 | data_path = data_path.replace('\\', '/') # But mhi.psout want to use '/' 61 | data = psoutFile.call(data_path) 62 | run = psoutFile.run(0) 63 | signal = list() 64 | for call in data.calls(): 65 | trace = run.trace(call) 66 | time = trace.domain.data 67 | signal.append(trace.data) 68 | 69 | return time, signal 70 | 71 | 72 | def getSignals(psoutFilePath, signalnames): 73 | ''' 74 | Get all signals from the .psout file who's name appear in signalnames list 75 | ''' 76 | signalnames_not_found = list() 77 | with mhi.psout.File(psoutFilePath) as psoutFile: 78 | t, _ = _getSignal(psoutFile, signalnames[0]) # Get time values to get the length of all the signals in the .psout file 79 | t = np.array(t) # Convert to a numpy array 80 | t = t.reshape(1,-1) # Reshape the t from (N,) to (1,N) 81 | signals = np.array(t) # Use as first row for the signals array 82 | for signalname in signalnames: 83 | try: 84 | _, signal = _getSignal(psoutFile, signalname) # Try to get each signal in the signalnames list 85 | signal = np.array(signal) # Convert to numpy array 86 | signals = np.append(signals, signal, axis=0) # And append to the signals array 87 | except: 88 | signalnames_not_found.append(signalname) # Make a list of all the signal names that could not be found 89 | 90 | # Remove the signal names that were not found from the signalnames list 91 | if len(signalnames_not_found) > 0: 92 | print(f"Warning: The following signal names were not found in the PSOUT file: {', '.join(signalnames_not_found)}") 93 | for signalname_not_found in signalnames_not_found: 94 | signalnames.remove(signalname_not_found) 95 | 96 | signalnames = ['time']+signalnames # Add the lable to be used for the time column 97 | 98 | return pd.DataFrame(np.transpose(signals), columns=signalnames) 99 | -------------------------------------------------------------------------------- /plotter/psout_to_csv.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | ''' 3 | This is a script to convert Manitoba Hydro International (MHI) PSOUT files to CSV format. 4 | It reads the PSOUT files from a specified folder, extracts the required signals based on a figure setup CSV file, and writes the data to CSV files. 5 | It supports optional compression of the output files, i.e. .zip, .gz, .bz2, or .xz formats provided by Pandas' "to_csv" function. 6 | It is designed to be used in conjunction with the process_psout set of function and uses Manitoba Hydro International (MHI) PSOUT File Reader Library. 7 | It is designed to be run from the command line with various options for input and output paths, compression type, and verbosity. 8 | ''' 9 | __version__ = 1.0 10 | 11 | import os, glob, time 12 | from datetime import datetime 13 | from process_psout import getAllSignalnames, getCaseSignalnames, getSignals 14 | import argparse 15 | 16 | #-----------------------------------------------------------------------------# 17 | parser = argparse.ArgumentParser(prog = 'psout_to_csv', 18 | description = 'Convert .psout to .csv with optional compression if required') 19 | parser.add_argument('-p', '--psoutFolder', 20 | action = 'store', 21 | dest = 'psoutFolder', 22 | nargs = '?', 23 | metavar = 'PSOUTFOLDER', 24 | default = '..\\export\\MTB_16042025101945', 25 | help = 'the folder where the .psout files are located') 26 | parser.add_argument('-o', '--outputRootFolder', 27 | action = 'store', 28 | dest = 'outputRootFolder', 29 | nargs = '?', 30 | metavar = 'OUTPUTROOTFOLDER', 31 | default = '..\\export', 32 | help = 'the folder where the .psout files are located') 33 | parser.add_argument('-f', '--figureSetupPath', 34 | action = 'store', 35 | dest = 'figureSetupPath', 36 | nargs = '?', 37 | metavar = 'FIGURESETUPPATH', 38 | default = 'figureSetup.csv', 39 | help = 'the folder where the .psout files are located') 40 | parser.add_argument('-c', '--compressionType', 41 | action = 'store', 42 | dest = 'compressionType', 43 | type = str, 44 | nargs = '?', 45 | metavar = 'COMPRESSIONTYPE', 46 | default = '.csv', 47 | help = 'the output compression type e.g. .zip, .bx2, .gz or .xz') 48 | parser.add_argument('-q', '--quiet', 49 | action = 'store_true', 50 | dest = 'QUIET', 51 | default = False, 52 | help = 'run quietly') 53 | parser.add_argument('-v','--version', 54 | action='version', 55 | version='Version: %(prog)s '+str(__version__)) 56 | 57 | args = parser.parse_args() 58 | #-----------------------------------------------------------------------------# 59 | 60 | 61 | def convertPsouts(psoutFolder, csvFolder, outFileType, dfSignalnames): 62 | psoutFilesPath = glob.glob(os.path.join(psoutFolder,'*.psout')) 63 | os.mkdir(csvFolder) 64 | 65 | for psoutFilePath in psoutFilesPath: 66 | psoutFileNameExt = os.path.basename(psoutFilePath) 67 | psoutFileName = os.path.splitext(psoutFileNameExt)[0] 68 | projectname, case = psoutFileName.split('_') 69 | case = int(case) 70 | if not args.QUIET: print(f'Processing {psoutFileNameExt}') 71 | signalnames = getCaseSignalnames(dfSignalnames, case) 72 | dfSignals = getSignals(psoutFilePath, signalnames) 73 | dfSignals.set_index('time', inplace=True) 74 | if not args.QUIET: print(f'Writing {projectname}_{case:02}{outFileType}\n') 75 | dfSignals.to_csv(os.path.join(csvFolder, f'{projectname}_{case:02}{outFileType}'), sep=';', header=True, compression='infer', decimal=',') #Note: For a Danish computer, decimal=',' else numbers are read in incorrelty in Excel 76 | 77 | def main(): 78 | # For .psout post processing 79 | start = time.time() 80 | signalnamesDF = getAllSignalnames(args.figureSetupPath) 81 | outputFolder = os.path.join(args.outputRootFolder, f'MTB_{datetime.now().strftime(r"%d%m%Y%H%M%S")}') 82 | convertPsouts(args.psoutFolder, outputFolder, args.compressionType, signalnamesDF) 83 | end = time.time() 84 | elapsed = end - start 85 | print(f'Done! ({elapsed:.2f} s)') 86 | 87 | if __name__ == '__main__': 88 | main() -------------------------------------------------------------------------------- /plotter/read_and_write_functions.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | from os.path import join, split, splitext 3 | from os import listdir 4 | from typing import Dict 5 | import re 6 | 7 | 8 | def loadEMT(infFile: str) -> pd.DataFrame: 9 | ''' 10 | Load EMT results from a collection of csv files defined by the given inf file. Returns a dataframe with index 'time'. 11 | ''' 12 | folder, filename = split(infFile) 13 | filename, fileext = splitext(filename) 14 | 15 | assert fileext.lower() == '.inf' 16 | 17 | adjFiles = listdir(folder) 18 | csvMap: Dict[int, str] = dict() 19 | pat = re.compile(r'^' + filename.lower() + r'(?:_([0-9]+))?.csv$') 20 | 21 | for file in adjFiles: 22 | rem = re.match(pat, file.lower()) 23 | if rem: 24 | if rem.group(1) is None: 25 | id = -1 26 | else: 27 | id = int(rem.group(1)) 28 | csvMap[id] = join(folder, file) 29 | csvMaps = list(csvMap.keys()) 30 | csvMaps.sort() 31 | 32 | df = pd.DataFrame() 33 | firstFile = True 34 | loadedColumns = 0 35 | for map in csvMaps: 36 | dfMap = pd.read_csv(csvMap[map], skiprows=1, header=None) # type: ignore 37 | if not firstFile: 38 | dfMap = dfMap.iloc[:, 1:] 39 | else: 40 | firstFile = False 41 | dfMap.columns = list(range(loadedColumns, loadedColumns + len(dfMap.columns))) 42 | loadedColumns = loadedColumns + len(dfMap.columns) 43 | df = pd.concat([df, dfMap], axis=1) # type: ignore 44 | 45 | columns = emtColumns(infFile) 46 | columns[0] = 'time' 47 | df = df[columns.keys()] 48 | df.rename(columns, inplace=True, axis=1) 49 | print(f"Loaded {infFile}, length = {df['time'].iloc[-1]}s") # type: ignore 50 | return df 51 | 52 | 53 | def emtColumns(infFilePath: str) -> Dict[int, str]: 54 | ''' 55 | Reads EMT result columns from the given inf file and returns a dictionary with the column number as key and the column name as value. 56 | ''' 57 | columns: Dict[int, str] = dict() 58 | with open(infFilePath, 'r') as file: 59 | for line in file: 60 | rem = re.match( 61 | r'^PGB\(([0-9]+)\) +Output +Desc="(\w+)" +Group="(\w+)" +Max=([0-9\-\.]+) +Min=([0-9\-\.]+) +Units="(\w*)" *$', 62 | line) 63 | if rem: 64 | columns[int(rem.group(1))] = rem.group(2) 65 | return columns -------------------------------------------------------------------------------- /plotter/read_configs.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from typing import Dict, List, Tuple 4 | import csv 5 | from Figure import Figure 6 | from Cursor import Cursor 7 | from collections import defaultdict 8 | from configparser import ConfigParser 9 | from down_sampling_method import DownSamplingMethod 10 | from cursor_type import CursorType 11 | 12 | 13 | class ReadConfig: 14 | def __init__(self) -> None: 15 | cp = ConfigParser() 16 | cp.read('config.ini') 17 | parsedConf = cp['config'] 18 | self.resultsDir = parsedConf['resultsDir'] 19 | self.genHTML = parsedConf.getboolean('genHTML') 20 | self.genImage = parsedConf.getboolean('genImage') 21 | self.htmlColumns = parsedConf.getint('htmlColumns') 22 | assert self.htmlColumns > 0 or not self.genHTML 23 | self.imageColumns = parsedConf.getint('imageColumns') 24 | assert self.imageColumns > 0 or not self.genImage 25 | self.htmlCursorColumns = parsedConf.getint('htmlCursorColumns') 26 | assert self.htmlCursorColumns > 0 or not self.genHTML 27 | self.imageCursorColumns = parsedConf.getint('imageCursorColumns') 28 | assert self.imageCursorColumns > 0 or not self.genImage 29 | self.imageFormat = parsedConf['imageFormat'] 30 | self.threads = parsedConf.getint('threads') 31 | assert self.threads > 0 32 | self.pfFlatTIme = parsedConf.getfloat('pfFlatTime') 33 | assert self.pfFlatTIme >= 0.1 34 | self.pscadInitTime = parsedConf.getfloat('pscadInitTime') 35 | assert self.pscadInitTime >= 1.0 36 | self.optionalCasesheet = parsedConf['optionalCasesheet'] 37 | self.simDataDirs : List[Tuple[str, str]] = list() 38 | simPaths = cp.items('Simulation data paths') 39 | for name, path in simPaths: 40 | self.simDataDirs.append((name, path)) 41 | 42 | 43 | def readFigureSetup(filePath: str) -> Dict[int, List[Figure]]: 44 | ''' 45 | Read figure setup file. 46 | ''' 47 | setup: List[Dict[str, str | List[int]]] = list() 48 | with open(filePath, newline='') as setupFile: 49 | setupReader = csv.DictReader(setupFile, delimiter=';') 50 | for row in setupReader: 51 | row['exclude_in_case'] = list( 52 | set([int(item.strip()) for item in row.get('exclude_in_case', '').split(',') if item.strip() != ''])) 53 | row['include_in_case'] = list( 54 | set([int(item.strip()) for item in row.get('include_in_case', '').split(',') if item.strip() != ''])) 55 | setup.append(row) 56 | 57 | figureList: List[Figure] = list() 58 | for figureStr in setup: 59 | figureList.append( 60 | Figure(int(figureStr['figure']), # type: ignore 61 | figureStr['title'], # type: ignore 62 | figureStr['units'], # type: ignore 63 | figureStr['emt_signal_1'], # type: ignore 64 | figureStr['emt_signal_2'], # type: ignore 65 | figureStr['emt_signal_3'], # type: ignore 66 | figureStr['rms_signal_1'], # type: ignore 67 | figureStr['rms_signal_2'], # type: ignore 68 | figureStr['rms_signal_3'], # type: ignore 69 | figureStr['gradient_threshold'], # type: ignore 70 | DownSamplingMethod.from_string(figureStr['down_sampling_method']), # type: ignore 71 | figureStr['include_in_case'], # type: ignore 72 | figureStr['exclude_in_case'])) # type: ignore 73 | 74 | defaultSetup = [fig for fig in figureList if fig.include_in_case == []] 75 | figDict: Dict[int, List[Figure]] = defaultdict(lambda: defaultSetup) 76 | 77 | for fig in figureList: 78 | if fig.include_in_case != []: 79 | for inc in fig.include_in_case: 80 | if not inc in figDict.keys(): 81 | figDict[inc] = defaultSetup.copy() 82 | figDict[inc].append(fig) 83 | else: 84 | for exc in fig.exclude_in_case: 85 | if not exc in figDict.keys(): 86 | figDict[exc] = defaultSetup.copy() 87 | figDict[exc].remove(fig) 88 | return figDict 89 | 90 | 91 | def readCursorSetup(filePath: str) -> List[Cursor]: 92 | ''' 93 | Read figure setup file. 94 | ''' 95 | setup: List[Dict[str, str | List]] = list() 96 | with open(filePath, newline='') as setupFile: 97 | setupReader = csv.DictReader(setupFile, delimiter=';') 98 | for row in setupReader: 99 | row['cursor_options'] = list( 100 | set([CursorType.from_string(str(item.strip())) for item in row.get('cursor_options', '').split(',') if item.strip() != ''])) 101 | row['emt_signals'] = list( 102 | set([str(item.strip()) for item in row.get('emt_signals', '').split(',') if item.strip() != ''])) 103 | row['rms_signals'] = list( 104 | set([str(item.strip()) for item in row.get('rms_signals', '').split(',') if item.strip() != ''])) 105 | row['time_ranges'] = list( 106 | set([float(item.strip()) for item in row.get('time_ranges', '').split(',') if item.strip() != ''])) 107 | setup.append(row) 108 | 109 | rankList: List[Cursor] = list() 110 | for rankStr in setup: 111 | rankList.append( 112 | Cursor(int(rankStr['rank']), # type: ignore 113 | str(rankStr['title']), 114 | rankStr['cursor_options'], # type: ignore 115 | rankStr['emt_signals'], 116 | rankStr['rms_signals'], 117 | rankStr['time_ranges'])) # type: ignore 118 | return rankList -------------------------------------------------------------------------------- /plotter/sampling_functions.py: -------------------------------------------------------------------------------- 1 | from tsdownsample import MinMaxLTTBDownsampler 2 | from typing import List, Tuple 3 | import numpy as np 4 | import pandas as pd 5 | 6 | 7 | def calculate_gradient(time, values): 8 | dt = np.gradient(time) 9 | dy = np.gradient(values) 10 | gradient = dy / dt 11 | return gradient 12 | 13 | 14 | def downsample_based_on_gradient(time, values, gradient_threshold): 15 | values = pd.to_numeric(values, errors='coerce') 16 | gradient = calculate_gradient(time, values) 17 | low_gradient_indices = np.where(np.abs(gradient) < gradient_threshold)[0] 18 | 19 | # Select points where the gradient is low (downsampled points) 20 | downsampled_indices = low_gradient_indices[::20] # Change 10 to control the downsampling rate 21 | 22 | # Combine low gradient downsampled points with high gradient points 23 | high_gradient_indices = np.where(np.abs(gradient) >= gradient_threshold)[0] 24 | combined_indices = np.unique(np.concatenate([downsampled_indices, high_gradient_indices])) 25 | 26 | downsampled_time = time[combined_indices] 27 | downsampled_values = values[combined_indices] 28 | 29 | return downsampled_time, downsampled_values 30 | 31 | 32 | def down_sample(data_x_axis: List[int], data_y_axis: List[int]) -> Tuple[List[int], List[int]]: 33 | if len(data_x_axis) < 100: 34 | return data_x_axis, data_y_axis 35 | downsample = MinMaxLTTBDownsampler().downsample(data_x_axis, data_y_axis, n_out=100) 36 | return data_x_axis[downsample], data_y_axis[downsample] 37 | 38 | 39 | #def get_down_sampling_method(fSetup: Dict[str, str]): 40 | # if 'down_sampling_method' in fSetup: 41 | # return DownSamplingMethod.from_string(str(fSetup['down_sampling_method'])) 42 | # else: 43 | # return DownSamplingMethod.NO_DOWN_SAMPLING -------------------------------------------------------------------------------- /powerfactory.pyi: -------------------------------------------------------------------------------- 1 | #type: ignore 2 | from typing import Any, Optional, overload 3 | from typing import List, Union # Added by Energinet 4 | from enum import Enum 5 | 6 | class DataObject: 7 | class AttributeType(Enum): 8 | INVALID = -1 9 | INTEGER = 0 10 | INTEGER_VEC = 1 11 | DOUBLE = 2 12 | DOUBLE_VEC = 3 13 | DOUBLE_MAT = 4 14 | OBJECT = 5 15 | OBJECT_VEC = 6 16 | STRING = 7 17 | STRING_VEC = 8 18 | INTEGER64 = 9 19 | INTEGER64_VEC = 10 20 | 21 | @overload 22 | def AddCopy(self, __objectToCopy: 'DataObject', *__partOfName: int|str) -> 'DataObject': ... 23 | @overload 24 | def AddCopy(self, __objectsToCopy: list) -> 'DataObject': ... 25 | 26 | def CopyData(self, __source: 'DataObject') -> None: ... 27 | 28 | def CreateObject(self, __className: str, *__objectNamePart: int|str) -> 'DataObject': ... 29 | 30 | def Delete(self) -> int: ... 31 | 32 | def Energize(self, __resetRA: Optional[int]) -> list: ... 33 | 34 | def GetAttribute(self, __name: str) -> Any: ... 35 | 36 | def GetAttributeDescription(self, __name: str, __short: int = 0) -> str: ... 37 | 38 | def GetAttributeLength(self, __name: str) -> int: ... 39 | 40 | def GetAttributeShape(self, __name: str) -> list: ... 41 | 42 | def GetAttributeType(self, __name: str) -> AttributeType: ... 43 | 44 | def GetAttributeUnit(self, __name: str) -> str: ... 45 | 46 | def GetAttributes(self) -> list: ... 47 | 48 | def GetChildren(self, __hiddenMode: int, __filter: Optional[str], __subfolders: Optional[int]) -> list: ... 49 | 50 | def GetClassName(self) -> str: ... 51 | 52 | def GetCombinedProjectSource(self) -> 'DataObject': ... 53 | 54 | def GetConnectedElements(self, __rBrk: Optional[int], __rDis: Optional[int], __rOut: Optional[int]) -> list: ... 55 | 56 | def GetConnectionCount(self, __includeNeutral: int = 0) -> int: ... 57 | 58 | def GetContents(self, __Name: Optional[str], __recursive: Optional[int]) -> List[DataObject]: ... #Edited by Energinet 59 | 60 | def GetControlledNode(self, __bus: int, __check: Optional[int]) -> list: ... 61 | 62 | def GetCubicle(self, __side: int) -> 'DataObject': ... 63 | 64 | def GetFullName(self, __type: Optional[int] = None) -> str: ... 65 | 66 | def GetImpedance(self, __refVoltage: float, __i3Trf: Optional[int]) -> list: ... 67 | 68 | def GetInom(self, __busIndex: int = 0, __inclChar: int = 0) -> float: ... 69 | 70 | def GetNode(self, __busIndex: int, __considerSwitches: int = 0) -> 'DataObject': ... 71 | 72 | def GetOperator(self) -> 'DataObject': ... 73 | 74 | def GetOwner(self) -> 'DataObject': ... 75 | 76 | def GetParent(self) -> 'DataObject': ... 77 | 78 | def GetReferences(self, __filter: str = '*', __includeSubsets: int = 0, __includeHiddenObjects: int = 0) -> list: ... 79 | 80 | def GetRegion(self) -> int: ... 81 | 82 | def GetSupplyingSubstations(self) -> list: ... 83 | 84 | def GetSupplyingTransformers(self) -> list: ... 85 | 86 | def GetSupplyingTrfstations(self) -> list: ... 87 | 88 | def GetSystemGrounding(self) -> int: ... 89 | 90 | def GetUnom(self, __busIndex: int = 0) -> float: ... 91 | 92 | def GetUserAttribute(self, __attName: str) -> list: ... 93 | 94 | def GetZeroImpedance(self, __refVoltage: float, __i3Trf: Optional[int]) -> list: ... 95 | 96 | def HasAttribute(self, __name: str) -> int: ... 97 | 98 | def HasReferences(self) -> int: ... 99 | 100 | def HasResults(self, __ibus: Optional[int]) -> int: ... 101 | 102 | def IsCalcRelevant(self) -> int: ... 103 | 104 | def IsDeleted(self) -> int: ... 105 | 106 | def IsEarthed(self) -> int: ... 107 | 108 | def IsEnergized(self) -> int: ... 109 | 110 | def IsHidden(self) -> int: ... 111 | 112 | def IsInFeeder(self, __Feeder: 'DataObject', __OptNested: int = 0) -> int: ... 113 | 114 | def IsNetworkDataFolder(self) -> int: ... 115 | 116 | def IsNode(self) -> int: ... 117 | 118 | def IsObjectActive(self, __time: int) -> int: ... 119 | 120 | def IsObjectModifiedByVariation(self, __considerADD: int, __considerDEL: int, __considerDELTA: int) -> int: ... 121 | 122 | def IsOutOfService(self) -> int: ... 123 | 124 | def IsReducible(self) -> int: ... 125 | 126 | def IsShortCircuited(self) -> int: ... 127 | 128 | def Isolate(self, __resetRA: Optional[int], __isolateCBs: Optional[int]) -> list: ... 129 | 130 | def MarkInGraphics(self, __searchAllDiagramsAndSelect: int = 0) -> None: ... 131 | 132 | @overload 133 | def Move(self, __objectToMove: 'DataObject') -> int: ... 134 | @overload 135 | def Move(self, __objectsToMove: list) -> int: ... 136 | 137 | @overload 138 | def PasteCopy(self, __objectToCopy: 'DataObject', __resetMissingReferences: int = 0) -> list: ... 139 | @overload 140 | def PasteCopy(self, __objectsToCopy: list) -> int: ... 141 | 142 | def PurgeUnusedObjects(self) -> None: ... 143 | 144 | def ReportUnusedObjects(self) -> None: ... 145 | 146 | def SearchObject(self, __name: str) -> Optional['DataObject']: ... #Optional[Union['DataObject', 'ComPython', 'ElmRes', 'IntPrj']]: ... #Edited by Energinet 147 | 148 | def SetAttribute(self, __name: str, __value: Any) -> None: ... 149 | 150 | def SetAttributeLength(self, __name: str, __length: int) -> int: ... 151 | 152 | def SetAttributeShape(self, __name: str, __shape: list) -> int: ... 153 | 154 | def SetAttributes(self, __values: list) -> None: ... 155 | 156 | def ShowEditDialog(self) -> int: ... 157 | 158 | def ShowModalSelectTree(self, __title: Optional[str], __filter: Optional[str]) -> 'DataObject': ... 159 | 160 | def SwitchOff(self, __resetRA: Optional[int], __simulateOnly: Optional[int]) -> list: ... 161 | 162 | def SwitchOn(self, __resetRA: Optional[int], __simulateOnly: Optional[int]) -> list: ... 163 | 164 | def WriteChangesToDb(self) -> None: ... 165 | 166 | 167 | class OutputWindow: 168 | class MessageType(Enum): 169 | Plain = 0 170 | Error = 1 171 | Warn = 2 172 | Info = 4 173 | 174 | def Clear(self) -> None: ... 175 | 176 | def Flush(self) -> None: ... 177 | 178 | @overload 179 | def GetContent(self) -> list: ... 180 | @overload 181 | def GetContent(self, __filter: MessageType) -> list: ... 182 | 183 | @overload 184 | def Print(self, __message: str) -> None: ... 185 | @overload 186 | def Print(self, __type: MessageType, __message: str) -> None: ... 187 | 188 | def Save(self, __filePath: str) -> None: ... 189 | 190 | def SetState(self, __newState: int) -> None: ... 191 | 192 | 193 | class Application: 194 | def ActivateProject(self, __name: str) -> int: ... 195 | 196 | def ActivateVariations(self, __variations: list) -> None: ... 197 | 198 | def ClearOutputWindow(self) -> None: ... 199 | 200 | def ClearRecycleBin(self) -> None: ... 201 | 202 | def CloseTableReports(self) -> None: ... 203 | 204 | def CommitTransaction(self) -> None: ... 205 | 206 | def ConvertGeometryStringToMDL(self, __orgString: str, __intGrfOrLayer: DataObject) -> str: ... 207 | 208 | def CreateFaultCase(self, __elms: set, __mode: int, __createEvt: int = 0, __folder: DataObject = None) -> int: ... 209 | 210 | @overload 211 | def CreateProject(self, __projectName: str, __gridName: str, __parent: DataObject = None) -> DataObject: ... 212 | @overload 213 | def CreateProject(self, __projectName: str, __parent: DataObject = None) -> DataObject: ... 214 | 215 | def DeactivateVariations(self, __variations: list) -> None: ... 216 | 217 | def DecodeColour(self, __encodedColour: int) -> list: ... 218 | 219 | def DefineTransferAttributes(self, __classname: str, __attributes: str) -> None: ... 220 | 221 | @overload 222 | def DeleteUntouchedObjects(self, __grid: DataObject) -> int: ... 223 | @overload 224 | def DeleteUntouchedObjects(self, __grids: list) -> int: ... 225 | 226 | def EchoOff(self) -> None: ... 227 | 228 | def EchoOn(self) -> None: ... 229 | 230 | def EncodeColour(self, __red: int, __green: int, __blue: int, __alpha: int = 255) -> int: ... 231 | 232 | def ExecuteCmd(self, __command: str) -> None: ... 233 | 234 | def FlushOutputWindow(self) -> None: ... 235 | 236 | def GetActiveCalculationStr(self) -> str: ... 237 | 238 | def GetActiveNetworkVariations(self) -> list: ... 239 | 240 | def GetActiveProject(self) -> Optional[DataObject]: ... #Edited by Energinet 241 | 242 | def GetActiveScenario(self) -> DataObject: ... 243 | 244 | def GetActiveScenarioScheduler(self) -> DataObject: ... 245 | 246 | def GetActiveStages(self, __variedFolder: DataObject = None) -> list: ... 247 | 248 | def GetActiveStudyCase(self) -> Optional[DataObject]: ... #Optional added by Energinet 249 | 250 | def GetAttributeDescription(self, __classname: str, __name: str, __short: int = 0) -> str: ... 251 | 252 | def GetAttributeUnit(self, __classname: str, __name: str) -> str: ... 253 | 254 | def GetBorderCubicles(self, __element: DataObject) -> list: ... 255 | 256 | def GetBrowserSelection(self) -> list: ... 257 | 258 | def GetCalcRelevantObjects(self, __nameFilter: str = "*.*", __includeOutOfService: int = 1, __topoElementsOnly: int = 0, __bAcSchemes: int = 0) -> list: ... 259 | 260 | def GetClassDescription(self, __name: str) -> str: ... 261 | 262 | def GetClassId(self, __className: str) -> int: ... 263 | 264 | def GetCurrentDiagram(self) -> DataObject: ... 265 | 266 | def GetCurrentScript(self) -> DataObject: ... 267 | 268 | def GetCurrentSelection(self) -> list: ... 269 | 270 | def GetCurrentUser(self) -> DataObject: ... 271 | 272 | def GetCurrentZoomScale(self) -> int: ... 273 | 274 | def GetDataFolder(self, __classname: str, __iCreate: int = 0) -> DataObject: ... 275 | 276 | def GetDesktop(self) -> DataObject: ... 277 | 278 | def GetDiagramSelection(self) -> list: ... 279 | 280 | def GetFlowOrientation(self) -> int: ... 281 | 282 | def GetFromStudyCase(self, __className: str) -> Optional['DataObject'] : ... #Optional[Union[DataObject,'ElmRes', 'SetDesktop']] : ... 283 | 284 | def GetGlobalLibrary(self, __ClassName: str = "") -> DataObject: ... 285 | 286 | def GetInstallationDirectory(self) -> str: ... 287 | 288 | def GetInterfaceVersion(self) -> int: ... 289 | 290 | def GetLanguage(self) -> str: ... 291 | 292 | def GetLocalLibrary(self, __ClassName: str = "") -> DataObject: ... 293 | 294 | def GetMem(self, __calculateDelta: int = 0, __inMegaByte: int = 0) -> int: ... 295 | 296 | def GetOutputWindow(self) -> OutputWindow: ... 297 | 298 | def GetProjectFolder(self, __type: str, __create: int = 0) -> Optional[DataObject]: ... #Optional added by Energinet 299 | 300 | def GetRandomNumber(self, __x1: Optional[float], __x2: Optional[float]) -> float: ... 301 | 302 | def GetRandomNumberEx(self, __distribution: int, __p1: Optional[float], __p2: Optional[float]) -> float: ... 303 | 304 | def GetRecordingStage(self) -> DataObject: ... 305 | 306 | def GetSettings(self, __key: str) -> str: ... 307 | 308 | def GetSummaryGrid(self) -> DataObject: ... 309 | 310 | def GetTableReports(self) -> list: ... 311 | 312 | def GetTemporaryDirectory(self) -> str: ... 313 | 314 | @overload 315 | def GetTouchedObjects(self, __varOrStage: object) -> list: ... 316 | @overload 317 | def GetTouchedObjects(self, __varsAndStages: list) -> list: ... 318 | 319 | @overload 320 | def GetTouchingExpansionStages(self, __rootObject: object) -> list: ... 321 | @overload 322 | def GetTouchingExpansionStages(self, __rootObjects: list) -> list: ... 323 | 324 | @overload 325 | def GetTouchingStageObjects(self, __rootObject: object) -> list: ... 326 | @overload 327 | def GetTouchingStageObjects(self, __rootObjects: list) -> list: ... 328 | 329 | @overload 330 | def GetTouchingVariations(self, __rootObject: object) -> list: ... 331 | @overload 332 | def GetTouchingVariations(self, __rootObjects: list) -> list: ... 333 | 334 | def GetUserManager(self) -> DataObject: ... 335 | 336 | def GetUserSettings(self, __user: object = None) -> object: ... 337 | 338 | def GetWorkspaceDirectory(self) -> str: ... 339 | 340 | def Hide(self) -> None: ... 341 | 342 | def ImportDz(self, __target: DataObject, __dzFilePath: str) -> list: ... 343 | 344 | def ImportSnapshot(self, __dzsFilePath: str) -> list: ... 345 | 346 | def InvertMatrix(self, __realPart: DataObject, __imaginaryPart: Optional[DataObject]) -> int: ... 347 | 348 | def IsAttributeModeInternal(self) -> int: ... 349 | 350 | def IsAutomaticCalculationResetEnabled(self) -> int: ... 351 | 352 | def IsFinalEchoOnEnabled(self) -> int: ... 353 | 354 | def IsLdfValid(self) -> int: ... 355 | 356 | def IsNAN(self, __value: float) -> int: ... 357 | 358 | def IsRmsValid(self) -> int: ... 359 | 360 | def IsScenarioAttribute(self, __classname: str, __attributename: str) -> int: ... 361 | 362 | def IsShcValid(self) -> int: ... 363 | 364 | def IsSimValid(self) -> int: ... 365 | 366 | def IsWriteCacheEnabled(self) -> int: ... 367 | 368 | def LicenceHasModule(self, __module: str) -> int: ... 369 | 370 | def LoadProfile(self, __profileName: str) -> int: ... 371 | 372 | def MarkInGraphics(self, __objects: list, __searchOpenedDiagramsOnly: int = 0) -> None: ... 373 | 374 | def OutputFlexibleData(self, __objects: list, __flexibleDataPage: str = '') -> None: ... 375 | 376 | @overload 377 | def PostCommand(self, __commandString: str) -> None: ... 378 | @overload 379 | def PostCommand(self, __command: DataObject) -> None: ... 380 | 381 | @overload 382 | def PrepForUntouchedDelete(self, __grid: DataObject) -> None: ... 383 | @overload 384 | def PrepForUntouchedDelete(self, __grids: list) -> None: ... 385 | 386 | def PrintError(self, __message: str) -> None: ... 387 | 388 | def PrintInfo(self, __message: str) -> None: ... 389 | 390 | def PrintPlain(self, __message: str) -> None: ... 391 | 392 | def PrintWarn(self, __message: str) -> None: ... 393 | 394 | def Rebuild(self, __iMode: int = 1) -> None: ... 395 | 396 | def ReleaseData_(self) -> None: ... 397 | 398 | def ReleaseMemory_(self) -> None: ... 399 | 400 | def ReloadProfile(self) -> None: ... 401 | 402 | def ResGetData(self, __resultObject: DataObject, __iX: int, __col: Optional[int]) -> list: ... 403 | 404 | def ResGetDescription(self, __resultObject: DataObject, __col: int, __ishort: Optional[int]) -> str: ... 405 | 406 | @overload 407 | def ResGetFirstValidObject(self, __resultFile: DataObject, __row: int, __classNames: Optional[str], __variableName: Optional[str], __limit: Optional[float], __limitOperator: Optional[int], __limit2: Optional[float], __limitOperator2: Optional[int]) -> int: ... 408 | @overload 409 | def ResGetFirstValidObject(self, __resultFile: Optional[DataObject], __row: Optional[int], __objects: Optional[list]) -> int: ... 410 | 411 | def ResGetFirstValidObjectVariable(self, __resultFile: DataObject, __variableNames: Optional[str]) -> int: ... 412 | 413 | def ResGetFirstValidVariable(self, __resultFile: DataObject, __row: int, __variableNames: Optional[str]) -> int: ... 414 | 415 | @overload 416 | def ResGetIndex(self, __resultFile: DataObject, __obj: DataObject, __varName: Optional[str]) -> int: ... 417 | @overload 418 | def ResGetIndex(self, __resultFile: DataObject, __obj: DataObject, __colIndex: Optional[int]) -> int: ... 419 | @overload 420 | def ResGetIndex(self, __resultFile: DataObject, __varName: Optional[str], __colIndex: Optional[int]) -> int: ... 421 | 422 | def ResGetMax(self, __resultFile: DataObject, __col: int) -> list: ... 423 | 424 | def ResGetMin(self, __resultFile: DataObject, __col: int) -> list: ... 425 | 426 | @overload 427 | def ResGetNextValidObject(self, __resultFile: DataObject, __classNames: Optional[str], __variableName: Optional[str], __limit: Optional[float], __limitOperator: Optional[int], __limit2: Optional[float], __limitOperator2: Optional[int]) -> int: ... 428 | @overload 429 | def ResGetNextValidObject(self, __resultFile: DataObject, __objects: list) -> int: ... 430 | 431 | def ResGetNextValidObjectVariable(self, __resultFile: DataObject, __variableNames: Optional[str]) -> int: ... 432 | 433 | def ResGetNextValidVariable(self, __resultFile: DataObject, __variableNames: Optional[str]) -> int: ... 434 | 435 | def ResGetObj(self, __resultObject: DataObject, __col: int) -> DataObject: ... 436 | 437 | def ResGetUnit(self, __resultObject: DataObject, __col: int) -> str: ... 438 | 439 | def ResGetValueCount(self, __resultObject: DataObject, __col: Optional[int]) -> int: ... 440 | 441 | def ResGetVariable(self, __resultObject: DataObject, __col: int) -> str: ... 442 | 443 | def ResGetVariableCount(self, __resultObject: DataObject) -> int: ... 444 | 445 | def ResLoadData(self, __resultObject: DataObject) -> None: ... 446 | 447 | def ResReleaseData(self, __resultObject: DataObject) -> None: ... 448 | 449 | def ResSortToVariable(self, __resultObject: DataObject, __col: int) -> int: ... 450 | 451 | def ResetCalculation(self) -> None: ... 452 | 453 | def RndExp(self, __rate: float, __rngNum: Optional[int]) -> float: ... 454 | 455 | def RndGetMethod(self, __rngNum: Optional[int]) -> str: ... 456 | 457 | def RndGetSeed(self, __rngNum: Optional[int]) -> int: ... 458 | 459 | def RndNormal(self, __mean: float, __stddev: float, __rngNum: Optional[int]) -> float: ... 460 | 461 | def RndSetup(self, __seedAutomatic: int, __seed: Optional[int], __rngType: Optional[int], __rngNum: Optional[int]) -> None: ... 462 | 463 | def RndUnifInt(self, __min: int, __max: int, __rngNum: Optional[int]) -> int: ... 464 | 465 | def RndUnifReal(self, __min: float, __max: float, __rngNum: Optional[int]) -> float: ... 466 | 467 | def RndWeibull(self, __shape: float, __scale: float, __rngNum: Optional[int]) -> float: ... 468 | 469 | def SaveAsScenario(self, __pName: str, __iSetActive: int) -> DataObject: ... 470 | 471 | def SearchObjectByForeignKey(self, __foreignKey: str) -> DataObject: ... 472 | 473 | def SearchObjectsByCimId(self, __cimId: str) -> list: ... 474 | 475 | def SelectToolbox(self, __toolbar: int, __groupName: str, __toolboxName: str) -> int: ... 476 | 477 | def SetAttributeModeInternal(self, __internalMode: int) -> None: ... 478 | 479 | def SetAutomaticCalculationResetEnabled(self, __enabled: int) -> None: ... 480 | 481 | def SetFinalEchoOnEnabled(self, __enabled: int) -> None: ... 482 | 483 | def SetGraphicUpdate(self, __enabled: int) -> None: ... 484 | 485 | def SetGuiUpdateEnabled(self, __enabled: int) -> int: ... 486 | 487 | def SetInterfaceVersion(self, __version: int) -> int: ... 488 | 489 | def SetOutputWindowState(self, __newState: int) -> None: ... 490 | 491 | def SetProgressBarUpdatesEnabled(self, __enabled: int) -> int: ... 492 | 493 | def SetRandomSeed(self, __seed: int) -> None: ... 494 | 495 | def SetShowAllUsers(self, __enabled: int) -> int: ... 496 | 497 | def SetUserBreakEnabled(self, __enabled: int) -> None: ... 498 | 499 | def SetWriteCacheEnabled(self, __enabled: int) -> None: ... 500 | 501 | def Show(self) -> None: ... 502 | 503 | def ShowModalBrowser(self, __objects: list, __detailMode: int = 0, __title: str = '', __page: str = '') -> None: ... 504 | 505 | def ShowModalSelectBrowser(self, __objects: list, __title: Optional[str], __classFilter: Optional[str], __page: str = '') -> list: ... 506 | 507 | def ShowModelessBrowser(self, __objects: list, __detailMode: int = 0, __title: str = '', __page: str = '') -> None: ... 508 | 509 | def SplitLine(self, __Line: DataObject, __percent: float = 50, __createSwitchSide0: int = 0, __createSwitchSide1: int = 0, __graphicSplit: int = 0) -> DataObject: ... 510 | 511 | def StatFileGetXrange(self) -> list: ... 512 | 513 | def StatFileResetXrange(self) -> None: ... 514 | 515 | def StatFileSetXrange(self, __min: float, __max: float) -> None: ... 516 | 517 | def UpdateTableReports(self) -> None: ... 518 | 519 | def WriteChangesToDb(self) -> None: ... 520 | 521 | 522 | def GetApplication(__username: str = None, __password: str = None, __commandLineArguments: str = None) -> Optional[Application]: ... #Optional added by Energinet 523 | 524 | def GetApplicationExt(__username: str = None, __password: str = None, __commandLineArguments: str = None) -> Optional[Application]: ... #Optional added by Energinet 525 | 526 | # Added by Energinet 527 | class ComPython(DataObject): 528 | 529 | def GetExternalObject(self, __name: str) -> List[Union[int, Optional[DataObject]]]: ... 530 | 531 | def GetInputParameterDouble(self, __name: str) -> List[Union[int, float]]: ... 532 | 533 | def GetInputParameterInt(self, __name: str) -> List[Union[int, int]]: ... 534 | 535 | def GetInputParameterString(self, __name: str) -> List[Union[int, str]]: ... 536 | 537 | class ComDpl(DataObject): 538 | 539 | def Execute(self) -> int: ... 540 | 541 | class ComTasks(DataObject): 542 | 543 | def AppendCommand(self, __command: DataObject, __row : Optional[int] = -1) -> int: ... 544 | 545 | def AppendStudyCase(self, __studyCase: DataObject) -> int: ... 546 | 547 | def Execute(self) -> None: ... 548 | 549 | class ComRes(DataObject): 550 | 551 | def Execute(self) -> None: ... 552 | 553 | class ElmRes(DataObject): 554 | 555 | def AddVariable(self, __element : DataObject, __varname: str, ) -> int: ... 556 | 557 | class ElmNet(DataObject): 558 | 559 | def Activate(self) -> int: ... 560 | 561 | class IntPrj(DataObject): 562 | 563 | def Activate(self) -> int: ... 564 | 565 | def Deactivate(self) -> int: ... 566 | 567 | def CreateVersion(self, __name: str) -> DataObject: ... 568 | 569 | class IntCase(DataObject): 570 | 571 | def Activate(self) -> int: ... 572 | 573 | def Deactivate(self) -> int: ... 574 | 575 | def Consolidate(self) -> int: ... 576 | 577 | def SetStudyTime(self, __datetime : int) -> None: ... 578 | 579 | class IntScheme(DataObject): 580 | def Activate(self) -> int: ... 581 | 582 | class IntSstage(DataObject): 583 | def Activate(self) -> int: ... 584 | 585 | class SetDesktop(DataObject): 586 | 587 | def GetPage(self, __name: str, __create: int = 0, cls : str = '') -> DataObject: ... 588 | 589 | class GrpPage(DataObject): 590 | 591 | def RemovePage(self) -> None: ... 592 | 593 | def GetOrInsertPlot(self, __name : str, __type : int, __create : int = 1) -> DataObject: ... 594 | 595 | class PltLinebarplot(DataObject): 596 | def GetDataSeries(self) -> DataObject : ... 597 | 598 | def DoAutoScale(self, __axis: Optional[int] = None) -> None: ... 599 | 600 | class PltDataseries(DataObject) 601 | def AddCurve(self, __element : DataObject, __varname : str, __datasource : Optional[DataObject] = None) -> None: ... 602 | 603 | __version__ : str = '' -------------------------------------------------------------------------------- /pscad_update_ums.py: -------------------------------------------------------------------------------- 1 | ''' 2 | Update unit measurement pgb names 3 | ''' 4 | from __future__ import annotations 5 | import os 6 | import sys 7 | 8 | if __name__ == '__main__': 9 | print(sys.version) 10 | #Ensure right working directory 11 | executePath = os.path.abspath(__file__) 12 | executeFolder = os.path.dirname(executePath) 13 | os.chdir(executeFolder) 14 | sys.path.append(executeFolder) 15 | print(executeFolder) 16 | 17 | from typing import List 18 | 19 | if __name__ == '__main__': 20 | from execute_pscad import connectPSCAD 21 | 22 | import mhi.pscad 23 | 24 | def updateUMs(pscad : mhi.pscad.PSCAD, verbose : bool = False) -> None: 25 | projectLst = pscad.projects() 26 | for prjDic in projectLst: 27 | if prjDic['type'].lower() == 'case': 28 | project = pscad.project(prjDic['name']) 29 | print(f'Updating unit measurements in project: {project}') 30 | ums : List[mhi.pscad.UserCmp]= project.find_all(Name_='$ALIAS_UM_9124$') #type: ignore 31 | for um in ums: 32 | print(f'\t{um}') 33 | canvas : mhi.pscad.Canvas = um.canvas() 34 | umParams = um.parameters() #type: ignore 35 | alias = umParams['alias'] #type: ignore 36 | pgbs = canvas.find_all('master:pgb') #type: ignore 37 | for pgb in pgbs: 38 | if verbose: 39 | print(f'\t\t{pgb}') 40 | pgbParams = pgb.parameters() #type: ignore 41 | pgb.parameters(Name = alias + '_' + pgbParams['Group']) #type: ignore 42 | 43 | def main(): 44 | pscad = connectPSCAD() #type: ignore 45 | updateUMs(pscad) 46 | print() 47 | 48 | if __name__ == '__main__': 49 | main() 50 | 51 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | pandas 2 | openpyxl 3 | jinja2 4 | plotly 5 | kaleido 6 | tsdownsample==0.1.3 7 | psutil 8 | mhi.psout 9 | -------------------------------------------------------------------------------- /setup_examples/MTB_Setup_Example.pfd: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Energinet-SimTools/MTB/695c911cb7fee587599cfcc50b8626cea0784694/setup_examples/MTB_Setup_Example.pfd -------------------------------------------------------------------------------- /setup_examples/MTB_Setup_Example.pswx: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | -------------------------------------------------------------------------------- /testcases.xlsx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Energinet-SimTools/MTB/695c911cb7fee587599cfcc50b8626cea0784694/testcases.xlsx -------------------------------------------------------------------------------- /utility_scripts/Check PowerFactory Model.pfd: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Energinet-SimTools/MTB/695c911cb7fee587599cfcc50b8626cea0784694/utility_scripts/Check PowerFactory Model.pfd -------------------------------------------------------------------------------- /utility_scripts/Get Component Data.pfd: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Energinet-SimTools/MTB/695c911cb7fee587599cfcc50b8626cea0784694/utility_scripts/Get Component Data.pfd -------------------------------------------------------------------------------- /utility_scripts/Get DSL Checksums.pfd: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Energinet-SimTools/MTB/695c911cb7fee587599cfcc50b8626cea0784694/utility_scripts/Get DSL Checksums.pfd -------------------------------------------------------------------------------- /utility_scripts/Get Relay Data.pfd: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Energinet-SimTools/MTB/695c911cb7fee587599cfcc50b8626cea0784694/utility_scripts/Get Relay Data.pfd -------------------------------------------------------------------------------- /utility_scripts/check_powerfactory_model.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Fri Feb 21 10:33:29 2025 4 | 5 | @author: PRW 6 | """ 7 | 8 | import powerfactory 9 | 10 | ldfCalcMethod = { 11 | 'balanced' : 0, #AC Load Flow, balanced, positive sequence 12 | 'unbalanced' : 1, #AC Load Flow, unbalanced, 3-phase (ABC) 13 | 'dc' : 2 #DC Load Flow linear 14 | } 15 | 16 | incNetRepres = { 17 | 'balanced' : 'sym', #Balanced, positive sequence 18 | 'unbalanced' : 'rst', #Unbalanced, 3-phase (ABC) 19 | } 20 | 21 | def CompileDynamicModelTypes(modelType, forceRebuild, outputLevel): 22 | ''' 23 | Try to complile all Dynamic Model Types specified 24 | ''' 25 | app.PrintInfo('Compile automatically all relevant dynamic model types.') 26 | app.EchoOn() 27 | inc = app.GetFromStudyCase('ComInc') 28 | compileResult = inc.CompileDynamicModelTypes(modelType, forceRebuild, outputLevel) 29 | 30 | if compileResult == 0: 31 | app.PrintInfo('Success!\n') 32 | elif compileResult == 1: 33 | app.PrintWarn('Success, but some DSL Model Types will run interpreted.\n') 34 | elif compileResult == 2: 35 | app.PrintError('Error!\n') 36 | else: 37 | app.PrintError('Something went wrong...\n') 38 | 39 | def CheckForZeroDerivatives(errseq): 40 | ''' 41 | Check for zero derivatives 42 | ''' 43 | app.PrintInfo('Check for state variable derivatives less than the tolerance for the initial conditions:') 44 | app.EchoOn() 45 | inc = app.GetFromStudyCase('ComInc') 46 | inc.iopt_sim = 'rms' #Simulation method 47 | inc.iopt_net = 'sym' #Network representation: Balanced = 'sym', Unbalanced = 'rst' 48 | inc.iopt_show = 1 #Verify initial conditions 49 | inc.iopt_adapt = 1 #Automatic step size adaption 50 | inc.dtgrd = 0.001 #Electromechanical stepsize 51 | if inc.iopt_adapt: 52 | inc.dtgrd_max = 0.01 #Maximum stepsize 53 | inc.errseq = errseq #Tolerance value for the initial conditions 54 | 55 | incResult = inc.ZeroDerivative() 56 | 57 | if incResult == 0: 58 | app.PrintWarn('At least one state variable has a derivative larger than the tolerance, or the required command options have not been set!\n') 59 | elif incResult == 1: 60 | app.PrintInfo('All state variable derivatives are less than the tolerance.\n') 61 | else: 62 | app.PrintError('Something went wrong!\n') 63 | 64 | def LoadFlow(iopt_net = 0, ): 65 | ''' 66 | Perform a Load Flow 67 | ''' 68 | if iopt_net == 0: 69 | app.PrintInfo('Performing a balanced, positive sequence loadflow...') 70 | elif iopt_net == 1: 71 | app.PrintInfo('Performing an unbalanced, 3-phase (ABC) loadflow...') 72 | else: 73 | app.PrintError('Unknown value for "iopt_net"!') 74 | exit(1) 75 | 76 | app.EchoOff() #To limit the output displayed 77 | ldf = app.GetFromStudyCase("ComLdf") 78 | ldf.iopt_net = iopt_net 79 | ldfResults=ldf.Execute() 80 | if ldfResults == 0: 81 | app.PrintInfo('Success!\n') 82 | else: 83 | app.PrintError('Non convergence of loadflow analysis.\n') 84 | 85 | def InitConditions(iopt_net = 'sym', iopt_adapt = 1, dtgrd = 0.001, dtgrd_max = 0.01, errseq = 0.01): 86 | ''' 87 | Calculated Initial Conditions 88 | ''' 89 | if iopt_net == 'sym': 90 | app.PrintInfo('Calculating initial conditions for a balanced, positive sequence network...') 91 | elif iopt_net == 'rst': 92 | app.PrintInfo('Calculating initial conditions for an unbalanced, 3-phase (ABC) network...') 93 | else: 94 | app.PrintError('Unknown value for "iopt_net"!') 95 | exit(1) 96 | 97 | app.EchoOff() #To limit the output displayed 98 | inc = app.GetFromStudyCase('ComInc') 99 | #Fixed 100 | inc.iopt_sim = 'rms' #Simulation method 101 | inc.iopt_show = 1 #Verify initial conditions 102 | #Adjustable 103 | inc.iopt_net = iopt_net #Network representation: Balanced = 'sym', Unbalanced = 'rst' 104 | inc.iopt_adapt = iopt_adapt #Automatic step size adaption 105 | inc.dtgrd = dtgrd #Electromechanical stepsize 106 | if inc.iopt_adapt: 107 | inc.dtgrd_max = dtgrd_max #Maximum stepsize 108 | inc.errseq = errseq #Tolerance value for the initial conditions 109 | 110 | incResult = inc.Execute() 111 | if incResult == 0: 112 | app.PrintInfo('Success!\n') 113 | else: 114 | app.PrintError("Initial conditions could not be calculated.\n") 115 | 116 | def StartSimulation(): 117 | ''' 118 | Performing an RMS Simulation 119 | ''' 120 | app.PrintInfo('Starting RMS Simulation...') 121 | app.EchoOff() #To limit the output displayed 122 | sim = app.GetFromStudyCase("ComSim") 123 | simResult = sim.Execute() 124 | if simResult == 0: 125 | app.PrintInfo('RMS Simulation successfully completed.\n') 126 | else: 127 | app.PrintError("Something went wrong during the RMS simulation!\n") 128 | 129 | ##### 130 | #Main 131 | ##### 132 | app = powerfactory.GetApplication() 133 | app.ClearOutputWindow() 134 | 135 | script = app.GetCurrentScript() 136 | excelOutputPath = script.GetAttribute('ExcelOutputPath') 137 | 138 | ####################################################### 139 | #Compile automatically all relevant dynamic model types 140 | ####################################################### 141 | modelType = script.GetAttribute('ModelTypes') 142 | forceRebuild = script.GetAttribute('ForceRebuild') 143 | outputLevel = script.GetAttribute('DisplayCompilerMessages') 144 | 145 | CompileDynamicModelTypes(modelType, forceRebuild, outputLevel) 146 | 147 | ######################################################################################## 148 | #Check for state variable derivatives less than the tolerance for the initial conditions 149 | ######################################################################################## 150 | initCondTolerance = script.GetAttribute('MaximumError') #Tolerance value for the initial conditions 151 | 152 | CheckForZeroDerivatives(initCondTolerance) 153 | 154 | ############################################ 155 | #Calculate a balanced loadflow, and flat run 156 | ############################################ 157 | app.PrintInfo('Check for a balanced loadflow, and flat run:\n') 158 | 159 | LoadFlow(ldfCalcMethod['balanced']) 160 | 161 | #Calculate initial conditions 162 | InitConditions(incNetRepres['balanced']) 163 | 164 | #Start RMS Simulation 165 | StartSimulation() 166 | 167 | ############################################### 168 | #Calculate an unbalanced loadflow, and flat run 169 | ############################################### 170 | app.PrintInfo('Check for an unbalanced loadflow, and flat run:\n') 171 | 172 | LoadFlow(ldfCalcMethod['unbalanced']) 173 | 174 | #Calculate initial conditions 175 | InitConditions(incNetRepres['unbalanced']) 176 | 177 | #Start RMS Simulation 178 | StartSimulation() 179 | 180 | ############################# 181 | #Check for fixed timestep run 182 | ############################# 183 | app.PrintInfo('Check for fixed timestep flat run:\n') 184 | 185 | LoadFlow(ldfCalcMethod['balanced']) 186 | 187 | #Calculate initial conditions 188 | InitConditions(incNetRepres['balanced'], False, 0.001) 189 | 190 | #Start RMS Simulation 191 | StartSimulation() 192 | 193 | ################################ 194 | #Check for variable timestep run 195 | ################################ 196 | app.PrintInfo('Check for variable timestep flat run:\n') 197 | 198 | LoadFlow(ldfCalcMethod['balanced']) 199 | 200 | #Calculate initial conditions 201 | InitConditions(incNetRepres['balanced'], True, 0.001, 0.01) 202 | 203 | #Start RMS Simulation 204 | StartSimulation() 205 | -------------------------------------------------------------------------------- /utility_scripts/compare_component_data_with_pscad.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | from __future__ import annotations 3 | import mhi.pscad 4 | import pandas as pd 5 | import openpyxl 6 | import re 7 | from math import pi 8 | 9 | #PSCAD Project Name 10 | pscad_project_name = 'Vinkel_PV' 11 | 12 | #Read PowerFactory component data from the following Excel file 13 | excel_path = r'E:\Users\PRW\AIG\2024_Vinkel_DSO\Vinkel EMT Model 2.6\MTB\Vinkel_component_data.xlsx' 14 | 15 | #SET_PARMS = True implies that all the parameters from PowerFactory will be taken as default and over write the PSCAD parameters 16 | SET_PARAMS = False 17 | 18 | #Extract floating point value and unit type from the returned parameter value 19 | def parse_PSCAD_value(PSCAD_value): 20 | #print(PSCAD_value, type(PSCAD_value)) 21 | if type(PSCAD_value) == float or type(PSCAD_value) == int: 22 | value = PSCAD_value 23 | unit = '' 24 | else: #if type(PSCAD_value) == str or type(PSCAD_value) == 'mhi.pscad.unit.Value': 25 | regex = r'([+-]?\d*\.?\d+(?:[eE][+-]?\d+)?)\s*\[(.*)\]' 26 | output = re.search(regex, str(PSCAD_value)) #force str for 'mhi.pscad.unit.Value' 27 | try: 28 | value = float(output.group(1)) 29 | unit = output.group(2) 30 | except: 31 | value = float(PSCAD_value) 32 | unit = '' 33 | return (value, unit) 34 | 35 | #Test if there are any 3-Winding Transformers in the Excel Workbook (work in progress) 36 | ELMTR3_EXISTS = True if 'PowerFactory ElmTr3 Data' in openpyxl.load_workbook(excel_path, read_only=True).sheetnames else False 37 | 38 | #Read in all existing Worksheets 39 | powerfactory_cable_data_df = pd.read_excel(excel_path, 'PowerFactory Cable Data') 40 | powerfactory_trfr2_data_df = pd.read_excel(excel_path, 'PowerFactory ElmTr2 Data') 41 | if ELMTR3_EXISTS: powerfactory_trfr3_data_df = pd.read_excel(excel_path, 'PowerFactory ElmTr3 Data') 42 | 43 | #Delete the 'index' which is equal to the first Unnamed column 44 | powerfactory_cable_data_df.drop('Unnamed: 0', axis=1, inplace=True) 45 | powerfactory_trfr2_data_df.drop('Unnamed: 0', axis=1, inplace=True) 46 | if ELMTR3_EXISTS: powerfactory_trfr3_data_df.drop('Unnamed: 0', axis=1, inplace=True) 47 | 48 | #Get the cable and transformer names 49 | cable_names = powerfactory_cable_data_df['Cable name'].tolist() 50 | trfr2_names = powerfactory_trfr2_data_df['Transformer name'].tolist() 51 | if ELMTR3_EXISTS: trfr3_names = powerfactory_trfr3_data_df['Transformer name'].tolist() 52 | 53 | with mhi.pscad.application() as pscad: 54 | # Get a list cases. 55 | #cases = pscad.cases() 56 | #project = pscad.project(cases[0].name) 57 | project = pscad.project(pscad_project_name) 58 | project.focus() 59 | main = project.canvas('Main') 60 | 61 | #Get all the cable component 62 | cable_components = [] 63 | for cable_name in cable_names: 64 | cable_components.append(project.find(cable_name)) 65 | 66 | #Read all the parameters for each cable component 67 | cables_list = [] 68 | for cable_component in cable_components: 69 | if cable_component is None: continue 70 | params_dict = cable_component.parameters() 71 | #print(params_dict) #DEBUG 72 | if SET_PARAMS: 73 | #Set the PSCAD parameters equal to the PowerFactory parameters 74 | i = cable_names.index(params_dict['Name']) 75 | params_dict['PU'] = 'R_XL_XC_OHM_' 76 | params_dict['len'] = powerfactory_cable_data_df.iloc[i]['length [km]'] 77 | params_dict['Rp'] = powerfactory_cable_data_df.iloc[i]['Eq. R\' [Ohm/km]'] 78 | params_dict['Xp'] = powerfactory_cable_data_df.iloc[i]['Eq. X\' [Ohm/km]'] 79 | params_dict['Bp'] = powerfactory_cable_data_df.iloc[i]['Eq. Shunt X\' [MOhm*km]'] 80 | params_dict['Rz'] = powerfactory_cable_data_df.iloc[i]['Eq. R0\' [Ohm/km]'] 81 | params_dict['Xz'] = powerfactory_cable_data_df.iloc[i]['Eq. X0\' [Ohm/km]'] 82 | params_dict['Bz'] = powerfactory_cable_data_df.iloc[i]['Eq. Shunt X0\' [MOhm*km]'] 83 | params_dict.pop('Rp2') #Only used if 'PU': 'R_L_C_OHM_H_UF_' 84 | params_dict.pop('Lp') #Only used if 'PU': 'R_L_C_OHM_H_UF_' 85 | params_dict.pop('Cp') #Only used if 'PU': 'R_L_C_OHM_H_UF_' 86 | params_dict.pop('Rz2') #Only used if 'PU': 'R_L_C_OHM_H_UF_' 87 | params_dict.pop('Lz') #Only used if 'PU': 'R_L_C_OHM_H_UF_' 88 | params_dict.pop('Cz') #Only used if 'PU': 'R_L_C_OHM_H_UF_' 89 | cable_component.parameters(parameters = params_dict) 90 | params_dict = cable_component.parameters() #Read parameter values again 91 | 92 | #Extract PSCAD parameters 93 | length = parse_PSCAD_value(params_dict['len'])[0]/1000 if parse_PSCAD_value(params_dict['len'])[1] == 'm' else parse_PSCAD_value(params_dict['len'])[0] 94 | if params_dict['PU'] == 'R_XL_XC_OHM_': 95 | Rp = parse_PSCAD_value(params_dict['Rp'])[0]*1000 if parse_PSCAD_value(params_dict['Rp'])[1] == 'ohm/m' else parse_PSCAD_value(params_dict['Rp'])[0] #else 'ohm/km' 96 | Xp = parse_PSCAD_value(params_dict['Xp'])[0]*1000 if parse_PSCAD_value(params_dict['Xp'])[1] == 'ohm/m' else parse_PSCAD_value(params_dict['Xp'])[0] #else 'ohm/km' 97 | Bp = parse_PSCAD_value(params_dict['Bp'])[0]*1000 if parse_PSCAD_value(params_dict['Bp'])[1] == 'Mohm*m' else parse_PSCAD_value(params_dict['Bp'])[0] #else 'Mohm*km' 98 | Rz = parse_PSCAD_value(params_dict['Rz'])[0]*1000 if parse_PSCAD_value(params_dict['Rz'])[1] == 'ohm/m' else parse_PSCAD_value(params_dict['Rz'])[0] #else 'ohm/km' 99 | Xz = parse_PSCAD_value(params_dict['Xz'])[0]*1000 if parse_PSCAD_value(params_dict['Xz'])[1] == 'ohm/m' else parse_PSCAD_value(params_dict['Xz'])[0] #else 'ohm/km' 100 | Bz = parse_PSCAD_value(params_dict['Bz'])[0]*1000 if parse_PSCAD_value(params_dict['Bz'])[1] == 'Mohm*m' else parse_PSCAD_value(params_dict['Bz'])[0] #else 'Mohm*km' 101 | elif params_dict['PU'] == 'R_L_C_OHM_H_UF_': 102 | f = parse_PSCAD_value(params_dict['F'])[0] 103 | if parse_PSCAD_value(params_dict['Rp2'])[1] == 'ohm/m' or parse_PSCAD_value(params_dict['Rp2'])[1] == '': #'ohm/m' default 104 | Rp = parse_PSCAD_value(params_dict['Rp2'])[0]*1000 105 | elif parse_PSCAD_value(params_dict['Rp2'])[1] == 'ohm/km': 106 | Rp = parse_PSCAD_value(params_dict['Rp2'])[0] 107 | else: 108 | print(f"Unknown unit for positive sequence series resistance: {parse_PSCAD_value(params_dict['Rp2'])[1]}") 109 | exit(0) 110 | if parse_PSCAD_value(params_dict['Lp'])[1] == 'mH/m' or parse_PSCAD_value(params_dict['Lp'])[1] == 'H/km': 111 | Xp = 2*pi*f*parse_PSCAD_value(params_dict['Lp'])[0] 112 | elif parse_PSCAD_value(params_dict['Lp'])[1] == 'mH/km': 113 | Xp = 2*pi*f*parse_PSCAD_value(params_dict['Lp'])[0]/1000 114 | elif parse_PSCAD_value(params_dict['Lp'])[1] == '': #'H/m' default 115 | Xp = 2*pi*f*parse_PSCAD_value(params_dict['Lp'])[0]*1000 116 | else: 117 | print(f"Unknown unit for positive sequence series inductance: {parse_PSCAD_value(params_dict['Lp'])[1]}") 118 | exit(0) 119 | if parse_PSCAD_value(params_dict['Cp'])[1] == 'uF/m' or parse_PSCAD_value(params_dict['Cp'])[1] == '': #'uF/m' default 120 | Bp = 1/(2*pi*f*parse_PSCAD_value(params_dict['Cp']*1000)[0]) 121 | elif parse_PSCAD_value(params_dict['Cp'])[1] == 'uF/km': 122 | Bp = 1/(2*pi*f*parse_PSCAD_value(params_dict['Cp'])[0]) 123 | else: 124 | print(f"Unknown unit for positive sequence shunt capacitance: {parse_PSCAD_value(params_dict['Cp'])[1]}") 125 | exit(0) 126 | if params_dict['Estim'] == 'ENTER': 127 | if parse_PSCAD_value(params_dict['Rz2'])[1] == 'ohm/m' or parse_PSCAD_value(params_dict['Rz2'])[1] == '': #'ohm/m' default 128 | Rz = parse_PSCAD_value(params_dict['Rz2'])[0]*1000 129 | elif parse_PSCAD_value(params_dict['Rz2'])[1] == 'ohm/km': 130 | Rz = parse_PSCAD_value(params_dict['Rz2'])[0] 131 | else: 132 | print(f"Unknown unit for zero sequence series resistance: {parse_PSCAD_value(params_dict['Rz2'])[1]}") 133 | exit(0) 134 | if parse_PSCAD_value(params_dict['Lz'])[1] == 'mH/m': 135 | Xz = 2*pi*f*parse_PSCAD_value(params_dict['Lz'])[0] 136 | elif parse_PSCAD_value(params_dict['Lz'])[1] == 'mH/km': 137 | Xz = 2*pi*f*parse_PSCAD_value(params_dict['Lz'])[0]/1000 138 | elif parse_PSCAD_value(params_dict['Lz'])[1] == '': #'H/m' default 139 | Xz = 2*pi*f*parse_PSCAD_value(params_dict['Lz'])[0]*1000 140 | else: 141 | print(f"Unknown unit for zero sequence series inductance: {parse_PSCAD_value(params_dict['Lz'])[1]}") 142 | exit(0) 143 | if parse_PSCAD_value(params_dict['Cz'])[1] == 'uF/m' or parse_PSCAD_value(params_dict['Cz'])[1] == '': #'uF/m' default 144 | Bz = 1/(2*pi*f*parse_PSCAD_value(params_dict['Cz']*1000)[0]) 145 | elif parse_PSCAD_value(params_dict['Cz'])[1] == 'uF/km': 146 | Bz = 1/(2*pi*f*parse_PSCAD_value(params_dict['Cz'])[0]) 147 | else: 148 | print(f"Unknown unit for zero sequence shunt capacitance: {parse_PSCAD_value(params_dict['Cz'])[1]}") 149 | exit(0) 150 | else: 151 | Rz = Xz = Bz = 0 152 | else: 153 | print(f"Impedance and admittance data: {params_dict['PU']}, not yet supported") 154 | 155 | cable_row = [params_dict['Name'], length, Rp, Xp, Bp, Rz, Xz, Bz] 156 | 157 | cables_list.append(cable_row) 158 | 159 | #Write all the cable data to a DataFrame 160 | pscad_cables_data_df = pd.DataFrame(cables_list, columns = ['Cable name', 161 | 'length [km]', 162 | 'Eq. R\' [Ohm/km]', 163 | 'Eq. X\' [Ohm/km]', 164 | 'Eq. Shunt X\' [MOhm*km]', 165 | 'Eq. R0\' [Ohm/km]', 166 | 'Eq. X0\' [Ohm/km]', 167 | 'Eq. Shunt X0\' [MOhm*km]']) 168 | #Get all the 2 winding transformer components 169 | trfr2_components = [] 170 | for trfr2_name in trfr2_names: 171 | trfr2_components.append(project.find(trfr2_name)) 172 | 173 | #Read all the parameters for each transformer component 174 | trfrs2_list = [] 175 | for trfr2_component in trfr2_components: 176 | if trfr2_component is None: continue 177 | params_dict = trfr2_component.parameters() 178 | 179 | #Check the transformer component model definition 180 | component_def = trfr2_component.defn_name[1] 181 | 182 | #Duality based 3 phase 2 winding transformer 183 | if component_def == 'db_xfmr_3p2w': 184 | if SET_PARAMS: 185 | #Set the PSCAD parameters equal to the PowerFactory parameters 186 | i = trfr2_names.index(params_dict['Name']) 187 | params_dict['Tmva'] = powerfactory_trfr2_data_df.iloc[i]['Sn [MVA]'] 188 | params_dict['f_'] = 50.0 189 | params_dict['V1LL'] = powerfactory_trfr2_data_df.iloc[i]['Un_HV [kV]'] 190 | params_dict['V2LL'] = powerfactory_trfr2_data_df.iloc[i]['Un_LV [kV]'] 191 | params_dict['TCuL_'] = powerfactory_trfr2_data_df.iloc[i]['P_Cu [pu]'] 192 | params_dict['CoreEddyLoss_'] = powerfactory_trfr2_data_df.iloc[i]['P_NL [pu]'] 193 | params_dict['Xl_'] = powerfactory_trfr2_data_df.iloc[i]['X1_leak [pu]'] 194 | params_dict['Iexc_'] = powerfactory_trfr2_data_df.iloc[i]['I1_mag [%]'] 195 | params_dict.pop('kVPerTurn_') #Remove this non-writable parameter 196 | trfr2_component.parameters(parameters = params_dict) 197 | params_dict = trfr2_component.parameters() #Read parameter values again 198 | 199 | #Extract PSCAD parameters 200 | W1 = 'Y' if params_dict['W1'] == 'Y' else 'D' 201 | W2 = 'y' if params_dict['W2'] == 'Y' else 'd' 202 | vec_group = W1+W2 203 | if parse_PSCAD_value(params_dict['CoreEddyLoss_'])[1] == 'pu' or parse_PSCAD_value(params_dict['CoreEddyLoss_'])[1] == '': #'pu' default 204 | CoreEddyLoss = parse_PSCAD_value(params_dict['CoreEddyLoss_'])[0] 205 | elif parse_PSCAD_value(params_dict['CoreEddyLoss_'])[1] == '%': 206 | CoreEddyLoss = parse_PSCAD_value(params_dict['CoreEddyLoss_'])[0]/100 207 | else: 208 | print(f"Unknown unit for Eddy Current Core Losses: {parse_PSCAD_value(params_dict['CoreEddyLoss_'])[1]}") 209 | exit(0) 210 | trfr_row = [params_dict['Name'], 211 | params_dict[vec_group], 212 | parse_PSCAD_value(params_dict['Tmva'])[0], 213 | parse_PSCAD_value(params_dict['V1LL'])[0], 214 | parse_PSCAD_value(params_dict['V2LL'])[0], 215 | parse_PSCAD_value(params_dict['TCuL_'])[0], 216 | CoreEddyLoss , 217 | parse_PSCAD_value(params_dict['Xl_'])[0], 218 | parse_PSCAD_value(params_dict['Iexc_'])[0], 219 | 'YES'] 220 | 221 | #3 Phase 2 Winding Transformer 222 | elif component_def == 'xfmr-3p2w': 223 | if SET_PARAMS: 224 | #Set the PSCAD parameters equal to the PowerFactory parameters 225 | i = trfr2_names.index(params_dict['Name']) 226 | params_dict['Tmva'] = powerfactory_trfr2_data_df.iloc[i]['Sn [MVA]'] 227 | params_dict['f'] = 50.0 228 | params_dict['V1'] = powerfactory_trfr2_data_df.iloc[i]['Un_HV [kV]'] 229 | params_dict['V2'] = powerfactory_trfr2_data_df.iloc[i]['Un_LV [kV]'] 230 | params_dict['CuL'] = powerfactory_trfr2_data_df.iloc[i]['P_Cu [pu]'] 231 | params_dict['NLL'] = powerfactory_trfr2_data_df.iloc[i]['P_NL [pu]'] 232 | params_dict['Xl'] = powerfactory_trfr2_data_df.iloc[i]['X1_leak [pu]'] 233 | params_dict['Im1'] = powerfactory_trfr2_data_df.iloc[i]['I1_mag [%]'] 234 | params_dict['Enab'] = 'YES' #Enable Saturation 235 | trfr2_component.parameters(parameters = params_dict) 236 | params_dict = trfr2_component.parameters() #Read parameter values again 237 | 238 | #Extract PSCAD parameters 239 | W1 = 'Y' if params_dict['YD1'] == 'Y' else 'D' 240 | W2 = 'y' if params_dict['YD2'] == 'Y' else 'd' 241 | D_lead_lag = params_dict['Lead'] 242 | if D_lead_lag == 'LAGS': 243 | if W1 == 'D' and W2 == 'y': 244 | hour = '11' 245 | elif W1 == 'Y' and W2 == 'd': 246 | hour = '1' 247 | else: 248 | hour = '0' 249 | else: 250 | if W1 == 'D' and W2 == 'y': 251 | hour = '1' 252 | elif W1 == 'Y' and W2 == 'd': 253 | hour = '11' 254 | else: 255 | hour = '0' 256 | if parse_PSCAD_value(params_dict['NLL'])[1] == 'pu' or parse_PSCAD_value(params_dict['NLL'])[1] == '': #'pu' default 257 | NLL = parse_PSCAD_value(params_dict['CoreEddyLoss_'])[0] 258 | elif parse_PSCAD_value(params_dict['CoreEddyLoss_'])[1] == '%': 259 | NLL = parse_PSCAD_value(params_dict['NLL_'])[0]/100 260 | else: 261 | print(f"Unknown unit for No Load Losses (NLL): {parse_PSCAD_value(params_dict['NLL'])[1]}") 262 | exit(0) 263 | trfr_row = [params_dict['Name'], 264 | W1+W2+hour, 265 | parse_PSCAD_value(params_dict['Tmva'])[0], 266 | parse_PSCAD_value(params_dict['V1'])[0], 267 | parse_PSCAD_value(params_dict['V2'])[0], 268 | parse_PSCAD_value(params_dict['CuL'])[0], 269 | NLL, 270 | parse_PSCAD_value(params_dict['Xl'])[0], 271 | parse_PSCAD_value(params_dict['Im1'])[0], 272 | params_dict['Enab']] 273 | 274 | else: 275 | print(f'2 Winding transformer model {component_def} not defined!') 276 | exit(0) 277 | 278 | trfrs2_list.append(trfr_row) 279 | 280 | pscad_trfr2_data_df = pd.DataFrame(trfrs2_list, columns = ['Transformer name', 281 | 'Vector Grouping', 282 | 'Sn [MVA]', 283 | 'Un_HV [kV]', 284 | 'Un_LV [kV]', 285 | 'P_Cu [pu]', 286 | 'P_NL [pu]', 287 | 'X1_leak [pu]', 288 | 'I1_mag [%]', 289 | 'Saturation Enabled']) 290 | 291 | if ELMTR3_EXISTS: 292 | #Get all the 3 winding transformer components 293 | trfr3_components = [] 294 | for trfr3_name in trfr3_names: 295 | trfr3_components.append(project.find(trfr3_name)) 296 | 297 | 298 | #Read all the parameters for each transformer component 299 | trfrs3_list = [] 300 | for trfr3_component in trfr3_components: 301 | if trfr3_component is None: continue 302 | params_dict = trfr3_component.parameters() 303 | 304 | #Check the transformer component model definition 305 | component_def = trfr3_component.defn_name[1] 306 | 307 | #Duality based 3 phase 2 winding transformer 308 | #3 Phase 3 Winding Transformer 309 | if component_def == 'xfmr-3p3w2': 310 | if SET_PARAMS: 311 | #Set the PSCAD parameters equal to the PowerFactory parameters 312 | i = trfr3_names.index(params_dict['Name']) 313 | params_dict['Tmva'] = powerfactory_trfr3_data_df.iloc[i]['Sn_HV [MVA]'] 314 | params_dict['f'] = 50.0 315 | params_dict['V1'] = powerfactory_trfr3_data_df.iloc[i]['Un_HV [kV]'] 316 | params_dict['V2'] = powerfactory_trfr3_data_df.iloc[i]['Un_MV [kV]'] 317 | params_dict['V3'] = powerfactory_trfr3_data_df.iloc[i]['Un_LV [kV]'] 318 | params_dict['CuL12'] = powerfactory_trfr3_data_df.iloc[i]['P_Cu (HV-MV) [pu]'] 319 | params_dict['CuL23'] = powerfactory_trfr3_data_df.iloc[i]['P_Cu (MV-LV) [pu]'] 320 | params_dict['CuL13'] = powerfactory_trfr3_data_df.iloc[i]['P_Cu (LV-HV) [pu]'] 321 | params_dict['NLL'] = powerfactory_trfr3_data_df.iloc[i]['P_NL [pu]'] 322 | params_dict['Xl12'] = powerfactory_trfr3_data_df.iloc[i]['X1_leak (HV-MV) [pu]'] 323 | params_dict['Xl23'] = powerfactory_trfr3_data_df.iloc[i]['X1_leak (MV-LV) [pu]'] 324 | params_dict['Xl13'] = powerfactory_trfr3_data_df.iloc[i]['X1_leak (LV-HV) [pu]'] 325 | params_dict['Im1'] = powerfactory_trfr3_data_df.iloc[i]['I1_mag [%]'] 326 | params_dict['Enab'] = 'YES' #Enable Saturation 327 | trfr3_component.parameters(parameters = params_dict) 328 | params_dict = trfr3_component.parameters() #Read parameter values again 329 | 330 | #Extract PSCAD parameters 331 | W1 = 'Y' if params_dict['YD1'] == 'Y' else 'D' 332 | W2 = 'y' if params_dict['YD2'] == 'Y' else 'd' 333 | W3 = 'y' if params_dict['YD3'] == 'Y' else 'd' 334 | D_lead_lag = params_dict['Lead'] 335 | if D_lead_lag == 'LAGS': 336 | if W1 == 'D' and W2 == 'y' and W3 == 'y': 337 | hour = '11' 338 | elif W1 == 'Y' and W2 == 'd' and W3 == 'd': 339 | hour = '1' 340 | else: 341 | hour = '0' 342 | else: 343 | if W1 == 'D' and W2 == 'y' and W3 == 'y': 344 | hour = '1' 345 | elif W1 == 'Y' and W2 == 'd' and W3 == 'd': 346 | hour = '11' 347 | else: 348 | hour = '0' 349 | 350 | trfr_row = [params_dict['Name'], 351 | W1+W2+hour+W3+hour, 352 | params_dict['Tmva'], 353 | params_dict['V1'], 354 | params_dict['V2'], 355 | params_dict['V3'], 356 | params_dict['CuL12'], 357 | params_dict['CuL23'], 358 | params_dict['CuL13'], 359 | params_dict['NLL'], 360 | params_dict['Xl12'], 361 | params_dict['Xl23'], 362 | params_dict['Xl13'], 363 | params_dict['Im1'], 364 | params_dict['Enab']] 365 | else: 366 | print('3 Winding transformer model {component_def} not defined!') 367 | exit(0) 368 | 369 | trfrs3_list.append(trfr_row) 370 | 371 | pscad_trfr3_data_df = pd.DataFrame(trfrs3_list, columns = ['Transformer name', 372 | 'Vector Grouping', 373 | 'Sn_HV [MVA]', 374 | 'Un_HV [kV]', 375 | 'Un_MV [kV]', 376 | 'Un_LV [kV]', 377 | 'P_Cu (HV-MV) [pu]', 378 | 'P_Cu (MV-LV) [pu]', 379 | 'P_Cu (LV-HV) [pu]', 380 | 'P_NL [pu]', 381 | 'X1_leak (HV-MV) [pu]', 382 | 'X1_leak (MV-LV) [pu]', 383 | 'X1_leak (LV-HV) [pu]', 384 | 'I1_mag [%]', 385 | 'Saturation Enabled']) 386 | 387 | #Combine the PowerFactory and PSCAD data into one DataFrame 388 | cable_data_df = pd.concat([powerfactory_cable_data_df, pscad_cables_data_df], ignore_index = True) 389 | trfr2_data_df = pd.concat([powerfactory_trfr2_data_df, pscad_trfr2_data_df], ignore_index = True) 390 | if ELMTR3_EXISTS: trfr3_data_df = pd.concat([powerfactory_trfr3_data_df, pscad_trfr3_data_df], ignore_index = True) 391 | 392 | #Compare the PowerFactory Cable data with the PSCAD Cable data 393 | cables = [] 394 | for cable_name in cable_names: 395 | cable = cable_data_df.index[cable_data_df['Cable name'] == cable_name].tolist() 396 | cables.append(cable) 397 | 398 | perc_diff = [] 399 | for i, cable in enumerate(cables): 400 | if len(cable)>1: #check to see if there are two cables to compare 401 | name = cable_names[i] 402 | dLen = abs((cable_data_df.iloc[cable[0]]['length [km]']-cable_data_df.iloc[cable[1]]['length [km]']))/cable_data_df.iloc[cable[0]]['length [km]'] 403 | dRp = abs((cable_data_df.iloc[cable[0]]['Eq. R\' [Ohm/km]']-cable_data_df.iloc[cable[1]]['Eq. R\' [Ohm/km]']))/cable_data_df.iloc[cable[0]]['Eq. R\' [Ohm/km]'] 404 | dXp = abs((cable_data_df.iloc[cable[0]]['Eq. X\' [Ohm/km]']-cable_data_df.iloc[cable[1]]['Eq. X\' [Ohm/km]']))/cable_data_df.iloc[cable[0]]['Eq. X\' [Ohm/km]'] 405 | dBp = abs((cable_data_df.iloc[cable[0]]['Eq. Shunt X\' [MOhm*km]']-cable_data_df.iloc[cable[1]]['Eq. Shunt X\' [MOhm*km]']))/cable_data_df.iloc[cable[0]]['Eq. Shunt X\' [MOhm*km]'] 406 | dRz = abs((cable_data_df.iloc[cable[0]]['Eq. R0\' [Ohm/km]']-cable_data_df.iloc[cable[1]]['Eq. R0\' [Ohm/km]']))/cable_data_df.iloc[cable[0]]['Eq. R0\' [Ohm/km]'] 407 | dXz = abs((cable_data_df.iloc[cable[0]]['Eq. X0\' [Ohm/km]']-cable_data_df.iloc[cable[1]]['Eq. X0\' [Ohm/km]']))/cable_data_df.iloc[cable[0]]['Eq. X0\' [Ohm/km]'] 408 | dBz = abs((cable_data_df.iloc[cable[0]]['Eq. Shunt X0\' [MOhm*km]']-cable_data_df.iloc[cable[1]]['Eq. Shunt X0\' [MOhm*km]']))/cable_data_df.iloc[cable[0]]['Eq. Shunt X0\' [MOhm*km]'] 409 | perc_diff.append([name, dLen, dRp, dXp, dBp, dRz, dXz, dBz]) 410 | 411 | cable_perc_diff_df = pd.DataFrame(perc_diff, columns = ['Cable name', 412 | 'length [km]', 413 | 'Eq. R\' [Ohm/km]', 414 | 'Eq. X\' [Ohm/km]', 415 | 'Eq. Shunt X\' [MOhm*km]', 416 | 'Eq. R0\' [Ohm/km]', 417 | 'Eq. X0\' [Ohm/km]', 418 | 'Eq. Shunt X0\' [MOhm*km]']) 419 | 420 | cable_comparative_data_df = pd.concat([powerfactory_cable_data_df, pscad_cables_data_df, cable_perc_diff_df], keys=['PF', 'PSCAD', '% Diff']) 421 | 422 | #Compare the PowerFactory ElmTr2 data with the PSCAD Trfr2 data 423 | trfr2s = [] 424 | for trfr2_name in trfr2_names: 425 | trfr2 = trfr2_data_df.index[trfr2_data_df['Transformer name'] == trfr2_name].tolist() 426 | trfr2s.append(trfr2) 427 | 428 | perc_diff = [] 429 | for i, trfr2 in enumerate(trfr2s): 430 | if len(trfr2)>1: #check to see if there are two transformers to compare 431 | name = trfr2_names[i] 432 | dSn = abs((trfr2_data_df.iloc[trfr2[0]]['Sn [MVA]']-trfr2_data_df.iloc[trfr2[1]]['Sn [MVA]']))/trfr2_data_df.iloc[trfr2[0]]['Sn [MVA]'] 433 | dUn_HV = abs((trfr2_data_df.iloc[trfr2[0]]['Un_HV [kV]']-trfr2_data_df.iloc[trfr2[1]]['Un_HV [kV]']))/trfr2_data_df.iloc[trfr2[0]]['Un_HV [kV]'] 434 | dUn_LV = abs((trfr2_data_df.iloc[trfr2[0]]['Un_LV [kV]']-trfr2_data_df.iloc[trfr2[1]]['Un_LV [kV]']))/trfr2_data_df.iloc[trfr2[0]]['Un_LV [kV]'] 435 | dP_Cu = abs((trfr2_data_df.iloc[trfr2[0]]['P_Cu [pu]']-trfr2_data_df.iloc[trfr2[1]]['P_Cu [pu]']))/trfr2_data_df.iloc[trfr2[0]]['P_Cu [pu]'] 436 | dP_NL = abs((trfr2_data_df.iloc[trfr2[0]]['P_NL [pu]']-trfr2_data_df.iloc[trfr2[1]]['P_NL [pu]']))/trfr2_data_df.iloc[trfr2[0]]['P_NL [pu]'] 437 | dX1_leak = abs((trfr2_data_df.iloc[trfr2[0]]['X1_leak [pu]']-trfr2_data_df.iloc[trfr2[1]]['X1_leak [pu]']))/trfr2_data_df.iloc[trfr2[0]]['X1_leak [pu]'] 438 | dI1_mag = abs((trfr2_data_df.iloc[trfr2[0]]['I1_mag [%]']-trfr2_data_df.iloc[trfr2[1]]['I1_mag [%]']))/trfr2_data_df.iloc[trfr2[0]]['I1_mag [%]'] 439 | perc_diff.append([name, dSn, dUn_HV, dUn_LV, dP_Cu, dP_NL, dX1_leak, dI1_mag]) 440 | 441 | trfr2_perc_diff_df = pd.DataFrame(perc_diff, columns = ['Transformer name', 442 | 'Sn [MVA]', 443 | 'Un_HV [kV]', 444 | 'Un_LV [kV]', 445 | 'P_Cu [pu]', 446 | 'P_NL [pu]', 447 | 'X1_leak [pu]', 448 | 'I1_mag [%]']) 449 | 450 | trfr2_comparative_data_df = pd.concat([powerfactory_trfr2_data_df, pscad_trfr2_data_df, trfr2_perc_diff_df], keys=['PF', 'PSCAD', '% Diff']) 451 | 452 | if ELMTR3_EXISTS: 453 | #Compare the PowerFactory ElmTr2 data with the PSCAD trfr3 data 454 | trfr3s = [] 455 | for trfr3_name in trfr3_names: 456 | trfr3 = trfr3_data_df.index[trfr3_data_df['Transformer name'] == trfr3_name].tolist() 457 | trfr3s.append(trfr3) 458 | 459 | perc_diff = [] 460 | for i, trfr3 in enumerate(trfr3s): 461 | if len(trfr3)>1: #check to see if there are two transformers to compare 462 | name = trfr3_names[i] 463 | dSn_HV = abs((trfr3_data_df.iloc[trfr3[0]]['Sn_HV [MVA]']-trfr3_data_df.iloc[trfr3[1]]['Sn_HV [MVA]']))/trfr3_data_df.iloc[trfr3[0]]['Sn_HV [MVA]'] 464 | dUn_HV = abs((trfr3_data_df.iloc[trfr3[0]]['Un_HV [kV]']-trfr3_data_df.iloc[trfr3[1]]['Un_HV [kV]']))/trfr3_data_df.iloc[trfr3[0]]['Un_HV [kV]'] 465 | dUn_MV = abs((trfr3_data_df.iloc[trfr3[0]]['Un_MV [kV]']-trfr3_data_df.iloc[trfr3[1]]['Un_MV [kV]']))/trfr3_data_df.iloc[trfr3[0]]['Un_MV [kV]'] 466 | dUn_LV = abs((trfr3_data_df.iloc[trfr3[0]]['Un_LV [kV]']-trfr3_data_df.iloc[trfr3[1]]['Un_LV [kV]']))/trfr3_data_df.iloc[trfr3[0]]['Un_LV [kV]'] 467 | dP_Cu_HV_MV = abs((trfr3_data_df.iloc[trfr3[0]]['P_Cu (HV-MV) [pu]']-trfr3_data_df.iloc[trfr3[1]]['P_Cu (HV-MV) [pu]']))/trfr3_data_df.iloc[trfr3[0]]['P_Cu (HV-MV) [pu]'] 468 | dP_Cu_MV_LV = abs((trfr3_data_df.iloc[trfr3[0]]['P_Cu (MV-LV) [pu]']-trfr3_data_df.iloc[trfr3[1]]['P_Cu (MV-LV) [pu]']))/trfr3_data_df.iloc[trfr3[0]]['P_Cu (MV-LV) [pu]'] 469 | dP_Cu_LV_HV = abs((trfr3_data_df.iloc[trfr3[0]]['P_Cu (LV-HV) [pu]']-trfr3_data_df.iloc[trfr3[1]]['P_Cu (LV-HV) [pu]']))/trfr3_data_df.iloc[trfr3[0]]['P_Cu (LV-HV) [pu]'] 470 | dP_NL = abs((trfr3_data_df.iloc[trfr3[0]]['P_NL [pu]']-trfr3_data_df.iloc[trfr3[1]]['P_NL [pu]']))/trfr3_data_df.iloc[trfr3[0]]['P_NL [pu]'] 471 | dX1_leak_HV_MV = abs((trfr3_data_df.iloc[trfr3[0]]['X1_leak (HV-MV) [pu]']-trfr3_data_df.iloc[trfr3[1]]['X1_leak (HV-MV) [pu]']))/trfr3_data_df.iloc[trfr3[0]]['X1_leak (HV-MV) [pu]'] 472 | dX1_leak_MV_LV = abs((trfr3_data_df.iloc[trfr3[0]]['X1_leak (MV-LV) [pu]']-trfr3_data_df.iloc[trfr3[1]]['X1_leak (MV-LV) [pu]']))/trfr3_data_df.iloc[trfr3[0]]['X1_leak (MV-LV) [pu]'] 473 | dX1_leak_LV_HV = abs((trfr3_data_df.iloc[trfr3[0]]['X1_leak (LV-HV) [pu]']-trfr3_data_df.iloc[trfr3[1]]['X1_leak (LV-HV) [pu]']))/trfr3_data_df.iloc[trfr3[0]]['X1_leak (LV-HV) [pu]'] 474 | dI1_mag = abs((trfr3_data_df.iloc[trfr3[0]]['I1_mag [%]']-trfr3_data_df.iloc[trfr3[1]]['I1_mag [%]']))/trfr3_data_df.iloc[trfr3[0]]['I1_mag [%]'] 475 | perc_diff.append([name, dSn_HV, dUn_HV, dUn_MV, dUn_LV, dP_Cu_HV_MV, dP_Cu_MV_LV, dP_Cu_LV_HV, dP_NL, dX1_leak_HV_MV, dX1_leak_MV_LV, dX1_leak_LV_HV, dI1_mag]) 476 | 477 | trfr3_perc_diff_df = pd.DataFrame(perc_diff, columns = ['Transformer name', 478 | 'Sn_HV [MVA]', 479 | 'Un_HV [kV]', 480 | 'Un_MV [kV]', 481 | 'Un_LV [kV]', 482 | 'P_Cu (HV-MV) [pu]', 483 | 'P_Cu (MV-LV) [pu]', 484 | 'P_Cu (LV-HV) [pu]', 485 | 'P_NL [pu]', 486 | 'X1_leak (HV-MV) [pu]', 487 | 'X1_leak (MV-LV) [pu]', 488 | 'X1_leak (LV-HV) [pu]', 489 | 'I1_mag [%]']) 490 | 491 | trfr3_comparative_data_df = pd.concat([powerfactory_trfr3_data_df, pscad_trfr3_data_df, trfr3_perc_diff_df], keys=['PF', 'PSCAD', '% Diff']) 492 | 493 | 494 | #Add these two DataFrames a new Sheets to the PowerFactory Poject Data Excel file 495 | with pd.ExcelWriter(excel_path, mode = "a", if_sheet_exists = 'replace', engine = "openpyxl") as project_data_writer: 496 | cable_comparative_data_df.to_excel(project_data_writer, sheet_name = 'Comparative Cable Data') 497 | trfr2_comparative_data_df.to_excel(project_data_writer, sheet_name = 'Comparative ElmTr2 Data') 498 | if ELMTR3_EXISTS: trfr3_comparative_data_df.to_excel(project_data_writer, sheet_name = 'Comparative ElmTr3 Data') 499 | 500 | print(f'Output written to \'{excel_path}\'') 501 | 502 | 503 | 504 | 505 | 506 | 507 | 508 | 509 | 510 | 511 | 512 | 513 | 514 | 515 | 516 | 517 | 518 | 519 | 520 | 521 | 522 | 523 | 524 | 525 | 526 | 527 | 528 | 529 | 530 | 531 | 532 | 533 | 534 | 535 | 536 | 537 | -------------------------------------------------------------------------------- /utility_scripts/get_component_data_from_powerfactory.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Mon Dec 9 12:24:34 2024 4 | 5 | @author: PRWenerginet 6 | """ 7 | 8 | import powerfactory 9 | import pandas as pd 10 | import re 11 | from numpy import sqrt 12 | 13 | def get_typtrx(typ_id): 14 | regex = r'(.*)(\.TypTr.\.*)' 15 | match = re.search(regex, re.split(r'\\', typ_id)[-1]) 16 | return match.group(1) 17 | 18 | app = powerfactory.GetApplication() 19 | script = app.GetCurrentScript() 20 | excel_output_path = script.GetAttribute('excel_output_path') 21 | 22 | app.ClearOutputWindow() 23 | app.PrintInfo('Get all cables, transformers and transformer types in the project, and write it to an Excel Spreadsheet') 24 | app.PrintInfo(f'Input Parameter: {excel_output_path}') 25 | 26 | ############ 27 | #ElmLne Data 28 | ############ 29 | oElmLnes = app.GetCalcRelevantObjects('*.ElmLne') 30 | 31 | dfCables = pd.DataFrame([[oElmLne.loc_name, 32 | oElmLne.nlnum, 33 | oElmLne.dline, 34 | oElmLne.GetType().rline, 35 | oElmLne.GetType().lline, 36 | oElmLne.GetType().xline, 37 | oElmLne.GetType().rline0, 38 | oElmLne.GetType().lline0, 39 | oElmLne.GetType().xline0, 40 | oElmLne.GetType().cline, 41 | oElmLne.GetType().bline, 42 | oElmLne.GetType().cline0, 43 | oElmLne.GetType().bline0] for oElmLne in oElmLnes if oElmLne.GetType().IsCable()], 44 | columns=['Cable name', 45 | 'par lines', 46 | 'length [km]', 47 | 'R\'[Ohm/km]', 48 | 'L\'[mH/km]', 49 | 'X\'[Ohm/km]', 50 | 'R0\'[Ohm/km]', 51 | 'L0\'[mH/km]', 52 | 'X0\'[Ohm/km]', 53 | 'C\'[uF/km]', 54 | 'B\'[uS/km]', 55 | 'C0\'[uF/km]', 56 | 'B0\'[uS/km]']) 57 | 58 | #app.PrintInfo(dfCables) 59 | 60 | #Calculate PSCAD equivalent values 61 | dfCables['Eq. R\' [Ohm/km]'] = dfCables['R\'[Ohm/km]']/dfCables['par lines'] 62 | dfCables['Eq. X\' [Ohm/km]'] = dfCables['X\'[Ohm/km]']/dfCables['par lines'] 63 | dfCables['Eq. Shunt X\' [MOhm*km]'] = dfCables['par lines']/dfCables['B\'[uS/km]'] 64 | dfCables['Eq. R0\' [Ohm/km]'] = dfCables['R0\'[Ohm/km]']/dfCables['par lines'] 65 | dfCables['Eq. X0\' [Ohm/km]'] = dfCables['X0\'[Ohm/km]']/dfCables['par lines'] 66 | dfCables['Eq. Shunt X0\' [MOhm*km]'] = dfCables['par lines']/dfCables['B0\'[uS/km]'] 67 | 68 | ############ 69 | #ElmTr2 Data 70 | ############ 71 | oElmTr2s = app.GetCalcRelevantObjects('*.ElmTr2') 72 | 73 | dfElmTr2s = pd.DataFrame([[oElmTr2.loc_name, 74 | get_typtrx(str(oElmTr2.typ_id)), 75 | oElmTr2.ntnum, 76 | oElmTr2.typ_id.vecgrp, 77 | oElmTr2.typ_id.strn, 78 | oElmTr2.typ_id.utrn_h, 79 | oElmTr2.typ_id.utrn_l, 80 | oElmTr2.typ_id.uktr, 81 | oElmTr2.typ_id.pcutr, 82 | oElmTr2.typ_id.uk0tr, 83 | oElmTr2.typ_id.curmg, 84 | oElmTr2.typ_id.pfe] for oElmTr2 in oElmTr2s], 85 | columns=['Transformer name', 86 | 'Transformer type', 87 | 'Number of par. trfrs.', 88 | 'Vector Grouping', 89 | 'Sn [MVA]', 90 | 'Un_HV [kV]', 91 | 'Un_LV [kV]', 92 | 'uk [%]', 93 | 'P_Cu [kW]', 94 | 'uk0 [%]', 95 | 'I_NL [%]', 96 | 'P_NL [kW]']) 97 | 98 | #Calculate PSCAD equivalent values 99 | dfElmTr2s['P_Cu [pu]'] = dfElmTr2s['P_Cu [kW]']/(dfElmTr2s['Sn [MVA]']*1000) 100 | dfElmTr2s['P_NL [pu]'] = dfElmTr2s['P_NL [kW]']/(dfElmTr2s['Sn [MVA]']*1000) 101 | dfElmTr2s['X1_leak [pu]'] = dfElmTr2s['uk [%]']/100 102 | dfElmTr2s['I1_mag [%]'] = sqrt(dfElmTr2s['I_NL [%]']**2-(dfElmTr2s['P_NL [pu]']/100)**2) #Ignoring P_Cu at no load 103 | 104 | ############ 105 | #ElmTr3 Data 106 | ############ 107 | oElmTr3s = app.GetCalcRelevantObjects('*.ElmTr3') 108 | 109 | ELMTR3_EXIST = True if len(oElmTr3s)>0 else False 110 | 111 | if ELMTR3_EXIST: 112 | dfElmTr3s = pd.DataFrame([[oElmTr3.loc_name, 113 | get_typtrx(str(oElmTr3.typ_id)), 114 | oElmTr3.nt3nm, 115 | f'{oElmTr3.typ_id.tr3cn_h}{oElmTr3.typ_id.nt3ag_h:.0f}{oElmTr3.typ_id.tr3cn_m}{oElmTr3.typ_id.nt3ag_m:.0f}{oElmTr3.typ_id.tr3cn_l}{oElmTr3.typ_id.nt3ag_l:.0f}', 116 | oElmTr3.typ_id.strn3_h, 117 | oElmTr3.typ_id.strn3_m, 118 | oElmTr3.typ_id.strn3_l, 119 | oElmTr3.typ_id.utrn3_h, 120 | oElmTr3.typ_id.utrn3_m, 121 | oElmTr3.typ_id.utrn3_l, 122 | oElmTr3.typ_id.uktr3_h, 123 | oElmTr3.typ_id.uktr3_m, 124 | oElmTr3.typ_id.uktr3_l, 125 | oElmTr3.typ_id.pcut3_h, 126 | oElmTr3.typ_id.pcut3_m, 127 | oElmTr3.typ_id.pcut3_l, 128 | oElmTr3.typ_id.uk0hm, 129 | oElmTr3.typ_id.uk0ml, 130 | oElmTr3.typ_id.uk0hl, 131 | oElmTr3.typ_id.curm3, 132 | oElmTr3.typ_id.pfe] for oElmTr3 in oElmTr3s], 133 | columns=['Transformer name', 134 | 'Transformer type', 135 | 'Number of par. trfrs.', 136 | 'Vector Grouping', 137 | 'Sn_HV [MVA]', 138 | 'Sn_MV [MVA]', 139 | 'Sn_LV [MVA]', 140 | 'Un_HV [kV]', 141 | 'Un_MV [kV]', 142 | 'Un_LV [kV]', 143 | 'uk (HV-MV) [%]', 144 | 'uk (MV-LV) [%]', 145 | 'uk (LV-HV) [%]', 146 | 'P_Cu (HV-MV) [kW]', 147 | 'P_Cu (MV-LV) [kW]', 148 | 'P_Cu (LV-HV) [kW]', 149 | 'uk0 (HV-MV) [%]', 150 | 'uk0 (MV-LV) [%]', 151 | 'uk0 (LV-HV) [%]', 152 | 'I_NL [%]', 153 | 'P_NL [kW]']) 154 | 155 | #Calculate PSCAD equivalent values 156 | dfElmTr3s['P_Cu (HV-MV) [pu]'] = dfElmTr3s['P_Cu (HV-MV) [kW]']/(dfElmTr3s['Sn_HV [MVA]']*1000) 157 | dfElmTr3s['P_Cu (MV-LV) [pu]'] = dfElmTr3s['P_Cu (MV-LV) [kW]']/(dfElmTr3s['Sn_HV [MVA]']*1000) 158 | dfElmTr3s['P_Cu (LV-HV) [pu]'] = dfElmTr3s['P_Cu (LV-HV) [kW]']/(dfElmTr3s['Sn_HV [MVA]']*1000) 159 | dfElmTr3s['P_NL [pu]'] = dfElmTr3s['P_NL [kW]']/(dfElmTr3s['Sn_HV [MVA]']*1000) 160 | dfElmTr3s['X1_leak (HV-MV) [pu]'] = dfElmTr3s['uk (HV-MV) [%]']/100 161 | dfElmTr3s['X1_leak (MV-LV) [pu]'] = dfElmTr3s['uk (MV-LV) [%]']/100 162 | dfElmTr3s['X1_leak (LV-HV) [pu]'] = dfElmTr3s['uk (LV-HV) [%]']/100 163 | dfElmTr3s['I1_mag [%]'] = sqrt(dfElmTr3s['I_NL [%]']**2-(dfElmTr3s['P_NL [pu]']/100)**2) #Ignoring P_Cu at no load 164 | 165 | #app.PrintInfo(dfCables) 166 | #app.PrintInfo(dfElmTr2s) 167 | #app.PrintInfo(dfElmTr3s) 168 | 169 | #Write Each DataFrame to a separate Excel Sheet 170 | with pd.ExcelWriter(excel_output_path, mode = "w", engine = "openpyxl") as project_data_writer: 171 | dfCables.to_excel(project_data_writer, sheet_name = 'PowerFactory Cable Data') 172 | dfElmTr2s.to_excel(project_data_writer, sheet_name = 'PowerFactory ElmTr2 Data') 173 | if ELMTR3_EXIST: 174 | dfElmTr3s.to_excel(project_data_writer, sheet_name = 'PowerFactory ElmTr3 Data') 175 | 176 | app.PrintInfo(f'Output written to \'{excel_output_path}\'') 177 | -------------------------------------------------------------------------------- /utility_scripts/get_dsl_checksums_from_powerfactory.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Fri Jan 31 12:44:25 2025 4 | 5 | @author: PRW 6 | """ 7 | 8 | import powerfactory 9 | import pandas as pd 10 | 11 | app = powerfactory.GetApplication() 12 | script = app.GetCurrentScript() 13 | excel_output_path = script.GetAttribute('excel_output_path') 14 | 15 | app.ClearOutputWindow() 16 | app.PrintInfo('Get all encrypted DSL model types in the project, and write their checksums to an Excel Spreadsheet') 17 | #app.PrintInfo(f'Input Parameter: {excel_output_path}') 18 | app.PrintPlain('') 19 | 20 | ############ 21 | #BlkDef Data 22 | ############ 23 | oBlkDefs = app.GetCalcRelevantObjects('*.BlkDef') 24 | 25 | #Display DSL object and Checksum 26 | for oBlkDef in oBlkDefs: 27 | for line in oBlkDef.sAddEquat: 28 | if line == '001! Encrypted model; Editing not possible.': 29 | app.PrintInfo(f'{oBlkDef}, Checksum = {oBlkDef.cCheckSum}') 30 | 31 | #Make a DataFrame with the DSL model type name and Checksum 32 | dfChecksums = pd.DataFrame([[oBlkDef.loc_name, 33 | str(oBlkDef.cCheckSum).strip('\'[]')] for oBlkDef in oBlkDefs for line in oBlkDef.sAddEquat if line == '001! Encrypted model; Editing not possible.'], 34 | columns=['DSL model type name', 35 | 'Checksum']) 36 | app.PrintPlain('') 37 | app.PrintPlain(dfChecksums) 38 | 39 | #Write DataFrame to Excel 40 | with pd.ExcelWriter(excel_output_path, mode = "w", engine = "openpyxl") as project_data_writer: 41 | dfChecksums.to_excel(project_data_writer, sheet_name = 'DSL Encryption Data') 42 | 43 | app.PrintPlain('') 44 | app.PrintInfo(f'Output written to \'{excel_output_path}\'') -------------------------------------------------------------------------------- /utility_scripts/get_relay_data_from_powerfactory.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Fri Dec 13 12:41:52 2024 4 | 5 | @author: PRW 6 | """ 7 | 8 | import powerfactory 9 | import pandas as pd 10 | import re 11 | from types import NoneType 12 | 13 | app = powerfactory.GetApplication() 14 | app.ClearOutputWindow() 15 | 16 | script = app.GetCurrentScript() 17 | excel_output_path = script.GetAttribute('excel_output_path') 18 | 19 | def get_type(typ_id): 20 | regex = r'(.*)(\.Typ.*)(\<.*)' 21 | match = re.search(regex, re.split(r'\\', typ_id)[-1]) 22 | return match.group(2) 23 | 24 | application_dict = {0 : 'Main Protection', 25 | 1 : 'Backup Protection'} 26 | 27 | outserv_dict = {0 : 'No', 28 | 1 : 'Yes'} 29 | 30 | #General 31 | idir_dict = {0 : 'None', 32 | 1 : 'Forward', 33 | 2 : 'Reverse'} 34 | 35 | #TypToc 36 | toc_atype_dict = {'3ph' : 'Phase Current (3ph)', 37 | '1ph' : 'Phase Current (1ph)', 38 | '3I0' : 'Earth Current (3*I0)', 39 | 'S3I0' : 'Sensitive Earth Current (3*I0)', 40 | 'I0' : 'Zero Sequence Current (I0)', 41 | 'I2' : 'Negative Sequence Current (I2)', 42 | '3I2' : '3*Negative Sequence Current (3*I2)', 43 | 'phA' : 'Phase A Current', 44 | 'phB' : 'Phase B Current', 45 | 'phC' : 'Phase C Current', 46 | 'th' : 'Thermal image (3ph)', 47 | 'th1p' : 'Thermal image (1ph)', 48 | 'd3m' : '3ph (other)', 49 | 'd1m' : '1ph (other)'} 50 | 51 | #TypChar 52 | char_atype_dict = {'d3m' : '3ph (other)', 53 | 'd1m' : '1ph (other)', 54 | 'P' : 'Active power (P)', 55 | 'Q' : 'Reactive power (Q)', 56 | 'S' : 'Apparent power (S)', 57 | 'V' : 'Voltage', 58 | 'f' : 'Frequency (f)', 59 | 'dfdt' : 'RoCoF (df/dt)', 60 | 'Vf' : 'Volt-per-Hertz (V/Hz)'} 61 | 62 | #TypUlim 63 | ulim_ifunc_dict = {0 : 'Undervoltage', 64 | 1 : 'Overvoltage', 65 | 2 : 'Phase Shift' } 66 | 67 | #TypFrq 68 | freq_ifunc_dict = {0 : 'Instantaneous', 69 | 1 : 'Gradient', 70 | 2 : 'Gradient Digital' } 71 | 72 | slot_name_ignore_list = ['Voltage Transformer', 'Measurement', 'fMeasurement', 'Output Logic', 'Clock', 'Phase Measurement Device PLL-Type', 'Sample and Hold', 'Moving Average Filter', 'Filter'] 73 | slot_type_ignore_list = ['.TypVt', '.TypCt', '.TypMeasure', '.TypFmeas', '.TypLogic', '.TypLogdip'] 74 | 75 | oElmRelays = app.GetCalcRelevantObjects('*.ElmRelay') 76 | oRelChar = app.GetCalcRelevantObjects('*.RelChar') 77 | app.PrintInfo(oElmRelays) 78 | app.PrintInfo(oRelChar) 79 | relays = [] 80 | for oElmRelay in oElmRelays: 81 | relay =[oElmRelay.loc_name, 82 | application_dict[oElmRelay.application], 83 | outserv_dict[oElmRelay.outserv], 84 | ] 85 | slots = oElmRelay.pdiselm 86 | for slot in slots: 87 | try: 88 | if type(slot.typ_id) is NoneType: 89 | if slot.loc_name not in slot_name_ignore_list: 90 | relay.append(slot.loc_name) 91 | else: 92 | slot_type = get_type(str(slot.typ_id)) 93 | if slot_type not in slot_type_ignore_list and slot.loc_name not in slot_name_ignore_list: 94 | relay.append(slot.loc_name) 95 | except: 96 | if slot.loc_name not in slot_name_ignore_list: 97 | relay.append(slot.loc_name) 98 | 99 | relays.append(relay) 100 | 101 | dfRelays = pd.DataFrame(relays) 102 | dfRelays.rename(columns={0 : 'Relay', 103 | 1 : 'Application', 104 | 2 : 'Out of Service'}, inplace=True) 105 | 106 | with pd.ExcelWriter(excel_output_path, mode = "w", engine = "openpyxl") as project_data_writer: 107 | dfRelays.to_excel(project_data_writer, sheet_name = 'Relay Data') 108 | 109 | for oElmRelay in oElmRelays: 110 | relay_info = [] 111 | for slot in oElmRelay.pdiselm: 112 | try: 113 | if type(slot.typ_id) is not NoneType: 114 | slot_type = get_type(str(slot.typ_id)) 115 | if slot_type not in slot_type_ignore_list: 116 | if slot_type == '.TypChar': 117 | iec_symb = slot.typ_id.sfiec 118 | ansi_symb = slot.typ_id.sfansi 119 | relay_type = char_atype_dict[slot.typ_id.atype] 120 | direct = idir_dict[slot.idir] 121 | charact = slot.pcharac.loc_name 122 | thresh = slot.Ipset 123 | time_set = slot.Tpset 124 | elif slot_type == '.TypToc': 125 | iec_symb = slot.typ_id.sfiec 126 | ansi_symb = slot.typ_id.sfansi 127 | relay_type = toc_atype_dict[slot.typ_id.atype] 128 | direct = idir_dict[slot.idir] 129 | charact = slot.pcharac.loc_name 130 | thresh = slot.Ipset 131 | time_set = slot.Tpset 132 | elif slot_type == '.TypIoc': 133 | iec_symb = slot.typ_id.sfiec 134 | ansi_symb = slot.typ_id.sfansi 135 | relay_type = toc_atype_dict[slot.typ_id.atype] 136 | direct = idir_dict[slot.idir] 137 | charact = 'Definite Time' 138 | thresh = slot.Ipset 139 | time_set = slot.Tset 140 | elif slot_type == '.TypUlim': # Over voltage protection 141 | iec_symb = slot.typ_id.sfiec 142 | ansi_symb = slot.typ_id.sfansi 143 | relay_type = ulim_ifunc_dict[slot.typ_id.ifunc] 144 | direct = 'None' 145 | charact = 'Definite Time' 146 | thresh = slot.Usetr # [sec.V per phase] or Uset [p.u.] or cUpset [pri.V per phase] 147 | time_set = slot.Tdel 148 | elif slot_type == '.TypFrq': 149 | relay_type = freq_ifunc_dict[slot.typ_id.itype] 150 | direct = 'None' 151 | charact = 'Definite Time' 152 | if relay_type == 'Instantaneous': 153 | iec_symb = 'f' 154 | ansi_symb = '81' 155 | thresh = slot.Fset 156 | if thresh >= 0: 157 | iec_symb = iec_symb + '>' 158 | else: 159 | iec_symb = iec_symb + '<' 160 | time_set = slot.Tdel 161 | else: 162 | iec_symb = 'df/dt' 163 | ansi_symb = '81R' 164 | thresh = slot.dFset 165 | if thresh >= 0: 166 | iec_symb = iec_symb + '>' 167 | else: 168 | iec_symb = iec_symb + '<' 169 | time_set = slot.Tdel 170 | else: 171 | app.PrintInfo('Slot Type not defined') 172 | exit(0) 173 | 174 | slot_info = [slot.loc_name, 175 | iec_symb, 176 | ansi_symb, 177 | outserv_dict[slot.outserv], 178 | relay_type, 179 | direct, 180 | charact, 181 | thresh, 182 | time_set] 183 | relay_info.append(slot_info) 184 | 185 | app.PrintInfo(f'{slot}, Out of Service = {outserv_dict[slot.outserv]}, Relay Type = {relay_type}, Direction = {direct}, Characterist = {charact}, Threshold = {thresh:.2f}, Time Setting = {time_set:.2f}''') 186 | except: 187 | continue 188 | dfRelay = pd.DataFrame(relay_info, columns = ['Name', 189 | 'IEC Symbol', 190 | 'ANSI Number', 191 | 'Out of Service', 192 | 'Relay Type', 193 | 'Tripping Direction', 194 | 'Characteristic', 195 | 'Threshold Value', 196 | 'Time Value']) 197 | 198 | #dfRelay.round({'Threshold Value':2, 'Time Value' :3}) 199 | 200 | with pd.ExcelWriter(excel_output_path, mode = "a", if_sheet_exists = 'replace', engine = "openpyxl") as project_data_writer: 201 | dfRelay.to_excel(project_data_writer, sheet_name = oElmRelay.loc_name) 202 | 203 | app.PrintInfo(f'Output written to \'{excel_output_path}\'') 204 | --------------------------------------------------------------------------------