├── powersimdata ├── design │ ├── __init__.py │ ├── tests │ │ └── __init__.py │ ├── compare │ │ ├── __init__.py │ │ ├── helpers.py │ │ ├── generation.py │ │ └── transmission.py │ ├── generation │ │ ├── __init__.py │ │ └── tests │ │ │ ├── __init__.py │ │ │ └── test_curtailment.py │ ├── transmission │ │ ├── __init__.py │ │ ├── tests │ │ │ ├── __init__.py │ │ │ ├── test_substations.py │ │ │ └── test_statelines.py │ │ ├── zones.py │ │ ├── substations.py │ │ └── statelines.py │ └── investment │ │ ├── data │ │ ├── rs │ │ │ ├── rs.cpg │ │ │ ├── rs.shp │ │ │ └── rs.shx │ │ ├── NEEM │ │ │ ├── NEEMregions.cpg │ │ │ ├── NEEMregions.dbf │ │ │ ├── NEEMregions.shp │ │ │ ├── NEEMregions.shx │ │ │ ├── NEEMregions.prj │ │ │ └── NEEMregions.qpj │ │ ├── transformer_cost.csv │ │ ├── LineRegMult.csv │ │ └── README.md │ │ ├── tests │ │ └── __init__.py │ │ ├── __init__.py │ │ ├── inflation.py │ │ └── const.py ├── input │ ├── __init__.py │ ├── const │ │ └── __init__.py │ ├── tests │ │ ├── __init__.py │ │ ├── test_input_data.py │ │ ├── test_profile_input.py │ │ ├── test_expansion_candidates.py │ │ ├── test_transform_demand.py │ │ ├── test_configure.py │ │ └── test_grid.py │ ├── converter │ │ ├── __init__.py │ │ ├── tests │ │ │ ├── __init__.py │ │ │ ├── test_pypsa_to_profiles.py │ │ │ └── test_pypsa_to_grid.py │ │ ├── csv_to_grid.py │ │ ├── helpers.py │ │ └── pypsa_to_profiles.py │ ├── exporter │ │ ├── __init__.py │ │ ├── tests │ │ │ └── test_export_to_pypsa.py │ │ └── export_to_reise.py │ ├── changes │ │ ├── tests │ │ │ ├── __init__.py │ │ │ └── test_add_electrification.py │ │ ├── __init__.py │ │ ├── helpers.py │ │ ├── demand_flex.py │ │ └── storage.py │ ├── electrified_demand_input.py │ ├── input_base.py │ ├── abstract_grid.py │ └── input_data.py ├── network │ ├── __init__.py │ ├── hifld │ │ ├── __init__.py │ │ └── model.py │ ├── constants │ │ ├── __init__.py │ │ ├── carrier │ │ │ ├── __init__.py │ │ │ ├── storage.py │ │ │ ├── plants.py │ │ │ ├── color.py │ │ │ ├── label.py │ │ │ └── efficiency.py │ │ ├── region │ │ │ ├── __init__.py │ │ │ ├── tests │ │ │ │ ├── __init__.py │ │ │ │ └── test_zones.py │ │ │ ├── interconnect.py │ │ │ ├── loadzone.py │ │ │ ├── division.py │ │ │ ├── mapping.py │ │ │ └── zones.py │ │ └── model.py │ ├── europe_tub │ │ └── __init__.py │ ├── usa_tamu │ │ ├── __init__.py │ │ ├── model.py │ │ └── data │ │ │ ├── dcline.csv │ │ │ └── zone.csv │ ├── csv_reader.py │ ├── tests │ │ └── test_model.py │ └── helpers.py ├── output │ └── __init__.py ├── scenario │ ├── __init__.py │ ├── tests │ │ ├── __init__.py │ │ ├── test_check.py │ │ ├── test_create.py │ │ └── test_scenario.py │ ├── check.py │ ├── delete.py │ ├── state.py │ └── move.py ├── tests │ ├── __init__.py │ ├── mock_builder.py │ ├── mock_change_table.py │ ├── mock_context.py │ ├── mock_scenario_info.py │ ├── mock_scenario.py │ └── mock_grid.py ├── utility │ ├── __init__.py │ ├── tests │ │ ├── __init__.py │ │ ├── test_distance.py │ │ └── test_helpers.py │ ├── templates │ │ ├── __init__.py │ │ ├── ExecuteList.csv │ │ └── ScenarioList.csv │ ├── server_setup.py │ ├── distance.py │ └── config.py ├── data_access │ ├── __init__.py │ ├── tests │ │ ├── __init__.py │ │ ├── test_launcher.py │ │ ├── sql │ │ │ ├── test_sql_store.py │ │ │ ├── test_execute_table.py │ │ │ └── test_scenario_table.py │ │ ├── test_data_access.py │ │ ├── test_execute_csv.py │ │ └── test_scenario_csv.py │ ├── stack.yml │ ├── sql │ │ └── schema.sql │ ├── context.py │ ├── README.md │ ├── ssh_fs.py │ ├── scenario_table.py │ ├── execute_table.py │ ├── execute_list.py │ ├── csv_store.py │ ├── fs_helper.py │ └── scenario_list.py └── __init__.py ├── .coveragerc ├── setup.py ├── .git-blame-ignore-revs ├── optional-requirements.txt ├── .dockerignore ├── docker-compose.yml ├── requirements.txt ├── Dockerfile ├── Pipfile ├── docs ├── index.rst └── grid.rst ├── .github └── workflows │ ├── coverage.yml │ ├── external.yml │ ├── test.yml │ ├── lint.yml │ └── release.yml ├── tox.ini ├── LICENSE ├── setup.cfg └── .gitignore /powersimdata/design/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /powersimdata/input/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /powersimdata/network/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /powersimdata/output/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /powersimdata/scenario/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /powersimdata/tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /powersimdata/utility/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /powersimdata/data_access/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /powersimdata/design/tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /powersimdata/input/const/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /powersimdata/input/tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /powersimdata/network/hifld/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /powersimdata/utility/tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | branch = True 3 | -------------------------------------------------------------------------------- /powersimdata/data_access/tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /powersimdata/design/compare/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /powersimdata/design/generation/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /powersimdata/input/converter/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /powersimdata/input/exporter/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /powersimdata/network/constants/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /powersimdata/network/europe_tub/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /powersimdata/network/usa_tamu/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /powersimdata/scenario/tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /powersimdata/utility/templates/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /powersimdata/design/generation/tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /powersimdata/design/transmission/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /powersimdata/input/changes/tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /powersimdata/input/converter/tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /powersimdata/design/transmission/tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /powersimdata/network/constants/carrier/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /powersimdata/network/constants/region/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /powersimdata/design/investment/data/rs/rs.cpg: -------------------------------------------------------------------------------- 1 | ISO-8859-1 -------------------------------------------------------------------------------- /powersimdata/network/constants/region/tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup 2 | 3 | setup() 4 | -------------------------------------------------------------------------------- /powersimdata/design/investment/data/NEEM/NEEMregions.cpg: -------------------------------------------------------------------------------- 1 | UTF-8 -------------------------------------------------------------------------------- /powersimdata/utility/templates/ExecuteList.csv: -------------------------------------------------------------------------------- 1 | id,status 2 | -------------------------------------------------------------------------------- /powersimdata/design/investment/tests/__init__.py: -------------------------------------------------------------------------------- 1 | __all__ = ["test_investment_costs"] 2 | -------------------------------------------------------------------------------- /.git-blame-ignore-revs: -------------------------------------------------------------------------------- 1 | # migrate code style to black 2 | 840af5a40f25c92fe33cba69ed4fda3f1be6709c 3 | -------------------------------------------------------------------------------- /powersimdata/design/investment/__init__.py: -------------------------------------------------------------------------------- 1 | __all__ = ["const", "create_mapping_files", "inflation", "investment_costs"] 2 | -------------------------------------------------------------------------------- /powersimdata/__init__.py: -------------------------------------------------------------------------------- 1 | from powersimdata.input.grid import Grid # noqa: F401 2 | from powersimdata.scenario.scenario import Scenario # noqa: F401 3 | -------------------------------------------------------------------------------- /optional-requirements.txt: -------------------------------------------------------------------------------- 1 | gdal~=3.1.3 2 | geopandas~=0.8.1 3 | fiona~=1.8.14 4 | matplotlib==3.2.1 5 | rtree~=0.9.4 6 | shapely==1.7.1 7 | psycopg2~=2.8.5 8 | -------------------------------------------------------------------------------- /powersimdata/design/investment/data/rs/rs.shp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Breakthrough-Energy/PowerSimData/HEAD/powersimdata/design/investment/data/rs/rs.shp -------------------------------------------------------------------------------- /powersimdata/design/investment/data/rs/rs.shx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Breakthrough-Energy/PowerSimData/HEAD/powersimdata/design/investment/data/rs/rs.shx -------------------------------------------------------------------------------- /powersimdata/design/investment/data/NEEM/NEEMregions.dbf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Breakthrough-Energy/PowerSimData/HEAD/powersimdata/design/investment/data/NEEM/NEEMregions.dbf -------------------------------------------------------------------------------- /powersimdata/design/investment/data/NEEM/NEEMregions.shp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Breakthrough-Energy/PowerSimData/HEAD/powersimdata/design/investment/data/NEEM/NEEMregions.shp -------------------------------------------------------------------------------- /powersimdata/design/investment/data/NEEM/NEEMregions.shx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Breakthrough-Energy/PowerSimData/HEAD/powersimdata/design/investment/data/NEEM/NEEMregions.shx -------------------------------------------------------------------------------- /powersimdata/utility/templates/ScenarioList.csv: -------------------------------------------------------------------------------- 1 | id,plan,name,state,grid_model,grid_model_version,interconnect,base_demand,base_hydro,base_solar,base_wind,change_table,start_date,end_date,interval,engine,runtime,infeasibilities 2 | -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | .git 2 | .github 3 | build 4 | .pytest_cache 5 | .tox 6 | *.egg-info 7 | **/__pycache__ 8 | .ipynb_checkpoints 9 | **/.ropeproject 10 | .env 11 | .venv 12 | .dockerignore 13 | config.ini 14 | powersimdata/network/europe_tub/data* 15 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | services: 2 | powersimdata: 3 | image: powersimdata:latest 4 | build: . 5 | volumes: 6 | - ~/ScenarioData:/root/ScenarioData 7 | - ~/.ssh:/root/.ssh 8 | environment: 9 | - BE_SERVER_USER 10 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | networkx~=2.5 2 | numpy~=1.20 3 | pandas~=1.2 4 | paramiko~=2.12 5 | scipy~=1.5 6 | tqdm==4.29.1 7 | requests~=2.25 8 | fs==2.4.14 9 | fs.sshfs 10 | fs-azureblob>=0.2.1 11 | black 12 | pytest 13 | coverage 14 | pytest-cov 15 | pypsa 16 | -------------------------------------------------------------------------------- /powersimdata/design/investment/data/transformer_cost.csv: -------------------------------------------------------------------------------- 1 | kV,69,115,138,161,230,345,500 2 | 69,4840,3940,4360,4590,5090,6250,8060 3 | 115,,5360,4360,4590,5090,5940,7290 4 | 138,,,5940,4840,5090,5940,7290 5 | 161,,,,6580,5360,6250,7670 6 | 230,,,,,7290,6250,7670 7 | 345,,,,,,8880,8060 8 | 500,,,,,,,11900 9 | -------------------------------------------------------------------------------- /powersimdata/input/changes/__init__.py: -------------------------------------------------------------------------------- 1 | # flake8: noqa 2 | from .bus import add_bus, remove_bus 3 | from .demand_flex import add_demand_flexibility 4 | from .electrification import add_electrification 5 | from .helpers import ordinal 6 | from .plant import add_plant, remove_plant, scale_plant_pmin 7 | from .storage import add_storage_capacity 8 | -------------------------------------------------------------------------------- /powersimdata/input/changes/helpers.py: -------------------------------------------------------------------------------- 1 | def ordinal(n): 2 | """Translate a 0-based index into a 1-based ordinal, e.g. 0 -> 1st, 1 -> 2nd, etc. 3 | 4 | :param int n: the index to be translated. 5 | :return: (*str*) -- Ordinal. 6 | """ 7 | ord_dict = {1: "st", 2: "nd", 3: "rd"} 8 | return str(n + 1) + ord_dict.get((n + 1) if (n + 1) < 20 else (n + 1) % 10, "th") 9 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.8.3 2 | 3 | RUN apt-get update 4 | RUN ln -s /mnt/bes/pcm $HOME/ScenarioData 5 | 6 | COPY powersimdata/utility/templates /mnt/bes/pcm/ 7 | 8 | WORKDIR /PowerSimData 9 | COPY Pipfile . 10 | COPY Pipfile.lock . 11 | RUN pip install -U pip pipenv ipython; \ 12 | pipenv sync --dev --system; 13 | 14 | COPY . . 15 | RUN pip install . 16 | 17 | CMD ["ipython"] 18 | -------------------------------------------------------------------------------- /powersimdata/network/constants/model.py: -------------------------------------------------------------------------------- 1 | model2region = {"usa_tamu": "USA", "hifld": "USA", "europe_tub": "Europe"} 2 | model2interconnect = { 3 | "usa_tamu": ["Eastern", "Texas", "Western"], 4 | "hifld": ["Eastern", "ERCOT", "Western"], 5 | "europe_tub": [ 6 | "ContinentalEurope", 7 | "Nordic", 8 | "GreatBritain", 9 | "Ireland", 10 | "Baltic", 11 | ], 12 | } 13 | -------------------------------------------------------------------------------- /Pipfile: -------------------------------------------------------------------------------- 1 | [[source]] 2 | name = "pypi" 3 | url = "https://pypi.org/simple" 4 | verify_ssl = true 5 | 6 | [dev-packages] 7 | black = "*" 8 | pytest = "*" 9 | coverage = "*" 10 | pytest-cov = "*" 11 | 12 | [packages] 13 | networkx = "~=2.5" 14 | numpy = "~=1.20" 15 | pandas = "~=1.2" 16 | paramiko = "~=2.12" 17 | scipy = "~=1.5" 18 | tqdm = "==4.29.1" 19 | requests = "~=2.25" 20 | fs = "==2.4.14" 21 | "fs.sshfs" = "*" 22 | fs-azureblob = ">=0.2.1" 23 | pypsa = "*" 24 | -------------------------------------------------------------------------------- /powersimdata/data_access/stack.yml: -------------------------------------------------------------------------------- 1 | # Use postgres/example user/password credentials 2 | version: '3.1' 3 | 4 | services: 5 | 6 | db: 7 | image: postgres 8 | restart: always 9 | environment: 10 | POSTGRES_PASSWORD: example 11 | ports: 12 | - 5432:5432 13 | volumes: 14 | - ./sql:/docker-entrypoint-initdb.d 15 | 16 | adminer: 17 | image: adminer 18 | restart: always 19 | ports: 20 | - 8080:8080 21 | 22 | -------------------------------------------------------------------------------- /powersimdata/design/investment/data/NEEM/NEEMregions.prj: -------------------------------------------------------------------------------- 1 | PROJCS["WGS_1984_Web_Mercator_Auxiliary_Sphere",GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137.0,298.257223563]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Mercator_Auxiliary_Sphere"],PARAMETER["False_Easting",0.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",0.0],PARAMETER["Standard_Parallel_1",0.0],PARAMETER["Auxiliary_Sphere_Type",0.0],UNIT["Meter",1.0]] -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | PowerSimData 2 | ============ 3 | This tutorial is designed to help users to use our software to carry power flow study in 4 | the U.S. electrical grid. PowerSimData is an open source package written in Python that 5 | is available on `GitHub `_. 6 | 7 | .. include:: 8 | grid.rst 9 | 10 | .. include:: 11 | scenario.rst 12 | 13 | .. include:: 14 | capacity_planning.rst 15 | 16 | .. include:: 17 | scenario_design.rst 18 | -------------------------------------------------------------------------------- /powersimdata/tests/mock_builder.py: -------------------------------------------------------------------------------- 1 | from powersimdata.scenario.create import _Builder 2 | from powersimdata.tests.mock_change_table import MockChangeTable 3 | 4 | 5 | class MockBuilder: 6 | def __init__(self, ct=None): 7 | """Constructor. 8 | 9 | :param dict ct: change table dict to be sent to MockChangeTable. 10 | """ 11 | if ct is None: 12 | ct = {} 13 | self.change_table = MockChangeTable(ct) 14 | 15 | @property 16 | def __class__(self): 17 | """If anyone asks, I'm a _Builder object!""" 18 | return _Builder 19 | -------------------------------------------------------------------------------- /powersimdata/network/constants/carrier/storage.py: -------------------------------------------------------------------------------- 1 | from powersimdata.network.helpers import check_model 2 | 3 | storage = { 4 | "duration": 4, 5 | "min_stor": 0.05, 6 | "max_stor": 0.95, 7 | "InEff": 0.9, 8 | "OutEff": 0.9, 9 | "energy_value": 20, 10 | "LossFactor": 0, 11 | "terminal_min": 0, 12 | "terminal_max": 1, 13 | } 14 | 15 | 16 | def get_storage(model): 17 | """Return storage constants. 18 | 19 | :param str model: grid model 20 | :return: (*dict*) -- storage information. 21 | """ 22 | check_model(model) 23 | 24 | return storage 25 | -------------------------------------------------------------------------------- /powersimdata/tests/mock_change_table.py: -------------------------------------------------------------------------------- 1 | from powersimdata.input.change_table import ChangeTable 2 | 3 | 4 | class MockChangeTable: 5 | def __init__(self, grid, ct=None): 6 | """Constructor. 7 | 8 | :param powersimdata.input.grid.Grid grid: instance of Grid object. 9 | :param dict ct: change table dict to be sent to ct attribute. 10 | """ 11 | self.grid = grid 12 | if ct is None: 13 | ct = {} 14 | self.ct = ct 15 | 16 | @property 17 | def __class__(self): 18 | """If anyone asks, I'm a ChangeTable object!""" 19 | return ChangeTable 20 | -------------------------------------------------------------------------------- /powersimdata/data_access/tests/test_launcher.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from powersimdata.data_access.launcher import _check_solver, _check_threads 4 | 5 | 6 | def test_check_solver(): 7 | _check_solver(None) 8 | _check_solver("gurobi") 9 | _check_solver("GLPK") 10 | with pytest.raises(TypeError): 11 | _check_solver(123) 12 | with pytest.raises(ValueError): 13 | _check_solver("not-a-real-solver") 14 | 15 | 16 | def test_check_threads(): 17 | _check_threads(None) 18 | _check_threads(1) 19 | _check_threads(8) 20 | with pytest.raises(TypeError): 21 | _check_threads("4") 22 | with pytest.raises(ValueError): 23 | _check_threads(0) 24 | -------------------------------------------------------------------------------- /powersimdata/input/tests/test_input_data.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from powersimdata.input.input_data import InputData 4 | 5 | _input_data = InputData() 6 | 7 | 8 | def test_get_file_components(): 9 | s_info = {"id": "123"} 10 | ct_file = _input_data._get_file_path(s_info, "ct") 11 | grid_file = _input_data._get_file_path(s_info, "grid") 12 | assert "data/input/123_ct.pkl" == ct_file 13 | assert "data/input/123_grid.pkl" == grid_file 14 | 15 | 16 | def test_check_field(): 17 | _check_field = _input_data._check_field 18 | _check_field("grid") 19 | _check_field("ct") 20 | with pytest.raises(ValueError): 21 | _check_field("foo") 22 | _check_field("solar") 23 | -------------------------------------------------------------------------------- /powersimdata/design/investment/data/NEEM/NEEMregions.qpj: -------------------------------------------------------------------------------- 1 | PROJCS["WGS 84 / Pseudo-Mercator",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],PROJECTION["Mercator_1SP"],PARAMETER["central_meridian",0],PARAMETER["scale_factor",1],PARAMETER["false_easting",0],PARAMETER["false_northing",0],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AXIS["X",EAST],AXIS["Y",NORTH],EXTENSION["PROJ4","+proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0 +k=1.0 +units=m +nadgrids=@null +wktext +no_defs"],AUTHORITY["EPSG","3857"]] 2 | -------------------------------------------------------------------------------- /.github/workflows/coverage.yml: -------------------------------------------------------------------------------- 1 | name: Code coverage 2 | 3 | on: 4 | push: 5 | branches: 6 | - 'develop' 7 | 8 | jobs: 9 | coverage: 10 | runs-on: ubuntu-latest 11 | 12 | name: Generate coverage report 13 | steps: 14 | - uses: actions/checkout@v3 15 | 16 | - name: Set up Python 17 | uses: actions/setup-python@v4 18 | with: 19 | python-version: 3.9 20 | 21 | - run: python -m pip install --upgrade pip tox 22 | - run: tox -e pytest-local -- --cov=powersimdata --cov-report=xml 23 | 24 | - name: Upload coverage to Codecov 25 | uses: codecov/codecov-action@v1 26 | with: 27 | name: codecov-powersimdata 28 | fail_ci_if_error: true 29 | -------------------------------------------------------------------------------- /powersimdata/scenario/check.py: -------------------------------------------------------------------------------- 1 | from powersimdata.scenario.analyze import Analyze 2 | from powersimdata.scenario.scenario import Scenario 3 | 4 | 5 | def _check_scenario_is_in_analyze_state(scenario): 6 | """Ensure that scenario is a Scenario object in the analyze state. 7 | 8 | :param powersimdata.scenario.scenario.Scenario scenario: scenario instance. 9 | :raises TypeError: if scenario is not a Scenario instance. 10 | :raises ValueError: if Scenario object is not in analyze state. 11 | """ 12 | if not isinstance(scenario, Scenario): 13 | raise TypeError(f"scenario must be a {Scenario} object") 14 | if not isinstance(scenario.state, Analyze): 15 | raise ValueError("scenario must in analyze state") 16 | -------------------------------------------------------------------------------- /.github/workflows/external.yml: -------------------------------------------------------------------------------- 1 | name: Trigger external workflows 2 | 3 | on: 4 | push: 5 | branches: 6 | - develop 7 | 8 | jobs: 9 | run-workflows: 10 | runs-on: ubuntu-latest 11 | steps: 12 | - name: Build and publish docs website 13 | uses: Breakthrough-Energy/actions/workflow-trigger@main 14 | with: 15 | repo: docs 16 | branch: master 17 | workflow_id: 2386877 18 | token: ${{ secrets.CI_TOKEN_CLONE_REPO }} 19 | 20 | - name: Build and publish docker image 21 | uses: Breakthrough-Energy/actions/workflow-trigger@main 22 | with: 23 | repo: plug 24 | workflow_id: 12413223 25 | token: ${{ secrets.CI_TOKEN_CLONE_REPO }} 26 | -------------------------------------------------------------------------------- /powersimdata/data_access/sql/schema.sql: -------------------------------------------------------------------------------- 1 | CREATE DATABASE psd; 2 | \connect psd; 3 | CREATE TABLE IF NOT EXISTS execute_list( 4 | id int primary key not null, 5 | status text not null 6 | ); 7 | CREATE TABLE IF NOT EXISTS scenario_list( 8 | id int primary key not null, 9 | plan text not null, 10 | name text not null, 11 | state text not null, 12 | grid_model text not null, 13 | interconnect text not null, 14 | base_demand text not null, 15 | base_hydro text not null, 16 | base_solar text not null, 17 | base_wind text not null, 18 | change_table boolean not null, 19 | start_date timestamptz not null, 20 | end_date timestamptz not null, 21 | interval text not null, 22 | engine text not null, 23 | runtime text, 24 | infeasibilities text 25 | ); 26 | -------------------------------------------------------------------------------- /powersimdata/tests/mock_context.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from powersimdata.data_access.data_access import TempDataAccess 4 | from powersimdata.data_access.fs_helper import get_blob_fs 5 | from powersimdata.utility import templates 6 | 7 | 8 | class MockContext: 9 | def __init__(self): 10 | self.data_access = self._setup() 11 | 12 | def get_data_access(self, ignored=None): 13 | return self.data_access 14 | 15 | def _setup(self): 16 | tda = TempDataAccess() 17 | tda.fs.add_fs("profile_fs", get_blob_fs("profiles"), priority=2) 18 | for path in ("ExecuteList.csv", "ScenarioList.csv"): 19 | orig = os.path.join(templates.__path__[0], path) 20 | with open(orig, "rb") as f: 21 | tda.fs.upload(path, f) 22 | return tda 23 | -------------------------------------------------------------------------------- /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | name: Pytest 2 | 3 | on: 4 | push: 5 | pull_request: 6 | branches: 7 | - develop 8 | 9 | jobs: 10 | test: 11 | if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name != github.event.pull_request.base.repo.full_name 12 | runs-on: ubuntu-latest 13 | strategy: 14 | fail-fast: false 15 | matrix: 16 | python-version: [3.9, '3.10', '3.11'] 17 | 18 | name: Python ${{ matrix.python-version }} 19 | steps: 20 | - uses: actions/checkout@v3 21 | 22 | - name: Set up Python ${{ matrix.python-version }} 23 | uses: actions/setup-python@v4 24 | with: 25 | python-version: ${{ matrix.python-version }} 26 | 27 | - run: python -m pip install --upgrade pip tox 28 | - run: tox -e pytest-local -- --cov=powersimdata 29 | -------------------------------------------------------------------------------- /powersimdata/scenario/tests/test_check.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from powersimdata.scenario.check import _check_scenario_is_in_analyze_state 4 | from powersimdata.tests.mock_scenario import MockScenario 5 | 6 | 7 | @pytest.fixture 8 | def mock_scenario(): 9 | return MockScenario() 10 | 11 | 12 | def test_check_scenario_is_in_analyze_state_argument_type(): 13 | arg = (1, "foo") 14 | for a in arg: 15 | with pytest.raises(TypeError): 16 | _check_scenario_is_in_analyze_state(a) 17 | 18 | 19 | def test_check_scenario_is_in_analyze_state_argument_value(): 20 | input = MockScenario() 21 | input.state = "Create" 22 | with pytest.raises(ValueError): 23 | _check_scenario_is_in_analyze_state(input) 24 | 25 | 26 | def test_check_scenario_is_in_analyze(mock_scenario): 27 | _check_scenario_is_in_analyze_state(mock_scenario) 28 | -------------------------------------------------------------------------------- /powersimdata/network/constants/carrier/plants.py: -------------------------------------------------------------------------------- 1 | from powersimdata.network.constants.carrier.color import get_color 2 | from powersimdata.network.constants.carrier.efficiency import get_efficiency 3 | from powersimdata.network.constants.carrier.emission import get_emission 4 | from powersimdata.network.constants.carrier.label import get_label 5 | from powersimdata.network.constants.carrier.resource import get_resource 6 | from powersimdata.network.helpers import check_model 7 | 8 | 9 | def get_plants(model): 10 | """Return plant constants. 11 | 12 | :param str model: grid model 13 | :return: (*dict*) -- plants information. 14 | """ 15 | check_model(model) 16 | 17 | return { 18 | **get_color(model), 19 | **get_efficiency(model), 20 | **get_emission(model), 21 | **get_label(model), 22 | **get_resource(model), 23 | } 24 | -------------------------------------------------------------------------------- /.github/workflows/lint.yml: -------------------------------------------------------------------------------- 1 | name: Lint 2 | 3 | on: 4 | push: 5 | pull_request: 6 | branches: 7 | - develop 8 | 9 | jobs: 10 | formatting: 11 | if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name != github.event.pull_request.base.repo.full_name 12 | runs-on: ubuntu-latest 13 | steps: 14 | - uses: actions/checkout@v3 15 | - uses: actions/setup-python@v4 16 | - run: python -m pip install --upgrade tox 17 | - run: tox -e checkformatting 18 | flake8: 19 | if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name != github.event.pull_request.base.repo.full_name 20 | runs-on: ubuntu-latest 21 | steps: 22 | - uses: actions/checkout@v3 23 | - uses: actions/setup-python@v4 24 | - run: python -m pip install --upgrade tox 25 | - run: tox -e flake8 26 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Publish Python 🐍 package 📦 to PyPI and TestPyPI 2 | on: 3 | workflow_dispatch: 4 | push: 5 | tags: 6 | - v*.*.* 7 | 8 | jobs: 9 | release: 10 | name: Publish python package 11 | runs-on: ubuntu-latest 12 | steps: 13 | - uses: actions/checkout@v3 14 | - uses: actions/setup-python@v4 15 | - run: python -m pip install --upgrade build 16 | - run: python -m build 17 | 18 | - name: Publish to Test PyPI 19 | uses: pypa/gh-action-pypi-publish@release/v1 20 | with: 21 | password: ${{ secrets.TEST_PYPI_API_TOKEN }} 22 | repository_url: https://test.pypi.org/legacy/ 23 | 24 | - name: Publish to PyPI 25 | if: startsWith(github.ref, 'refs/tags') 26 | uses: pypa/gh-action-pypi-publish@release/v1 27 | with: 28 | password: ${{ secrets.PYPI_API_TOKEN }} 29 | -------------------------------------------------------------------------------- /powersimdata/network/constants/region/tests/test_zones.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | import pytest 3 | 4 | from powersimdata.network.constants.region.zones import check_zone 5 | 6 | 7 | def test_check_zone_argument_type(): 8 | with pytest.raises(TypeError, match="zone must be a pandas.DataFrame"): 9 | check_zone("usa_tamu", 0) 10 | 11 | 12 | def test_check_zone_index(): 13 | zone = pd.DataFrame({"A": [1, 2, 3], "B": [4, 5, 6]}).rename_axis(index="id") 14 | with pytest.raises(ValueError) as excinfo: 15 | check_zone("usa_tamu", zone) 16 | assert str(excinfo.value) == "index must be named zone_id" 17 | 18 | 19 | def test_check_zone_column(): 20 | zone = pd.DataFrame( 21 | {"country": [1, 2, 3], "interconnect": [4, 5, 6], "time_zone": [7, 8, 9]} 22 | ).rename_axis(index="zone_id") 23 | with pytest.raises(ValueError) as excinfo: 24 | check_zone("usa_tamu", zone) 25 | assert str(excinfo.value) == "zone must have: abv | state | zone_name as columns" 26 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | envlist = pytest-local, format, flake8 3 | skipsdist = true 4 | 5 | [testenv] 6 | deps = 7 | pytest: pipenv 8 | {format,checkformatting}: black 9 | {format,checkformatting}: isort 10 | flake8: flake8 11 | flake8: pep8-naming 12 | commands = 13 | pytest: pipenv sync --dev 14 | local: pytest -m 'not integration' {posargs} 15 | integration: pytest {posargs} 16 | format: black . 17 | format: isort . 18 | checkformatting: black . --check --diff 19 | checkformatting: isort --check --diff . 20 | flake8: flake8 powersimdata/ 21 | 22 | [flake8] 23 | ignore = E501,W503,E741,E203,W605 24 | 25 | [isort] 26 | profile = black 27 | 28 | [pytest] 29 | addopts = --ignore-glob=**/sql/* 30 | markers = 31 | integration: marks tests that require external dependencies (deselect with '-m "not integration"') 32 | db: marks tests that connect to a local database 33 | ssh: marks tests that connect to the server over ssh 34 | wip: marks tests undergoing development 35 | -------------------------------------------------------------------------------- /powersimdata/design/transmission/zones.py: -------------------------------------------------------------------------------- 1 | def calculate_interzone_capacity(grid): 2 | """For each zone in a grid, calculate the aggreagte zone transmission 3 | capacity (in a transport model, ignoring power flow). 4 | 5 | :param powersimdata.input.grid.Grid grid: a grid instance. 6 | :return: (*pandas.Series*) -- index is zone IDs, values are total transmission 7 | capacity (MW). 8 | """ 9 | # Get new branch data frame with 'from_zone' and 'to_zone' columns 10 | branch = grid.branch.assign( 11 | from_zone_id=grid.branch.from_bus_id.map(grid.bus["zone_id"]), 12 | to_zone_id=grid.branch.to_bus_id.map(grid.bus["zone_id"]), 13 | ) 14 | # Calculate total substation capacity for matching 'from_zone' branches 15 | filtered_branch = branch.query("from_zone_id != to_zone_id") 16 | from_cap = filtered_branch.groupby("from_zone_id").sum()["rateA"] 17 | to_cap = filtered_branch.groupby("to_zone_id").sum()["rateA"] 18 | total_capacities = from_cap.combine(to_cap, lambda x, y: x + y, fill_value=0) 19 | return total_capacities 20 | -------------------------------------------------------------------------------- /powersimdata/design/transmission/substations.py: -------------------------------------------------------------------------------- 1 | def calculate_substation_capacity(grid): 2 | """For each substation in a grid, calculate the total substation transmission 3 | capacity (in a transport model, ignoring power flow). 4 | 5 | :param powersimdata.input.grid.Grid grid: a grid instance. 6 | :return: (*pandas.Series*) -- index is substation IDs, value are total transmission 7 | capacity (MW). 8 | """ 9 | # Get new branch data frame with 'from_sub' and 'to_sub' columns 10 | branch = grid.branch.assign( 11 | from_sub_id=grid.branch.from_bus_id.map(grid.bus2sub.sub_id), 12 | to_sub_id=grid.branch.to_bus_id.map(grid.bus2sub.sub_id), 13 | ) 14 | # Calculate total substation capacity for matching 'from_sub' branches 15 | filtered_branch = branch.query("from_sub_id != to_sub_id") 16 | from_cap = filtered_branch.groupby("from_sub_id").sum()["rateA"] 17 | to_cap = filtered_branch.groupby("to_sub_id").sum()["rateA"] 18 | total_capacities = from_cap.combine(to_cap, lambda x, y: x + y, fill_value=0) 19 | return total_capacities 20 | -------------------------------------------------------------------------------- /powersimdata/design/investment/inflation.py: -------------------------------------------------------------------------------- 1 | from powersimdata.design.investment.const import inflation_rate_pct 2 | 3 | 4 | def calculate_inflation(start_year, end_year=None): 5 | """Calculate the overall inflation between two years. 6 | 7 | :param int start_year: Year to start calculating inflation from. 8 | :param int/None end_year: Year to calculate inflation to. Calculates using the 9 | rates from [start_year, end_year), since we calculate _to_ end_year, not 10 | _through_ end_year. If None, inflates to as recent as possible. 11 | :return: (*float*) -- Inflation factor. 12 | """ 13 | if start_year not in inflation_rate_pct: 14 | raise ValueError(f"No inflation data for year {start_year}") 15 | if end_year is None: 16 | end_year = max(inflation_rate_pct.keys()) + 1 17 | if (end_year - 1) not in inflation_rate_pct: 18 | raise ValueError(f"No inflation data for year {(end_year - 1)}") 19 | factor = 1 20 | for i in range(start_year, end_year): 21 | factor *= 1 + (inflation_rate_pct[i] / 100) 22 | return factor 23 | -------------------------------------------------------------------------------- /powersimdata/network/hifld/model.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from powersimdata.input.converter.csv_to_grid import FromCSV 4 | from powersimdata.network.constants.carrier.storage import get_storage 5 | from powersimdata.network.helpers import check_and_format_interconnect 6 | from powersimdata.network.model import ModelImmutables 7 | 8 | 9 | class HIFLD(FromCSV): 10 | """HIFLD network. 11 | 12 | :param str/iterable interconnect: interconnect name(s). 13 | """ 14 | 15 | def __init__(self, interconnect): 16 | """Constructor.""" 17 | super().__init__() 18 | 19 | self.grid_model = "hifld" 20 | self.interconnect = check_and_format_interconnect( 21 | interconnect, model=self.grid_model 22 | ) 23 | self.model_immutables = ModelImmutables( 24 | self.grid_model, interconnect=interconnect 25 | ) 26 | self._set_data_loc(os.path.dirname(__file__)) 27 | 28 | def build(self): 29 | """Build network""" 30 | self._build(self.interconnect, self.grid_model) 31 | self.storage.update(get_storage(self.grid_model)) 32 | -------------------------------------------------------------------------------- /powersimdata/network/usa_tamu/model.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from powersimdata.input.converter.csv_to_grid import FromCSV 4 | from powersimdata.network.constants.carrier.storage import get_storage 5 | from powersimdata.network.helpers import check_and_format_interconnect 6 | from powersimdata.network.model import ModelImmutables 7 | 8 | 9 | class TAMU(FromCSV): 10 | """TAMU network. 11 | 12 | :param str/iterable interconnect: interconnect name(s). 13 | """ 14 | 15 | def __init__(self, interconnect): 16 | """Constructor.""" 17 | super().__init__() 18 | 19 | self.grid_model = "usa_tamu" 20 | self.interconnect = check_and_format_interconnect( 21 | interconnect, model=self.grid_model 22 | ) 23 | self.model_immutables = ModelImmutables( 24 | self.grid_model, interconnect=interconnect 25 | ) 26 | self._set_data_loc(os.path.dirname(__file__)) 27 | 28 | def build(self): 29 | """Build network""" 30 | self._build(self.interconnect, self.grid_model) 31 | self.storage.update(get_storage(self.grid_model)) 32 | -------------------------------------------------------------------------------- /powersimdata/network/csv_reader.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import pandas as pd 4 | 5 | 6 | class CSVReader: 7 | """Read CSV files enclosing a grid model. 8 | 9 | :param str data_loc: path to data. 10 | """ 11 | 12 | def __init__(self, data_loc): 13 | """Constructor""" 14 | self.bus = read(data_loc, "bus.csv") 15 | self.plant = read(data_loc, "plant.csv") 16 | self.gencost = read(data_loc, "gencost.csv") 17 | self.branch = read(data_loc, "branch.csv") 18 | self.dcline = read(data_loc, "dcline.csv") 19 | self.sub = read(data_loc, "sub.csv") 20 | self.bus2sub = read(data_loc, "bus2sub.csv") 21 | self.zone = read(data_loc, "zone.csv") 22 | 23 | 24 | def read(data_loc, filename): 25 | """Reads CSV. 26 | 27 | :return: (*pandas.DataFrame*) -- created data frame. 28 | """ 29 | path = os.path.join(data_loc, filename) 30 | if os.path.isfile(path): 31 | print("Reading %s" % filename) 32 | return pd.read_csv(path, index_col=0, float_precision="high") 33 | else: 34 | raise FileNotFoundError(f"{path} cannot be found") 35 | -------------------------------------------------------------------------------- /powersimdata/design/compare/helpers.py: -------------------------------------------------------------------------------- 1 | def _reindex_as_necessary(df1, df2, check_columns): 2 | """Check for indices with mismatched entries in specified columns. If any entries 3 | don't match, reindex based on these columns such that there are no shared indices 4 | with mismatched entries in these columns. 5 | 6 | :param pandas.DataFrame df1: data frame containing ``check_columns``. 7 | :param pandas.DataFrame df2: data frame containing ``check_columns``. 8 | :param iterable check_columns: column 9 | :return: (*tuple*) -- data frames, reindexed as necessary. 10 | """ 11 | # Coerce to list for safety, since pandas interprets lists and tuples differently 12 | check_columns_list = list(check_columns) 13 | shared_indices = set(df1.index) & set(df2.index) 14 | check1 = df1.loc[shared_indices, check_columns_list] 15 | check2 = df2.loc[shared_indices, check_columns_list] 16 | if not check1.equals(check2): 17 | df1 = df1.set_index(keys=check_columns_list, drop=False, append=True) 18 | df2 = df2.set_index(keys=check_columns_list, drop=False, append=True) 19 | return df1, df2 20 | -------------------------------------------------------------------------------- /powersimdata/design/transmission/tests/test_substations.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | 3 | from powersimdata.design.transmission.substations import calculate_substation_capacity 4 | from powersimdata.tests.mock_grid import MockGrid 5 | 6 | 7 | def test_calculate_substation_capacity(): 8 | mock_sub = {"sub_id": [1, 2, 3, 4]} 9 | mock_bus2sub = {"bus_id": [10, 20, 21, 30, 40], "sub_id": [1, 2, 2, 3, 4]} 10 | mock_branch = { 11 | "branch_id": [200, 400, 420, 600, 601, 1200], 12 | "from_bus_id": [10, 40, 20, 20, 30, 30], 13 | "to_bus_id": [20, 10, 21, 30, 20, 40], 14 | "rateA": [1, 2, 4, 8, 16, 32], 15 | } 16 | 17 | mock_grid_data = { 18 | "sub": mock_sub, 19 | "bus2sub": mock_bus2sub, 20 | "branch": mock_branch, 21 | } 22 | mock_grid = MockGrid(grid_attrs=mock_grid_data) 23 | substation_capacity = calculate_substation_capacity(mock_grid) 24 | expected_return = pd.Series( 25 | { 26 | 1: 3, 27 | 2: 25, 28 | 3: 56, 29 | 4: 34, 30 | } 31 | ) 32 | assert substation_capacity.equals(expected_return) 33 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2021 Breakthrough Energy 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /powersimdata/design/investment/data/LineRegMult.csv: -------------------------------------------------------------------------------- 1 | kV,MW,ENT,FRCC,MAPP US,MISO IN,MISO MI,MISO MO-IL,MISO W,MISO WUMS,NE,NEISO,NonRTO Midwest,NYISO A-F,NYISO GHI,NYISO J-K,PJM E,PJM ROM,PJM ROR,SOCO,SPP N,SPP S,TVA,VACAR,NP15,SP15,ERCOT,RMPA,AZ-NM-SNV Coal,NWPP Coal 2 | 229,300,1.4,1.05,0.7,1.05,1.05,1.05,1.05,1.05,0.9,2.25,1.85,1.35,1.35,13.65,,,,,0.9,0.9,0.95,0.9,2.25,2.25,1,1,1,1 3 | 230,600,1.6,1.75,1.05,0.65,0.65,0.65,0.65,0.65,1.05,,,1.95,1.95,,,,,1.2,1.05,1.05,1.25,0.9,2.25,2.25,1,1,1,1 4 | 230,900,1.25,1.6,0.85,0.7,0.7,0.7,0.7,0.7,0.7,2.85,,,,,,,,0.95,0.7,0.7,1.05,0.75,2.25,2.25,1,1,1,1 5 | 230,1200,1.4,1.4,1.05,1.1,1.1,1.1,1.1,1.1,1,,,,,,,,,0.95,1,1,1,0.75,2.25,2.25,1,1,1,1 6 | 345,500,,,,,,,,,,0.65,,,,1.35,,,,,,,,,,,,,, 7 | 345,900,1.8,,0.65,0.65,0.6,0.45,0.6,0.65,0.75,2.15,1.35,2.05,3,,,,,,0.75,0.75,,,2.25,2.25,1,1,1,1 8 | 345,1800,1.7,,0.75,0.7,0.65,0.5,0.65,0.7,0.6,1.4,1.1,,,,,,1.45,,0.6,0.6,,,2.25,2.25,1,1,1,1 9 | 345,3600,,,,0.75,0.85,0.75,0.8,0.95,0.85,1.6,1.75,,,,,,,,0.85,0.85,,,2.25,2.25,1,1,1,1 10 | 500,2600,1.4,0.95,0.55,0.6,,0.6,0.6,0.7,0.7,,1.2,,,,4,2.3,1.25,0.9,0.7,0.7,0.75,0.45,2.25,2.25,1,1,1,1 11 | 765,4000,0.85,,,0.6,0.7,0.55,0.55,0.65,0.6,,,,,,2.75,2.55,0.85,,0.6,0.6,,,2.25,2.25,1,1,1,1 -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [metadata] 2 | name = powersimdata 3 | version = 0.5.5 4 | author = Breakthrough Energy 5 | author_email = sciences@breakthroughenergy.org 6 | description = Power Simulation Data 7 | long_description = file: README.md 8 | long_description_content_type = text/markdown 9 | url = https://github.com/Breakthrough-Energy/PowerSimData 10 | project_urls = 11 | Bug Tracker = https://github.com/Breakthrough-Energy/PowerSimData/issues 12 | classifiers = 13 | License :: OSI Approved :: MIT License 14 | Operating System :: OS Independent 15 | Programming Language :: Python :: 3 16 | Programming Language :: Python :: 3.8 17 | Programming Language :: Python :: 3.9 18 | Programming Language :: Python :: 3.10 19 | 20 | [options] 21 | zip_safe = False 22 | packages = find: 23 | python_requires = >=3.8 24 | install_requires = 25 | networkx 26 | numpy 27 | pandas 28 | paramiko 29 | scipy 30 | tqdm 31 | requests 32 | fs==2.4.14 33 | fs.sshfs 34 | fs-azureblob>=0.2.1 35 | pypsa 36 | 37 | [options.package_data] 38 | powersimdata = 39 | network/*/data/*.csv 40 | design/investment/data/*.csv 41 | design/investment/data/*/* 42 | utility/templates/*.csv 43 | -------------------------------------------------------------------------------- /powersimdata/design/compare/generation.py: -------------------------------------------------------------------------------- 1 | from powersimdata.design.compare.helpers import _reindex_as_necessary 2 | from powersimdata.input.check import _check_data_frame 3 | 4 | 5 | def calculate_plant_difference(plant1, plant2): 6 | """Calculate the capacity differences between two plant data frames. If capacity in 7 | ``plant2`` is larger than capacity in ``plant1``, the return will be positive. 8 | 9 | :param pandas.DataFrame plant1: first plant data frame. 10 | :param pandas.DataFrame plant2: second plant data frame. 11 | :return: (*pandas.DataFrame*) -- merged data frames with a new 'diff' column. 12 | """ 13 | _check_data_frame(plant1, "plant1") 14 | _check_data_frame(plant2, "plant2") 15 | # Reindex so that we don't get NaN when calculating upgrades for new generators 16 | plant1, plant2 = _reindex_as_necessary(plant1, plant2, ["bus_id", "type"]) 17 | plant_merge = plant1.merge( 18 | plant2, how="outer", right_index=True, left_index=True, suffixes=(None, "_2") 19 | ) 20 | plant_merge["diff"] = plant_merge.Pmax_2.fillna(0) - plant_merge.Pmax.fillna(0) 21 | # Ensure that lats & lons get filled in as necessary from plant2 entries 22 | for l in ["lat", "lon"]: 23 | plant_merge[l].fillna(plant_merge[f"{l}_2"], inplace=True) 24 | 25 | return plant_merge 26 | -------------------------------------------------------------------------------- /powersimdata/input/exporter/tests/test_export_to_pypsa.py: -------------------------------------------------------------------------------- 1 | from importlib.util import find_spec 2 | 3 | import pytest 4 | 5 | from powersimdata.input.exporter.export_to_pypsa import export_to_pypsa 6 | from powersimdata.input.grid import Grid 7 | 8 | 9 | def assert_columns_preserved(n): 10 | assert "Vmax" in n.buses 11 | assert "ramp_10" in n.generators 12 | assert "rateB" in n.lines 13 | assert "QminF" in n.links 14 | 15 | 16 | def assert_columns_deleted(n): 17 | assert "Vmax" not in n.buses 18 | assert "ramp_10" not in n.generators 19 | assert "rateB" not in n.lines 20 | assert "QminF" not in n.links 21 | 22 | 23 | @pytest.mark.skipif(find_spec("pypsa") is None, reason="Package PyPSA not available.") 24 | def test_export_grid_to_pypsa(): 25 | grid = Grid("USA") 26 | 27 | n = export_to_pypsa(grid, add_substations=False) 28 | assert len(n.snapshots) == 1 29 | assert not n.loads_t.p.empty 30 | assert not n.generators_t.p.empty 31 | assert len(n.buses) == len(grid.bus) 32 | assert_columns_deleted(n) 33 | 34 | n = export_to_pypsa(grid, add_all_columns=True, add_substations=False) 35 | assert len(n.snapshots) == 1 36 | assert not n.loads_t.p.empty 37 | assert not n.generators_t.p.empty 38 | assert len(n.buses) == len(grid.bus) 39 | assert_columns_preserved(n) 40 | 41 | n = export_to_pypsa(grid, add_substations=True) 42 | assert len(n.buses) == len(grid.sub) + len(grid.bus) 43 | -------------------------------------------------------------------------------- /powersimdata/data_access/tests/sql/test_sql_store.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from powersimdata.data_access.sql_store import SqlStore 4 | 5 | 6 | class DummySqlStore(SqlStore): 7 | table = "my_table" 8 | columns = ["id", "foo", "bar"] 9 | 10 | 11 | @pytest.fixture 12 | def store(): 13 | with DummySqlStore() as store: 14 | yield store 15 | 16 | 17 | @pytest.mark.integration 18 | @pytest.mark.db 19 | def test_select_where(store): 20 | query = store.select_where("id") 21 | sql_str = query.as_string(store.conn) 22 | expected = 'SELECT "id","foo","bar" FROM "my_table" WHERE "id" = %s' 23 | assert expected == sql_str 24 | 25 | 26 | @pytest.mark.integration 27 | @pytest.mark.db 28 | def test_select_all(store): 29 | query = store.select_all() 30 | sql_str = query.as_string(store.conn) 31 | expected = 'SELECT "id","foo","bar" FROM "my_table"' 32 | assert expected == sql_str 33 | 34 | 35 | @pytest.mark.integration 36 | @pytest.mark.db 37 | def test_insert(store): 38 | query = store.insert() 39 | sql_str = query.as_string(store.conn) 40 | expected = 'INSERT INTO "my_table" ("id","foo","bar") VALUES (%s,%s,%s)' 41 | assert expected == sql_str 42 | 43 | 44 | @pytest.mark.integration 45 | @pytest.mark.db 46 | def test_delete(store): 47 | query = store.delete(key="id") 48 | sql_str = query.as_string(store.conn) 49 | expected = 'DELETE FROM "my_table" WHERE "id" = %s' 50 | assert expected == sql_str 51 | -------------------------------------------------------------------------------- /powersimdata/input/exporter/export_to_reise.py: -------------------------------------------------------------------------------- 1 | import pickle 2 | 3 | from powersimdata.input.transform_profile import TransformProfile 4 | 5 | 6 | def export_grid(grid, file_path): 7 | """Save a grid object locally. 8 | 9 | :param powersimdata.input.grid.Grid grid: a Grid object 10 | :param str file_path: path to save the result, including the filename 11 | """ 12 | print(f"Writing grid object to {file_path} on local machine") 13 | with open(file_path, "a") as f: 14 | pickle.dump(grid, f) 15 | 16 | 17 | def export_transformed_profile(kind, scenario_info, grid, ct, file_path, slice=True): 18 | """Apply transformation to the given kind of profile and save the result locally. 19 | 20 | :param str kind: which profile to export. This parameter is passed to 21 | :meth:`TransformProfile.get_profile`. 22 | :param dict scenario_info: a dict containing the profile version, with 23 | key in the form base_{kind} 24 | :param powersimdata.input.grid.Grid grid: a Grid object previously 25 | transformed. 26 | :param dict ct: change table. 27 | :param str file_path: path to save the result, including the filename 28 | :param bool slice: whether to slice the profiles by the Scenario's time range. 29 | """ 30 | tp = TransformProfile(scenario_info, grid, ct, slice) 31 | profile = tp.get_profile(kind) 32 | print(f"Writing scaled {kind} profile to {file_path} on local machine") 33 | profile.to_csv(file_path) 34 | -------------------------------------------------------------------------------- /powersimdata/tests/mock_scenario_info.py: -------------------------------------------------------------------------------- 1 | from powersimdata.design.scenario_info import ScenarioInfo 2 | from powersimdata.tests.mock_scenario import MockScenario 3 | 4 | 5 | class MockScenarioInfo(ScenarioInfo): 6 | def __init__(self, scenario=None): 7 | self._DEFAULT_FLOAT = 42 8 | scenario = MockScenario() if scenario is None else scenario 9 | super().__init__(scenario) 10 | 11 | def area_to_loadzone(self, area, area_type=None): 12 | return set() 13 | 14 | def get_available_resource(self, area, area_type=None): 15 | return [] 16 | 17 | def get_demand(self, area, start_time, end_time, area_type=None): 18 | return self._DEFAULT_FLOAT 19 | 20 | def get_capacity(self, gentype, area, area_type=None): 21 | return self._DEFAULT_FLOAT 22 | 23 | def get_generation(self, gentype, area, start_time, end_time, area_type=None): 24 | return self._DEFAULT_FLOAT 25 | 26 | def get_profile_resource(self, gentype, area, start_time, end_time, area_type=None): 27 | return self._DEFAULT_FLOAT 28 | 29 | def get_curtailment(self, gentype, area, start_time, end_time, area_type=None): 30 | return self._DEFAULT_FLOAT 31 | 32 | def get_capacity_factor(self, gentype, area, start_time, end_time, area_type=None): 33 | return self._DEFAULT_FLOAT 34 | 35 | def get_no_congest_capacity_factor( 36 | self, gentype, area, start_time, end_time, area_type=None 37 | ): 38 | return self._DEFAULT_FLOAT 39 | -------------------------------------------------------------------------------- /powersimdata/data_access/context.py: -------------------------------------------------------------------------------- 1 | from powersimdata.data_access.data_access import LocalDataAccess, SSHDataAccess 2 | from powersimdata.data_access.launcher import HttpLauncher, NativeLauncher, SSHLauncher 3 | from powersimdata.utility import server_setup 4 | from powersimdata.utility.config import DeploymentMode 5 | 6 | 7 | class Context: 8 | """Factory for data access instances""" 9 | 10 | @staticmethod 11 | def get_data_access(make_fs=None): 12 | """Return a data access instance appropriate for the current 13 | environment. 14 | 15 | :param callable make_fs: a function that returns a filesystem instance, or 16 | None to use a default 17 | :return: (:class:`powersimdata.data_access.data_access.DataAccess`) -- a data access 18 | instance 19 | """ 20 | if server_setup.DEPLOYMENT_MODE == DeploymentMode.Server: 21 | if make_fs is None: 22 | make_fs = lambda: None # noqa: E731 23 | return SSHDataAccess(make_fs()) 24 | return LocalDataAccess() 25 | 26 | @staticmethod 27 | def get_launcher(scenario): 28 | """Return instance for interaction with simulation engine 29 | 30 | :param powersimdata.scenario.scenario.Scenario scenario: a scenario object 31 | :return: (:class:`powersimdata.data_access.launcher.Launcher`) -- a launcher instance 32 | """ 33 | mode = server_setup.DEPLOYMENT_MODE 34 | if mode == DeploymentMode.Server: 35 | return SSHLauncher(scenario) 36 | elif mode == DeploymentMode.Container: 37 | return HttpLauncher(scenario) 38 | return NativeLauncher(scenario) 39 | -------------------------------------------------------------------------------- /powersimdata/scenario/delete.py: -------------------------------------------------------------------------------- 1 | from powersimdata.scenario.ready import Ready 2 | from powersimdata.utility import server_setup 3 | 4 | 5 | class Delete(Ready): 6 | """Deletes scenario.""" 7 | 8 | name = "delete" 9 | allowed = [] 10 | exported_methods = {"delete_scenario"} | Ready.exported_methods 11 | 12 | def delete_scenario(self, confirm=True): 13 | """Deletes scenario on server. 14 | 15 | :param bool confirm: prompt before each batch 16 | """ 17 | scenario_id = self._scenario_info["id"] 18 | _join = self._data_access.join 19 | 20 | input_dir = _join(*server_setup.INPUT_DIR) 21 | output_dir = _join(*server_setup.OUTPUT_DIR) 22 | 23 | proceed = self._data_access.remove( 24 | input_dir, f"{scenario_id}_*", confirm=confirm 25 | ) 26 | if proceed: 27 | proceed = self._data_access.remove( 28 | output_dir, f"{scenario_id}_*", confirm=confirm 29 | ) 30 | if proceed: 31 | pattern = f"scenario_{scenario_id}/*" 32 | proceed = self._data_access.remove( 33 | server_setup.EXECUTE_DIR, pattern, confirm=confirm 34 | ) 35 | 36 | if not proceed: 37 | print("Cancelling deletion.") 38 | return 39 | 40 | print("--> Deleting entries in scenario and execute list") 41 | self._scenario_list_manager.delete_entry(scenario_id) 42 | self._execute_list_manager.delete_entry(scenario_id) 43 | 44 | # Delete attributes 45 | self._clean() 46 | 47 | def _clean(self): 48 | """Clean after deletion.""" 49 | self._scenario_info = None 50 | -------------------------------------------------------------------------------- /powersimdata/network/usa_tamu/data/dcline.csv: -------------------------------------------------------------------------------- 1 | dcline_id,from_bus_id,to_bus_id,status,Pf,Pt,Qf,Qt,Vf,Vt,Pmin,Pmax,QminF,QmaxF,QminT,QmaxT,loss0,loss1,muPmin,muPmax,muQminF,muQmaxF,muQminT,muQmaxT,from_interconnect,to_interconnect 2 | 0,69999,2077254,1,200.0,190.0,0,0,1,1,-200,200,0,0,0,0,0,0,0,0,0,0,0,0,Eastern,Western 3 | 1,67947,2070132,1,200.0,190.0,0,0,1,1,-200,200,0,0,0,0,0,0,0,0,0,0,0,0,Eastern,Western 4 | 2,67315,2070170,1,100.0,95.0,0,0,1,1,-100,100,0,0,0,0,0,0,0,0,0,0,0,0,Eastern,Western 5 | 3,66353,2060653,1,200.0,190.0,0,0,1,1,-200,200,0,0,0,0,0,0,0,0,0,0,0,0,Eastern,Western 6 | 4,65584,2060761,1,210.0,200.0,0,0,1,1,-210,210,0,0,0,0,0,0,0,0,0,0,0,0,Eastern,Western 7 | 5,61858,2053305,1,200.0,190.0,0,0,1,1,-200,200,0,0,0,0,0,0,0,0,0,0,0,0,Eastern,Western 8 | 6,61981,2053238,1,200.0,190.0,0,0,1,1,-200,200,0,0,0,0,0,0,0,0,0,0,0,0,Eastern,Western 9 | 7,63463,3002054,1,200.0,190.0,0,0,1,1,-200,200,0,0,0,0,0,0,0,0,0,0,0,0,Eastern,Texas 10 | 8,59722,3008126,1,600.0,550.0,0,0,1,1,-600,600,0,0,0,0,0,0,0,0,0,0,0,0,Eastern,Texas 11 | 9,2050383,2028847,1,2400.0,2300.0,0,0,1,1,-2400,2400,0,0,0,0,0,0,0,0,0,0,0,0,Western,Western 12 | 10,2013620,2025332,1,3100.0,3000.0,0,0,1,1,-3100,3100,0,0,0,0,0,0,0,0,0,0,0,0,Western,Western 13 | 11,69748,51390,1,1000.0,950.0,0,0,1,1,-1000,1000,0,0,0,0,0,0,0,0,0,0,0,0,Eastern,Eastern 14 | 12,11613,6132,1,660.0,630.0,0,0,1,1,-660,660,0,0,0,0,0,0,0,0,0,0,0,0,Eastern,Eastern 15 | 13,6171,6219,1,660.0,630.0,0,0,1,1,-660,660,0,0,0,0,0,0,0,0,0,0,0,0,Eastern,Eastern 16 | 14,69731,49734,1,500.0,475.0,0,0,1,1,-500,500,0,0,0,0,0,0,0,0,0,0,0,0,Eastern,Eastern 17 | 15,2021181,2021641,1,400.0,375.0,0,0,1,1,-400,400,0,0,0,0,0,0,0,0,0,0,0,0,Western,Western 18 | 16,5423,7280,1,330.0,300.0,0,0,1,1,-330,330,0,0,0,0,0,0,0,0,0,0,0,0,Eastern,Eastern 19 | -------------------------------------------------------------------------------- /powersimdata/input/electrified_demand_input.py: -------------------------------------------------------------------------------- 1 | from powersimdata.input.profile_input import ProfileInput 2 | 3 | 4 | def get_profile_version(_fs, grid_model, kind, end_use, tech): 5 | _fs = _fs.makedirs(f"raw/{grid_model}/{kind}", recreate=True) 6 | base_name = f"{end_use}_{tech}_" 7 | matching = [f for f in _fs.listdir(".") if base_name in f] 8 | 9 | return [f.replace(base_name, "").replace(".csv", "") for f in matching] 10 | 11 | 12 | class ElectrifiedDemand(ProfileInput): 13 | """Loads electrification profile data""" 14 | 15 | def __init__(self): 16 | super().__init__() 17 | self._file_extension = {} 18 | 19 | def get_profile(self, grid_model, kind, profile): 20 | """Get the specified profile 21 | 22 | :param str grid_model: the grid model 23 | :param str kind: the kind of electrification 24 | :param str profile: the filename 25 | :return: (*pandas.DataFrame*) -- profile data frame 26 | """ 27 | path = f"raw/{grid_model}/{kind}/{profile}.csv" 28 | return self._get_data_internal(path) 29 | 30 | def get_profile_version(self, grid_model, kind, end_use, tech): 31 | """Returns available raw profile from blob storage or local disk. 32 | 33 | :param str grid_model: grid model. 34 | :param str kind: *'building'*, *'transportation'* 35 | :param str end_use: electrification use case 36 | :param str tech: the technology used for the given use case 37 | :return: (*list*) -- available profile version. 38 | """ 39 | 40 | def _callback(fs): 41 | return get_profile_version(fs, grid_model, kind, end_use, tech) 42 | 43 | return self.data_access.get_profile_version(_callback) 44 | -------------------------------------------------------------------------------- /powersimdata/input/tests/test_profile_input.py: -------------------------------------------------------------------------------- 1 | from fs.tempfs import TempFS 2 | 3 | from powersimdata.input.electrified_demand_input import ( 4 | get_profile_version as get_profile_version_elec, 5 | ) 6 | from powersimdata.input.profile_input import ProfileInput, get_profile_version 7 | 8 | 9 | def test_get_profile_version(): 10 | with TempFS() as tmp_fs: 11 | grid_model = "usa_tamu" 12 | sub_fs = tmp_fs.makedirs(f"raw/{grid_model}", recreate=True) 13 | sub_fs.touch("solar_vOct2022.csv") 14 | sub_fs.touch("foo_v1.0.1.csv") 15 | v_solar = get_profile_version(tmp_fs, grid_model, "solar") 16 | v_foo = get_profile_version(tmp_fs, grid_model, "foo") 17 | v_missing = get_profile_version(tmp_fs, grid_model, "missing") 18 | assert "vOct2022" == v_solar[0] 19 | assert "v1.0.1" == v_foo[0] 20 | assert [] == v_missing 21 | 22 | 23 | def test_get_file_path(): 24 | s_info = {"base_wind": "v8", "grid_model": "europe"} 25 | path = ProfileInput()._get_file_path(s_info, "wind") 26 | assert "raw/europe/wind_v8.csv" == path 27 | 28 | 29 | def test_get_profile_version_electrification(): 30 | with TempFS() as tmp_fs: 31 | grid_model = "usa_tamu" 32 | kind = "building" 33 | end_use = "res_cooking" 34 | tech = "standard_heat_pump" 35 | sub_fs = tmp_fs.makedirs(f"raw/{grid_model}/{kind}", recreate=True) 36 | sub_fs.touch(f"{end_use}_{tech}_v1.csv") 37 | version = get_profile_version_elec(tmp_fs, grid_model, kind, end_use, tech) 38 | v_missing = get_profile_version_elec( 39 | tmp_fs, grid_model, kind, end_use, "fake_tech" 40 | ) 41 | assert "v1" == version[0] 42 | assert [] == v_missing 43 | -------------------------------------------------------------------------------- /powersimdata/utility/server_setup.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from powersimdata.utility.config import get_config, get_deployment_mode 4 | 5 | config = get_config() 6 | SERVER_ADDRESS = config.SERVER_ADDRESS 7 | SERVER_SSH_PORT = config.SERVER_SSH_PORT 8 | BACKUP_DATA_ROOT_DIR = config.BACKUP_DATA_ROOT_DIR 9 | DATA_ROOT_DIR = config.DATA_ROOT_DIR 10 | EXECUTE_DIR = config.EXECUTE_DIR 11 | INPUT_DIR = config.INPUT_DIR 12 | OUTPUT_DIR = config.OUTPUT_DIR 13 | LOCAL_DIR = config.LOCAL_DIR 14 | MODEL_DIR = config.MODEL_DIR 15 | ENGINE_DIR = config.ENGINE_DIR 16 | DEPLOYMENT_MODE = get_deployment_mode() 17 | BLOB_TOKEN_RO = "?sv=2021-06-08&ss=b&srt=co&sp=rl&se=2050-08-06T01:31:08Z&st=2022-08-05T17:31:08Z&spr=https&sig=ORHiRQQCocyaHXV2phhSN92GFhRnaHuGOecskxsmG3U%3D" 18 | BLOB_KEY_NAME = "BLOB_ACCOUNT_KEY_V2" 19 | BLOB_TOKEN_RW = os.getenv(BLOB_KEY_NAME) 20 | 21 | 22 | def get_blob_credential(): 23 | return BLOB_TOKEN_RW if BLOB_TOKEN_RW is not None else BLOB_TOKEN_RO 24 | 25 | 26 | os.makedirs(LOCAL_DIR, exist_ok=True) 27 | 28 | 29 | def get_server_user(): 30 | """Returns the first username found using the following sources: 31 | 32 | - BE_SERVER_USER environment variable 33 | - powersimdata/utility/.server_user 34 | - username of the active login. 35 | 36 | :return: (*str*) -- user name to be used to log into server. 37 | """ 38 | server_user = os.getenv("BE_SERVER_USER") 39 | if server_user is not None: 40 | return server_user 41 | 42 | dir_path = os.path.dirname(os.path.realpath(__file__)) 43 | try: 44 | with open(os.path.join(dir_path, ".server_user")) as f: 45 | server_user = f.read().strip() 46 | except FileNotFoundError: 47 | server_user = os.getlogin() 48 | 49 | return server_user 50 | -------------------------------------------------------------------------------- /powersimdata/data_access/README.md: -------------------------------------------------------------------------------- 1 | ## About 2 | The `powersimdata.data_access` package contains implementations of storage to 3 | be used by the simulation framework generally. By providing a consistent api 4 | for any given set of data, we can decouple the storage medium from application 5 | logic. 6 | 7 | Currently, there are csv and sql implementations for the scenario list and 8 | execute list. 9 | 10 | ## Usage 11 | To try this out, use the `stack.yml` to run a local instance of postgres, plus an admin ui. 12 | The integration tests for the db layer are run against this instance, and you can also connect to it with `psql`, 13 | the standard cli tool for interacting with postgres. 14 | 15 | Start the container using the following command, taken from the postgres 16 | [docs](https://github.com/docker-library/docs/blob/master/postgres/README.md). 17 | ``` 18 | docker-compose -f stack.yml up 19 | ``` 20 | 21 | 22 | ## Schema creation 23 | When the container starts, it will run the `.sql` files in the mounted volume 24 | to initialize the database. 25 | 26 | To do this manually, run the container then do the following: 27 | 28 | ``` 29 | # connect to container, use password from stack.yml 30 | psql -U postgres -h localhost 31 | ``` 32 | 33 | In the psql shell, run `\i sql/schema.sql` (make sure to `cd` to this directory first) 34 | to create the necessary objects. After this, you should be connected to the `psd` database, 35 | and running `\dt` should confirm the tables have been created. 36 | 37 | 38 | ## Database management 39 | At the moment this is kind of a placeholder section. But for anyone interested, 40 | the stack.yml setup includes a container running an admin ui which can be used to explore the 41 | containerized db. You can view this at http://localhost:8080 and login using the credentials 42 | from the file. 43 | -------------------------------------------------------------------------------- /powersimdata/network/constants/region/interconnect.py: -------------------------------------------------------------------------------- 1 | import ast 2 | 3 | from powersimdata.network.constants.model import model2region 4 | from powersimdata.network.constants.region.geography import format, get_geography 5 | from powersimdata.network.helpers import powerset 6 | 7 | 8 | class InterconnectMapping: 9 | """Interconnect mapping for grid models 10 | 11 | :param str model: grid model. 12 | :param pandas.DataFrame zone: information on zones of a grid model. 13 | """ 14 | 15 | def __init__(self, model, zone): 16 | geo = get_geography(model) 17 | region = model2region[model] 18 | 19 | self.interconnect = { 20 | ast.literal_eval(repr(format(c)).replace(geo["sub"][region], region)) 21 | for c in powerset(zone["interconnect"].unique(), 1) 22 | } 23 | self.interconnect2timezone = { 24 | i: geo["interconnect2timezone"][i] for i in self.interconnect 25 | } 26 | self.name2interconnect = { 27 | i: geo["name2interconnect"][i] for i in self.interconnect 28 | } 29 | self.name2component = {i: geo["name2component"][i] for i in self.interconnect} 30 | self.interconnect2loadzone = ( 31 | zone.groupby("interconnect")["zone_name"].apply(set).to_dict() 32 | ) 33 | self.interconnect2id = ( 34 | zone.reset_index().groupby("interconnect")["zone_id"].apply(set).to_dict() 35 | ) 36 | self.interconnect2abv = zone.groupby("interconnect")["abv"].apply(set).to_dict() 37 | 38 | 39 | def get_interconnect_mapping(model, zone): 40 | """Return interconnect mappings for a grid model. 41 | 42 | :param str model: grid model 43 | :param pandas.DataFrame zone: information on zones of a grid model. 44 | """ 45 | return InterconnectMapping(model, zone).__dict__ 46 | -------------------------------------------------------------------------------- /powersimdata/scenario/tests/test_create.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from pandas.testing import assert_series_equal 3 | 4 | from powersimdata.scenario.scenario import Scenario 5 | 6 | 7 | @pytest.mark.integration 8 | def test_get_demand_and_get_bus_demand(): 9 | scenario = Scenario("") 10 | scenario.set_grid(interconnect="Texas") 11 | # Before we set the profile version, we should get errors trying to access 12 | with pytest.raises(Exception): 13 | scenario.get_bus_demand() 14 | with pytest.raises(Exception): 15 | scenario.get_demand() 16 | # After we set the profile version, we should get the right len (default full year) 17 | scenario.set_base_profile("demand", "vJan2021") 18 | assert len(scenario.get_bus_demand()) == 8784 19 | scenario.set_time("2016-01-01 00:00", "2016-01-03 23:00", "24H") 20 | demand = scenario.get_demand() 21 | bus_demand = scenario.get_bus_demand() 22 | assert len(demand) == 72 23 | assert len(bus_demand) == 72 24 | assert_series_equal(demand.sum(axis=1), bus_demand.sum(axis=1)) 25 | unscaled_total_demand = demand.sum().sum() 26 | scenario.change_table.scale_demand(zone_id={301: 1.5}) 27 | new_demand = scenario.get_demand() 28 | new_bus_demand = scenario.get_bus_demand() 29 | assert_series_equal(new_demand.sum(axis=1), new_bus_demand.sum(axis=1)) 30 | assert new_demand.sum().sum() > unscaled_total_demand 31 | 32 | 33 | @pytest.mark.integration 34 | def test_get_solar(): 35 | scenario = Scenario("") 36 | scenario.set_grid(interconnect="Texas") 37 | with pytest.raises(Exception): 38 | scenario.get_solar() 39 | scenario.set_base_profile("solar", "vJan2021") 40 | assert len(scenario.get_solar()) == 8784 41 | scenario.set_time("2016-01-01 00:00", "2016-01-03 23:00", "24H") 42 | assert len(scenario.get_solar()) == 72 43 | -------------------------------------------------------------------------------- /powersimdata/data_access/tests/test_data_access.py: -------------------------------------------------------------------------------- 1 | import fs as fs2 2 | import pytest 3 | 4 | from powersimdata.data_access.data_access import MemoryDataAccess, SSHDataAccess 5 | from powersimdata.utility import server_setup 6 | 7 | FILE_NAME = "test.txt" 8 | CONTENT = b"content" 9 | 10 | 11 | @pytest.fixture 12 | def ssh_data_access(): 13 | return SSHDataAccess() 14 | 15 | 16 | @pytest.fixture 17 | def data_access(): 18 | return MemoryDataAccess() 19 | 20 | 21 | def make_temp(fs, path): 22 | fs.makedirs(fs2.path.dirname(path), recreate=True) 23 | with fs.open(path, "wb") as f: 24 | f.write(CONTENT) 25 | 26 | 27 | def _check_content(fs, filepath): 28 | assert fs.exists(filepath), f"File {filepath} not found" 29 | with fs.open(filepath, "rb") as f: 30 | assert CONTENT == f.read(), f"File {filepath} content does not match expected" 31 | 32 | 33 | def _join(*paths): 34 | return fs2.path.join(*paths) 35 | 36 | 37 | def test_tmp_folder(ssh_data_access): 38 | tmp_files = ssh_data_access.tmp_folder(42) 39 | assert "tmp/scenario_42" == tmp_files 40 | 41 | 42 | @pytest.mark.integration 43 | @pytest.mark.ssh 44 | def test_setup_server_connection(ssh_data_access): 45 | _, stdout, _ = ssh_data_access.exec_command("whoami") 46 | assert stdout.read().decode("utf-8").strip() == server_setup.get_server_user() 47 | 48 | 49 | def test_copy_from(data_access): 50 | make_temp(data_access.fs, FILE_NAME) 51 | data_access.copy_from(FILE_NAME) 52 | _check_content(data_access.local_fs, FILE_NAME) 53 | 54 | 55 | def test_copy_from_multi_path(data_access): 56 | src_path = _join(data_access.root, "foo", "bar") 57 | filepath = _join(src_path, FILE_NAME) 58 | make_temp(data_access.fs, filepath) 59 | data_access.copy_from(FILE_NAME, src_path) 60 | _check_content(data_access.local_fs, filepath) 61 | -------------------------------------------------------------------------------- /powersimdata/network/constants/region/loadzone.py: -------------------------------------------------------------------------------- 1 | class LoadzoneMapping: 2 | """Loadzone mapping for grid models 3 | 4 | :param pandas.DataFrame zone: information on zones of a grid model. 5 | """ 6 | 7 | def __init__(self, zone): 8 | self.loadzone = set(zone["zone_name"]) 9 | self.id2timezone = zone["time_zone"].to_dict() 10 | self.id2loadzone = zone["zone_name"].to_dict() 11 | self.timezone2id = ( 12 | zone.reset_index().groupby("time_zone")["zone_id"].apply(set).to_dict() 13 | ) 14 | self.loadzone2id = ( 15 | zone.reset_index().groupby("zone_name")["zone_id"].apply(int).to_dict() 16 | ) 17 | self.loadzone2abv = dict(zip(zone["zone_name"], zone["abv"])) 18 | self.loadzone2interconnect = dict(zip(zone["zone_name"], zone["interconnect"])) 19 | 20 | 21 | class USALoadzoneMapping(LoadzoneMapping): 22 | """Loadzone mapping for USA grid models 23 | 24 | :param pandas.DataFrame zone: information on zones of a grid model. 25 | """ 26 | 27 | def __init__(self, zone): 28 | super().__init__(zone) 29 | self.loadzone2state = dict(zip(zone["zone_name"], zone["state"])) 30 | 31 | 32 | class EULoadzoneMapping(LoadzoneMapping): 33 | """Loadzone mapping for EU grid models 34 | 35 | :param pandas.DataFrame zone: information on zones of a grid model. 36 | """ 37 | 38 | def __init__(self, zone): 39 | super().__init__(zone) 40 | self.loadzone2country = dict(zip(zone["zone_name"], zone["country"])) 41 | 42 | 43 | def get_loadzone_mapping(model, zone): 44 | """Return loadzone mappings for a grid model. 45 | 46 | :param str model: grid model 47 | :param pandas.DataFrame zone: information on zones of a grid model. 48 | """ 49 | _lookup = { 50 | "usa_tamu": USALoadzoneMapping, 51 | "hifld": USALoadzoneMapping, 52 | "europe_tub": EULoadzoneMapping, 53 | } 54 | return _lookup[model](zone).__dict__ 55 | -------------------------------------------------------------------------------- /powersimdata/utility/tests/test_distance.py: -------------------------------------------------------------------------------- 1 | from math import sqrt 2 | 3 | from numpy.testing import assert_almost_equal, assert_array_almost_equal 4 | 5 | from powersimdata.utility.distance import angular_distance, find_closest_neighbor, ll2uv 6 | 7 | 8 | def test_ll2uv(): 9 | assert_array_almost_equal(ll2uv(0, 0), [1.0, 0.0, 0.0]) 10 | assert_array_almost_equal(ll2uv(0, 90), [0.0, 0.0, 1.0]) # north pole 11 | assert_array_almost_equal(ll2uv(45, 90), [0.0, 0.0, 1.0]) # north pole 12 | assert_array_almost_equal(ll2uv(90, 90), [0.0, 0.0, 1.0]) # north pole 13 | assert_array_almost_equal(ll2uv(-45, -90), [0.0, 0.0, -1.0]) # south pole 14 | assert_array_almost_equal(ll2uv(-90, -90), [0.0, 0.0, -1.0]) # south pole 15 | assert_array_almost_equal(ll2uv(-120, -90), [0.0, 0, -1.0]) # south pole 16 | assert_array_almost_equal(ll2uv(45, 45), [1 / 2, 1 / 2, sqrt(2) / 2]) 17 | assert_array_almost_equal(ll2uv(60, 60), [1 / 4, sqrt(3) / 4, sqrt(3) / 2]) 18 | 19 | 20 | def test_angular_distance(): 21 | # pole to pole 22 | assert_almost_equal(angular_distance([0.0, 0, 1.0], [0.0, 0, -1.0]), 180) 23 | # equator to north pole 24 | assert_almost_equal(angular_distance([1.0, 0, 0.0], [0.0, 0, 1.0]), 90) 25 | # equator to south pole 26 | assert_almost_equal( 27 | angular_distance([sqrt(2) / 2, sqrt(2) / 2, 0.0], [0.0, 0, -1.0]), 90 28 | ) 29 | # 45 deg longitude to 60 deg longitude 30 | assert_almost_equal( 31 | angular_distance([sqrt(2) / 2, sqrt(2) / 2, 0.0], [1 / 2, sqrt(3) / 2, 0.0]), 15 32 | ) 33 | 34 | 35 | def test_find_closest_neighbor(): 36 | point = (45, 45) 37 | neighbors = [ 38 | [0, 45], 39 | [10, 50], 40 | [40, 40], 41 | [-120, -60], 42 | [44.75, 45.1], 43 | [-270, 5], 44 | [43, 46], 45 | [320, 45], 46 | [45, 44], 47 | [44, 45], 48 | [44.5, 45.5], 49 | ] 50 | closest_neighbor_id = find_closest_neighbor(point, neighbors) 51 | assert closest_neighbor_id == 4 52 | -------------------------------------------------------------------------------- /powersimdata/input/tests/test_expansion_candidates.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | import pytest 3 | 4 | from powersimdata.input.expansion_candidates import ( 5 | ExpansionCandidates, 6 | check_branch_voltage, 7 | check_bus_id, 8 | ) 9 | from powersimdata.input.grid import Grid 10 | 11 | grid = Grid("Texas") 12 | 13 | 14 | def test_column_types(): 15 | ec = ExpansionCandidates(grid) 16 | with pytest.raises(TypeError): 17 | branch = {"branch_id": [1, 2]} 18 | ec.set_branch(branch) 19 | with pytest.raises(ValueError): 20 | plant = pd.DataFrame({"plant_id": [42]}) 21 | ec.set_plant(plant) 22 | with pytest.raises(ValueError): 23 | storage = pd.DataFrame({"bus_id": [1, 2], "foo": [3, 4]}) 24 | ec.set_storage(storage) 25 | 26 | 27 | def test_check_branch_voltage(): 28 | # baseKV = 230 29 | branch = pd.DataFrame({"from_bus": [3001004], "to_bus": [3008159]}) 30 | check_branch_voltage(branch, grid) 31 | 32 | # baseKV = 115, 230 33 | branch = pd.DataFrame({"from_bus": [3001001], "to_bus": [3008156]}) 34 | with pytest.raises(ValueError): 35 | check_branch_voltage(branch, grid) 36 | 37 | 38 | def test_check_bus_id(): 39 | bus_id = pd.Series([3001001]) 40 | check_bus_id(bus_id, grid) 41 | 42 | with pytest.raises(ValueError): 43 | check_bus_id(bus_id, Grid("Western")) 44 | 45 | bus_id = pd.Series([-1]) 46 | with pytest.raises(ValueError): 47 | check_bus_id(bus_id, grid) 48 | 49 | 50 | def test_set_candidates(): 51 | ec = ExpansionCandidates(grid) 52 | branch = grid.branch.head().loc[:, ["from_bus_id", "to_bus_id"]] 53 | branch = branch.rename( 54 | columns={ 55 | "from_bus_id": "from_bus", 56 | "to_bus_id": "to_bus", 57 | } 58 | ) 59 | ec.set_branch(branch) 60 | 61 | plant = grid.plant.head().loc[:, ["bus_id", "type"]] 62 | plant["marginal_cost"] = 4 63 | ec.set_plant(plant) 64 | 65 | storage = plant.loc[:, ["bus_id"]].copy() 66 | ec.set_storage(storage) 67 | -------------------------------------------------------------------------------- /powersimdata/data_access/ssh_fs.py: -------------------------------------------------------------------------------- 1 | from fs.subfs import SubFS 2 | from tqdm import tqdm 3 | 4 | 5 | def progress_bar(*args, **kwargs): 6 | """Creates progress bar 7 | 8 | :param \\*args: variable length argument list passed to the tqdm constructor. 9 | :param \\*\\*kwargs: arbitrary keyword arguments passed to the tqdm constructor. 10 | """ 11 | bar = tqdm(*args, **kwargs) 12 | last = [0] 13 | 14 | def show(a, b): 15 | bar.total = int(b) 16 | bar.update(int(a - last[0])) 17 | last[0] = a 18 | 19 | return show, bar 20 | 21 | 22 | class WrapSSHFS(SubFS): 23 | """Wrapper around another filesystem which is rooted at the given path and adds 24 | progress bar for download 25 | 26 | :param fs.base.FS parent_fs: the filesystem instance to wrap 27 | :param str path: the path which will be the root of the wrapped filesystem 28 | """ 29 | 30 | def __init__(self, parent_fs, path): 31 | self.client = parent_fs._client 32 | super().__init__(parent_fs, path) 33 | 34 | def download(self, path, file, chunk_size=None, **options): 35 | """Wrapper around pyfilesystem download with progress bar""" 36 | 37 | cbk, bar = progress_bar(ascii=True, unit="b", unit_scale=True) 38 | super().download(path, file, chunk_size=chunk_size, callback=cbk, **options) 39 | bar.close() 40 | 41 | def exec_command(self, command): 42 | """Wrapper around paramiko exec_command 43 | 44 | :param str command: the command to execute 45 | :return: (*tuple*) -- standard streams 46 | """ 47 | return self.client.exec_command(command) 48 | 49 | def checksum(self, filepath): 50 | """Return the checksum of the file path (using sha1sum) 51 | 52 | :param str filepath: path to file 53 | :return: (*str*) -- the checksum of the file 54 | """ 55 | command = f"sha1sum {filepath}" 56 | _, stdout, _ = self.exec_command(command) 57 | lines = stdout.readlines() 58 | return lines[0].strip() 59 | -------------------------------------------------------------------------------- /powersimdata/utility/tests/test_helpers.py: -------------------------------------------------------------------------------- 1 | from powersimdata.utility.helpers import MemoryCache, PrintManager, cache_key 2 | 3 | 4 | def test_print_is_disabled(capsys): 5 | pm = PrintManager() 6 | pm.block_print() 7 | print("printout are disabled") 8 | captured = capsys.readouterr() 9 | assert captured.out == "" 10 | 11 | pm.enable_print() 12 | print("printout are enabled") 13 | captured = capsys.readouterr() 14 | assert captured.out == "printout are enabled\n" 15 | 16 | 17 | def test_cache_key_valid_types(): 18 | key1 = cache_key(["foo", "bar"], 4, "other") 19 | assert (("foo", "bar"), 4, "other") == key1 20 | 21 | key2 = cache_key(True) 22 | assert (True,) == key2 23 | 24 | key3 = cache_key({1, 2, 2, 3}) 25 | assert ((1, 2, 3),) == key3 26 | 27 | key4 = cache_key(None) 28 | assert ("null",) == key4 29 | 30 | key5 = cache_key(object()) 31 | assert "object" in key5[0] 32 | 33 | 34 | def test_no_collision(): 35 | key1 = cache_key([["foo"], ["bar"]]) 36 | key2 = cache_key([[["foo"], ["bar"]]]) 37 | key3 = cache_key([["foo"], "bar"]) 38 | keys = [key1, key2, key3] 39 | assert len(keys) == len(set(keys)) 40 | 41 | 42 | def test_cache_key_distinct_types(): 43 | assert cache_key(4) != cache_key("4") 44 | 45 | 46 | def test_mem_cache_put_dict(): 47 | cache = MemoryCache() 48 | key = cache_key(["foo", "bar"], 4, "other") 49 | obj = {"key1": 42} 50 | cache.put(key, obj) 51 | assert cache.get(key) == obj 52 | 53 | 54 | def test_mem_cache_get_returns_copy(): 55 | cache = MemoryCache() 56 | key = cache_key("foo", 4) 57 | obj = {"key1": 42} 58 | cache.put(key, obj) 59 | assert id(cache.get(key)) != id(obj) 60 | 61 | 62 | def test_mem_cache_put_version_never_changes(): 63 | cache = MemoryCache() 64 | key = cache_key("foo", 4) 65 | obj = {"key1": "value1"} 66 | cache.put(key, obj) 67 | obj["key2"] = "value2" 68 | assert "key1" in cache.get(key) 69 | assert "key2" not in cache.get(key) 70 | assert "key2" in obj 71 | -------------------------------------------------------------------------------- /powersimdata/network/tests/test_model.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from powersimdata.network.model import area_to_loadzone 4 | 5 | 6 | def test_area_to_loadzone_argument_type(): 7 | with pytest.raises(TypeError, match="area must be a str"): 8 | area_to_loadzone("usa_tamu", 3) 9 | 10 | with pytest.raises(TypeError, match="area_type must be either None or str"): 11 | area_to_loadzone("usa_tamu", "all", area_type=["interconnect"]) 12 | 13 | 14 | def test_area_to_loadzone_argument_value(): 15 | with pytest.raises(ValueError): 16 | area_to_loadzone("usa_tamu", "all", area_type="province") 17 | 18 | with pytest.raises(ValueError, match="Invalid area / area_type combination"): 19 | area_to_loadzone("usa_tamu", "California", area_type="loadzone") 20 | 21 | with pytest.raises(ValueError, match="Invalid area / area_type combination"): 22 | area_to_loadzone("usa_tamu", "WA", area_type="interconnect") 23 | 24 | with pytest.raises(ValueError, match="Invalid area / area_type combination"): 25 | area_to_loadzone("usa_tamu", "Utah", area_type="state_abbr") 26 | 27 | 28 | def test_area_to_loadzone(): 29 | assert area_to_loadzone("usa_tamu", "El Paso") == {"El Paso"} 30 | assert area_to_loadzone("usa_tamu", "Texas", area_type="state") == area_to_loadzone( 31 | "usa_tamu", "Texas" 32 | ) 33 | assert area_to_loadzone("usa_tamu", "Texas", area_type="state") == { 34 | "East Texas", 35 | "South Central", 36 | "Far West", 37 | "North Central", 38 | "West", 39 | "North", 40 | "Texas Panhandle", 41 | "South", 42 | "East", 43 | "Coast", 44 | "El Paso", 45 | } 46 | 47 | assert area_to_loadzone("usa_tamu", "Texas", area_type="interconnect") == { 48 | "South Central", 49 | "Far West", 50 | "North Central", 51 | "West", 52 | "North", 53 | "South", 54 | "East", 55 | "Coast", 56 | } 57 | assert area_to_loadzone("usa_tamu", "MT") == {"Montana Eastern", "Montana Western"} 58 | -------------------------------------------------------------------------------- /powersimdata/data_access/scenario_table.py: -------------------------------------------------------------------------------- 1 | from powersimdata.data_access.sql_store import SqlStore, to_data_frame 2 | 3 | 4 | class ScenarioTable(SqlStore): 5 | """Storage abstraction for scenario list using sql database.""" 6 | 7 | table = "scenario_list" 8 | columns = [ 9 | "id", 10 | "plan", 11 | "name", 12 | "state", 13 | "grid_model", 14 | "interconnect", 15 | "base_demand", 16 | "base_hydro", 17 | "base_solar", 18 | "base_wind", 19 | "change_table", 20 | "start_date", 21 | "end_date", 22 | "interval", 23 | "engine", 24 | "runtime", 25 | "infeasibilities", 26 | ] 27 | 28 | def get_scenario_by_id(self, scenario_id): 29 | """Get entry from scenario list by id 30 | 31 | :param str scenario_id: scenario id 32 | :return: (*pandas.DataFrame*) -- results as a data frame. 33 | """ 34 | query = self.select_where("id") 35 | self.cur.execute(query, (scenario_id,)) 36 | result = self.cur.fetchmany() 37 | return to_data_frame(result) 38 | 39 | def get_scenario_table(self, limit=None): 40 | """Returns scenario table from database 41 | 42 | :return: (*pandas.DataFrame*) -- scenario list as a data frame. 43 | """ 44 | query = self.select_all() 45 | self.cur.execute(query) 46 | if limit is None: 47 | result = self.cur.fetchall() 48 | else: 49 | result = self.cur.fetchmany(limit) 50 | return to_data_frame(result) 51 | 52 | def add_entry(self, scenario_info): 53 | """Adds scenario to the scenario list. 54 | 55 | :param collections.OrderedDict scenario_info: entry to add to scenario list. 56 | """ 57 | sql = self.insert(subset=scenario_info.keys()) 58 | self.cur.execute(sql, tuple(scenario_info.values())) 59 | 60 | def delete_entry(self, scenario_id): 61 | """Deletes entry in scenario list. 62 | 63 | :param int/str scenario_id: the id of the scenario 64 | """ 65 | sql = self.delete("id") 66 | self.cur.execute(sql, (scenario_id,)) 67 | -------------------------------------------------------------------------------- /powersimdata/network/constants/region/division.py: -------------------------------------------------------------------------------- 1 | class DivisionMapping: 2 | """State/Country mapping for grid models. 3 | 4 | :param pandas.DataFrame zone: information on zones of a grid model. 5 | """ 6 | 7 | def __init__(self, zone): 8 | self.abv = set(zone["abv"]) 9 | self.abv2loadzone = zone.groupby("abv")["zone_name"].apply(set).to_dict() 10 | self.abv2id = zone.reset_index().groupby("abv")["zone_id"].apply(set).to_dict() 11 | self.id2abv = zone["abv"].to_dict() 12 | self.abv2interconnect = dict(zip(zone["abv"], zone["interconnect"])) 13 | 14 | 15 | class USADivisionMapping(DivisionMapping): 16 | """State mapping for USA grid models 17 | 18 | :param pandas.DataFrame zone: information on zones of a grid model. 19 | """ 20 | 21 | def __init__(self, zone): 22 | super().__init__(zone) 23 | self.state = set(zone["state"]) 24 | self.state_abbr = set(zone["abv"]) 25 | self.state2loadzone = zone.groupby("state")["zone_name"].apply(set).to_dict() 26 | self.state2abv = dict(zip(zone["state"], zone["abv"])) 27 | self.abv2state = dict(zip(zone["abv"], zone["state"])) 28 | 29 | 30 | class EUDivisionMapping(DivisionMapping): 31 | """Country mapping for EU grid models 32 | 33 | :param pandas.DataFrame zone: information on zones of a grid model. 34 | """ 35 | 36 | def __init__(self, zone): 37 | super().__init__(zone) 38 | self.country = set(zone["country"]) 39 | self.country_abbr = set(zone["country"]) 40 | self.country2loadzone = ( 41 | zone.groupby("country")["zone_name"].apply(set).to_dict() 42 | ) 43 | self.country2abv = dict(zip(zone["country"], zone["abv"])) 44 | self.abv2country = dict(zip(zone["abv"], zone["country"])) 45 | 46 | 47 | def get_division_mapping(model, zone): 48 | """Return division mappings for a grid model. 49 | 50 | :param str model: grid model. 51 | :param pandas.DataFrame zone: information on zones of a grid model. 52 | """ 53 | _lookup = { 54 | "usa_tamu": USADivisionMapping, 55 | "hifld": USADivisionMapping, 56 | "europe_tub": EUDivisionMapping, 57 | } 58 | return _lookup[model](zone).__dict__ 59 | -------------------------------------------------------------------------------- /powersimdata/design/transmission/statelines.py: -------------------------------------------------------------------------------- 1 | def classify_interstate_intrastate(scenario): 2 | """Classifies branches in a change_table as interstate or intrastate. 3 | 4 | :param powersimdata.scenario.scenario.Scenario scenario: scenario instance. 5 | :return: (*dict*) -- keys are *'interstate'*, *'intrastate'*. Values are 6 | list of branch ids. 7 | """ 8 | 9 | ct = scenario.state.get_ct() 10 | grid = scenario.state.get_grid() 11 | upgraded_branches = _classify_interstate_intrastate(ct, grid) 12 | return upgraded_branches 13 | 14 | 15 | def _classify_interstate_intrastate(ct, grid): 16 | """Classifies branches in a change_table as interstate or intrastate. 17 | This function is separate from classify_interstate_intrastate() for testing 18 | purposes. 19 | 20 | :param dict ct: change_table dictionary. 21 | :param powersimdata.input.grid.Grid grid: Grid instance. 22 | :return: (*dict*) -- keys are *'interstate'*, *'intrastate'*. Values are 23 | list of branch ids. 24 | """ 25 | 26 | branch = grid.branch 27 | id2abv = grid.model_immutables.zones["id2abv"] 28 | upgraded_branches = {"interstate": [], "intrastate": []} 29 | 30 | if "branch" not in ct or "branch_id" not in ct["branch"]: 31 | return upgraded_branches 32 | 33 | all_upgraded_branches = ct["branch"]["branch_id"].keys() 34 | for b in all_upgraded_branches: 35 | # Alternatively: bus.loc[branch.loc[b, 'from_bus_id'], 'from_zone_id'] 36 | try: 37 | from_zone = branch.loc[b, "from_zone_id"] 38 | to_zone = branch.loc[b, "to_zone_id"] 39 | except KeyError: 40 | raise ValueError(f"ct entry not found in branch: {b}") 41 | try: 42 | from_state = id2abv[from_zone] 43 | except KeyError: 44 | raise ValueError(f"zone not found in id2abv: {from_zone}") 45 | try: 46 | to_state = id2abv[to_zone] 47 | except KeyError: 48 | raise ValueError(f"zone not found in id2abv: {to_zone}") 49 | if from_state == to_state: 50 | upgraded_branches["intrastate"].append(b) 51 | else: 52 | upgraded_branches["interstate"].append(b) 53 | 54 | return upgraded_branches 55 | -------------------------------------------------------------------------------- /powersimdata/tests/mock_scenario.py: -------------------------------------------------------------------------------- 1 | from powersimdata.scenario.scenario import Scenario 2 | from powersimdata.tests.mock_analyze import MockAnalyze 3 | 4 | 5 | class MockScenario: 6 | """ 7 | :param dict grid_attrs: fields to be added to grid. 8 | :param \\*\\*kwargs: collected keyword arguments to be passed to 9 | MockAnalyze init. 10 | """ 11 | 12 | _setattr_allowlist = {"state", "info"} 13 | 14 | def __init__(self, grid_attrs=None, **kwargs): 15 | """Constructor.""" 16 | self.state = MockAnalyze(grid_attrs, **kwargs) 17 | self.info = { 18 | "id": "111", 19 | "plan": None, 20 | "name": None, 21 | "state": None, 22 | "grid_model": "usa_tamu", 23 | "interconnect": None, 24 | "base_demand": None, 25 | "base_hydro": None, 26 | "base_solar": None, 27 | "base_wind": None, 28 | "change_table": None, 29 | "start_date": None, 30 | "end_date": None, 31 | "interval": None, 32 | "engine": None, 33 | "runtime": None, 34 | "infeasibilities": None, 35 | } 36 | 37 | @property 38 | def __class__(self): 39 | """If anyone asks, I'm a Scenario object!""" 40 | return Scenario 41 | 42 | def __dir__(self): 43 | return sorted(super().__dir__() + list(self.state.exported_methods)) 44 | 45 | def __getattr__(self, name): 46 | if name in self.state.exported_methods: 47 | return getattr(self.state, name) 48 | elif hasattr(self.state, "__getattr__"): 49 | return self.state.__getattr__(name) 50 | else: 51 | raise AttributeError( 52 | f"Scenario object in {self.state.name} state " 53 | f"has no attribute {name}" 54 | ) 55 | 56 | def __setattr__(self, name, value): 57 | if name in self._setattr_allowlist: 58 | super().__setattr__(name, value) 59 | elif name in self.state.exported_methods: 60 | raise AttributeError( 61 | f"{name} is exported from Scenario.state, edit it there if necessary" 62 | ) 63 | super().__setattr__(name, value) 64 | -------------------------------------------------------------------------------- /powersimdata/data_access/execute_table.py: -------------------------------------------------------------------------------- 1 | from powersimdata.data_access.sql_store import SqlStore, to_data_frame 2 | 3 | 4 | class ExecuteTable(SqlStore): 5 | """Storage abstraction for execute list using sql database.""" 6 | 7 | table = "execute_list" 8 | columns = ["id", "status"] 9 | 10 | def get_status(self, scenario_id): 11 | """Get status of scenario by scenario_id 12 | 13 | :param str scenario_id: the scenario id 14 | :return: (*pandas.DataFrame*) -- results as a data frame. 15 | """ 16 | query = self.select_where("id") 17 | self.cur.execute(query, (scenario_id,)) 18 | result = self.cur.fetchmany() 19 | return to_data_frame(result) 20 | 21 | def get_execute_table(self, limit=None): 22 | """Return the execute table as a data frame 23 | 24 | :return: (*pandas.DataFrame*) -- execute list as a data frame. 25 | """ 26 | query = self.select_all() 27 | self.cur.execute(query) 28 | if limit is None: 29 | result = self.cur.fetchall() 30 | else: 31 | result = self.cur.fetchmany(limit) 32 | return to_data_frame(result) 33 | 34 | def add_entry(self, scenario_info): 35 | """Add entry to execute list 36 | 37 | :param collections.OrderedDict scenario_info: entry to add 38 | """ 39 | scenario_id, status = scenario_info["id"], "created" 40 | sql = self.insert() 41 | self.cur.execute( 42 | sql, 43 | ( 44 | scenario_id, 45 | status, 46 | ), 47 | ) 48 | 49 | def set_status(self, scenario_id, status): 50 | """Updates status of scenario in execute list 51 | 52 | :param int scenario_id: the scenario id 53 | :param str status: execution status. 54 | """ 55 | self.cur.execute( 56 | "UPDATE execute_list SET status = %s WHERE id = %s", 57 | (status, scenario_id), 58 | ) 59 | 60 | def delete_entry(self, scenario_id): 61 | """Deletes entry from execute list. 62 | 63 | :param int/str scenario_id: the id of the scenario 64 | """ 65 | sql = self.delete("id") 66 | self.cur.execute(sql, (scenario_id,)) 67 | -------------------------------------------------------------------------------- /powersimdata/input/tests/test_transform_demand.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | 3 | from powersimdata.input.change_table import ChangeTable 4 | from powersimdata.input.grid import Grid 5 | from powersimdata.input.transform_demand import TransformDemand 6 | from powersimdata.tests.mock_profile_input import MockProfileInput 7 | 8 | 9 | def test_profile_mappings(): 10 | grid = Grid("Texas") 11 | ct = ChangeTable(grid) 12 | grid_info = {"res_heating": {"standard_heat_pump_v2": 0.3}} 13 | zone_info = { 14 | "East": {"res_cooking": {"advanced_heat_pump_v2": 0.7}}, 15 | "Coast": { 16 | "com_hot_water": { 17 | "standard_heat_pump_v1": 0.6, 18 | "advanced_heat_pump_v2": 0.4, 19 | } 20 | }, 21 | "Far West": { 22 | "res_cooking": {"standard_heat_pump_v1": 0.2, "advanced_heat_pump_v2": 0.3} 23 | }, 24 | } 25 | 26 | kind = "building" 27 | ct.add_electrification(kind, {"zone": zone_info, "grid": grid_info}) 28 | td = TransformDemand(grid, ct, kind) 29 | 30 | actual = td.p2g 31 | expected = {"res_heating_standard_heat_pump_v2.csv": 0.3} 32 | assert expected == actual 33 | 34 | actual = td.p2z 35 | expected = { 36 | "res_cooking_advanced_heat_pump_v2.csv": [(308, 0.7), (301, 0.3)], 37 | "com_hot_water_standard_heat_pump_v1.csv": [(307, 0.6)], 38 | "com_hot_water_advanced_heat_pump_v2.csv": [(307, 0.4)], 39 | "res_cooking_standard_heat_pump_v1.csv": [(301, 0.2)], 40 | } 41 | 42 | assert expected == actual 43 | 44 | 45 | def test_aggregate_demand(): 46 | grid = Grid("Texas") 47 | ct = ChangeTable(grid) 48 | kind = "building" 49 | zone_info = {"East": {"res_cooking": {"advanced_heat_pump_v2": 0.7}}} 50 | grid_info = {"res_cooking": {"advanced_heat_pump_v2": 0.3}} 51 | ct.add_electrification(kind, {"zone": zone_info, "grid": grid_info}) 52 | 53 | mock_input = MockProfileInput(grid) 54 | demand = mock_input.get_data(None, "demand") 55 | mock_input.get_profile = lambda *args: demand 56 | 57 | td = TransformDemand(grid, ct, kind) 58 | td._profile_data = mock_input 59 | result = td.value() 60 | 61 | pd.testing.assert_series_equal(0.7 * demand.loc[:, 308], result.loc[:, 308]) 62 | pd.testing.assert_frame_equal(0.3 * demand.loc[:, :307], result.loc[:, :307]) 63 | -------------------------------------------------------------------------------- /powersimdata/data_access/execute_list.py: -------------------------------------------------------------------------------- 1 | from powersimdata.data_access.csv_store import CsvStore, verify_hash 2 | 3 | 4 | class ExecuteListManager(CsvStore): 5 | """Storage abstraction for execute list using a csv file.""" 6 | 7 | _FILE_NAME = "ExecuteList.csv" 8 | 9 | def get_execute_table(self): 10 | """Returns execute table from server if possible, otherwise read local 11 | copy. Updates the local copy upon successful server connection. 12 | 13 | :return: (*pandas.DataFrame*) -- execute list as a data frame. 14 | """ 15 | return self.get_table() 16 | 17 | def get_status(self, scenario_id): 18 | """Return the status for the scenario 19 | 20 | :param str/int scenario_id: the scenario id 21 | :raises Exception: if scenario not found in execute list. 22 | :return: (*str*) -- scenario status 23 | """ 24 | table = self.get_execute_table() 25 | try: 26 | return table.loc[int(scenario_id), "status"] 27 | except KeyError: 28 | raise Exception(f"Scenario not found in execute list, id = {scenario_id}") 29 | 30 | def add_entry(self, scenario_info): 31 | """Add entry to execute list 32 | 33 | :param collections.OrderedDict scenario_info: entry to add 34 | """ 35 | scenario_id = int(scenario_info["id"]) 36 | return self.set_status(scenario_id, "created") 37 | 38 | @verify_hash 39 | def set_status(self, scenario_id, status): 40 | """Set the scenario status 41 | 42 | :param int/str scenario_id: the scenario id 43 | :param str status: the new status 44 | :return: (*pandas.DataFrame*) -- the updated data frame 45 | """ 46 | table = self.get_execute_table() 47 | table.loc[int(scenario_id), "status"] = status 48 | 49 | print(f"--> Setting status={status} in execute list") 50 | return table 51 | 52 | @verify_hash 53 | def delete_entry(self, scenario_id): 54 | """Deletes entry from execute list. 55 | 56 | :param int/str scenario_id: the id of the scenario 57 | :return: (*pandas.DataFrame*) -- the updated data frame 58 | """ 59 | table = self.get_execute_table() 60 | table.drop(int(scenario_id), inplace=True) 61 | 62 | print("--> Deleting entry in %s" % self._FILE_NAME) 63 | return table 64 | -------------------------------------------------------------------------------- /powersimdata/input/input_base.py: -------------------------------------------------------------------------------- 1 | from powersimdata.utility.helpers import MemoryCache, cache_key 2 | 3 | _cache = MemoryCache() 4 | 5 | 6 | class InputBase: 7 | """Define abstract methods and common implementation for subclasses that interact 8 | with scenario input data. 9 | """ 10 | 11 | def __init__(self): 12 | """Constructor.""" 13 | self.data_access = None 14 | self._file_extension = {} 15 | 16 | def _check_field(self, field_name): 17 | """Checks field name. 18 | 19 | :param str field_name: defined by subclass 20 | :raises ValueError: if invalid field name is given 21 | """ 22 | possible = list(self._file_extension.keys()) 23 | if field_name not in possible: 24 | raise ValueError("Only %s data can be loaded" % " | ".join(possible)) 25 | 26 | def _get_file_path(self, scenario_info, field_name): 27 | """Get the path to a file for the scenario 28 | 29 | :param dict scenario_info: metadata for a scenario. 30 | :param str field_name: defined by subclass 31 | :return: (*str*) -- the pyfilesystem path to the file 32 | """ 33 | raise NotImplementedError 34 | 35 | def _read(self, f, path): 36 | """Read content from file object into data frame 37 | 38 | :param io.IOBase f: an open file object 39 | :param str path: the file path 40 | :return: (*object*) -- implementation dependent 41 | """ 42 | raise NotImplementedError 43 | 44 | def get_data(self, scenario_info, field_name): 45 | """Returns data from (possibly remote) filesystem and cache 46 | the result in memory. 47 | 48 | :param dict scenario_info: scenario information. 49 | :param str field_name: defined by subclass 50 | :return: (*object*) -- implementation dependent 51 | """ 52 | self._check_field(field_name) 53 | print("--> Loading %s" % field_name) 54 | 55 | filepath = self._get_file_path(scenario_info, field_name) 56 | return self._get_data_internal(filepath) 57 | 58 | def _get_data_internal(self, filepath): 59 | key = cache_key(filepath) 60 | cached = _cache.get(key) 61 | if cached is not None: 62 | return cached 63 | with self.data_access.get(filepath) as (f, path): 64 | data = self._read(f, path) 65 | _cache.put(key, data) 66 | return data 67 | -------------------------------------------------------------------------------- /powersimdata/scenario/tests/test_scenario.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from powersimdata.data_access.context import Context 4 | from powersimdata.scenario.delete import Delete 5 | from powersimdata.scenario.scenario import Scenario 6 | from powersimdata.tests.mock_context import MockContext 7 | 8 | 9 | @pytest.mark.integration 10 | @pytest.mark.ssh 11 | def test_bad_scenario_name(): 12 | # This test will fail if we do add a scenario with this name 13 | with pytest.raises(ValueError): 14 | Scenario("this_scenario_does_not_exist") 15 | 16 | 17 | def test_scenario_workflow(monkeypatch): 18 | mock_context = MockContext() 19 | monkeypatch.setattr(Context, "get_data_access", mock_context.get_data_access) 20 | 21 | s = Scenario() 22 | print(s.state.name) 23 | 24 | s.set_grid(interconnect="Texas") 25 | 26 | s.set_name("test", "dummy") 27 | s.set_time("2016-01-01 00:00:00", "2016-01-01 03:00:00", "1H") 28 | 29 | s.set_base_profile("demand", "vJan2021") 30 | s.set_base_profile("hydro", "vJan2021") 31 | s.set_base_profile("solar", "vJan2021") 32 | s.set_base_profile("wind", "vJan2021") 33 | s.change_table.ct = { 34 | "wind": { 35 | "zone_id": { 36 | 301: 1.1293320940114195, 37 | 302: 2.2996731828360466, 38 | 303: 1.1460693669609412, 39 | 304: 1.5378918905751389, 40 | 305: 1.6606575751914816, 41 | }, 42 | "plant_id": {12912: 0}, 43 | } 44 | } 45 | 46 | s.get_grid() 47 | s.get_ct() 48 | 49 | rfs = s.data_access.fs 50 | lfs = s.data_access.local_fs 51 | tmp_dir = s.data_access.tmp_folder(1) 52 | 53 | s.print_scenario_info() 54 | s.create_scenario() 55 | 56 | scenario_list = s.get_scenario_table() 57 | assert 1 == scenario_list.shape[0] 58 | 59 | for fs in (lfs, rfs): 60 | assert fs.exists("data/input/1_ct.pkl") 61 | 62 | # hack to use truncated profiles so the test runs quickly 63 | s.info["grid_model"] = "test_usa_tamu" 64 | s.prepare_simulation_input() 65 | 66 | tmp_files = rfs.listdir(tmp_dir) 67 | assert len(tmp_files) > 0 68 | 69 | s.change(Delete) 70 | s.delete_scenario(confirm=False) 71 | 72 | for fs in (rfs, lfs): 73 | assert not fs.exists(tmp_dir) 74 | assert len(fs.listdir("data/input")) == 0 75 | 76 | scenario_list = Scenario().get_scenario_table() 77 | assert 0 == scenario_list.shape[0] 78 | -------------------------------------------------------------------------------- /powersimdata/data_access/tests/sql/test_execute_table.py: -------------------------------------------------------------------------------- 1 | from collections import OrderedDict 2 | 3 | import pytest 4 | 5 | from powersimdata.data_access.execute_table import ExecuteTable 6 | from powersimdata.data_access.sql_store import SqlError 7 | 8 | row_id = 9000 9 | 10 | 11 | def _get_test_row(): 12 | global row_id 13 | row_id += 1 14 | return OrderedDict([("id", row_id)]) 15 | 16 | 17 | class NoEffectSqlStore(ExecuteTable): 18 | def __exit__(self, exc_type, exc_value, traceback): 19 | self.conn.rollback() 20 | super().__exit__(exc_type, exc_value, traceback) 21 | 22 | 23 | class RaiseErrorSqlStore(ExecuteTable): 24 | def add_entry(self, scenario_info): 25 | raise Exception("Error while executing sql") 26 | 27 | 28 | @pytest.fixture 29 | def store(): 30 | with NoEffectSqlStore() as store: 31 | yield store 32 | 33 | 34 | @pytest.mark.integration 35 | @pytest.mark.db 36 | def test_err_handle(): 37 | with pytest.raises(SqlError): 38 | with RaiseErrorSqlStore() as store: 39 | store.add_entry(None) 40 | 41 | 42 | @pytest.mark.integration 43 | @pytest.mark.db 44 | def test_select_no_limit(store): 45 | store.add_entry(_get_test_row()) 46 | store.add_entry(_get_test_row()) 47 | result = store.get_execute_table() 48 | assert result.shape[0] == 2 49 | 50 | 51 | @pytest.mark.integration 52 | @pytest.mark.db 53 | def test_select_with_limit(store): 54 | n_rows = 6 55 | limit = 3 56 | for i in range(n_rows): 57 | store.add_entry(_get_test_row()) 58 | result = store.get_execute_table(limit) 59 | assert result.shape[0] == limit 60 | 61 | 62 | @pytest.mark.integration 63 | @pytest.mark.db 64 | def test_add_entry(store): 65 | info = _get_test_row() 66 | store.add_entry(info) 67 | status = store.get_status(info["id"]) 68 | assert status.loc[0, "status"] == "created" 69 | 70 | 71 | @pytest.mark.integration 72 | @pytest.mark.db 73 | def test_update_entry(store): 74 | info = _get_test_row() 75 | store.add_entry(info) 76 | sid = info["id"] 77 | store.set_status(sid, "testing") 78 | status = store.get_status(sid) 79 | assert status.loc[0, "status"] == "testing" 80 | 81 | 82 | @pytest.mark.integration 83 | @pytest.mark.db 84 | def test_delete_entry(store): 85 | info = _get_test_row() 86 | sid = info["id"] 87 | store.add_entry(info) 88 | store.delete_entry(sid) 89 | status = store.get_status(sid) 90 | assert status.shape == (0, 0) 91 | -------------------------------------------------------------------------------- /powersimdata/input/abstract_grid.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | 3 | from powersimdata.input.const import grid_const 4 | 5 | 6 | class AbstractGrid: 7 | """Grid Builder.""" 8 | 9 | def __init__(self): 10 | """Constructor""" 11 | self.data_loc = None 12 | self.interconnect = None 13 | self.zone2id = {} 14 | self.id2zone = {} 15 | self.sub = pd.DataFrame(columns=grid_const.col_name_sub).rename_axis( 16 | grid_const.indices["sub"] 17 | ) 18 | self.plant = pd.DataFrame(columns=grid_const.col_name_plant).rename_axis( 19 | grid_const.indices["plant"] 20 | ) 21 | self.gencost = { 22 | "before": pd.DataFrame(columns=grid_const.col_name_gencost).rename_axis( 23 | grid_const.indices["plant"] 24 | ), 25 | "after": pd.DataFrame(columns=grid_const.col_name_gencost).rename_axis( 26 | grid_const.indices["plant"] 27 | ), 28 | } 29 | self.dcline = pd.DataFrame(columns=grid_const.col_name_dcline).rename_axis( 30 | grid_const.indices["dcline"] 31 | ) 32 | self.bus2sub = pd.DataFrame(columns=grid_const.col_name_bus2sub).rename_axis( 33 | grid_const.indices["bus2sub"] 34 | ) 35 | self.bus = pd.DataFrame(columns=grid_const.col_name_bus).rename_axis( 36 | grid_const.indices["bus"] 37 | ) 38 | self.branch = pd.DataFrame(columns=grid_const.col_name_branch).rename_axis( 39 | grid_const.indices["branch"] 40 | ) 41 | self.storage = storage_template() 42 | self.grid_model = "" 43 | self.model_immutables = None 44 | 45 | 46 | def storage_template(): 47 | """Get storage 48 | 49 | :return: (*dict*) -- storage structure for MATPOWER/MOST 50 | """ 51 | storage = { 52 | "gen": pd.DataFrame(columns=grid_const.col_name_plant), 53 | "gencost": pd.DataFrame(columns=grid_const.col_name_gencost), 54 | "StorageData": pd.DataFrame(columns=grid_const.col_name_storage_storagedata), 55 | "genfuel": [], 56 | "duration": None, # hours 57 | "min_stor": None, # ratio 58 | "max_stor": None, # ratio 59 | "InEff": None, 60 | "OutEff": None, 61 | "LossFactor": None, # stored energy fraction / hour 62 | "energy_value": None, # $/MWh 63 | "terminal_min": None, 64 | "terminal_max": None, 65 | } 66 | return storage 67 | -------------------------------------------------------------------------------- /powersimdata/network/constants/carrier/color.py: -------------------------------------------------------------------------------- 1 | class USAColor: 2 | """Color for each resource in USA grid models.""" 3 | 4 | def __init__(self): 5 | self.type2color = { 6 | "wind": "xkcd:green", 7 | "solar": "xkcd:amber", 8 | "hydro": "xkcd:light blue", 9 | "ng": "xkcd:orchid", 10 | "nuclear": "xkcd:silver", 11 | "coal": "xkcd:light brown", 12 | "geothermal": "xkcd:hot pink", 13 | "dfo": "xkcd:royal blue", 14 | "biomass": "xkcd:dark green", 15 | "other": "xkcd:melon", 16 | "storage": "xkcd:orange", 17 | "wind_offshore": "xkcd:teal", 18 | } 19 | self.curtailable2color = { 20 | "solar": "xkcd:amber", 21 | "wind": "xkcd:green", 22 | "wind_offshore": "xkcd:teal", 23 | } 24 | self.curtailable2hatchcolor = { 25 | "solar": "xkcd:grey", 26 | "wind": "xkcd:grey", 27 | "wind_offshore": "xkcd:grey", 28 | } 29 | 30 | 31 | class EUColor: 32 | """Color for each resource in EU grid model.""" 33 | 34 | def __init__(self): 35 | self.type2color = { 36 | "onwind": "#235ebc", 37 | "offwind-ac": "#6895dd", 38 | "offwind-dc": "#74c6f2", 39 | "inflow": "#08ad97", 40 | "ror": "#4adbc8", 41 | "solar": "#f9d002", 42 | "biomass": "#0c6013", 43 | "geothermal": "#ba91b1", 44 | "OCGT": "#d35050", 45 | "CCGT": "#b20101", 46 | "nuclear": "#ff9000", 47 | "coal": "#707070", 48 | "lignite": "#9e5a01", 49 | "oil": "#262626", 50 | "H2": "#ea048a", 51 | "battery": "#b8ea04", 52 | } 53 | self.curtailable2color = { 54 | "solar": "#f9d002", 55 | "onwind": "#235ebc", 56 | "offwind-ac": "#6895dd", 57 | "offwind-dc": "74c6f2", 58 | } 59 | self.curtailable2hatchcolor = { 60 | "solar": "#808080", 61 | "onwind": "#808080", 62 | "offwind-ac": "#808080", 63 | "offwind-dc": "#808080", 64 | } 65 | 66 | 67 | def get_color(model): 68 | """Return color for generator types. 69 | 70 | :param str model: grid model 71 | """ 72 | _lookup = { 73 | "usa_tamu": USAColor, 74 | "hifld": USAColor, 75 | "europe_tub": EUColor, 76 | } 77 | return _lookup[model]().__dict__ 78 | -------------------------------------------------------------------------------- /powersimdata/network/constants/carrier/label.py: -------------------------------------------------------------------------------- 1 | class USALabel: 2 | """Label for each resource in USA grid models.""" 3 | 4 | def __init__(self): 5 | self.type2label = { 6 | "nuclear": "Nuclear", 7 | "geothermal": "Geothermal", 8 | "coal": "Coal", 9 | "dfo": "Diesel Fuel Oil", 10 | "hydro": "Hydro", 11 | "ng": "Natural Gas", 12 | "solar": "Solar", 13 | "wind": "Onshore Wind", 14 | "wind_offshore": "Offshore Wind", 15 | "biomass": "Biomass", 16 | "other": "Other", 17 | "storage": "Storage", 18 | } 19 | self.curtailable2label = { 20 | "solar": "Solar Curtailment", 21 | "wind": "Onshore Wind Curtailment", 22 | "wind_offshore": "Offshore Wind Curtailment", 23 | } 24 | self.label2type = {v: k for k, v in self.type2label.items()} 25 | self.label2curtailable = {v: k for k, v in self.curtailable2label.items()} 26 | 27 | 28 | class EULabel: 29 | """Label for each resource in EU grid model.""" 30 | 31 | def __init__(self): 32 | self.type2label = { 33 | "onwind": "Onshore Wind", 34 | "offwind-ac": "Offshore Wind (AC)", 35 | "offwind-dc": "Offshore Wind (DC)", 36 | "inflow": "Hydroelectricity with Inflow", 37 | "ror": "Run of River", 38 | "solar": "Solar", 39 | "biomass": "Biomass", 40 | "geothermal": "Geothermal", 41 | "OCGT": "Open-Cycle Gas", 42 | "CCGT": "Combined-Cycle Gas", 43 | "nuclear": "Nuclear", 44 | "coal": "Coal", 45 | "lignite": "Lignite", 46 | "oil": "Oil", 47 | "H2": "Hydrogen Storage", 48 | "battery": "Battery Storage", 49 | } 50 | self.curtailable2label = { 51 | "solar": "Solar Curtailment", 52 | "onwind": "Onshore Wind Curtailment", 53 | "offwind-ac": "Offshore Wind Curtailment (AC)", 54 | "offwind-dc": "Offshore Wind Curtailment (DC)", 55 | } 56 | self.label2type = {v: k for k, v in self.type2label.items()} 57 | self.label2curtailable = {v: k for k, v in self.curtailable2label.items()} 58 | 59 | 60 | def get_label(model): 61 | """Return label for generator types. 62 | 63 | :param str model: grid model 64 | """ 65 | _lookup = { 66 | "usa_tamu": USALabel, 67 | "hifld": USALabel, 68 | "europe_tub": EULabel, 69 | } 70 | return _lookup[model]().__dict__ 71 | -------------------------------------------------------------------------------- /powersimdata/data_access/csv_store.py: -------------------------------------------------------------------------------- 1 | import functools 2 | 3 | import pandas as pd 4 | 5 | 6 | def verify_hash(func): 7 | """Utility function which verifies the sha1sum of the file before writing 8 | it on the server. Operates on methods that return an updated scenario or 9 | execute list. 10 | """ 11 | 12 | @functools.wraps(func) 13 | def wrapper(self, *args, **kwargs): 14 | checksum = self.data_access.checksum(self._FILE_NAME) 15 | table = func(self, *args, **kwargs) 16 | self.commit(table, checksum) 17 | return table 18 | 19 | return wrapper 20 | 21 | 22 | def _parse_csv(file_object): 23 | """Read file from disk into data frame 24 | 25 | :param str, path object or file-like object file_object: a reference to 26 | the csv file 27 | :return: (*pandas.DataFrame*) -- the specified file as a data frame. 28 | """ 29 | table = pd.read_csv(file_object) 30 | table.set_index("id", inplace=True) 31 | table.fillna("", inplace=True) 32 | return table.astype(str) 33 | 34 | 35 | class CsvStore: 36 | """Base class for common functionality used to manage scenario and execute 37 | list stored as csv files on the server 38 | 39 | :param powersimdata.data_access.data_access.DataAccess: data access object 40 | """ 41 | 42 | def __init__(self, data_access): 43 | """Constructor""" 44 | self.data_access = data_access 45 | 46 | def get_table(self): 47 | """Attempt to download the file from server and blob storage, falling back to 48 | local copy if one exists, and return the combined result. 49 | 50 | :return: (*pandas.DataFrame*) -- the specified table as a data frame. 51 | """ 52 | filename = self._FILE_NAME 53 | orig = self._get_table(filename) 54 | blob = self._get_table(filename + ".2") 55 | df = pd.concat([orig, blob]) 56 | return df[~df.index.duplicated()] 57 | 58 | def _get_table(self, filename): 59 | try: 60 | self.data_access.copy_from(filename) 61 | except: # noqa 62 | pass 63 | try: 64 | with self.data_access.get(filename) as (f, _): 65 | return _parse_csv(f) 66 | except: # noqa 67 | return pd.DataFrame() 68 | 69 | def commit(self, table, checksum): 70 | """Save to local directory and upload if needed 71 | 72 | :param pandas.DataFrame table: the data frame to save 73 | :param str checksum: the checksum prior to download 74 | """ 75 | with self.data_access.push(self._FILE_NAME, checksum) as f: 76 | table.to_csv(f) 77 | -------------------------------------------------------------------------------- /powersimdata/network/constants/region/mapping.py: -------------------------------------------------------------------------------- 1 | from powersimdata.network.constants.model import model2interconnect, model2region 2 | from powersimdata.network.constants.region.division import get_division_mapping 3 | from powersimdata.network.constants.region.interconnect import get_interconnect_mapping 4 | from powersimdata.network.constants.region.loadzone import get_loadzone_mapping 5 | 6 | 7 | class Mapping: 8 | """Geographical/time mapping for USA grid models 9 | 10 | :param str model: grid model. 11 | :param list interconnect: interconnect(s) 12 | :param pandas.DataFrame info: information on zones of a grid model. 13 | """ 14 | 15 | def __init__(self, model, interconnect, info): 16 | self.zones = dict() 17 | 18 | interconnect = ( # noqa 19 | model2interconnect[model] 20 | if model2region[model] in interconnect 21 | else interconnect 22 | ) 23 | zone = info.query("interconnect == @interconnect") 24 | self.zones.update(get_loadzone_mapping(model, zone)) 25 | self.zones.update(get_division_mapping(model, zone)) 26 | self.zones.update(get_interconnect_mapping(model, zone)) 27 | 28 | 29 | class USAMapping(Mapping): 30 | """Geographical/time mapping for USA grid models 31 | 32 | :param str model: grid model. 33 | :param list interconnect: interconnect(s) 34 | :param pandas.DataFrame info: information on zones of a grid model. 35 | """ 36 | 37 | def __init__(self, model, interconnect, info): 38 | super().__init__(model, interconnect, info) 39 | self.zones["mappings"] = {"state_abbr", "state", "loadzone", "interconnect"} 40 | self.zones["division"] = "state" 41 | 42 | 43 | class EUMapping(Mapping): 44 | """Geographical/time mapping for EU grid models 45 | 46 | :param str model: grid model. 47 | :param list interconnect: interconnect(s) 48 | :param pandas.DataFrame info: information on zones of a grid model. 49 | """ 50 | 51 | def __init__(self, model, interconnect, info): 52 | super().__init__(model, interconnect, info) 53 | self.zones["mappings"] = {"country_abbr", "country", "loadzone", "interconnect"} 54 | self.zones["division"] = "country" 55 | 56 | 57 | def get_mapping(model, interconnect, info): 58 | """Return interconnect mappings for a grid model. 59 | 60 | :param str model: grid model. 61 | :param list interconnect: interconnect(s) 62 | :param pandas.DataFrame info: information on zones of a grid model. 63 | """ 64 | _lookup = { 65 | "usa_tamu": USAMapping, 66 | "hifld": USAMapping, 67 | "europe_tub": EUMapping, 68 | } 69 | return _lookup[model](model, interconnect, info).zones 70 | -------------------------------------------------------------------------------- /powersimdata/input/changes/demand_flex.py: -------------------------------------------------------------------------------- 1 | import copy 2 | 3 | 4 | def add_demand_flexibility(obj, info): 5 | """Adds demand flexibility to the system. 6 | 7 | :param powersimdata.input.change_table.ChangeTable obj: change table 8 | :param dict info: Each key refers to a different component required to 9 | parameterize the demand flexibility model. Each value associated with the 10 | keys corresponds to the profile version of the profile in question. 11 | Required keys: "demand_flexibility_up", "demand_flexibility_dn". 12 | Optional keys: "demand_flexibility_duration", "demand_flexibility_cost_up", 13 | "demand_flexibility_cost_dn". 14 | :raises TypeError: if info is not a dict 15 | :raises ValueError: if duration is not a positive int, or if no profile is found 16 | """ 17 | 18 | # Check inputs 19 | if not isinstance(info, dict): 20 | raise TypeError( 21 | "Argument enclosing new demand flexibility info must be a dictionary." 22 | ) 23 | info = copy.deepcopy(info) 24 | required = {"demand_flexibility_up", "demand_flexibility_dn"} 25 | optional = { 26 | "demand_flexibility_duration", 27 | "demand_flexibility_cost_up", 28 | "demand_flexibility_cost_dn", 29 | } 30 | obj._check_entry_keys(info, 0, "demand_flexibility", required, None, optional) 31 | 32 | # Add a key for demand flexibility in the change table, if necessary 33 | if "demand_flexibility" not in obj.ct: 34 | obj.ct["demand_flexibility"] = {} 35 | 36 | # Access the specified demand flexibility profiles that are required 37 | for k in required | (optional & info.keys()): 38 | if k == "demand_flexibility_duration": 39 | # Check that demand flexibility duration is an integer and positive 40 | if not isinstance(info[k], int): 41 | raise ValueError(f"The value of {k} is not integer-valued.") 42 | if info[k] <= 0: 43 | raise ValueError(f"The value of {k} is not positive.") 44 | obj.ct["demand_flexibility"][k] = info[k] 45 | else: 46 | # Determine the available profile versions 47 | possible = obj._profile_input.get_profile_version(obj.grid.grid_model, k) 48 | 49 | # Add the profile to the change table 50 | if len(possible) == 0: 51 | del obj.ct["demand_flexibility"] 52 | raise ValueError(f"No {k} profile available.") 53 | elif info[k] in possible: 54 | obj.ct["demand_flexibility"][k] = info[k] 55 | else: 56 | del obj.ct["demand_flexibility"] 57 | raise ValueError(f"Available {k} profiles: {', '.join(possible)}") 58 | -------------------------------------------------------------------------------- /powersimdata/tests/mock_grid.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | 3 | from powersimdata.input.abstract_grid import AbstractGrid 4 | from powersimdata.input.const import grid_const 5 | from powersimdata.input.grid import Grid 6 | from powersimdata.network.model import ModelImmutables 7 | 8 | 9 | class MockGrid(AbstractGrid): 10 | def __init__(self, grid_attrs=None, model="usa_tamu"): 11 | """Constructor. 12 | 13 | :param dict grid_attrs: dictionary of {*field_name*, *data_dict*} pairs where 14 | *field_name* can be: sub, bus2sub, branch, bus, dcline, plant, 15 | gencost_before, gencost_after, storage_gen, storage_StorageData and 16 | *data_dict* is a dictionary in which the keys are the column name of the 17 | data frame associated to *field_name* and the values are a list of values. 18 | :param str model: grid model. Use to access geographical information such 19 | as loadzones, interconnections, etc. 20 | :raises TypeError: 21 | if ``grid_attrs`` is not a dict. 22 | if keys of ``grid_attrs`` are not str. 23 | :raises ValueError: if a key of ``grid_attrs`` is incorrect. 24 | """ 25 | if grid_attrs is None: 26 | grid_attrs = {} 27 | 28 | if not isinstance(grid_attrs, dict): 29 | raise TypeError("grid_attrs must be a dict") 30 | 31 | for key in grid_attrs.keys(): 32 | if not isinstance(key, str): 33 | raise TypeError("grid_attrs keys must all be str") 34 | 35 | extra_keys = set(grid_attrs) - set(grid_const.indices).union( 36 | {"gencost_before", "gencost_after", "storage_gen", "storage_StorageData"} 37 | ) 38 | if len(extra_keys) > 0: 39 | raise ValueError("Got unknown key(s):" + str(extra_keys)) 40 | 41 | super().__init__() 42 | self.grid_model = model 43 | self.model_immutables = ModelImmutables(model) 44 | 45 | other = {} 46 | for k, v in grid_attrs.items(): 47 | if k in grid_const.indices: 48 | setattr(self, k, pd.DataFrame(v).set_index(grid_const.indices[k])) 49 | else: 50 | s = k.split("_") 51 | df = ( 52 | pd.DataFrame(v).set_index("plant_id") 53 | if s[0] == "gencost" 54 | else pd.DataFrame(v) 55 | ) 56 | 57 | if s[0] not in other: 58 | other[s[0]] = {s[1]: df} 59 | else: 60 | other[s[0]][s[1]] = df 61 | 62 | for k, v in other.items(): 63 | setattr(self, k, v) 64 | 65 | @property 66 | def __class__(self): 67 | """If anyone asks, I'm a Grid object!""" 68 | return Grid 69 | -------------------------------------------------------------------------------- /powersimdata/input/converter/tests/test_pypsa_to_profiles.py: -------------------------------------------------------------------------------- 1 | from importlib.util import find_spec 2 | 3 | import pytest 4 | 5 | from powersimdata.input.converter.pypsa_to_profiles import ( 6 | get_pypsa_demand_profile, 7 | get_pypsa_gen_profile, 8 | ) 9 | 10 | if find_spec("pypsa"): 11 | import pypsa 12 | 13 | @pytest.fixture 14 | def network(): 15 | return pypsa.examples.ac_dc_meshed() 16 | 17 | 18 | def _assert_error(err_msg, error_type, func, *args, **kwargs): 19 | with pytest.raises(error_type) as excinfo: 20 | func(*args, **kwargs) 21 | assert err_msg in str(excinfo.value) 22 | 23 | 24 | @pytest.mark.skipif(find_spec("pypsa") is None, reason="Package PyPSA not available.") 25 | def test_get_pypsa_gen_profile_argument_type(network): 26 | _assert_error( 27 | "network must be a Network object", 28 | TypeError, 29 | get_pypsa_gen_profile, 30 | "network", 31 | {"wind": "onwind"}, 32 | ) 33 | _assert_error( 34 | "profile2carrier must be a dict", 35 | TypeError, 36 | get_pypsa_gen_profile, 37 | network, 38 | "onwind", 39 | ) 40 | _assert_error( 41 | "values of profile2carrier must be an iterable", 42 | TypeError, 43 | get_pypsa_gen_profile, 44 | network, 45 | {"solar": "PV"}, 46 | ) 47 | 48 | 49 | @pytest.mark.skipif(find_spec("pypsa") is None, reason="Package PyPSA not available.") 50 | def test_get_pypsa_gen_profile_argument_value(network): 51 | _assert_error( 52 | "keys of profile2carrier must be a subset of ['hydro', 'solar', 'wind']", 53 | ValueError, 54 | get_pypsa_gen_profile, 55 | network, 56 | {"offwind": ["offwind-ac", "offwind-dc"]}, 57 | ) 58 | 59 | 60 | @pytest.mark.skipif(find_spec("pypsa") is None, reason="Package PyPSA not available.") 61 | def test_extract_wind(network): 62 | gen_profile = get_pypsa_gen_profile(network, {"wind": ["wind"]}) 63 | wind_profile = gen_profile["wind"] 64 | 65 | assert wind_profile.index.name == "UTC" 66 | assert wind_profile.columns.name is None 67 | assert wind_profile.sum().apply(bool).all() 68 | assert wind_profile.max().max() <= 1 69 | 70 | 71 | def test_get_pypsa_demand_profile_argument_type(): 72 | _assert_error( 73 | "network must be a Network object", 74 | TypeError, 75 | get_pypsa_demand_profile, 76 | "network", 77 | ) 78 | 79 | 80 | @pytest.mark.skipif(find_spec("pypsa") is None, reason="Package PyPSA not available.") 81 | def test_extract_demand(network): 82 | demand_profile = get_pypsa_demand_profile(network) 83 | 84 | assert demand_profile.index.name == "UTC" 85 | assert demand_profile.columns.name is None 86 | assert demand_profile.sum().sum() >= 0 87 | -------------------------------------------------------------------------------- /powersimdata/data_access/fs_helper.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | import fs 4 | from fs.multifs import MultiFS 5 | 6 | from powersimdata.data_access.ssh_fs import WrapSSHFS 7 | from powersimdata.utility import server_setup 8 | 9 | logger = logging.getLogger("azure.core.pipeline.policies.http_logging_policy") 10 | logger.setLevel(logging.WARNING) 11 | 12 | 13 | def get_blob_fs(container): 14 | """Create fs for the given blob storage container 15 | 16 | :param str container: the container name 17 | :return: (*fs.base.FS*) -- filesystem instance 18 | """ 19 | account = "esmi" 20 | sas_token = server_setup.get_blob_credential() 21 | return fs.open_fs(f"azblobv2://{account}:{sas_token}@{container}") 22 | 23 | 24 | def get_ssh_fs(root=""): 25 | """Create fs for the given directory on the server 26 | 27 | :param str root: root direcory on server 28 | :return: (*fs.base.FS*) -- filesystem instance 29 | """ 30 | host = server_setup.SERVER_ADDRESS 31 | port = server_setup.SERVER_SSH_PORT 32 | username = server_setup.get_server_user() 33 | base_fs = fs.open_fs(f"ssh://{username}@{host}:{port}") 34 | return WrapSSHFS(base_fs, root) 35 | 36 | 37 | def get_multi_fs(root): 38 | """Create filesystem combining the server (if connected) with profile and scenario 39 | containers in blob storage. The priority is in descending order, so the server will 40 | be used first if possible 41 | 42 | :param str root: root directory on server 43 | :return: (*fs.base.FS*) -- filesystem instance 44 | """ 45 | scenario_data = get_blob_fs("scenariodata") 46 | profiles = get_blob_fs("profiles") 47 | mfs = MultiFS() 48 | try: 49 | ssh_fs = get_ssh_fs(root) 50 | mfs.add_fs("ssh_fs", ssh_fs, write=True, priority=3) 51 | except: # noqa 52 | print("Could not connect to ssh server") 53 | mfs.add_fs("profile_fs", profiles, priority=2) 54 | mfs.add_fs("scenario_fs", scenario_data, priority=1) 55 | remotes = ",".join([f[0] for f in mfs.iterate_fs()]) 56 | print(f"Initialized remote filesystem with {remotes}") 57 | return mfs 58 | 59 | 60 | def get_scenario_fs(): 61 | """Create filesystem combining the server (if connected) with blob storage, 62 | prioritizing the server if connected. 63 | 64 | :return: (*fs.base.FS*) -- filesystem instance 65 | """ 66 | scenario_data = get_blob_fs("scenariodata") 67 | mfs = MultiFS() 68 | try: 69 | ssh_fs = get_ssh_fs(server_setup.DATA_ROOT_DIR) 70 | mfs.add_fs("ssh_fs", ssh_fs, write=True, priority=2) 71 | except: # noqa 72 | print("Could not connect to ssh server") 73 | mfs.add_fs("scenario_fs", scenario_data, priority=1) 74 | remotes = ",".join([f[0] for f in mfs.iterate_fs()]) 75 | print(f"Initialized remote filesystem with {remotes}") 76 | return mfs 77 | -------------------------------------------------------------------------------- /powersimdata/scenario/state.py: -------------------------------------------------------------------------------- 1 | class State: 2 | """Defines an interface for encapsulating the behavior associated with a 3 | particular state of the Scenario object. 4 | 5 | :param powersimdata.scenario.scenario.Scenario scenario: scenario instance 6 | :raise TypeError: if not instantiated through a derived class 7 | """ 8 | 9 | name = "state" 10 | allowed = [] 11 | exported_methods = {"print_scenario_info"} 12 | 13 | def __init__(self, scenario): 14 | """Constructor.""" 15 | if type(self) == State: 16 | raise TypeError("Only subclasses of 'State' can be instantiated directly") 17 | 18 | self._scenario = scenario 19 | self._scenario_info = scenario.info 20 | self._scenario_status = scenario.status 21 | self._data_access = scenario.data_access 22 | self._scenario_list_manager = scenario._scenario_list_manager 23 | self._execute_list_manager = scenario._execute_list_manager 24 | 25 | def refresh(self, scenario): 26 | """Called during state changes to ensure instance is properly initialized 27 | 28 | :param powersimdata.scenario.scenario.Scenario scenario: scenario instance 29 | """ 30 | pass 31 | 32 | def _update_scenario_info(self): 33 | """Override this method if applicable""" 34 | pass 35 | 36 | def print_scenario_info(self): 37 | """Prints scenario information.""" 38 | print("--------------------") 39 | print("SCENARIO INFORMATION") 40 | print("--------------------") 41 | try: 42 | self._update_scenario_info() 43 | for key, val in self._scenario_info.items(): 44 | print("%s: %s" % (key, val)) 45 | except AttributeError: 46 | print("Scenario has been deleted") 47 | 48 | def switch(self, state): 49 | """Switches state. 50 | 51 | :param class state: One of :class:`.Analyze`, :class:`.Create`, 52 | :class:`.Execute`, :class:`.Delete`, :class:`.Move`. 53 | """ 54 | if state.name in self.allowed: 55 | print("State switching: %s --> %s" % (self, state.name)) 56 | self._leave() 57 | self.__class__ = state 58 | self._enter(state) 59 | self.refresh(self._scenario) 60 | else: 61 | raise Exception( 62 | "State switching: %s --> %s not permitted" % (self, state.name) 63 | ) 64 | 65 | def __str__(self): 66 | """ 67 | 68 | :return: (*str*) -- state name. 69 | """ 70 | return self.name 71 | 72 | def _leave(self): 73 | """Cleans when leaving state.""" 74 | pass 75 | 76 | def _enter(self, state): 77 | """Initializes when entering state.""" 78 | self.exported_methods = state.exported_methods 79 | -------------------------------------------------------------------------------- /docs/grid.rst: -------------------------------------------------------------------------------- 1 | 2 | Grid Object 3 | ----------- 4 | A ``Grid`` object contains data representing an electric power system. An object has various attributes that are listed below: 5 | 6 | - ``data_loc`` (``str``) gives the path to the data used to create a ``Grid`` object 7 | - ``grid_model`` (``str``) gives the name of the power system 8 | - ``version`` (``str``) gives the version of the grid model 9 | - ``model_immutables`` (``object``) contains static data specific to the power system 10 | - ``zone2id`` and ``id2zone`` (``dict``) map load zone name (id) to load zone id 11 | (name) 12 | - ``interconnect`` (``str``) indicates the geographical region covered 13 | - ``bus`` (``pandas.DataFrame``) encloses the characteristics of the buses 14 | - ``sub`` (``pandas.DataFrame``) encloses the characteristics of the substations 15 | - ``bus2sub`` (``pandas.DataFrame``) maps buses to substations 16 | - ``plant`` (``pandas.DataFrame``) encloses the characteristics of the plants 17 | - ``branch`` (``pandas.DataFrame``) encloses the characteristics of the AC lines, 18 | transformers and transformer windings 19 | - ``gencost`` (``dict``) encloses the generation cost curves 20 | - ``dcline`` (``pandas.DataFrame``) encloses the characteristics of the HVDC lines 21 | - ``storage`` (``dict``) encloses information related to storage units 22 | 23 | Two grid models representing the `U.S. `_ and 24 | the `European `_ power system at the transmission 25 | network level are available at this time. In addition to the full continental U.S. 26 | or Europe, a ``Grid`` object can represent one of the interconnection or a 27 | combination of interconnections. 28 | 29 | A ``Grid`` object can be created as follows for the U.S. grid model: 30 | 31 | - U.S. grid 32 | 33 | .. code-block:: python 34 | 35 | from powersimdata import Grid 36 | usa = Grid("USA") 37 | 38 | - Western interconnection 39 | 40 | .. code-block:: python 41 | 42 | from powersimdata import Grid 43 | western = Grid("Western") 44 | 45 | - combination of two interconnections 46 | 47 | .. code-block:: python 48 | 49 | from powersimdata import Grid 50 | eastern_western = Grid(["Eastern", "Western"]) 51 | texas_western = Grid(["Texas", "Western"]) 52 | 53 | While the for the European grid model, it can be achieved as follows: 54 | 55 | - European grid with 128 load zones 56 | 57 | .. code-block:: python 58 | 59 | from powersimdata import Grid 60 | europe = Grid("Europe", source="europe_tub", reduction=128) 61 | 62 | - Nordic interconnection 63 | 64 | .. code-block:: python 65 | 66 | from powersimdata import Grid 67 | europe = Grid("Nordic", source="europe_tub", reduction=128) 68 | 69 | Any ``Grid`` object can be transformed, i.e., generators/lines can be scaled or added. 70 | This is achieved in the scenario framework. 71 | -------------------------------------------------------------------------------- /powersimdata/scenario/move.py: -------------------------------------------------------------------------------- 1 | from fs.copy import copy_dir 2 | from fs.errors import FSError 3 | from fs.walk import Walker 4 | 5 | from powersimdata.data_access.fs_helper import get_ssh_fs 6 | from powersimdata.scenario.ready import Ready 7 | from powersimdata.utility import server_setup 8 | from powersimdata.utility.config import DeploymentMode 9 | 10 | 11 | class Move(Ready): 12 | """Moves scenario. 13 | 14 | :param powersimdata.scenario.scenario.Scenario scenario: scenario instance. 15 | """ 16 | 17 | name = "move" 18 | allowed = [] 19 | exported_methods = {"move_scenario"} | Ready.exported_methods 20 | 21 | def move_scenario(self, confirm=True): 22 | """Move scenario. 23 | 24 | :param bool confirm: prompt before deleting each batch of files 25 | :raises ValueError: if current deployment mode not supported 26 | """ 27 | if server_setup.DEPLOYMENT_MODE != DeploymentMode.Server: 28 | raise ValueError("move state only supported for scenario data on server.") 29 | 30 | scenario_id = self._scenario_info["id"] 31 | backup = BackUpDisk(self._data_access, scenario_id) 32 | backup.backup_scenario(confirm=confirm) 33 | 34 | self._execute_list_manager.set_status(scenario_id, "moved") 35 | 36 | 37 | class BackUpDisk: 38 | """Back up scenario data to backup disk mounted on server. 39 | 40 | :param powersimdata.data_access.data_access.DataAccess data_access: 41 | data access object. 42 | :param str scenario_id: scenario id 43 | """ 44 | 45 | def __init__(self, data_access, scenario_id): 46 | """Constructor.""" 47 | self._data_access = data_access 48 | self.scenario_id = scenario_id 49 | self._join = data_access.join 50 | 51 | def backup_scenario(self, confirm=True): 52 | """Copy scenario data to backup disk and remove original 53 | 54 | :param bool confirm: prompt before deleting each batch of files 55 | """ 56 | src_fs = dst_fs = get_ssh_fs() 57 | items = [ 58 | (self._join(*server_setup.INPUT_DIR), f"{self.scenario_id}_*"), 59 | (self._join(*server_setup.OUTPUT_DIR), f"{self.scenario_id}_*"), 60 | (self._data_access.tmp_folder(self.scenario_id), "**"), 61 | ] 62 | for folder, pattern in items: 63 | print(f"--> Moving files matching {pattern} from {folder}") 64 | src_path = self._join(server_setup.DATA_ROOT_DIR, folder) 65 | dst_path = self._join(server_setup.BACKUP_DATA_ROOT_DIR, folder) 66 | walker = Walker(filter=[pattern]) 67 | try: 68 | copy_dir(src_fs, src_path, dst_fs, dst_path, walker=walker) 69 | except FSError as e: 70 | print(f"Operation failed: {e}") 71 | 72 | self._data_access.remove(self._join(folder, pattern), confirm=confirm) 73 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # This is specific to this package 2 | powersimdata/utility/.server_user 3 | config.ini 4 | powersimdata/network/europe_tub/data* 5 | 6 | # The remainder of this file taken from github/gitignore 7 | # https://github.com/github/gitignore/blob/master/Python.gitignore 8 | 9 | 10 | # Byte-compiled / optimized / DLL files 11 | __pycache__/ 12 | *.py[cod] 13 | *$py.class 14 | 15 | # C extensions 16 | *.so 17 | 18 | # Distribution / packaging 19 | .Python 20 | build/ 21 | develop-eggs/ 22 | dist/ 23 | downloads/ 24 | eggs/ 25 | .eggs/ 26 | lib/ 27 | lib64/ 28 | parts/ 29 | sdist/ 30 | var/ 31 | wheels/ 32 | pip-wheel-metadata/ 33 | share/python-wheels/ 34 | *.egg-info/ 35 | .installed.cfg 36 | *.egg 37 | MANIFEST 38 | 39 | # PyInstaller 40 | # Usually these files are written by a python script from a template 41 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 42 | *.manifest 43 | *.spec 44 | 45 | # Installer logs 46 | pip-log.txt 47 | pip-delete-this-directory.txt 48 | 49 | # Unit test / coverage reports 50 | htmlcov/ 51 | .tox/ 52 | .nox/ 53 | .coverage 54 | .coverage.* 55 | .cache 56 | nosetests.xml 57 | coverage.xml 58 | *.cover 59 | *.py,cover 60 | .hypothesis/ 61 | .pytest_cache/ 62 | 63 | # Translations 64 | *.mo 65 | *.pot 66 | 67 | # Django stuff: 68 | *.log 69 | local_settings.py 70 | db.sqlite3 71 | db.sqlite3-journal 72 | 73 | # Flask stuff: 74 | instance/ 75 | .webassets-cache 76 | 77 | # Scrapy stuff: 78 | .scrapy 79 | 80 | # Sphinx documentation 81 | docs/_build/ 82 | 83 | # PyBuilder 84 | target/ 85 | 86 | # Jupyter Notebook 87 | .ipynb_checkpoints 88 | 89 | # IPython 90 | profile_default/ 91 | ipython_config.py 92 | 93 | # pyenv 94 | .python-version 95 | 96 | # pipenv 97 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 98 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 99 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 100 | # install all needed dependencies. 101 | #Pipfile.lock 102 | 103 | # celery beat schedule file 104 | celerybeat-schedule 105 | 106 | # SageMath parsed files 107 | *.sage.py 108 | 109 | # Environments 110 | .env 111 | .venv 112 | env/ 113 | venv/ 114 | ENV/ 115 | env.bak/ 116 | venv.bak/ 117 | 118 | # Spyder project settings 119 | .spyderproject 120 | .spyproject 121 | 122 | # Rope project settings 123 | .ropeproject 124 | 125 | # mkdocs documentation 126 | /site 127 | 128 | # mypy 129 | .mypy_cache/ 130 | .dmypy.json 131 | dmypy.json 132 | 133 | # Pyre type checker 134 | .pyre/ 135 | 136 | # Editors 137 | .vscode/ 138 | .idea/ 139 | 140 | # Mac/OSX 141 | .DS_Store 142 | 143 | # Windows 144 | Thumbs.db 145 | 146 | # Misc 147 | environment.yaml 148 | .syncignore-receive* 149 | .syncignore-send* -------------------------------------------------------------------------------- /powersimdata/input/converter/csv_to_grid.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from powersimdata.input.abstract_grid import AbstractGrid 4 | from powersimdata.input.converter.helpers import ( 5 | add_coord_to_grid_data_frames, 6 | add_zone_to_grid_data_frames, 7 | ) 8 | from powersimdata.network.constants.model import model2region 9 | from powersimdata.network.csv_reader import CSVReader 10 | 11 | 12 | class FromCSV(AbstractGrid): 13 | """Grid Builder for grid model enclosed in CSV files.""" 14 | 15 | def _set_data_loc(self, top_dirname): 16 | """Sets data location. 17 | 18 | :param str top_dirname: name of directory enclosing data. 19 | :raises IOError: if directory does not exist. 20 | """ 21 | data_loc = os.path.join(top_dirname, "data") 22 | if os.path.isdir(data_loc) is False: 23 | raise IOError("%s directory not found" % data_loc) 24 | else: 25 | self.data_loc = data_loc 26 | 27 | def _build(self, interconnect, grid_model): 28 | """Build network. 29 | 30 | :param list interconnect: interconnect name(s). 31 | :param str model: the grid model. 32 | """ 33 | reader = CSVReader(self.data_loc) 34 | self.bus = reader.bus 35 | self.plant = reader.plant 36 | self.branch = reader.branch 37 | self.dcline = reader.dcline 38 | self.gencost["after"] = self.gencost["before"] = reader.gencost 39 | self.sub = reader.sub 40 | self.bus2sub = reader.bus2sub 41 | self.id2zone = reader.zone["zone_name"].to_dict() 42 | self.zone2id = {v: k for k, v in self.id2zone.items()} 43 | 44 | self._add_information() 45 | 46 | if model2region[grid_model] not in interconnect: 47 | self._drop_interconnect(interconnect) 48 | 49 | def _add_information(self): 50 | add_zone_to_grid_data_frames(self) 51 | add_coord_to_grid_data_frames(self) 52 | 53 | def _drop_interconnect(self, interconnect): 54 | """Trim data frames to only keep information pertaining to the user 55 | defined interconnect(s). 56 | 57 | :param list interconnect: interconnect name(s). 58 | """ 59 | for key, value in self.__dict__.items(): 60 | if key in ["sub", "bus2sub", "bus", "plant", "branch"]: 61 | value.query("interconnect == @interconnect", inplace=True) 62 | elif key == "gencost": 63 | value["before"].query("interconnect == @interconnect", inplace=True) 64 | elif key == "dcline": 65 | value.query( 66 | "from_interconnect == @interconnect &" 67 | "to_interconnect == @interconnect", 68 | inplace=True, 69 | ) 70 | self.id2zone = {k: self.id2zone[k] for k in self.bus.zone_id.unique()} 71 | self.zone2id = {value: key for key, value in self.id2zone.items()} 72 | -------------------------------------------------------------------------------- /powersimdata/network/constants/region/zones.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | 3 | from powersimdata.network.constants.region.geography import get_geography 4 | from powersimdata.network.helpers import get_zone_info 5 | 6 | 7 | def from_csv(model): 8 | """Returns geographical and timezone information of a grid model from a CSV file. 9 | 10 | :param str model: grid model. 11 | :return: (*pandas.DataFrame*) -- a data frame with loadzone name (*'zone_name'*), 12 | division name (e.g. *'state'* name for USA grid models), interconnect name 13 | (*'interconnect'*), time zone of loadzone (*'time_zone'*), division abbreviation 14 | (*'abv'*) as columns and loadzone id (*'zone_id'*) as indices. 15 | """ 16 | geo = get_geography(model) 17 | info = get_zone_info(model=model) 18 | info["abv"] = info[geo["division"]].map(geo[f"{geo['division']}2abv"]) 19 | 20 | return info 21 | 22 | 23 | def from_pypsa(model, info): 24 | """Returns geographical and timezone information of a grid model from a PyPSA 25 | Network object. 26 | 27 | :param str model: grid model. 28 | :param pd.DataFrame info: a data frame with loadzone id as index and loadzone name 29 | (*'zone_name'*) and division abbreviation (*'abv'*) as columns. 30 | :return: (*pandas.DataFrame*) -- a data frame with loadzone name (*'zone_name'*), 31 | division name (e.g. *'country'* name for EU grid models), interconnect name 32 | (*'interconnect'*), time zone of loadzone (*'time_zone'*), division abbreviation 33 | (*'abv'*) as columns and loadzone id (*'zone_id'*) as indices. 34 | """ 35 | geo = get_geography(model) 36 | info[geo["division"]] = info["abv"].map(geo[f"abv2{geo['division']}"]) 37 | info["interconnect"] = info["abv"].map(geo["abv2interconnect"]) 38 | info["time_zone"] = info["abv"].map(geo["abv2timezone"]) 39 | 40 | info.rename_axis(index="zone_id") 41 | 42 | return info 43 | 44 | 45 | def check_zone(model, zone): 46 | """Validate data frame used in :class:`powersimdata.network.model.ModelImmutables` 47 | class. 48 | 49 | :param str model: grid model. 50 | :param pandas.DataFrame zone: data frame to be tested. 51 | :raises TypeError: if ``zone`` is not a pandas.DataFrame 52 | :raises ValueError: 53 | if index name is not *'zone_id'* 54 | if *'zone_name'*, *'state'*/*'country'* (model dependent), *'interconnect'*, 55 | *'time_zone'* and *'abv'* are not in columns. 56 | """ 57 | if not isinstance(zone, pd.DataFrame): 58 | raise TypeError("zone must be a pandas.DataFrame") 59 | if zone.index.name != "zone_id": 60 | raise ValueError("index must be named zone_id") 61 | 62 | geo = get_geography(model) 63 | missing = list( 64 | {"zone_name", geo["division"], "interconnect", "time_zone", "abv"} 65 | - set(zone.columns) 66 | ) 67 | 68 | if len(missing) != 0: 69 | raise ValueError(f"zone must have: {' | '.join(sorted(missing))} as columns") 70 | -------------------------------------------------------------------------------- /powersimdata/input/tests/test_configure.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | from pandas.testing import assert_frame_equal 3 | 4 | from powersimdata.input.configure import linearize_gencost 5 | from powersimdata.tests.mock_grid import MockGrid 6 | 7 | mock_gc = { 8 | "plant_id": range(3), 9 | "type": [2, 2, 2], 10 | "startup": [0, 0, 0], 11 | "shutdown": [0, 0, 0], 12 | "n": [3, 3, 3], 13 | "c2": [1, 2, 3], 14 | "c1": [4, 5, 6], 15 | "c0": [7, 8, 9], 16 | "interconnect": ["Western"] * 3, 17 | } 18 | mock_plant_gc = {"plant_id": range(3), "Pmin": [20, 40, 60], "Pmax": [50, 100, 150]} 19 | grid_attrs_gc = { 20 | "plant": mock_plant_gc, 21 | "gencost_before": mock_gc, 22 | } 23 | mock_grid_gc = MockGrid(grid_attrs_gc) 24 | 25 | expected_one_segment = pd.DataFrame( 26 | { 27 | "plant_id": range(3), 28 | "type": [1, 1, 1], 29 | "startup": [0, 0, 0], 30 | "shutdown": [0, 0, 0], 31 | "n": [2, 2, 2], 32 | "c2": [1, 2, 3], 33 | "c1": [4, 5, 6], 34 | "c0": [7, 8, 9], 35 | "p1": [20, 40, 60], 36 | "f1": [ 37 | (1 * 20**2 + 4 * 20 + 7), 38 | (2 * 40**2 + 5 * 40 + 8), 39 | (3 * 60**2 + 6 * 60 + 9), 40 | ], 41 | "p2": [50, 100, 150], 42 | "f2": [ 43 | (1 * 50**2 + 4 * 50 + 7), 44 | (2 * 100**2 + 5 * 100 + 8), 45 | (3 * 150**2 + 6 * 150 + 9), 46 | ], 47 | "interconnect": ["Western"] * 3, 48 | } 49 | ).set_index("plant_id") 50 | 51 | expected_two_segment = expected_one_segment.copy() 52 | expected_two_segment.n = 3 53 | expected_two_segment["p3"] = expected_one_segment["p2"].copy() 54 | expected_two_segment["f3"] = expected_one_segment["f2"].copy() 55 | expected_two_segment.p2 = [35, 70, 105] 56 | expected_two_segment.f2 = [ 57 | 1 * 35**2 + 4 * 35 + 7, 58 | 2 * 70**2 + 5 * 70 + 8, 59 | 3 * 105**2 + 6 * 105 + 9, 60 | ] 61 | expected_two_segment["interconnect"] = expected_two_segment.pop("interconnect") 62 | 63 | expected_all_equal = mock_grid_gc.gencost["before"].copy() 64 | expected_all_equal.c2 = 0 65 | expected_all_equal.c1 = 0 66 | expected_all_equal.c0 = [2707, 20508, 68409] 67 | 68 | 69 | def _linearize_gencost(grid, num_segments=1): 70 | before = grid.gencost["before"] 71 | return linearize_gencost(before, grid.plant, num_segments) 72 | 73 | 74 | def test_linearize_gencost(): 75 | actual = _linearize_gencost(mock_grid_gc) 76 | assert_frame_equal(expected_one_segment, actual, check_dtype=False) 77 | 78 | 79 | def test_linearize_gencost_two_segment(): 80 | actual = _linearize_gencost(mock_grid_gc, num_segments=2) 81 | assert_frame_equal(expected_two_segment, actual, check_dtype=False) 82 | 83 | 84 | def test_linearize_gencost_pmin_equal_pmax(): 85 | plant = mock_grid_gc.plant.copy() 86 | plant.Pmin = plant.Pmax 87 | grid = MockGrid({"plant": plant.reset_index().to_dict(), "gencost_before": mock_gc}) 88 | actual = _linearize_gencost(grid, num_segments=3) 89 | assert_frame_equal(expected_all_equal, actual, check_dtype=False) 90 | -------------------------------------------------------------------------------- /powersimdata/data_access/tests/test_execute_csv.py: -------------------------------------------------------------------------------- 1 | import os 2 | import shutil 3 | from collections import OrderedDict 4 | 5 | import pandas as pd 6 | import pytest 7 | from numpy.testing import assert_array_equal 8 | 9 | from powersimdata.data_access.data_access import LocalDataAccess, SSHDataAccess 10 | from powersimdata.data_access.execute_list import ExecuteListManager 11 | from powersimdata.utility import server_setup, templates 12 | 13 | 14 | @pytest.fixture 15 | def data_access(): 16 | return SSHDataAccess() 17 | 18 | 19 | @pytest.fixture 20 | def execute_table(data_access): 21 | execute_list_manager = ExecuteListManager(data_access) 22 | return execute_list_manager.get_execute_table() 23 | 24 | 25 | @pytest.mark.integration 26 | @pytest.mark.ssh 27 | def test_get_execute_file_from_server_type(execute_table): 28 | assert isinstance(execute_table, pd.DataFrame) 29 | 30 | 31 | @pytest.mark.integration 32 | @pytest.mark.ssh 33 | def test_get_execute_file_from_server_header(execute_table): 34 | header = ["status"] 35 | assert_array_equal(execute_table.columns, header) 36 | assert "id" == execute_table.index.name 37 | 38 | 39 | def clone_template(): 40 | orig = os.path.join(templates.__path__[0], "ExecuteList.csv") 41 | dest = os.path.join(server_setup.LOCAL_DIR, "ExecuteList.csv.test") 42 | shutil.copy(orig, dest) 43 | return dest 44 | 45 | 46 | @pytest.fixture 47 | def manager(): 48 | test_csv = clone_template() 49 | data_access = LocalDataAccess() 50 | manager = ExecuteListManager(data_access) 51 | manager._FILE_NAME = "ExecuteList.csv.test" 52 | yield manager 53 | os.remove(test_csv) 54 | 55 | 56 | def mock_row(): 57 | return OrderedDict( 58 | [ 59 | ("id", "1"), 60 | ("state", "create"), 61 | ("interconnect", "Western"), 62 | ] 63 | ) 64 | 65 | 66 | def test_blank_csv_append(manager): 67 | manager.add_entry(mock_row()) 68 | table = manager.get_execute_table() 69 | assert table.shape == (1, 1) 70 | status = manager.get_status(1) 71 | assert status == "created" 72 | 73 | 74 | def test_set_status(manager): 75 | manager.add_entry(mock_row()) 76 | asdf = "asdf" 77 | result = manager.set_status(1, asdf) 78 | assert result.loc[1, "status"] == asdf 79 | 80 | foo = "foo" 81 | result = manager.set_status("1", foo) 82 | assert result.loc[1, "status"] == foo 83 | 84 | 85 | def test_get_status(manager): 86 | manager.add_entry(mock_row()) 87 | status = manager.get_status(1) 88 | assert status == "created" 89 | 90 | status = manager.get_status("1") 91 | assert status == "created" 92 | 93 | 94 | def test_delete(manager): 95 | manager.add_entry(mock_row()) 96 | table = manager.get_execute_table() 97 | assert table.shape == (1, 1) 98 | 99 | table = manager.delete_entry(1) 100 | assert table.shape == (0, 1) 101 | 102 | table = manager.get_execute_table() 103 | assert table.shape == (0, 1) 104 | -------------------------------------------------------------------------------- /powersimdata/design/investment/data/README.md: -------------------------------------------------------------------------------- 1 | ### Data sources used for each type of cost 2 | 3 | * Transmission base costs (converted to $/MW-mile) [EIPC] 4 | * AC lines: [EIPC] table 5-1 5 | * DC lines: [EIPC] table 5-5 (assumes 500 kV HVDC) 6 | * Transformers: [EIPC] table 5-8 7 | * Generation base costs [ATB] 8 | * NREL’s 2020 ATB, CAPEX summary page 9 | * Regional multipliers 10 | * Generation cost regional multipliers: [ReEDS] 11 | * I did some mapping using ReEDS regions from: ReEDS/bokehpivot/in/ 12 | * gis_rs.csv 13 | * /reeds2/region_map.csv 14 | * /reeds2/hierarchy.csv 15 | * Regional multiplier values from: ReEDS/inputs/reg_cap_cost_mult/reg_cap_cost_mult_default.csv 16 | * AC lines cost regional multipliers: 17 | * [EIPC] table 5-2 (for Eastern regions) 18 | * [ReEDS] documentation page for ERCOT, Western regions. 19 | * [NEEM] for shapefile: mapping buses to NEEM regions 20 | 21 | ### Source file locations/details 22 | 23 | **EIPC**: https://www.dropbox.com/s/qell57fb3e7d5g4/02%2BPhase%2BII.pdf?dl=0 24 | 25 | * This is where ReEDS sourced their transmission cost estimates. But, we have more kV detail than them, so we can get more specific (costs by kV). 26 | * (unused) substation cost data 27 | 28 | EIPC/ReEDS multiplier data manually formatted in xlsx: https://www.dropbox.com/scl/fi/z9bfmfkvmxupvtzhe589q/TransCosts_real.xlsx?dl=0&rlkey=0ds2q2rx384y5kx98rmfcrk8q 29 | 30 | Note: I removed underground lines as options unless it was one of the only options (like NY). This should probably be changed. 31 | Note: I manually added the Western Interconnect and ERCOT NEEM regions. ReEDS documentation says they used line regional multipliers of 1 and California is 2.25 that of the rest of WECC. Their map does not match this documentation, however, so if possible, this should be cross-checked. PowerGenome also seems to believe this documentation (of 2.25 multiplier), as seen here: https://github.com/gschivley/pg_misc/blob/master/create_clusters/site_interconnection_costs.py#L32-L155. Also, it looks like Greg pulled these values from the ReEDS multiplier map, but there are some inconsistencies with Eastern regions (also with ReEDS documentation). 32 | Note: there are also substation costs in this excel sheet 33 | 34 | NREL’s 2020 **ATB**: https://atb.nrel.gov 35 | ATB data: (Mac) https://www.dropbox.com/scl/fi/nj542inqw2e0k1lw1ofry/2020-ATB-Data-Mac.xlsm?dl=0&rlkey=zwesaydrm1vi0488qg2q9n7t8 36 | (Other) https://www.dropbox.com/scl/fi/x5np0b25qy1bg8mnlwnh6/2020-ATB-Data-1.xlsm?dl=0&rlkey=tq1e5cd3q7tsq4u81al5vb2s9 37 | 38 | **ReEDS**: ReEDS 2.0 Version 2019 (request license): https://github.com/NREL/ReEDS_OpenAccess 39 | 40 | **NEEM regions shapefile**: 41 | 42 | * Original shapefile: https://www.dropbox.com/sh/6adq9plptczz6hb/AABseOxIbMsbLDTy-LQD9PK-a?dl=0 43 | * Simplified shapefile used in PowerSimData “investment_cost” branch: powersimdata/design/investment/data/NEEM 44 | * Note, this was simplified (to make point mapping faster) in QGIS using “Distance (Douglas-Peucker) method with 1 km tolerance. 45 | -------------------------------------------------------------------------------- /powersimdata/data_access/tests/sql/test_scenario_table.py: -------------------------------------------------------------------------------- 1 | from collections import OrderedDict 2 | 3 | import pytest 4 | 5 | from powersimdata.data_access.scenario_table import ScenarioTable 6 | from powersimdata.data_access.sql_store import SqlError 7 | 8 | # uncomment to enable logging queries to stdout 9 | # os.environ["DEBUG_MODE"] = "1" 10 | 11 | row_id = 9000 12 | 13 | 14 | def _get_test_row( 15 | name="foo", interconnect="Western", base_demand="v3", base_hydro="v2" 16 | ): 17 | global row_id 18 | row_id += 1 19 | return OrderedDict( 20 | [ 21 | ("id", str(row_id)), 22 | ("plan", ""), 23 | ("name", name), 24 | ("state", "create"), 25 | ("grid_model", ""), 26 | ("interconnect", interconnect), 27 | ("base_demand", base_demand), 28 | ("base_hydro", base_hydro), 29 | ("base_solar", ""), 30 | ("base_wind", ""), 31 | ("change_table", False), 32 | ("start_date", "2016-01-01 00:00:00"), 33 | ("end_date", "2016-12-31 23:00:00"), 34 | ("interval", ""), 35 | ("engine", ""), 36 | ("runtime", ""), 37 | ("infeasibilities", ""), 38 | ] 39 | ) 40 | 41 | 42 | class NoEffectSqlStore(ScenarioTable): 43 | def __exit__(self, exc_type, exc_value, traceback): 44 | self.conn.rollback() 45 | super().__exit__(exc_type, exc_value, traceback) 46 | 47 | 48 | @pytest.fixture 49 | def store(): 50 | with NoEffectSqlStore() as store: 51 | yield store 52 | 53 | 54 | @pytest.mark.integration 55 | @pytest.mark.db 56 | def test_select_no_limit(store): 57 | store.add_entry(_get_test_row()) 58 | store.add_entry(_get_test_row()) 59 | result = store.get_scenario_table() 60 | assert result.shape[0] == 2 61 | 62 | 63 | @pytest.mark.integration 64 | @pytest.mark.db 65 | def test_select_with_limit(store): 66 | n_rows = 6 67 | limit = 3 68 | for i in range(n_rows): 69 | store.add_entry(_get_test_row()) 70 | result = store.get_scenario_table(limit) 71 | assert result.shape[0] == limit 72 | 73 | 74 | @pytest.mark.integration 75 | @pytest.mark.db 76 | def test_add_entry(store): 77 | info = _get_test_row(name="bar") 78 | store.add_entry(info) 79 | entry = store.get_scenario_by_id(info["id"]) 80 | assert entry.loc[0, "name"] == "bar" 81 | 82 | 83 | @pytest.mark.integration 84 | @pytest.mark.db 85 | def test_add_entry_missing_required_raises(): 86 | info = _get_test_row() 87 | del info["plan"] 88 | with pytest.raises(SqlError): 89 | # create explicitly since yield loses exception context 90 | with NoEffectSqlStore() as store: 91 | store.add_entry(info) 92 | 93 | 94 | @pytest.mark.integration 95 | @pytest.mark.db 96 | def test_delete_entry(store): 97 | info = _get_test_row() 98 | sid = info["id"] 99 | store.add_entry(info) 100 | store.delete_entry(sid) 101 | entry = store.get_scenario_by_id(sid) 102 | assert entry.shape == (0, 0) 103 | -------------------------------------------------------------------------------- /powersimdata/design/compare/transmission.py: -------------------------------------------------------------------------------- 1 | from powersimdata.design.compare.helpers import _reindex_as_necessary 2 | from powersimdata.input.check import _check_data_frame, _check_grid_type 3 | 4 | 5 | def calculate_branch_difference(branch1, branch2): 6 | """Calculate the capacity differences between two branch data frames. If capacity in 7 | ``branch2`` is larger than capacity in ``branch1``, the return will be positive. 8 | 9 | :param pandas.DataFrame branch1: first branch data frame. 10 | :param pandas.DataFrame branch2: second branch data frame. 11 | :param float/int difference_threshold: drop any changes less than this value from 12 | the returned Series. 13 | :return: (*pandas.Series*) -- capacity difference between the two branch data 14 | frames. 15 | """ 16 | _check_data_frame(branch1, "branch1") 17 | _check_data_frame(branch2, "branch2") 18 | if not ("rateA" in branch1.columns) and ("rateA" in branch2.columns): 19 | raise ValueError("branch1 and branch2 both must have 'rateA' columns") 20 | branch1, branch2 = _reindex_as_necessary( 21 | branch1, branch2, ["from_bus_id", "to_bus_id"] 22 | ) 23 | branch_merge = branch1.merge( 24 | branch2, how="outer", right_index=True, left_index=True, suffixes=(None, "_2") 25 | ) 26 | branch_merge["diff"] = branch_merge.rateA_2.fillna(0) - branch_merge.rateA.fillna(0) 27 | # Ensure that lats & lons get filled in as necessary from branch2 entries 28 | for l in ["from_lat", "from_lon", "to_lat", "to_lon"]: 29 | branch_merge[l].fillna(branch_merge[f"{l}_2"], inplace=True) 30 | 31 | return branch_merge 32 | 33 | 34 | def calculate_dcline_difference(grid1, grid2): 35 | """Calculate capacity differences between dcline tables, and add to/from lat/lon. 36 | 37 | :param powersimdata.input.grid.Grid grid1: first grid instance. 38 | :param powersimdata.input.grid.Grid grid2: second grid instance. 39 | :return: (*pandas.DataFrame*) -- data frame with all indices, plus new columns: 40 | diff, from_lat, from_lon, to_lat, to_lon. 41 | """ 42 | _check_grid_type(grid1) 43 | _check_grid_type(grid2) 44 | dcline1, dcline2 = _reindex_as_necessary( 45 | grid1.dcline, grid2.dcline, ["from_bus_id", "to_bus_id"] 46 | ) 47 | # Get latitudes and longitudes for to & from buses 48 | for dcline, grid in [(dcline1, grid1), (dcline2, grid2)]: 49 | dcline["from_lat"] = grid.bus.loc[dcline.from_bus_id, "lat"].to_numpy() 50 | dcline["from_lon"] = grid.bus.loc[dcline.from_bus_id, "lon"].to_numpy() 51 | dcline["to_lat"] = grid.bus.loc[dcline.to_bus_id, "lat"].to_numpy() 52 | dcline["to_lon"] = grid.bus.loc[dcline.to_bus_id, "lon"].to_numpy() 53 | dc_merge = dcline1.merge( 54 | dcline2, how="outer", right_index=True, left_index=True, suffixes=(None, "_2") 55 | ) 56 | dc_merge["diff"] = dc_merge.Pmax_2.fillna(0) - dc_merge.Pmax.fillna(0) 57 | # Ensure that lats & lons get filled in as necessary from grid2.dcline entries 58 | for l in ["from_lat", "from_lon", "to_lat", "to_lon"]: 59 | dc_merge[l].fillna(dc_merge[f"{l}_2"], inplace=True) 60 | 61 | return dc_merge 62 | -------------------------------------------------------------------------------- /powersimdata/utility/distance.py: -------------------------------------------------------------------------------- 1 | from math import acos, asin, cos, degrees, radians, sin, sqrt 2 | 3 | 4 | def haversine(point1, point2): 5 | """Given two lat/long pairs, return distance in miles. 6 | 7 | :param tuple point1: first point, (lat, long) in degrees. 8 | :param tuple point2: second point, (lat, long) in degrees. 9 | :return: (*float*) -- distance in miles. 10 | """ 11 | 12 | _AVG_EARTH_RADIUS_MILES = 3958.7613 # noqa: N806 13 | 14 | # unpack latitude/longitude 15 | lat1, lng1 = point1 16 | lat2, lng2 = point2 17 | 18 | # convert all latitudes/longitudes from decimal degrees to radians 19 | lat1, lng1, lat2, lng2 = map(radians, (lat1, lng1, lat2, lng2)) 20 | 21 | # calculate haversine 22 | lat = lat2 - lat1 23 | lng = lng2 - lng1 24 | d = ( 25 | 2 26 | * _AVG_EARTH_RADIUS_MILES 27 | * asin(sqrt(sin(lat * 0.5) ** 2 + cos(lat1) * cos(lat2) * sin(lng * 0.5) ** 2)) 28 | ) 29 | 30 | return d 31 | 32 | 33 | def great_circle_distance(x): 34 | """Calculates distance between two sites. 35 | 36 | :param pandas.dataFrame x: start and end point coordinates of branches. 37 | :return: (*float*) -- length of branch (in km.). 38 | """ 39 | mi_to_km = 1.60934 40 | return haversine((x.from_lat, x.from_lon), (x.to_lat, x.to_lon)) * mi_to_km 41 | 42 | 43 | def ll2uv(lon, lat): 44 | """Convert (longitude, latitude) to unit vector. 45 | 46 | :param float lon: longitude of the site (in deg.) measured eastward from 47 | Greenwich, UK. 48 | :param float lat: latitude of the site (in deg.). Equator is the zero point. 49 | :return: (*list*) -- 3-components (x,y,z) unit vector. 50 | """ 51 | cos_lat = cos(radians(lat)) 52 | sin_lat = sin(radians(lat)) 53 | cos_lon = cos(radians(lon)) 54 | sin_lon = sin(radians(lon)) 55 | 56 | uv = [cos_lat * cos_lon, cos_lat * sin_lon, sin_lat] 57 | 58 | return uv 59 | 60 | 61 | def angular_distance(uv1, uv2): 62 | """Calculate the angular distance between two vectors. 63 | 64 | :param list uv1: 3-components vector as returned by :func:`ll2uv`. 65 | :param list uv2: 3-components vector as returned by :func:`ll2uv`. 66 | :return: (*float*) -- angle (in degrees). 67 | """ 68 | cos_angle = uv1[0] * uv2[0] + uv1[1] * uv2[1] + uv1[2] * uv2[2] 69 | if cos_angle >= 1: 70 | cos_angle = 1 71 | if cos_angle <= -1: 72 | cos_angle = -1 73 | angle = degrees(acos(cos_angle)) 74 | 75 | return angle 76 | 77 | 78 | def find_closest_neighbor(point, neighbors): 79 | """Locates the closest neighbor. 80 | 81 | :param tuple point: (lon, lat) in degrees. 82 | :param list neighbors: each element of the list are the (lon, lat) 83 | of potential neighbor. 84 | :return: (*int*) -- id of the closest neighbor 85 | """ 86 | uv_point = ll2uv(point[0], point[1]) 87 | id_neighbor = None 88 | angle_min = float("inf") 89 | for i, n in enumerate(neighbors): 90 | angle = angular_distance(uv_point, ll2uv(n[0], n[1])) 91 | if angle < angle_min: 92 | id_neighbor = i 93 | angle_min = angle 94 | 95 | return id_neighbor 96 | -------------------------------------------------------------------------------- /powersimdata/input/changes/tests/test_add_electrification.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from powersimdata.input.change_table import ChangeTable 4 | from powersimdata.input.changes.electrification import ( 5 | AreaScaling, 6 | ScaleFactors, 7 | _check_scale_factors, 8 | add_electrification, 9 | ) 10 | from powersimdata.input.grid import Grid 11 | 12 | 13 | def test_scale_factors(): 14 | info = {"standard_heat_pump_v1": 0.7, "advanced_heat_pump_v2": -3} 15 | 16 | invalid = (info, [1, 2, 3], {123: 456}, {"foo": "bar"}) 17 | for arg in invalid: 18 | with pytest.raises(ValueError): 19 | ScaleFactors(arg) 20 | 21 | info["advanced_heat_pump_v2"] = 0.3 22 | result = ScaleFactors(info) 23 | assert info == result.value() 24 | 25 | 26 | def test_area_scaling(): 27 | with pytest.raises(ValueError): 28 | AreaScaling([]) 29 | with pytest.raises(ValueError): 30 | AreaScaling({1: 2}) 31 | 32 | sf = {"standard_heat_pump_v1": 0.7, "advanced_heat_pump_v2": 0.2} 33 | info = {"res_heating": sf} 34 | result = AreaScaling(info) 35 | assert info == result.value() 36 | 37 | 38 | def test_check_scale_factors(): 39 | with pytest.raises(ValueError): 40 | info = {"tech1": "foo"} 41 | _check_scale_factors(info) 42 | 43 | info = {"standard_heat_pump_v1": 0.7, "advanced_heat_pump_v2": -3} 44 | with pytest.raises(ValueError): 45 | _check_scale_factors(info) 46 | 47 | with pytest.raises(ValueError): 48 | info = {"standard_heat_pump_v1": 0.7, "advanced_heat_pump_v2": 0.8} 49 | _check_scale_factors(info) 50 | 51 | 52 | def test_add_electrification(): 53 | obj = ChangeTable(Grid("Texas")) 54 | kind = "building" 55 | 56 | sf = {"standard_heat_pump_v1": 0.7, "advanced_heat_pump_v2": 0.3} 57 | info = {"res_heating": sf} 58 | add_electrification(obj, kind, {"grid": info}) 59 | 60 | with pytest.raises(ValueError): 61 | add_electrification(obj, "foo", {"grid": info}) 62 | 63 | 64 | def test_add_electrification_by_zone(): 65 | obj = ChangeTable(Grid("Eastern")) 66 | kind = "building" 67 | 68 | info = { 69 | "New York City": {"res_cooking": {"advanced_heat_pump_v2": 0.7}}, 70 | "Western North Carolina": { 71 | "com_hot_water": { 72 | "standard_heat_pump_v1": 0.5, 73 | "advanced_heat_pump_v2": 0.5, 74 | } 75 | }, 76 | } 77 | add_electrification(obj, kind, {"zone": info}) 78 | 79 | sf = {"standard_heat_pump_v1": 0.2, "advanced_heat_pump_v2": 0.8} 80 | info = {"Maine": {"res_cooking": sf}} 81 | add_electrification(obj, kind, {"zone": info}) 82 | 83 | result = obj.ct[kind] 84 | assert "Maine" in result["zone"] 85 | assert "New York City" in result["zone"] 86 | 87 | 88 | def test_add_electrification_combined(): 89 | obj = ChangeTable(Grid("Eastern")) 90 | kind = "building" 91 | sf = {"standard_heat_pump_v1": 0.2, "advanced_heat_pump_v2": 0.8} 92 | zone = {"Maine": {"res_cooking": sf}} 93 | grid = {"res_heating": sf} 94 | 95 | info = {"grid": grid, "zone": zone} 96 | add_electrification(obj, kind, info) 97 | 98 | assert info == obj.ct[kind] 99 | -------------------------------------------------------------------------------- /powersimdata/network/helpers.py: -------------------------------------------------------------------------------- 1 | import os 2 | from itertools import chain, combinations 3 | 4 | import pandas as pd 5 | 6 | from powersimdata.network.constants.model import model2interconnect, model2region 7 | 8 | 9 | def check_model(model): 10 | """Check that a grid model exists. 11 | 12 | :param str model: grid model name 13 | :raises TypeError: if ``model`` is not a str. 14 | :raises ValueError: if grid model does not exist. 15 | """ 16 | if not isinstance(model, str): 17 | raise TypeError("model must be a str") 18 | if model not in model2region: 19 | raise ValueError(f"Invalid model. Choose among {' | '.join(model2region)}") 20 | 21 | 22 | def check_and_format_interconnect(interconnect, model="hifld"): 23 | """Checks interconnect in a grid model. 24 | 25 | :param str/iterable interconnect: interconnect name(s). 26 | :param str model: the grid model. 27 | :return: (*list*) -- interconnect(s) 28 | :raises TypeError: if ``interconnect`` is not a str. 29 | :raises ValueError: 30 | if ``interconnect`` is not in the model. 31 | if combination of interconnects is incorrect. 32 | """ 33 | if isinstance(interconnect, str): 34 | interconnect = [interconnect] 35 | try: 36 | interconnect = sorted(set(interconnect)) 37 | except TypeError: 38 | raise TypeError("interconnect must be either str or an iterable of str") 39 | 40 | interconnect = [i.replace(" ", "") for i in interconnect] 41 | 42 | region = model2region[model] 43 | possible = model2interconnect[model] 44 | if len(set(interconnect) - ({region} | set(possible))) != 0: 45 | raise ValueError( 46 | f"Invalid interconnect(s). Choose from {' | '.join(set(possible) | {region})}" 47 | ) 48 | if region in interconnect and len(interconnect) > 1: 49 | raise ValueError(f"{region} cannot be paired") 50 | if len(set(possible) - set(interconnect)) == 0: 51 | interconnect = [region] 52 | 53 | return interconnect 54 | 55 | 56 | def interconnect_to_name(interconnect, model="hifld"): 57 | """Return name of interconnect or collection of interconnects for a grid model. 58 | 59 | :param str/iterable interconnect: interconnect name(s). 60 | :param str model: the grid model. 61 | :return: (*str*): name of grid model. 62 | """ 63 | return "_".join(sorted(check_and_format_interconnect(interconnect, model=model))) 64 | 65 | 66 | def get_zone_info(model="hifld"): 67 | """Return information loacated in the zone CSV file of the model. 68 | 69 | :param str model: the grid model. 70 | :return: (*pandas.DataFrame*) -- information on the zones of the model. 71 | :raises FileNotFoundError: if file enclosing the geographical information of the 72 | grid model can't be found. 73 | """ 74 | check_model(model) 75 | 76 | path = os.path.join(os.path.dirname(__file__), model, "data", "zone.csv") 77 | if os.path.exists(path): 78 | return pd.read_csv(path, index_col=0) 79 | else: 80 | raise FileNotFoundError(f"File {path} cannot be found") 81 | 82 | 83 | def powerset(l, r): 84 | return list(chain.from_iterable(combinations(l, i) for i in range(r, len(l) + 1))) 85 | -------------------------------------------------------------------------------- /powersimdata/design/investment/const.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | inflation_rate_pct = { 4 | 2010: 1.5, 5 | 2011: 3.0, 6 | 2012: 1.7, 7 | 2013: 1.5, 8 | 2014: 0.8, 9 | 2015: 0.7, 10 | 2016: 2.1, 11 | 2017: 2.1, 12 | 2018: 1.9, 13 | 2019: 2.3, 14 | 2020: 1.4, 15 | } 16 | 17 | hvdc_line_cost = { 18 | "kV": 500, 19 | "MW": 3500, 20 | "costMWmi": (3200 / 7), 21 | } 22 | 23 | # 2020 USD, from MISO cost estimations 24 | hvdc_terminal_cost_per_MW = 135e3 # noqa: N816 25 | 26 | ac_line_cost = { 27 | "kV": [229, 230, 230, 230, 345, 345, 345, 345, 500, 765], 28 | "MW": [300, 600, 900, 1200, 500, 900, 1800, 3600, 2600, 4000], 29 | "costMWmi": [ 30 | 3666.67, 31 | 2000, 32 | 1777.78, 33 | 1500, 34 | 39600, 35 | 2333.33, 36 | 1388.89, 37 | 777.78, 38 | 1346.15, 39 | 1400, 40 | ], 41 | } 42 | 43 | transformer_cost = { 44 | "kV": [230, 345, 500, 765], 45 | "Cost": [5.5e6, 8.5e6, 22.75e6, 42.5e6], 46 | } 47 | 48 | data_dir = os.path.join(os.path.dirname(__file__), "data") 49 | ac_reg_mult_path = os.path.join(data_dir, "LineRegMult.csv") 50 | bus_neem_regions_path = os.path.join(data_dir, "buses_NEEMregion.csv") 51 | bus_reeds_regions_path = os.path.join(data_dir, "buses_ReEDS_region.csv") 52 | gen_inv_cost_path = os.path.join(data_dir, "2020-ATB-Summary_CAPEX.csv") 53 | neem_shapefile_path = os.path.join(data_dir, "NEEM", "NEEMregions.shp") 54 | reeds_mapping_hierarchy_path = os.path.join(data_dir, "mapping", "hierarchy.csv") 55 | reeds_wind_csv_path = os.path.join(data_dir, "mapping", "gis_rs.csv") 56 | reeds_wind_shapefile_path = os.path.join(data_dir, "rs", "rs.shp") 57 | reeds_wind_to_ba_path = os.path.join(data_dir, "mapping", "region_map.csv") 58 | regional_multiplier_path = os.path.join(data_dir, "reg_cap_cost_mult_default.csv") 59 | transformer_cost_path = os.path.join(data_dir, "transformer_cost.csv") 60 | 61 | gen_inv_cost_translation = { 62 | "OffShoreWind": "wind_offshore", 63 | "LandbasedWind": "wind", 64 | "UtilityPV": "solar", 65 | "Battery": "storage", 66 | "NaturalGas": "ng", 67 | "Hydropower": "hydro", 68 | "Nuclear": "nuclear", 69 | "Geothermal": "geothermal", 70 | "Coal": "coal", 71 | } 72 | 73 | gen_inv_cost_techdetails_to_keep = { 74 | "HydroFlash", # Single tech for geothermal 75 | "NPD1", # Single tech for hydro 76 | "newAvgCF", # Single tech for coal 77 | "CCAvgCF", # Single tech for ng 78 | "OTRG1", # Single tech for wind_offshore 79 | "LTRG1", # Single tech for wind 80 | "4Hr Battery Storage", # Single tech for storage 81 | "Seattle", # Single tech for solar 82 | "*", # Single tech for nuclear 83 | } 84 | 85 | regional_multiplier_gen_translation = { 86 | "wind-ofs_1": "wind_offshore", 87 | "wind-ons_1": "wind", 88 | "upv_1": "solar", 89 | "battery": "storage", 90 | "Gas-CC": "ng", 91 | "Nuclear": "nuclear", 92 | "Hydro": "hydro", 93 | "coal-new": "coal", 94 | } 95 | 96 | regional_multiplier_wind_region_types = {"wind", "wind_offshore", "csp"} 97 | regional_multiplier_ba_region_types = { 98 | "solar", 99 | "storage", 100 | "nuclear", 101 | "coal", 102 | "ng", 103 | "hydro", 104 | "geothermal", 105 | } 106 | -------------------------------------------------------------------------------- /powersimdata/network/constants/carrier/efficiency.py: -------------------------------------------------------------------------------- 1 | class USAEfficiency: 2 | """Efficiency (MWh electric to MWh thermal) for thermal generators in USA grid 3 | models come from: 4 | 5 | * The Technology Data for Generation of Energy and District Heating `page 6 | `_ from the Danish Energy Agency for ng and 7 | diesel fuel oil. 8 | 9 | * ng: efficiency value from section 52 (OCGT – Natural Gas p.383) 10 | * oil: efficiency value from section 50 (Diesel engine farm p.366) 11 | 12 | * `Lazard’s Levelized Cost of Energy Analysis - Version 13.0 - Updated June 13 | 2022 `_ for coal and lignite: 14 | 15 | * coal and lignite: efficiency value calculated from Heat Rate value in Lazard 16 | report (p.19) 17 | """ 18 | 19 | def __init__(self): 20 | self.efficiency = { 21 | "coal": 0.33, 22 | "dfo": 0.35, 23 | "ng": 0.39, # referring to OCGT values from Danish Energy Agency 24 | } 25 | 26 | 27 | class EUEfficiency: 28 | """Efficiency (MWh electric to MWh thermal) for thermal generators in EU grid 29 | model come from: 30 | 31 | * the Technology Data for Generation of Energy and District Heating `page 32 | `_ from the Danish Energy Agency for biomass, gas 33 | turbines and oil. More specifically, the 2015’s electrical efficiency data (net, 34 | annual average) can be found in the `Technology Data Catalogue for Electricity 35 | and district heating production - Updated June 2022 `_. A `Data sheet for 36 | Electricity and district heat production - Updated June 2022 `_ is also 37 | available. 38 | 39 | * biomass: efficiency average of section 09a (Wood Chips extract. plant p.151) 40 | and section 09a (Wood Pellets extract. plant p.163) 41 | * OCGT: efficiency value from section 52 (OCGT – Natural Gas p.383) 42 | * CCGT: efficiency value from section 05 (Gas turb. CC, steam extract. p.72) 43 | * oil: efficiency value from section 50 (Diesel engine farm p.366) 44 | 45 | * The `Lazard’s Levelized Cost of Energy Analysis - Version 13.0 - Updated June 46 | 2022 `_ for coal and lignite: 47 | 48 | * coal and lignite: efficiency value calculated from Heat Rate value in Lazard 49 | report (p.19) 50 | """ 51 | 52 | def __init__(self): 53 | self.efficiency = { 54 | "biomass": 0.416, 55 | "OCGT": 0.39, 56 | "CCGT": 0.55, 57 | "coal": 0.33, 58 | "lignite": 0.33, 59 | "oil": 0.35, 60 | } 61 | 62 | 63 | def get_efficiency(model): 64 | """Return arrrangement of generator types. 65 | 66 | :param str model: grid model 67 | """ 68 | _lookup = { 69 | "usa_tamu": USAEfficiency, 70 | "hifld": USAEfficiency, 71 | "europe_tub": EUEfficiency, 72 | } 73 | return _lookup[model]().__dict__ 74 | -------------------------------------------------------------------------------- /powersimdata/input/input_data.py: -------------------------------------------------------------------------------- 1 | import os 2 | import pickle 3 | 4 | import pandas as pd 5 | 6 | from powersimdata.data_access.context import Context 7 | from powersimdata.data_access.fs_helper import get_scenario_fs 8 | from powersimdata.input.input_base import InputBase 9 | from powersimdata.utility import server_setup 10 | 11 | 12 | class InputData(InputBase): 13 | """Load input data.""" 14 | 15 | def __init__(self): 16 | super().__init__() 17 | self._file_extension = {"ct": "pkl", "grid": "pkl"} 18 | self.data_access = Context.get_data_access(get_scenario_fs) 19 | 20 | def _get_file_path(self, scenario_info, field_name): 21 | """Get the path to either grid or ct for the scenario 22 | 23 | :param dict scenario_info: metadata for a scenario. 24 | :param str field_name: either 'grid' or 'ct' 25 | :return: (*str*) -- the pyfilesystem path to the file 26 | """ 27 | ext = self._file_extension[field_name] 28 | file_name = scenario_info["id"] + "_" + field_name + "." + ext 29 | return "/".join([*server_setup.INPUT_DIR, file_name]) 30 | 31 | def _read(self, f, path): 32 | """Read data from file object 33 | 34 | :param io.IOBase f: an open file object 35 | :param str path: the path corresponding to f 36 | :raises ValueError: if extension is unknown. 37 | :return: (*dict* or *powersimdata.input.grid.Grid*) -- either a change table 38 | dict or grid object 39 | """ 40 | ext = os.path.basename(path).split(".")[-1] 41 | if ext == "pkl": 42 | data = pd.read_pickle(f) 43 | else: 44 | raise ValueError("Unknown extension! %s" % ext) 45 | 46 | return data 47 | 48 | def save_change_table(self, ct, scenario_id): 49 | """Saves change table to the data store. 50 | 51 | :param dict ct: a change table 52 | :param str scenario_id: scenario id, used for file name 53 | """ 54 | filepath = "/".join([*server_setup.INPUT_DIR, f"{scenario_id}_ct.pkl"]) 55 | with self.data_access.write(filepath) as f: 56 | pickle.dump(ct, f) 57 | 58 | 59 | def distribute_demand_from_zones_to_buses(zone_demand, bus): 60 | """Decomposes zone demand to bus demand based on bus 'Pd' column. 61 | 62 | :param pandas.DataFrame zone_demand: demand by zone. Index is timestamp, columns are 63 | zone IDs, values are zone demand (MW). 64 | :param pandas.DataFrame bus: table of bus data, containing at least 'zone_id' and 65 | 'Pd' columns. 66 | :return: (*pandas.DataFrame*) -- data frame of demand. Index is timestamp, columns 67 | are bus IDs, values are bus demand (MW). 68 | :raises ValueError: if the columns of ``zone_demand`` don't match the set of zone 69 | IDs within the 'zone_id' column of ``bus``. 70 | """ 71 | if set(bus["zone_id"].unique()) != set(zone_demand.columns): 72 | raise ValueError("zones don't match between zone_demand and bus dataframes") 73 | grouped_buses = bus.groupby("zone_id") 74 | bus_zone_pd = grouped_buses["Pd"].transform("sum") 75 | bus_zone_share = pd.concat( 76 | [pd.Series(bus["Pd"] / bus_zone_pd, name="zone_share"), bus["zone_id"]], axis=1 77 | ) 78 | zone_bus_shares = bus_zone_share.pivot_table( 79 | index="bus_id", 80 | columns="zone_id", 81 | values="zone_share", 82 | dropna=False, 83 | fill_value=0, 84 | ) 85 | bus_demand = zone_demand.dot(zone_bus_shares.T) 86 | 87 | return bus_demand 88 | -------------------------------------------------------------------------------- /powersimdata/utility/config.py: -------------------------------------------------------------------------------- 1 | import configparser 2 | import os 3 | import shutil 4 | from dataclasses import dataclass 5 | from pathlib import Path 6 | 7 | from powersimdata.utility import templates 8 | 9 | INI_FILE = "config.ini" 10 | if Path(INI_FILE).exists(): 11 | conf = configparser.ConfigParser() 12 | conf.read(INI_FILE) 13 | for k, v in conf["PowerSimData"].items(): 14 | os.environ[k.upper()] = v 15 | 16 | 17 | @dataclass(frozen=True) 18 | class Config: 19 | """Base class for configuration data. It should contain all expected keys, 20 | defaulting to None when not universally applicable. 21 | """ 22 | 23 | SERVER_ADDRESS = None 24 | SERVER_SSH_PORT = None 25 | BACKUP_DATA_ROOT_DIR = None 26 | MODEL_DIR = None 27 | ENGINE_DIR = None 28 | DATA_ROOT_DIR = "/mnt/bes/pcm" 29 | EXECUTE_DIR = "tmp" 30 | INPUT_DIR = ("data", "input") 31 | OUTPUT_DIR = ("data", "output") 32 | LOCAL_DIR = os.path.join(Path.home(), "ScenarioData", "") 33 | 34 | 35 | @dataclass(frozen=True) 36 | class ServerConfig(Config): 37 | """Values specific to internal client/server usage""" 38 | 39 | SERVER_ADDRESS = os.getenv("BE_SERVER_ADDRESS", "becompute01.gatesventures.com") 40 | SERVER_SSH_PORT = os.getenv("BE_SERVER_SSH_PORT", 22) 41 | MODEL_DIR = "/home/bes/pcm" 42 | 43 | 44 | @dataclass(frozen=True) 45 | class ContainerConfig(Config): 46 | """Values specific to containerized environment""" 47 | 48 | SERVER_ADDRESS = os.getenv("BE_SERVER_ADDRESS", "reisejl") 49 | 50 | 51 | @dataclass(frozen=True) 52 | class LocalConfig(Config): 53 | """Values specific to native installation""" 54 | 55 | DATA_ROOT_DIR = Config.LOCAL_DIR 56 | ENGINE_DIR = os.getenv("ENGINE_DIR") 57 | 58 | def initialize(self): 59 | """Create data directory with blank templates""" 60 | confirmed = input( 61 | f"Provision directory {self.LOCAL_DIR}? [y/n] (default is 'n')" 62 | ) 63 | if confirmed.lower() != "y": 64 | print("Operation cancelled.") 65 | return 66 | os.makedirs(self.LOCAL_DIR, exist_ok=True) 67 | for fname in ("ScenarioList.csv", "ExecuteList.csv"): 68 | orig = os.path.join(templates.__path__[0], fname) 69 | dest = os.path.join(self.LOCAL_DIR, fname) 70 | shutil.copy(orig, dest) 71 | print("--> Done!") 72 | 73 | 74 | class DeploymentMode: 75 | """Constants representing the type of installation being used""" 76 | 77 | Server = "SERVER" 78 | Container = "CONTAINER" 79 | Local = "LOCAL" 80 | 81 | CONFIG_MAP = {Server: ServerConfig, Container: ContainerConfig, Local: LocalConfig} 82 | 83 | 84 | def get_deployment_mode(): 85 | """Get the deployment mode used to determine various configuration values 86 | 87 | :return: (*str*) -- the deployment mode 88 | """ 89 | mode = os.getenv("DEPLOYMENT_MODE") 90 | if mode is None: 91 | return DeploymentMode.Server 92 | if mode == "1" or mode.lower() == "container": 93 | return DeploymentMode.Container 94 | if mode == "2" or mode.lower() == "local": 95 | return DeploymentMode.Local 96 | 97 | 98 | def get_config(): 99 | """Get a config instance based on the DEPLOYMENT_MODE environment variable 100 | 101 | :return: (*powersimdata.utility.config.Config*) -- a config instance 102 | """ 103 | mode = get_deployment_mode() 104 | return DeploymentMode.CONFIG_MAP[mode]() 105 | -------------------------------------------------------------------------------- /powersimdata/input/changes/storage.py: -------------------------------------------------------------------------------- 1 | import copy 2 | 3 | from powersimdata.input.changes import ordinal 4 | 5 | 6 | def add_storage_capacity(obj, info): 7 | """Sets storage parameters in change table. 8 | 9 | :param powersimdata.input.change_table.ChangeTable obj: change table 10 | :param list info: each entry is a dictionary. The dictionary gathers 11 | the information needed to create a new storage device. 12 | Required keys: "bus_id", "capacity". 13 | "capacity" denotes the symmetric input and output power limits (MW). 14 | Optional keys: "duration", "min_stor", "max_stor", "energy_value", "InEff", 15 | "OutEff", "LossFactor", "terminal_min", "terminal_max". 16 | "duration" denotes the energy to power ratio (hours). 17 | "min_stor" denotes the minimum energy limit (unitless), e.g. 0.05 = 5%. 18 | "max_stor" denotes the maximum energy limit (unitless), e.g. 0.95 = 95%. 19 | "energy_value" denotes the value of stored energy at interval end ($/MWh). 20 | "InEff" denotes the input efficiency (unitless), e.g. 0.95 = 95%. 21 | "OutEff" denotes the output efficiency (unitless), e.g. 0.95 = 95%. 22 | "LossFactor" denotes the per-hour relative losses, 23 | e.g. 0.01 means that 1% of the current state of charge is lost per hour). 24 | "terminal_min" denotes the minimum state of charge at interval end, 25 | e.g. 0.5 means that the storage must end the interval with at least 50%. 26 | "terminal_max" denotes the maximum state of charge at interval end, 27 | e.g. 0.9 means that the storage must end the interval with no more than 90%. 28 | :raises TypeError: if ``info`` is not a list. 29 | :raises ValueError: if any of the new storages to be added have bad values. 30 | """ 31 | if not isinstance(info, list): 32 | raise TypeError("Argument enclosing new storage(s) must be a list") 33 | 34 | info = copy.deepcopy(info) 35 | new_storages = [] 36 | required = {"bus_id", "capacity"} 37 | optional = { 38 | "duration", 39 | "min_stor", 40 | "max_stor", 41 | "energy_value", 42 | "InEff", 43 | "OutEff", 44 | "LossFactor", 45 | "terminal_min", 46 | "terminal_max", 47 | } 48 | anticipated_bus = obj._get_transformed_df("bus") 49 | for i, storage in enumerate(info): 50 | obj._check_entry_keys(storage, i, "storage", required, None, optional) 51 | if storage["bus_id"] not in anticipated_bus.index: 52 | raise ValueError( 53 | f"No bus id {storage['bus_id']} available for {ordinal(i)} storage" 54 | ) 55 | for o in optional: 56 | if o not in storage: 57 | storage[o] = obj.grid.storage[o] 58 | for k, v in storage.items(): 59 | if not isinstance(v, (int, float)): 60 | err_msg = f"values must be numeric, bad type for {ordinal(i)} {k}" 61 | raise ValueError(err_msg) 62 | if v < 0: 63 | raise ValueError( 64 | f"values must be non-negative, bad value for {ordinal(i)} {k}" 65 | ) 66 | for k in {"min_stor", "max_stor", "InEff", "OutEff", "LossFactor"}: 67 | if storage[k] > 1: 68 | raise ValueError( 69 | f"value for {k} must be <=1, bad value for {ordinal(i)} storage" 70 | ) 71 | new_storages.append(storage) 72 | if "storage" not in obj.ct: 73 | obj.ct["storage"] = [] 74 | obj.ct["storage"] += new_storages 75 | -------------------------------------------------------------------------------- /powersimdata/network/usa_tamu/data/zone.csv: -------------------------------------------------------------------------------- 1 | zone_id,zone_name,state,interconnect,time_zone 2 | 1,Maine,Maine,Eastern,ETC/GMT+5 3 | 2,New Hampshire,New Hampshire,Eastern,ETC/GMT+5 4 | 3,Vermont,Vermont,Eastern,ETC/GMT+5 5 | 4,Massachusetts,Massachusetts,Eastern,ETC/GMT+5 6 | 5,Rhode Island,Rhode Island,Eastern,ETC/GMT+5 7 | 6,Connecticut,Connecticut,Eastern,ETC/GMT+5 8 | 7,New York City,New York,Eastern,ETC/GMT+5 9 | 8,Upstate New York,New York,Eastern,ETC/GMT+5 10 | 9,New Jersey,New Jersey,Eastern,ETC/GMT+5 11 | 10,Pennsylvania Eastern,Pennsylvania,Eastern,ETC/GMT+5 12 | 11,Pennsylvania Western,Pennsylvania,Eastern,ETC/GMT+5 13 | 12,Delaware,Delaware,Eastern,ETC/GMT+5 14 | 13,Maryland,Maryland,Eastern,ETC/GMT+5 15 | 14,Virginia Mountains,Virginia,Eastern,ETC/GMT+5 16 | 15,Virginia Tidewater,Virginia,Eastern,ETC/GMT+5 17 | 16,North Carolina,North Carolina,Eastern,ETC/GMT+5 18 | 17,Western North Carolina,North Carolina,Eastern,ETC/GMT+5 19 | 18,South Carolina,South Carolina,Eastern,ETC/GMT+5 20 | 19,Georgia North,Georgia,Eastern,ETC/GMT+5 21 | 20,Georgia South,Georgia,Eastern,ETC/GMT+5 22 | 21,Florida Panhandle,Florida,Eastern,ETC/GMT+6 23 | 22,Florida North,Florida,Eastern,ETC/GMT+5 24 | 23,Florida South,Florida,Eastern,ETC/GMT+5 25 | 24,Alabama,Alabama,Eastern,ETC/GMT+6 26 | 25,Mississippi,Mississippi,Eastern,ETC/GMT+6 27 | 26,Tennessee,Tennessee,Eastern,ETC/GMT+6 28 | 27,Kentucky,Kentucky,Eastern,ETC/GMT+5 29 | 28,West Virginia,West Virginia,Eastern,ETC/GMT+5 30 | 29,Ohio River,Ohio,Eastern,ETC/GMT+5 31 | 30,Ohio Lake Erie,Ohio,Eastern,ETC/GMT+5 32 | 31,Michigan Northern,Michigan,Eastern,ETC/GMT+5 33 | 32,Michigan Southern,Michigan,Eastern,ETC/GMT+5 34 | 33,Indiana,Indiana,Eastern,ETC/GMT+5 35 | 34,Chicago North Illinois,Illinois,Eastern,ETC/GMT+6 36 | 35,Illinois Downstate,Illinois,Eastern,ETC/GMT+6 37 | 36,Wisconsin,Wisconsin,Eastern,ETC/GMT+6 38 | 37,Minnesota Northern,Minnesota,Eastern,ETC/GMT+6 39 | 38,Minnesota Southern,Minnesota,Eastern,ETC/GMT+6 40 | 39,Iowa,Iowa,Eastern,ETC/GMT+6 41 | 40,Missouri East,Missouri,Eastern,ETC/GMT+6 42 | 41,Missouri West,Missouri,Eastern,ETC/GMT+6 43 | 42,Arkansas,Arkansas,Eastern,ETC/GMT+6 44 | 43,Louisiana,Louisiana,Eastern,ETC/GMT+6 45 | 44,East Texas,Texas,Eastern,ETC/GMT+6 46 | 45,Texas Panhandle,Texas,Eastern,ETC/GMT+6 47 | 46,New Mexico Eastern,New Mexico,Eastern,ETC/GMT+7 48 | 47,Oklahoma,Oklahoma,Eastern,ETC/GMT+6 49 | 48,Kansas,Kansas,Eastern,ETC/GMT+6 50 | 49,Nebraska,Nebraska,Eastern,ETC/GMT+6 51 | 50,South Dakota,South Dakota,Eastern,ETC/GMT+6 52 | 51,North Dakota,North Dakota,Eastern,ETC/GMT+6 53 | 52,Montana Eastern,Montana,Eastern,ETC/GMT+7 54 | 201,Washington,Washington,Western,ETC/GMT+8 55 | 202,Oregon,Oregon,Western,ETC/GMT+8 56 | 203,Northern California,California,Western,ETC/GMT+8 57 | 204,Bay Area,California,Western,ETC/GMT+8 58 | 205,Central California,California,Western,ETC/GMT+8 59 | 206,Southwest California,California,Western,ETC/GMT+8 60 | 207,Southeast California,California,Western,ETC/GMT+8 61 | 208,Nevada,Nevada,Western,ETC/GMT+8 62 | 209,Arizona,Arizona,Western,ETC/GMT+7 63 | 210,Utah,Utah,Western,ETC/GMT+7 64 | 211,New Mexico Western,New Mexico,Western,ETC/GMT+7 65 | 212,Colorado,Colorado,Western,ETC/GMT+7 66 | 213,Wyoming,Wyoming,Western,ETC/GMT+7 67 | 214,Idaho,Idaho,Western,ETC/GMT+7 68 | 215,Montana Western,Montana,Western,ETC/GMT+7 69 | 216,El Paso,Texas,Western,ETC/GMT+7 70 | 301,Far West,Texas,Texas,ETC/GMT+6 71 | 302,North,Texas,Texas,ETC/GMT+6 72 | 303,West,Texas,Texas,ETC/GMT+6 73 | 304,South,Texas,Texas,ETC/GMT+6 74 | 305,North Central,Texas,Texas,ETC/GMT+6 75 | 306,South Central,Texas,Texas,ETC/GMT+6 76 | 307,Coast,Texas,Texas,ETC/GMT+6 77 | 308,East,Texas,Texas,ETC/GMT+6 78 | -------------------------------------------------------------------------------- /powersimdata/data_access/tests/test_scenario_csv.py: -------------------------------------------------------------------------------- 1 | import os 2 | import shutil 3 | from collections import OrderedDict 4 | 5 | import pandas as pd 6 | import pytest 7 | from numpy.testing import assert_array_equal 8 | 9 | from powersimdata.data_access.data_access import LocalDataAccess, SSHDataAccess 10 | from powersimdata.data_access.scenario_list import ScenarioListManager 11 | from powersimdata.utility import server_setup, templates 12 | 13 | 14 | @pytest.fixture 15 | def data_access(): 16 | return SSHDataAccess() 17 | 18 | 19 | @pytest.fixture 20 | def scenario_table(data_access): 21 | scenario_list_manager = ScenarioListManager(data_access) 22 | return scenario_list_manager.get_scenario_table() 23 | 24 | 25 | @pytest.mark.integration 26 | @pytest.mark.ssh 27 | def test_get_scenario_file_from_server_type(data_access, scenario_table): 28 | assert isinstance(scenario_table, pd.DataFrame) 29 | 30 | 31 | @pytest.mark.integration 32 | @pytest.mark.ssh 33 | def test_get_scenario_file_from_server_header(data_access, scenario_table): 34 | header = [ 35 | "plan", 36 | "name", 37 | "state", 38 | "grid_model", 39 | "grid_model_version", 40 | "interconnect", 41 | "base_demand", 42 | "base_hydro", 43 | "base_solar", 44 | "base_wind", 45 | "change_table", 46 | "start_date", 47 | "end_date", 48 | "interval", 49 | "engine", 50 | "runtime", 51 | "infeasibilities", 52 | ] 53 | assert_array_equal(scenario_table.columns, header) 54 | assert "id" == scenario_table.index.name 55 | 56 | 57 | def clone_template(): 58 | orig = os.path.join(templates.__path__[0], "ScenarioList.csv") 59 | dest = os.path.join(server_setup.LOCAL_DIR, "ScenarioList.csv.test") 60 | shutil.copy(orig, dest) 61 | return dest 62 | 63 | 64 | @pytest.fixture 65 | def manager(): 66 | test_csv = clone_template() 67 | data_access = LocalDataAccess() 68 | manager = ScenarioListManager(data_access) 69 | manager._FILE_NAME = "ScenarioList.csv.test" 70 | yield manager 71 | os.remove(test_csv) 72 | 73 | 74 | def mock_row(): 75 | return OrderedDict( 76 | [ 77 | ("plan", "test"), 78 | ("name", "dummy"), 79 | ("state", "create"), 80 | ("grid_model", ""), 81 | ("grid_model_version", ""), 82 | ("interconnect", "Western"), 83 | ("base_demand", ""), 84 | ("base_hydro", ""), 85 | ("base_solar", ""), 86 | ("base_wind", ""), 87 | ("change_table", ""), 88 | ("start_date", ""), 89 | ("end_date", ""), 90 | ("interval", ""), 91 | ("engine", ""), 92 | ] 93 | ) 94 | 95 | 96 | def test_blank_csv_append(manager): 97 | entry = mock_row() 98 | table = manager.add_entry(entry) 99 | assert entry["id"] == "1" 100 | assert table.shape == (1, 17) 101 | 102 | 103 | def test_get_scenario(manager): 104 | manager.add_entry(mock_row()) 105 | manager.add_entry(mock_row()) 106 | manager.add_entry(mock_row()) 107 | entry = manager.get_scenario(2) 108 | assert entry["id"] == "2" 109 | entry = manager.get_scenario("2") 110 | assert entry["id"] == "2" 111 | 112 | 113 | def test_delete_entry(manager): 114 | manager.add_entry(mock_row()) 115 | manager.add_entry(mock_row()) 116 | manager.add_entry(mock_row()) 117 | table = manager.delete_entry(2) 118 | assert table.shape == (2, 17) 119 | -------------------------------------------------------------------------------- /powersimdata/input/converter/tests/test_pypsa_to_grid.py: -------------------------------------------------------------------------------- 1 | import pypsa 2 | from pandas.testing import assert_series_equal 3 | 4 | from powersimdata.input.change_table import ChangeTable 5 | from powersimdata.input.converter.pypsa_to_grid import FromPyPSA 6 | from powersimdata.input.exporter.export_to_pypsa import export_to_pypsa 7 | from powersimdata.input.grid import Grid 8 | from powersimdata.input.transform_grid import TransformGrid 9 | 10 | 11 | def test_import_arbitrary_network_from_pypsa_to_grid(): 12 | n = pypsa.examples.ac_dc_meshed() 13 | grid = FromPyPSA(n).build() 14 | 15 | assert not grid.bus.empty 16 | assert len(n.buses) == len(grid.bus) 17 | 18 | 19 | def test_import_network_including_storages_from_pypsa_to_grid(): 20 | n = pypsa.examples.storage_hvdc() 21 | grid = FromPyPSA(n).build() 22 | 23 | inflow = n.get_switchable_as_dense("StorageUnit", "inflow") 24 | has_inflow = inflow.any() 25 | 26 | assert not grid.bus.empty 27 | assert len(n.buses) + has_inflow.sum() == len(grid.bus) 28 | assert len(n.generators) + has_inflow.sum() == len(grid.plant) 29 | assert all( 30 | [ 31 | "inflow" in i 32 | for i in grid.plant.iloc[len(grid.plant) - has_inflow.sum() :].index 33 | ] 34 | ) 35 | assert not grid.storage["gen"].empty 36 | assert not grid.storage["gencost"].empty 37 | assert not grid.storage["StorageData"].empty 38 | 39 | 40 | def test_import_exported_network(): 41 | grid = Grid("Western") 42 | ct = ChangeTable(grid) 43 | storage = [ 44 | {"bus_id": 2021005, "capacity": 116.0}, 45 | {"bus_id": 2028827, "capacity": 82.5}, 46 | {"bus_id": 2028060, "capacity": 82.5}, 47 | ] 48 | ct.add_storage_capacity(storage) 49 | ref = TransformGrid(grid, ct.ct).get_grid() 50 | 51 | kwargs = dict(add_substations=True, add_load_shedding=False, add_all_columns=True) 52 | n = export_to_pypsa(ref, **kwargs) 53 | test = Grid( 54 | "Western", 55 | source="pypsa", 56 | grid_model="usa_tamu", 57 | network=n, 58 | add_pypsa_cols=False, 59 | ) 60 | 61 | # Only a scaled version of linear cost term is exported to pypsa 62 | # Test whether the exported marginal cost is in the same order of magnitude 63 | ref_total_c1 = ref.gencost["before"]["c1"].sum() 64 | test_total_c1 = test.gencost["before"]["c1"].sum() 65 | assert ref_total_c1 / test_total_c1 > 0.95 and ref_total_c1 / test_total_c1 < 1.05 66 | 67 | # Now overwrite costs 68 | for c in ["c0", "c1", "c2"]: 69 | test.gencost["before"][c] = ref.gencost["before"][c] 70 | test.gencost["after"][c] = ref.gencost["after"][c] 71 | 72 | # Due to rounding errors we have to compare some columns in advance 73 | rtol = 1e-15 74 | assert_series_equal(ref.branch.x, test.branch.x, rtol=rtol) 75 | assert_series_equal(ref.branch.r, test.branch.r, rtol=rtol) 76 | assert_series_equal(ref.bus.Va, test.bus.Va, rtol=rtol) 77 | 78 | test.branch.x = ref.branch.x 79 | test.branch.r = ref.branch.r 80 | test.bus.Va = ref.bus.Va 81 | 82 | storage_data = test.storage["StorageData"] 83 | # storage specification is need in import but has to removed for testing 84 | test.storage["gencost"].drop(columns="pypsa_component", inplace=True) 85 | test.storage["gen"].drop(columns="pypsa_component", inplace=True) 86 | storage_data.drop(columns="pypsa_component", inplace=True) 87 | 88 | # columns is overwritten in conversion to satisfy constraints set by engine 89 | storage_data.InitialStorage = ref.storage["StorageData"].InitialStorage 90 | 91 | assert ref == test 92 | -------------------------------------------------------------------------------- /powersimdata/data_access/scenario_list.py: -------------------------------------------------------------------------------- 1 | from collections import OrderedDict 2 | 3 | import pandas as pd 4 | 5 | from powersimdata.data_access.csv_store import CsvStore, verify_hash 6 | 7 | 8 | class ScenarioListManager(CsvStore): 9 | """Storage abstraction for scenario list using a csv file.""" 10 | 11 | _FILE_NAME = "ScenarioList.csv" 12 | 13 | def get_scenario_table(self): 14 | """Returns scenario table from server if possible, otherwise read local 15 | copy. Updates the local copy upon successful server connection. 16 | 17 | :return: (*pandas.DataFrame*) -- scenario list as a data frame. 18 | """ 19 | return self.get_table() 20 | 21 | def _generate_scenario_id(self, table): 22 | """Generates scenario id. 23 | 24 | :param pandas.DataFrame table: the current scenario list 25 | :return: (*str*) -- new scenario id. 26 | """ 27 | max_value = table.index.max() 28 | result = 1 if pd.isna(max_value) else max_value + 1 29 | return str(result) 30 | 31 | def get_scenario(self, descriptor): 32 | """Get information for a scenario based on id or name 33 | 34 | :param int/str descriptor: the id or name of the scenario 35 | :return: (*collections.OrderedDict*) -- matching entry as a dict, or 36 | None if either zero or multiple matches found 37 | """ 38 | 39 | def err_message(text): 40 | print("------------------") 41 | print(text) 42 | print("------------------") 43 | 44 | table = self.get_scenario_table() 45 | try: 46 | matches = table.index.isin([int(descriptor)]) 47 | except ValueError: 48 | matches = table[table.name == descriptor].index 49 | 50 | scenario = table.loc[matches, :] 51 | if scenario.shape[0] == 0: 52 | err_message("SCENARIO NOT FOUND") 53 | elif scenario.shape[0] > 1: 54 | err_message("MULTIPLE SCENARIO FOUND") 55 | dupes = ",".join(str(i) for i in scenario.index) 56 | print(f"Duplicate ids: {dupes}") 57 | print("Use id to access scenario") 58 | else: 59 | return ( 60 | scenario.reset_index() 61 | .astype({"id": "str"}) 62 | .to_dict("records", into=OrderedDict)[0] 63 | ) 64 | 65 | @verify_hash 66 | def add_entry(self, scenario_info): 67 | """Adds scenario to the scenario list file. 68 | 69 | :param collections.OrderedDict scenario_info: entry to add to scenario list. 70 | :return: (*pandas.DataFrame*) -- the updated data frame 71 | """ 72 | table = self.get_scenario_table() 73 | scenario_id = self._generate_scenario_id(table) 74 | scenario_info["id"] = scenario_id 75 | scenario_info.move_to_end("id", last=False) 76 | table.reset_index(inplace=True) 77 | entry = pd.DataFrame({k: [v] for k, v in scenario_info.items()}) 78 | table = pd.concat([table, entry]) 79 | table.set_index("id", inplace=True) 80 | 81 | print("--> Adding entry in %s" % self._FILE_NAME) 82 | return table 83 | 84 | @verify_hash 85 | def delete_entry(self, scenario_id): 86 | """Deletes entry in scenario list. 87 | 88 | :param int/str scenario_id: the id of the scenario 89 | :return: (*pandas.DataFrame*) -- the updated data frame 90 | """ 91 | table = self.get_scenario_table() 92 | table.drop(int(scenario_id), inplace=True) 93 | 94 | print("--> Deleting entry in %s" % self._FILE_NAME) 95 | return table 96 | -------------------------------------------------------------------------------- /powersimdata/design/transmission/tests/test_statelines.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | from powersimdata.design.transmission.statelines import _classify_interstate_intrastate 4 | from powersimdata.tests.mock_grid import MockGrid 5 | 6 | # branch_id is the index 7 | mock_branch = { 8 | "branch_id": [1, 2, 3, 4, 5, 6, 7, 8], 9 | "from_zone_id": [10, 10, 10, 52, 216, 216, 216, 7], 10 | "to_zone_id": [10, 11, 12, 215, 301, 216, 301, 204], 11 | } 12 | 13 | expected_keys = {"interstate", "intrastate"} 14 | 15 | 16 | class TestClassifyInterstateIntrastate(unittest.TestCase): 17 | def setUp(self): 18 | def check_expected(upgrades, expected_interstate, expected_intrastate): 19 | err_msg = "classify_interstate_intrastate should return a dict" 20 | self.assertIsInstance(upgrades, dict, err_msg) 21 | err_msg = "dict keys should be 'interstate' and 'intrastate'" 22 | self.assertEqual(upgrades.keys(), expected_keys, err_msg) 23 | for v in upgrades.values(): 24 | self.assertIsInstance(v, list, "dict values should be lists") 25 | for b in v: 26 | self.assertIsInstance(b, int, "branch_ids should be ints") 27 | err_msg = "interstate values not as expected" 28 | self.assertEqual(set(upgrades["interstate"]), expected_interstate, err_msg) 29 | err_msg = "intrastate values not as expected" 30 | self.assertEqual(set(upgrades["intrastate"]), expected_intrastate, err_msg) 31 | 32 | self.check_expected = check_expected 33 | self.mock_grid = MockGrid(grid_attrs={"branch": mock_branch}) 34 | 35 | def test_classify_interstate_intrastate_empty_ct(self): 36 | mock_ct = {} 37 | expected_interstate = set() 38 | expected_intrastate = set() 39 | 40 | upgrades = _classify_interstate_intrastate(mock_ct, self.mock_grid) 41 | self.check_expected(upgrades, expected_interstate, expected_intrastate) 42 | 43 | def test_classify_interstate_intrastate_bad_ct(self): 44 | mock_ct = {"branch": {"branch_id": {9: 1.5}}} 45 | 46 | with self.assertRaises(ValueError): 47 | _classify_interstate_intrastate(mock_ct, self.mock_grid) 48 | 49 | def test_classify_interstate_intrastate_none(self): 50 | mock_ct = {"branch": {"branch_id": {}}} 51 | expected_interstate = set() 52 | expected_intrastate = set() 53 | 54 | upgrades = _classify_interstate_intrastate(mock_ct, self.mock_grid) 55 | self.check_expected(upgrades, expected_interstate, expected_intrastate) 56 | 57 | def test_classify_interstate_intrastate_two(self): 58 | mock_ct = {"branch": {"branch_id": {1: 2, 8: 10}}} 59 | expected_interstate = {8} 60 | expected_intrastate = {1} 61 | 62 | upgrades = _classify_interstate_intrastate(mock_ct, self.mock_grid) 63 | self.check_expected(upgrades, expected_interstate, expected_intrastate) 64 | 65 | def test_classify_interstate_intrastate_several(self): 66 | mock_ct = { 67 | "branch": { 68 | "branch_id": { 69 | 1: 2, 70 | 2: 3, 71 | 3: 1.5, 72 | 4: 4, 73 | 5: 1.1, 74 | 8: 10, 75 | } 76 | } 77 | } 78 | expected_interstate = {3, 8} 79 | expected_intrastate = {1, 2, 4, 5} 80 | 81 | upgrades = _classify_interstate_intrastate(mock_ct, self.mock_grid) 82 | self.check_expected(upgrades, expected_interstate, expected_intrastate) 83 | 84 | 85 | if __name__ == "__main__": 86 | unittest.main() 87 | -------------------------------------------------------------------------------- /powersimdata/input/converter/helpers.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | 3 | 4 | def add_coord_to_grid_data_frames(grid): 5 | """Adds longitude and latitude information to bus, plant and branch data 6 | frames of grid instance. 7 | 8 | :param powersimdata.input.grid.Grid grid: grid instance. 9 | """ 10 | bus2coord = ( 11 | pd.merge(grid.bus2sub[["sub_id"]], grid.sub[["lat", "lon"]], on="sub_id") 12 | .set_index(grid.bus2sub.index) 13 | .drop(columns="sub_id") 14 | .to_dict() 15 | ) 16 | 17 | def get_lat(idx): 18 | return [bus2coord["lat"][i] for i in idx] 19 | 20 | def get_lon(idx): 21 | return [bus2coord["lon"][i] for i in idx] 22 | 23 | extra_col_bus = {"lat": get_lat(grid.bus.index), "lon": get_lon(grid.bus.index)} 24 | grid.bus = grid.bus.assign(**extra_col_bus) 25 | 26 | extra_col_plant = { 27 | "lat": get_lat(grid.plant.bus_id), 28 | "lon": get_lon(grid.plant.bus_id), 29 | } 30 | grid.plant = grid.plant.assign(**extra_col_plant) 31 | 32 | extra_col_branch = { 33 | "from_lat": get_lat(grid.branch.from_bus_id), 34 | "from_lon": get_lon(grid.branch.from_bus_id), 35 | "to_lat": get_lat(grid.branch.to_bus_id), 36 | "to_lon": get_lon(grid.branch.to_bus_id), 37 | } 38 | grid.branch = grid.branch.assign(**extra_col_branch) 39 | 40 | 41 | def add_zone_to_grid_data_frames(grid): 42 | """Adds zone name/id to plant and branch data frames of grid instance. 43 | 44 | :param powersimdata.input.grid.Grid grid: grid instance. 45 | """ 46 | bus2zone = grid.bus.zone_id.to_dict() 47 | 48 | def get_zone_id(idx): 49 | return [bus2zone[i] for i in idx] 50 | 51 | def get_zone_name(idx): 52 | return [grid.id2zone[bus2zone[i]] for i in idx] 53 | 54 | extra_col_plant = { 55 | "zone_id": get_zone_id(grid.plant.bus_id), 56 | "zone_name": get_zone_name(grid.plant.bus_id), 57 | } 58 | grid.plant = grid.plant.assign(**extra_col_plant) 59 | 60 | extra_col_branch = { 61 | "from_zone_id": get_zone_id(grid.branch.from_bus_id), 62 | "to_zone_id": get_zone_id(grid.branch.to_bus_id), 63 | "from_zone_name": get_zone_name(grid.branch.from_bus_id), 64 | "to_zone_name": get_zone_name(grid.branch.to_bus_id), 65 | } 66 | grid.branch = grid.branch.assign(**extra_col_branch) 67 | 68 | 69 | def add_interconnect_to_grid_data_frames(grid): 70 | """Adds interconnect name to bus, branch, plant and dcline data frames of 71 | grid instance. 72 | 73 | :param powersimdata.input.grid.Grid grid: grid instance. 74 | """ 75 | bus2interconnect = grid.bus2sub.interconnect.to_dict() 76 | 77 | def get_interconnect(idx): 78 | return [bus2interconnect[i] for i in idx] 79 | 80 | extra_col_bus = {"interconnect": get_interconnect(grid.bus.index)} 81 | grid.bus = grid.bus.assign(**extra_col_bus) 82 | 83 | extra_col_branch = {"interconnect": get_interconnect(grid.branch.from_bus_id)} 84 | grid.branch = grid.branch.assign(**extra_col_branch) 85 | 86 | extra_col_plant = {"interconnect": get_interconnect(grid.plant.bus_id)} 87 | grid.plant = grid.plant.assign(**extra_col_plant) 88 | 89 | extra_col_gencost = {"interconnect": get_interconnect(grid.plant.bus_id)} 90 | grid.gencost["before"] = grid.gencost["before"].assign(**extra_col_gencost) 91 | grid.gencost["after"] = grid.gencost["after"].assign(**extra_col_gencost) 92 | 93 | extra_col_dcline = { 94 | "from_interconnect": get_interconnect(grid.dcline.from_bus_id), 95 | "to_interconnect": get_interconnect(grid.dcline.to_bus_id), 96 | } 97 | grid.dcline = grid.dcline.assign(**extra_col_dcline) 98 | -------------------------------------------------------------------------------- /powersimdata/input/converter/pypsa_to_profiles.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | import pypsa 3 | from pypsa.descriptors import get_switchable_as_dense 4 | 5 | 6 | def get_pypsa_gen_profile(network, profile2carrier): 7 | """Return hydro, solar or wind profile enclosed in a PyPSA network. 8 | 9 | :param pypsa.Network network: the Network object. 10 | :param dict profile2carrier: a dictionary mapping profile type to carrier type. 11 | *'hydro'*, *'solar'* and *'wind'* are valid keys. Values is a corresponding 12 | set of carriers as found in the Network object. 13 | :return: (*dict*) -- keys are the same ones than in ``profile2carrier``. Values 14 | are profiles as data frame. 15 | :raises TypeError: 16 | if ``network`` is not a pypsa.components.Network object. 17 | if ``profile2carrier`` is not a dict. 18 | if values of ``profile2carrier`` are not an iterable. 19 | :raises ValueError: 20 | if keys of ``profile2carrier`` are invalid. 21 | """ 22 | if not isinstance(network, pypsa.components.Network): 23 | raise TypeError("network must be a Network object") 24 | if not isinstance(profile2carrier, dict): 25 | raise TypeError("profile2carrier must be a dict") 26 | if not all(isinstance(v, (list, set, tuple)) for v in profile2carrier.values()): 27 | raise TypeError("values of profile2carrier must be an iterable") 28 | if not set(profile2carrier).issubset({"hydro", "solar", "wind"}): 29 | raise ValueError( 30 | "keys of profile2carrier must be a subset of ['hydro', 'solar', 'wind']" 31 | ) 32 | 33 | component2timeseries = { 34 | "Generator": "p_max_pu", 35 | "StorageUnit": "inflow", 36 | } 37 | profile = {} 38 | for p, c in profile2carrier.items(): 39 | c = [c] if isinstance(c, str) else list(c) 40 | profile[p] = pd.DataFrame() 41 | for component, ts in component2timeseries.items(): 42 | id_carrier = network.df(component).query("carrier==@c").index 43 | ts_carrier = get_switchable_as_dense(network, component, ts)[id_carrier] 44 | if not ts_carrier.empty: 45 | if ts == "inflow": 46 | has_inflow = ts_carrier.any().index[ts_carrier.any()] 47 | ts_carrier = ts_carrier[has_inflow].add_suffix(" inflow") 48 | norm = ts_carrier.max().replace(0, 1) 49 | else: 50 | norm = 1 51 | profile[p] = pd.concat([profile[p], ts_carrier / norm], axis=1) 52 | 53 | profile[p].rename_axis(index="UTC", columns=None, inplace=True) 54 | 55 | return profile 56 | 57 | 58 | def get_pypsa_demand_profile(network): 59 | """Return demand profile enclosed in a PyPSA network. 60 | 61 | :param pypsa.Network network: the Network object. 62 | :return: (*pandas.DataFrame*) -- demand profile. 63 | :raises TypeError: 64 | if ``network`` is not a pypsa.components.Network object. 65 | """ 66 | if not isinstance(network, pypsa.components.Network): 67 | raise TypeError("network must be a Network object") 68 | 69 | if not network.loads_t.p.empty: 70 | demand = network.loads_t.p.copy() 71 | else: 72 | demand = network.get_switchable_as_dense("Load", "p_set") 73 | if "zone_id" in network.buses: 74 | # Assume this is a PyPSA network originally created from powersimdata 75 | demand = demand.groupby( 76 | network.buses.zone_id.dropna().astype(int), axis=1 77 | ).sum() 78 | demand.columns = pd.to_numeric(demand.columns, errors="ignore") 79 | demand.rename_axis(index="UTC", columns=None, inplace=True) 80 | 81 | return demand 82 | -------------------------------------------------------------------------------- /powersimdata/design/generation/tests/test_curtailment.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | import pytest 3 | 4 | from powersimdata.design.generation.curtailment import temporal_curtailment 5 | from powersimdata.tests.mock_scenario import MockScenario 6 | 7 | mock_plant = { 8 | "plant_id": [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11], 9 | "type": ["coal", "wind", "solar", "hydro", "ng", "nuclear"] * 2, 10 | "Pmax": [50, 50, 50, 50, 20, 60, 100, 50, 50, 50, 40, 120], 11 | "Pmin": [20, 0, 0, 0, 0, 30, 30, 0, 0, 0, 10, 75], 12 | } 13 | 14 | mock_demand = pd.DataFrame( 15 | {1: [350, 350, 325, 400, 500]}, 16 | ) 17 | 18 | mock_hydro = pd.DataFrame( 19 | { 20 | 3: [25, 40, 50, 30, 0], 21 | 9: [25, 40, 50, 30, 0], 22 | } 23 | ) 24 | 25 | mock_solar = pd.DataFrame( 26 | { 27 | 2: [20, 20, 20, 20, 20], 28 | 8: [10, 20, 30, 40, 50], 29 | } 30 | ) 31 | 32 | mock_wind = pd.DataFrame( 33 | { 34 | 1: [0, 50, 0, 50, 0], 35 | 7: [20, 30, 20, 30, 20], 36 | } 37 | ) 38 | 39 | 40 | @pytest.fixture 41 | def mock_scenario(): 42 | return MockScenario( 43 | grid_attrs={"plant": mock_plant}, 44 | demand=mock_demand, 45 | hydro=mock_hydro, 46 | solar=mock_solar, 47 | wind=mock_wind, 48 | ) 49 | 50 | 51 | def test_temporal_curtailment(mock_scenario): 52 | assert temporal_curtailment(mock_scenario) == pytest.approx(0.3361702) 53 | 54 | # Testing that "None" overrides the default simulation Pmin with the data Pmin 55 | curtailment = temporal_curtailment( 56 | mock_scenario, pmin_by_type={"hydro": None, "ng": None} 57 | ) 58 | assert curtailment == pytest.approx(0.4) 59 | curtailment = temporal_curtailment(mock_scenario, pmin_by_id={10: None}) 60 | assert curtailment == pytest.approx(0.4) 61 | 62 | # Testing that we can change replace the Pmin with relative-to-Pmax values 63 | curtailment = temporal_curtailment(mock_scenario, pmin_by_id={5: 1}) 64 | assert curtailment == pytest.approx(0.35531915) 65 | curtailment = temporal_curtailment(mock_scenario, pmin_by_id={11: 1}) 66 | assert curtailment == pytest.approx(0.37446809) 67 | curtailment = temporal_curtailment(mock_scenario, pmin_by_id={5: 1, 11: 1}) 68 | assert curtailment == pytest.approx(0.39361702) 69 | curtailment = temporal_curtailment( 70 | mock_scenario, pmin_by_type={"hydro": None, "nuclear": 1} 71 | ) 72 | assert curtailment == pytest.approx(0.39361702) 73 | curtailment = temporal_curtailment(mock_scenario, pmin_by_id={5: 1, 11: 0.99}) 74 | assert curtailment == pytest.approx(0.38595744) 75 | curtailment = temporal_curtailment( 76 | mock_scenario, pmin_by_type={"hydro": None, "nuclear": 0.98} 77 | ) 78 | assert curtailment == pytest.approx(0.37063830) 79 | # Testing that we can override by-type with by-id 80 | curtailment = temporal_curtailment( 81 | mock_scenario, pmin_by_type={"hydro": None, "nuclear": 1}, pmin_by_id={11: 0.99} 82 | ) 83 | assert curtailment == pytest.approx(0.38595744) 84 | 85 | # Testing that we can relax the Pmin of a profile resource by type 86 | assert temporal_curtailment(mock_scenario, pmin_by_type={"hydro": 0}) == 0 87 | # Testing that we can relax the Pmin of a profile resource by id 88 | assert temporal_curtailment(mock_scenario, pmin_by_id={3: 0}) == pytest.approx(0.1) 89 | curtailment = temporal_curtailment(mock_scenario, pmin_by_id={3: 0, 9: 0}) 90 | assert curtailment == 0 91 | # Testing that we can override by-type with by-id 92 | curtailment = temporal_curtailment( 93 | mock_scenario, pmin_by_type={"hydro": 0}, pmin_by_id={9: None} 94 | ) 95 | assert curtailment == pytest.approx(0.1) 96 | -------------------------------------------------------------------------------- /powersimdata/input/tests/test_grid.py: -------------------------------------------------------------------------------- 1 | import copy 2 | 3 | import pytest 4 | 5 | from powersimdata.input.grid import Grid 6 | 7 | INCORRECT_SOURCE = "invalid_source" 8 | 9 | 10 | def test_grid_incorrect_source(): 11 | with pytest.raises(ValueError): 12 | Grid(["USA"], source=INCORRECT_SOURCE) 13 | 14 | 15 | def test_grid_type(): 16 | g = Grid(["USA"]) 17 | assert isinstance(g, Grid) 18 | 19 | 20 | @pytest.fixture(scope="session") 21 | def base_texas(): 22 | return Grid(["Texas"]) 23 | 24 | 25 | @pytest.fixture(scope="session") 26 | def base_western(): 27 | return Grid(["Western"]) 28 | 29 | 30 | def test_deepcopy_works(base_texas): 31 | assert isinstance(copy.deepcopy(base_texas), Grid) 32 | 33 | 34 | def test_grid_eq_success_simple(base_texas): 35 | assert base_texas == Grid(["Texas"]) 36 | 37 | 38 | def test_grid_eq_failure_simple(base_texas, base_western): 39 | assert base_texas != base_western 40 | 41 | 42 | def test_grid_eq_failure_bus(base_texas): 43 | test_grid = copy.deepcopy(base_texas) 44 | test_grid.bus.loc[test_grid.bus.head(1).index, "baseKV"] *= 2 45 | assert base_texas != test_grid 46 | 47 | 48 | def test_grid_eq_success_bus_type(base_texas): 49 | test_grid = copy.deepcopy(base_texas) 50 | test_grid.bus.type = 1 51 | assert base_texas == test_grid 52 | 53 | 54 | def test_grid_eq_failure_branch(base_texas): 55 | test_grid = copy.deepcopy(base_texas) 56 | test_grid.branch.loc[test_grid.branch.head(1).index, "rateA"] *= 2 57 | assert base_texas != test_grid 58 | 59 | 60 | def test_grid_eq_failure_dcline(base_western): 61 | test_grid = copy.deepcopy(base_western) 62 | test_grid.dcline.loc[test_grid.dcline.head(1).index, "Pmax"] *= 2 63 | assert base_western != test_grid 64 | 65 | 66 | def test_grid_eq_failure_gencost_before(base_texas): 67 | test_grid = copy.deepcopy(base_texas) 68 | before = test_grid.gencost["before"] 69 | before.loc[before.head(1).index, "n"] += 1 70 | assert base_texas != test_grid 71 | 72 | 73 | def test_grid_eq_success_gencost_after(base_texas): 74 | test_grid = copy.deepcopy(base_texas) 75 | test_grid.gencost["after"] = test_grid.gencost["after"].drop( 76 | test_grid.gencost["after"].tail(1).index 77 | ) 78 | assert base_texas == test_grid 79 | 80 | 81 | def test_grid_eq_failure_plant(base_texas): 82 | test_grid = copy.deepcopy(base_texas) 83 | test_grid.plant.loc[test_grid.plant.head(1).index, "Pmax"] *= 2 84 | assert base_texas != test_grid 85 | 86 | 87 | def test_grid_eq_success_plant_ramp30(base_texas): 88 | test_grid = copy.deepcopy(base_texas) 89 | test_grid.plant.loc[test_grid.plant.head(1).index, "ramp_30"] *= 2 90 | assert base_texas == test_grid 91 | 92 | 93 | def test_grid_eq_failure_sub(base_texas): 94 | test_grid = copy.deepcopy(base_texas) 95 | first_name = str(test_grid.sub.loc[test_grid.sub.head(1).index, "name"]) 96 | test_grid.sub.loc[test_grid.sub.head(1).index, "name"] = first_name[::-1] 97 | assert base_texas != test_grid 98 | 99 | 100 | def test_grid_eq_failure_storage(base_texas): 101 | test_grid = copy.deepcopy(base_texas) 102 | gencost = {g: 0 for g in test_grid.storage["gencost"].columns} 103 | gen = {g: 0 for g in test_grid.storage["gen"].columns} 104 | test_grid.storage["gencost"].loc[0, :] = gencost 105 | test_grid.storage["gen"].loc[0, :] = gen 106 | assert base_texas != test_grid 107 | 108 | 109 | def test_that_fields_are_not_modified_when_loading_another_grid(): 110 | western_grid = Grid(["Western"]) 111 | western_plant_original_shape = western_grid.plant.shape 112 | Grid(["Eastern"]) 113 | assert western_plant_original_shape == western_grid.plant.shape 114 | --------------------------------------------------------------------------------