├── .dependabot └── config.yml ├── .dockerignore ├── .env.template ├── .gitattributes ├── .gitignore ├── .test.env.template ├── .vscode └── settings.json ├── CHANGELOG.md ├── CODEOWNERS ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── Dockerfile ├── LICENSE ├── README.md ├── dev_docker.sh.template ├── docker-compose.yml.template ├── docs ├── Makefile ├── make.bat └── source │ ├── BuildingModels.rst │ ├── DataClients.rst │ ├── HVACSource.rst │ ├── WeatherSource.rst │ ├── conf.py │ ├── index.rst │ └── notes │ └── getting_started_.rst ├── idf └── v9-4-0 │ ├── heatedbsmt_1story_2000sqft_gasfurnace_AC.idf │ ├── heatedbsmt_2story_2300sqft_gasfurnace_AC.idf │ └── slab_1story_2000sqft_gasfurnace_AC.idf ├── notebooks ├── demo_GCSDYDSource.ipynb ├── demo_GCSFlatFilesSource.ipynb ├── demo_LocalSource.ipynb ├── demo_LocalSource_scripting.ipynb └── test_GCSDYD_chicago.ipynb ├── pytest.ini ├── requirements.txt ├── requirements_unfixed.txt ├── scripts ├── epvm.sh └── setup │ ├── .bashrc │ ├── .pdbrc │ ├── download_IECC_idfs.sh │ ├── install_acados.sh │ ├── install_ep.sh │ ├── install_solvers.sh │ ├── jupyter_lab.sh │ ├── jupyter_lab_bkgrnd.sh │ └── test_env_setup.sh ├── setup.py ├── src └── python │ └── BuildingControlsSimulator │ ├── BuildingModels │ ├── BuildingModel.py │ ├── EnergyPlusBuildingModel.py │ ├── IDFPreprocessor.py │ ├── __init__.py │ ├── test_EnergyPlusBuildingModel.py │ └── test_IDFPreprocessor.py │ ├── ControllerModels │ ├── ControllerModel.py │ ├── ControllerStatus.py │ ├── Deadband.py │ ├── FMIController.py │ └── __init__.py │ ├── Conversions │ ├── Conversions.py │ └── __init__.py │ ├── DataClients │ ├── DataChannel.py │ ├── DataClient.py │ ├── DataDestination.py │ ├── DataSource.py │ ├── DataSpec.py │ ├── DataStates.py │ ├── DateTimeChannel.py │ ├── EquipmentChannel.py │ ├── GBQDataSource.py │ ├── GCSDYDSource.py │ ├── GCSDataSource.py │ ├── GCSDestination.py │ ├── GCSFlatFilesSource.py │ ├── LocalDestination.py │ ├── LocalSource.py │ ├── SensorsChannel.py │ ├── ThermostatChannel.py │ ├── WeatherChannel.py │ ├── __init__.py │ ├── test_DataClient.py │ ├── test_GBQFlatFilesSource.py │ ├── test_GCSDYDSource.py │ ├── test_GCSDestination.py │ ├── test_GCSFlatFilesSource.py │ ├── test_LocalDestination.py │ ├── test_LocalSource.py │ ├── test_ThermostatChannel.py │ └── test_WeatherChannel.py │ ├── OutputAnalysis │ ├── OutputAnalysis.py │ └── __init__.py │ ├── Simulator │ ├── Config.py │ ├── Simulation.py │ ├── Simulator.py │ ├── __init__.py │ ├── params_test_Simulator.py │ └── test_Simulator.py │ ├── StateEstimatorModels │ ├── LowPassFilter.py │ ├── StateEstimatorModel.py │ └── test_LowPassFilter.py │ └── __init__.py └── test ├── data └── input │ └── local │ └── DYD_dummy_data.csv.zip ├── fmu └── fmu-models │ └── deadband │ └── deadband.fmu └── idf ├── v8-9-0 └── AZ_Phoenix_gasfurnace_crawlspace_IECC_2018_cycles.idf └── v9-4-0 ├── heatedbsmt_1story_2000sqft_gasfurnace_AC.idf ├── heatedbsmt_2story_2300sqft_gasfurnace_AC.idf └── slab_1story_2000sqft_gasfurnace_AC.idf /.dependabot/config.yml: -------------------------------------------------------------------------------- 1 | version: 1 2 | update_configs: 3 | - package_manager: "python" 4 | directory: "/" 5 | update_schedule: "live" 6 | target_branch: "dev" 7 | allowed_updates: 8 | - match: 9 | update_type: "security" 10 | default_labels: 11 | - "dependabot" 12 | -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | * 2 | !Dockerfile 3 | !src/ 4 | !scripts/ 5 | !setup.py 6 | !requirements.txt 7 | !requirements_unfixed.txt 8 | !pytest.ini 9 | !.vscode/ 10 | -------------------------------------------------------------------------------- /.env.template: -------------------------------------------------------------------------------- 1 | 2 | # this is the only thing that *must* be added for quick-start 3 | # add your local host computer's path to the repo 4 | LOCAL_PACKAGE_DIR= 5 | 6 | # docker and package info: 7 | # do not change from default, may break integration 8 | PACKAGE_NAME=building-controls-simulator 9 | VERSION_TAG=0.6.0-alpha 10 | DOCKERHUB_REPOSITORY=tstesco 11 | MAINTAINER=tom.stesco@gmail.com 12 | USER_NAME=bcs 13 | DOCKER_IMAGE=${PACKAGE_NAME} 14 | 15 | # container default directory structure: 16 | # do not change from default, may break integration 17 | DOCKER_HOME_DIR=/home/${USER_NAME} 18 | DOCKER_LIB_DIR=${DOCKER_HOME_DIR}/lib 19 | DOCKER_PACKAGE_DIR=${DOCKER_LIB_DIR}/${PACKAGE_NAME} 20 | DOCKER_EXT_DIR=${DOCKER_LIB_DIR}/external 21 | LIB_DIR=${DOCKER_HOME_DIR}/lib 22 | EXT_DIR=${LIB_DIR}/external 23 | FMIL_HOME=${EXT_DIR}/FMIL/build-fmil 24 | PACKAGE_DIR=/home/bcs/lib/${PACKAGE_NAME} 25 | PYTHONPATH=${DOCKER_PACKAGE_DIR}/src/python 26 | ENERGYPLUS_INSTALL_DIR=${EXT_DIR}/EnergyPlus 27 | ENERGYPLUSTOFMUSCRIPT=${EXT_DIR}/EnergyPlusToFMU-3.1.0/Scripts/EnergyPlusToFMU.py 28 | WEATHER_DIR=${DOCKER_PACKAGE_DIR}/weather 29 | ARCHIVE_TMY3_DIR=${WEATHER_DIR}/archive_tmy3 30 | ARCHIVE_TMY3_META=${ARCHIVE_TMY3_DIR}/TMY3_StationsMeta.csv 31 | ARCHIVE_TMY3_DATA_DIR=${ARCHIVE_TMY3_DIR}/tmy3_data 32 | EP_TMY3_CACHE_DIR=${WEATHER_DIR}/ep_tmy3_cache 33 | NSRDB_CACHE_DIR=${WEATHER_DIR}/nsrdb 34 | SIMULATION_EPW_DIR=${WEATHER_DIR}/simulation_epw 35 | LOCAL_CACHE_DIR=${DOCKER_PACKAGE_DIR}/data 36 | TEST_DIR= 37 | BLASFEO_MAIN_FOLDER="${EXT_DIR}/blasfeo" 38 | HPIPM_MAIN_FOLDER="${EXT_DIR}/hpipm" 39 | ACADOS_DIR=${EXT_DIR}/acados 40 | ACADOS_SOURCE_DIR=${ACADOS_DIR} 41 | LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:${HPIPM_MAIN_FOLDER}/lib:${BLASFEO_MAIN_FOLDER}/lib:${ACADOS_DIR}/lib 42 | 43 | # test directory structure 44 | # do not change from default, may break integration 45 | TEST_DIR= 46 | 47 | # Cloud service credentials: 48 | # These must be configured to your specfic project if you intend to use 49 | # supported cloud service features 50 | # GOOGLE_APPLICATION_CREDENTIALS=${DOCKER_HOME_DIR}/.config/gcloud/application_default_credentials.json 51 | # DYD_GOOGLE_CLOUD_PROJECT= 52 | # DYD_GCS_URI_BASE= 53 | # DYD_METADATA_URI= 54 | # BCS_GOOGLE_CLOUD_PROJECT= 55 | # BCS_OUTPUT_GCS_URI_BASE= 56 | # NREL_DEV_API_KEY= 57 | # NREL_DEV_EMAIL= 58 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | *.ipynb linguist-documentation -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.DS_store 2 | *.txt 3 | *.png 4 | *.csv 5 | *.fmu 6 | *.idf 7 | *sqlite.err 8 | *eplusssz.csv 9 | *epluszsz.csv 10 | *.exe 11 | *.mat 12 | *.egg-info 13 | idf-to-fmu-export-prep-darwin 14 | idf-to-fmu-export-prep-linux 15 | Output_EPExport_Slave/ 16 | Output_EPExport_Test FMI 1.0 CS/ 17 | Output_EPExport_Model/ 18 | output/ 19 | data/ 20 | weather/ 21 | idf/ 22 | fmu/ 23 | notebooks/* 24 | !notebooks/demo_LocalSource.ipynb 25 | !notebooks/demo_GCSDYDSource.ipynb 26 | !notebooks/test_GCSDYD_chicago.ipynb 27 | notes/ 28 | !test/idf/v8-9-0/AZ_Phoenix_gasfurnace_crawlspace_IECC_2018_cycles.idf 29 | !test/idf/v9-4-0/heatedbsmt_2story_2300sqft_gasfurnace_AC.idf 30 | !test/idf/v9-4-0/heatedbsmt_1story_2000sqft_gasfurnace_AC.idf 31 | !test/idf/v9-4-0/slab_1story_2000sqft_gasfurnace_AC.idf 32 | !test/fmu/fmu-models/deadband/deadband.fmu 33 | !requirements.txt 34 | !requirements_unfixed.txt 35 | __pycache__/ 36 | .pytest_cache/ 37 | .ipynb_checkpoints/ 38 | application_default_credentials.json 39 | docs/build/ 40 | docs/source/generated/ 41 | .env 42 | .test.env 43 | docker-compose.yml 44 | build/ 45 | dev_docker.sh 46 | -------------------------------------------------------------------------------- /.test.env.template: -------------------------------------------------------------------------------- 1 | # container default directory structure: 2 | # do not change from default, may break integration 3 | TEST_DIR=${PACKAGE_DIR}/test 4 | IDF_DIR=${TEST_DIR}/idf/v${ENERGYPLUS_INSTALL_VERSION} 5 | FMU_DIR=${TEST_DIR}/fmu/v${ENERGYPLUS_INSTALL_VERSION} 6 | WEATHER_DIR=${TEST_DIR}/weather 7 | OUTPUT_DIR=${TEST_DIR}/output 8 | ARCHIVE_TMY3_DIR=${WEATHER_DIR}/archive_tmy3 9 | ARCHIVE_TMY3_META=${ARCHIVE_TMY3_DIR}/TMY3_StationsMeta.csv 10 | ARCHIVE_TMY3_DATA_DIR=${ARCHIVE_TMY3_DIR}/tmy3_data 11 | EP_TMY3_CACHE_DIR=${WEATHER_DIR}/ep_tmy3_cache 12 | NSRDB_CACHE_DIR=${WEATHER_DIR}/nsrdb 13 | SIMULATION_EPW_DIR=${WEATHER_DIR}/simulation_epw 14 | LOCAL_CACHE_DIR=${TEST_DIR}/data 15 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "files.exclude": { 3 | "**/.DS_Store": true, 4 | "**/.ipynb": true, 5 | "**/__pycache__": true, 6 | "**/.pytest_cache": true, 7 | "**/build": true, 8 | "**/BuildingControlsSimulator-*": true, 9 | "**/src/python/BuildingControlsSimulator.egg-info": true, 10 | "**/notebooks": true, 11 | "**/.ipynb_checkpoints": true, 12 | "**/util-get-address-size.exe": true, 13 | }, 14 | "search.exclude": { 15 | "**/.git": true, 16 | "**/*.csv": true, 17 | "**/*.epw": true, 18 | "**/*.fmu": true, 19 | "**/*.exe": true, 20 | "**/*.txt": true, 21 | "src/python/.pytype": true, 22 | "**/notebooks": true, 23 | "**/notes": true, 24 | "**/build": true, 25 | "**/data": true, 26 | "**/output": true, 27 | "**/Output_EPExport_Slave": true, 28 | "**/Output_EPExport_Test FMI 1.0 CS": true, 29 | "**/test/data": true, 30 | "external": true, 31 | "bcs_venv": true, 32 | }, 33 | "python.linting.pylintEnabled": false, 34 | "python.linting.flake8Enabled": true, 35 | "python.linting.enabled": true, 36 | "python.testing.unittestEnabled": false, 37 | "python.testing.nosetestsEnabled": false, 38 | "python.testing.pytestEnabled": true, 39 | "python.envFile": "${workspaceFolder}/.test.env", 40 | "python.testing.pytestArgs": [ 41 | "-c", 42 | "${workspaceFolder}/pytest.ini" 43 | ], 44 | "python.formatting.provider": "black", 45 | "python.formatting.blackArgs": [ 46 | "--line-length", 47 | "79" 48 | ], 49 | "python.jediEnabled": false, 50 | "editor.rulers": [ 51 | 79, 52 | 90 53 | ], 54 | "python.languageServer": "Pylance", 55 | "spellright.language": [ 56 | "English" 57 | ], 58 | } -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # 0.6.0-alpha (2022-03-15) 2 | 3 | ## Features and Improvements 4 | - update dependencies to latest versions (see diff of requirements.txt) 5 | - DataSpec and conversions support nullable and non-nullable data types in numpy and pandas 6 | 7 | ## Breaking changes 8 | - Previously undefined behaviour of nullable data types must now be defined in conversions 9 | 10 | ## Bug fixes 11 | - fixed make_epw_file and adding test_make_epw_file 12 | - fixed DataClient issue with data type conversion after filling nulls 13 | - fixed DataClient.py removal of columns that get truncated to all NA 14 | 15 | # 0.5.0-alpha (2021-06-13) 16 | 17 | ## Features and Improvements 18 | - simplify `.env` setup and usage 19 | - adding archetypal .idf building model geometries 20 | - remove pytest logging of dependency libraries and only show logs from failed tests 21 | - adding `DataClient.generate_dummy_data()` to simplify demo and testing data 22 | - improve `demo_LocalSource.ipynb` 23 | - DataSpecs can now be partially filled to allow for multiple data sources for same specification 24 | 25 | ## Breaking changes 26 | - ControllerModel now has `options` attribute that defines deadband size 27 | - no longer include outdoor temperature in null check columns to allow for automatic filling of missing weather data 28 | 29 | ## Bug fixes 30 | - fixed `make_data_directories` usage when no local_cache is given 31 | - fixed `get_local_cache_file` usage when no local_cache is given 32 | - fixed `DataSpec.py` null_check_columns and units 33 | - conditional skipping of `DataClient` tests that use external data sources if those sources are not configured 34 | -------------------------------------------------------------------------------- /CODEOWNERS: -------------------------------------------------------------------------------- 1 | # This is a comment. 2 | # Each line is a file pattern followed by one or more owners. 3 | 4 | # These owners will be the default owners for everything in 5 | # the repo. Unless a later match takes precedence, these owners 6 | # will be requested for review when someone opens a pull request. 7 | * @tomstesco 8 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Code of Conduct 2 | 3 | ## Our Pledge 4 | 5 | In the interest of fostering an open and welcoming environment, we as 6 | contributors and maintainers pledge to make participation in our project and 7 | our community a harassment-free experience for everyone, regardless of age, body 8 | size, disability, ethnicity, sex characteristics, gender identity and expression, 9 | level of experience, education, socio-economic status, nationality, personal 10 | appearance, race, religion, or sexual identity and orientation. 11 | 12 | ## Our Standards 13 | 14 | Examples of behavior that contributes to creating a positive environment 15 | include: 16 | 17 | * Using welcoming and inclusive language 18 | * Being respectful of differing viewpoints and experiences 19 | * Gracefully accepting constructive criticism 20 | * Focusing on what is best for the community 21 | * Showing empathy towards other community members 22 | 23 | Examples of unacceptable behavior by participants include: 24 | 25 | * The use of sexualized language or imagery and unwelcome sexual attention or 26 | advances 27 | * Trolling, insulting/derogatory comments, and personal or political attacks 28 | * Public or private harassment 29 | * Publishing others' private information, such as a physical or electronic 30 | address, without explicit permission 31 | * Other conduct which could reasonably be considered inappropriate in a 32 | professional setting 33 | 34 | ## Our Responsibilities 35 | 36 | Project maintainers are responsible for clarifying the standards of acceptable 37 | behavior and are expected to take appropriate and fair corrective action in 38 | response to any instances of unacceptable behavior. 39 | 40 | Project maintainers have the right and responsibility to remove, edit, or 41 | reject comments, commits, code, wiki edits, issues, and other contributions 42 | that are not aligned to this Code of Conduct, or to ban temporarily or 43 | permanently any contributor for other behaviors that they deem inappropriate, 44 | threatening, offensive, or harmful. 45 | 46 | ## Scope 47 | 48 | This Code of Conduct applies within all project spaces, and it also applies when 49 | an individual is representing the project or its community in public spaces. 50 | Examples of representing a project or community include using an official 51 | project e-mail address, posting via an official social media account, or acting 52 | as an appointed representative at an online or offline event. Representation of 53 | a project may be further defined and clarified by project maintainers. 54 | 55 | ## Enforcement 56 | 57 | Instances of abusive, harassing, or otherwise unacceptable behavior may be 58 | reported by contacting the project team at . All 59 | complaints will be reviewed and investigated and will result in a response that 60 | is deemed necessary and appropriate to the circumstances. The project team is 61 | obligated to maintain confidentiality with regard to the reporter of an incident. 62 | Further details of specific enforcement policies may be posted separately. 63 | 64 | Project maintainers who do not follow or enforce the Code of Conduct in good 65 | faith may face temporary or permanent repercussions as determined by other 66 | members of the project's leadership. 67 | 68 | ## Attribution 69 | 70 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, 71 | available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html 72 | 73 | [homepage]: https://www.contributor-covenant.org 74 | 75 | For answers to common questions about this code of conduct, see 76 | https://www.contributor-covenant.org/faq 77 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Table of contents 2 | 3 | - [Contributing to Building Controls Simulator](#contributing-to-Building-Controls-Simulator) 4 | - [Development Setup](#Development-Setup) 5 | - [Codebase structure](#codebase-structure) 6 | - [Unit testing](#unit-testing) 7 | - [Writing documentation](#writing-documentation) 8 | - [Building documentation](#building-documentation) 9 | - [Previewing documentation changes](#previewing-changes) 10 | - [Submitting documentation changes for review](#submitting-changes-for-review) 11 | - [Adding documentation tests](#adding-documentation-tests) 12 | 13 | ## Contributing to Building Controls Simulator 14 | 15 | If you are interested in contributing to the Building Controls Simulator (BCS for short) project, your contributions will fall 16 | into two categories: 17 | 18 | 1. You want to propose a new feature and implement it. 19 | - Post about your intended feature, and we shall discuss the design and 20 | implementation. Once we agree that the plan looks good, go ahead and implement it. 21 | 2. You want to implement a feature or bug-fix for an outstanding issue. 22 | - Search for your issue here: https://github.com/ecobee/building-controls-simulator/issues 23 | - Pick an issue and comment on the task that you want to work on this feature. 24 | - If you need more context on a particular issue, please ask and we shall provide. 25 | 26 | Once you finish implementing a feature or bug-fix, please send a Pull Request to 27 | https://github.com/ecobee/building-controls-simulator 28 | 29 | ## Development setup 30 | 31 | To develop Building Controls Simulator on your machine, here are some tips: 32 | 33 | 1. Clone a copy of the Building Controls Simulator repo from source: 34 | 35 | ```bash 36 | git clone https://github.com/ecobee/building-controls-simulator 37 | cd building-controls-simulator 38 | ``` 39 | 40 | 2. Build docker image: 41 | 42 | The docker container provides EnergyPlus version management and isolated cross-platform development environment 43 | 44 | ```bash 45 | make build-docker 46 | ``` 47 | 48 | 3. run docker container with edittable library files mounted: 49 | 50 | ```bash 51 | make run 52 | ``` 53 | 54 | ### Setting up VS Code IDE 55 | 56 | If you are unfamiliar with VS Code you can use your IDE/text editor of choice. If you have not yet decided on an IDE I recommend VS Code for development. 57 | The `Remote - Containers` extension allows you to attach to the running BCS container which can be used for writing, testing, and debugging. 58 | 59 | For further detail on how to use VS Code see this tutorial: https://pycon.switowski.com/01-vscode/ 60 | 61 | ## Codebase structure 62 | 63 | * [src](src) - Core library files 64 | * [src/python](src/python) - Python source code and tests 65 | * [notebooks](notebooks) - Jupyter notebooks used for interactive development, testing, and debugging 66 | 67 | ## Unit testing 68 | 69 | Tests are located under `test/` and unit tests are co-located with modules in `src/`. Run the entire test suite with: 70 | 71 | ```bash 72 | python -m pytest 73 | ``` 74 | 75 | or run individual test suites, test files, or individual tests. For example: 76 | 77 | ```bash 78 | python -m pytest tests/python/IDFPreprocessor/test_IDFPreprocessor.py::TestIDFPreprocessor::test_preprocess 79 | ``` 80 | 81 | Ideally all new code will be accompanied by unittests written by someone who has 82 | full context of those changes. Usually this would be the person implementing the 83 | changes. However, we appreciate WIP branches and PRs to illustrate ideas without 84 | working unit tests. 85 | 86 | ## Writing documentation 87 | For documenation BCS uses [Sphinx](https://www.sphinx-doc.org/en/master/) with 88 | [Google style](http://sphinxcontrib-napoleon.readthedocs.io/en/latest/example_google.html) 89 | for formatting docstrings. Length of line inside docstrings block must be limited to 80 characters to 90 | fit into Jupyter documentation popups. 91 | 92 | ### Building documentation 93 | 94 | Generate the documentation HTML files. The generated files will be in `docs/build/html`. 95 | 96 | ```bash 97 | cd docs 98 | make clean 99 | make html 100 | ``` 101 | 102 | #### Previewing documentation changes 103 | 104 | To view HTML files locally, you can open the files in your web browser. For example, 105 | navigate to `$PACKAGE_DIR/docs/build/html/index.html` in a web browser. 106 | 107 | If you are developing on a remote machine, you can set up an SSH tunnel so that 108 | you can access the HTTP server on the remote machine from your local machine. To map 109 | remote port 8000 to local port 8000, use either of the following commands. 110 | 111 | ```bash 112 | # For SSH 113 | ssh my_machine -L 8000:my_machine:8000 114 | 115 | # For Eternal Terminal 116 | et my_machine -t="8000:8000" 117 | ``` 118 | 119 | Then navigate to `localhost:8000` in your web browser. 120 | 121 | #### Submitting documentation changes for review 122 | 123 | It is helpful when submitting a PR that changes the docs to provide a rendered 124 | version of the result. If your change is small, you can add a screenshot of the 125 | changed docs to your PR. 126 | 127 | If your change to the docs is large and affects multiple pages, you can host 128 | the docs yourself with the following steps, then add a link to the output in your 129 | PR. These instructions use GitHub pages to host the docs 130 | you have built. To do so, follow [these steps](https://guides.github.com/features/pages/) 131 | to make a repo to host your changed documentation. 132 | 133 | GitHub pages expects to be hosting a Jekyll generated website which does not work 134 | well with the static resource paths used in the Sphinx documentation. To get around 135 | this, you must add an empty file called `.nojekyll` to your repo. 136 | 137 | ```bash 138 | cd your_github_pages_repo 139 | touch .nojekyll 140 | git add . 141 | git commit 142 | git push 143 | ``` 144 | 145 | Then, copy built documentation and push the changes: 146 | 147 | ```bash 148 | cd your_github_pages_repo 149 | cp -r $PACKAGE_DIR/docs/build/html/* . 150 | git add . 151 | git commit 152 | git push 153 | ``` 154 | 155 | Then you should be able to see the changes at your_github_username.github.com/your_github_pages_repo. 156 | 157 | 158 | #### Adding documentation tests 159 | 160 | It is easy for code snippets in docstrings and `.rst` files to get out of date. The docs 161 | build includes the [Sphinx Doctest Extension](https://www.sphinx-doc.org/en/master/usage/extensions/doctest.html), 162 | which can run code in documentation as a unit test. To use the extension, use 163 | the `.. testcode::` directive in your `.rst` and docstrings. 164 | 165 | To manually run these tests, follow steps 1 and 2 above, then run: 166 | 167 | ```bash 168 | cd docs 169 | make doctest 170 | ``` 171 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM ubuntu:20.04 2 | 3 | MAINTAINER Tom Stesco 4 | 5 | # env vars 6 | # Use C.UTF-8 locale to avoid issues with ASCII encoding 7 | ENV LANG="C.UTF-8" 8 | ENV LC_ALL="C.UTF-8" 9 | ENV USER_NAME="bcs" 10 | ENV IS_DOCKER_ENV="true" 11 | ENV PACKAGE_NAME="building-controls-simulator" 12 | ENV PYENV_SHELL="bash" 13 | 14 | # dependent env vars 15 | ENV HOME="/home/${USER_NAME}" 16 | ENV LIB_DIR="${HOME}/lib" 17 | ENV EXT_DIR="${LIB_DIR}/external" 18 | ENV ENERGYPLUS_INSTALL_DIR="${EXT_DIR}/EnergyPlus" 19 | ENV FMIL_HOME="${EXT_DIR}/FMIL/build-fmil" 20 | ENV PACKAGE_DIR="${LIB_DIR}/${PACKAGE_NAME}" 21 | ENV PYENV_ROOT="${HOME}/.pyenv" 22 | ENV PATH="${HOME}/.local/bin:${PYENV_ROOT}/shims:${PYENV_ROOT}/bin:${PATH}" 23 | ENV VENV_NAME="${USER_NAME}_venv" 24 | 25 | # set build noninteractive 26 | ARG DEBIAN_FRONTEND=noninteractive 27 | 28 | # create application user and give user ownership of $HOME 29 | RUN apt-get update && apt-get install -y --no-install-recommends sudo \ 30 | && adduser "${USER_NAME}" --shell /bin/bash --disabled-password --gecos "" \ 31 | && adduser "${USER_NAME}" sudo \ 32 | && echo '%sudo ALL=(ALL) NOPASSWD:ALL' >> /etc/sudoers \ 33 | && echo 'Defaults env_keep += "DEBIAN_FRONTEND"' >> "/etc/sudoers.d/env_keep" \ 34 | && chown -R "${USER_NAME}" "${HOME}" 35 | 36 | USER "${USER_NAME}" 37 | 38 | # install core system libraries 39 | RUN sudo apt-get update && sudo apt-get upgrade -y \ 40 | && sudo apt-get install -y --no-install-recommends \ 41 | build-essential \ 42 | ca-certificates \ 43 | curl \ 44 | git \ 45 | libbz2-dev \ 46 | libffi-dev \ 47 | libncurses5-dev \ 48 | libncursesw5-dev \ 49 | libreadline-dev \ 50 | libsqlite3-dev \ 51 | liblzma-dev \ 52 | libssl-dev \ 53 | llvm \ 54 | make \ 55 | cmake \ 56 | netbase \ 57 | pkg-config \ 58 | tk-dev \ 59 | wget \ 60 | xz-utils \ 61 | zlib1g-dev \ 62 | unzip \ 63 | python3-dev \ 64 | python3-distutils \ 65 | subversion \ 66 | p7zip-full \ 67 | bc \ 68 | gfortran \ 69 | && sudo rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* 70 | 71 | # install pyenv https://github.com/pyenv/pyenv-installer 72 | # note: pyenv.run is not accessible to all networks, use github url 73 | # install FMI library 74 | # install FMUComplianceChecker 75 | # install EnergyPlusToFMU 76 | # download and extract PyFMI release 77 | # because we dont use builtin PyFMI ODE simulation capabilities 78 | RUN mkdir "${LIB_DIR}" && mkdir "${EXT_DIR}" \ 79 | && cd "${EXT_DIR}" \ 80 | && curl -L https://github.com/pyenv/pyenv-installer/raw/master/bin/pyenv-installer | bash \ 81 | && pyenv update && pyenv install 3.8.12 \ 82 | && wget "https://github.com/modelon-community/fmi-library/archive/refs/tags/2.3.zip" \ 83 | && unzip "2.3.zip" && mv "fmi-library-2.3" "FMIL" \ 84 | && rm -rf "2.3.zip" \ 85 | && cd "FMIL" \ 86 | && mkdir build-fmil; cd build-fmil \ 87 | && cmake -DFMILIB_INSTALL_PREFIX=./ ../ \ 88 | && make install test \ 89 | && cd "${EXT_DIR}" \ 90 | && wget "https://github.com/modelica-tools/FMUComplianceChecker/releases/download/2.0.4/FMUChecker-2.0.4-linux64.zip" \ 91 | && unzip "FMUChecker-2.0.4-linux64.zip" \ 92 | && rm "FMUChecker-2.0.4-linux64.zip" \ 93 | && mv "FMUChecker-2.0.4-linux64" "FMUComplianceChecker" \ 94 | && mkdir "fmu" \ 95 | && cd "${EXT_DIR}" \ 96 | && wget "https://github.com/lbl-srg/EnergyPlusToFMU/archive/refs/tags/v3.1.0.zip" \ 97 | && unzip "v3.1.0.zip" && rm "v3.1.0.zip" \ 98 | # install sundials 4.1.0 is latest supported (dep of assimulo) 99 | && cd "${EXT_DIR}" \ 100 | && wget "https://github.com/LLNL/sundials/releases/download/v4.1.0/sundials-4.1.0.tar.gz" \ 101 | && tar -xzf "sundials-4.1.0.tar.gz" && rm "sundials-4.1.0.tar.gz" \ 102 | && cd "sundials-4.1.0" \ 103 | && mkdir "build" \ 104 | && cd "build" \ 105 | && cmake -DCMAKE_INSTALL_PREFIX="${EXT_DIR}/sundials" .. \ 106 | && make install \ 107 | # intsall lapack and blas (dep of assimulo) 108 | && cd "${EXT_DIR}" \ 109 | && wget "https://github.com/Reference-LAPACK/lapack/archive/refs/tags/v3.9.1.tar.gz" \ 110 | && tar -xzf "v3.9.1.tar.gz" && rm "v3.9.1.tar.gz" \ 111 | && cd "lapack-3.9.1" \ 112 | && mkdir build \ 113 | && cd "build" \ 114 | && cmake -DCMAKE_INSTALL_PREFIX="${EXT_DIR}/lapack" .. \ 115 | && cmake --build . -j --target install \ 116 | # get Assimulo source (dep of PyFMI 2.8+) 117 | && cd "${EXT_DIR}" \ 118 | && wget "https://github.com/modelon-community/Assimulo/archive/refs/tags/Assimulo-3.2.9.tar.gz" \ 119 | && tar -xzf "Assimulo-3.2.9.tar.gz" && rm "Assimulo-3.2.9.tar.gz" \ 120 | && mv "${EXT_DIR}/Assimulo-Assimulo-3.2.9" "${EXT_DIR}/Assimulo-3.2.9" \ 121 | # get PyFMI source 122 | && cd "${EXT_DIR}" \ 123 | && wget "https://github.com/modelon-community/PyFMI/archive/refs/tags/PyFMI-2.9.5.tar.gz" \ 124 | && tar -xzf "PyFMI-2.9.5.tar.gz" && rm "PyFMI-2.9.5.tar.gz"\ 125 | && mv "${EXT_DIR}/PyFMI-PyFMI-2.9.5" "${EXT_DIR}/PyFMI" \ 126 | && cd "${LIB_DIR}" \ 127 | # make PACKAGE_DIR and cleanup 128 | && mkdir "${PACKAGE_DIR}" \ 129 | && sudo rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* 130 | 131 | # copying will cause rebuild at minimum to start from here 132 | # use .dockerignore to add files to docker image 133 | COPY ./ "${PACKAGE_DIR}" 134 | 135 | # copied directory will not have user ownership by default 136 | # install energyplus versions desired in `scripts/setup/install_ep.sh` 137 | # install python dev environment 138 | RUN sudo chown -R "${USER_NAME}" "${PACKAGE_DIR}" \ 139 | && cd "${PACKAGE_DIR}" \ 140 | && mv "${PACKAGE_DIR}/.vscode" "${LIB_DIR}/.vscode" \ 141 | && sudo chmod +x "./scripts/setup/install_ep.sh" \ 142 | && sudo ./scripts/setup/install_ep.sh "${ENERGYPLUS_INSTALL_DIR}" \ 143 | && cd "${PACKAGE_DIR}" \ 144 | && ${PYENV_ROOT}/versions/3.8.12/bin/python3.8 -m venv "${LIB_DIR}/${VENV_NAME}" \ 145 | && . "${LIB_DIR}/${VENV_NAME}/bin/activate" \ 146 | && pip install --no-cache-dir --upgrade setuptools pip \ 147 | && pip install --no-cache-dir -r "requirements.txt" \ 148 | # && pip install --no-cache-dir -r "requirements_unfixed.txt" \ 149 | # install bcs 150 | && pip install --editable . \ 151 | # install Assimulo (dep of PyFMI 2.8+) 152 | && cd "${EXT_DIR}/Assimulo-3.2.9" \ 153 | && python setup.py install --sundials-home="${EXT_DIR}/sundials" --blas-home="${EXT_DIR}/lapack/lib" --lapack-home="${EXT_DIR}/lapack" \ 154 | # install PyFMI 155 | && cd "${EXT_DIR}/PyFMI" \ 156 | && python "setup.py" install --fmil-home="${FMIL_HOME}" \ 157 | && cd "${PACKAGE_DIR}" \ 158 | && . "scripts/setup/install_solvers.sh" 159 | 160 | # copy .rc files to user home for use on startup. This can be further configured by user. 161 | RUN cd "${PACKAGE_DIR}" \ 162 | && cp "${PACKAGE_DIR}/scripts/setup/.bashrc" "$HOME/.bashrc" \ 163 | && cp "${PACKAGE_DIR}/scripts/setup/.pdbrc" "$HOME/.pdbrc" \ 164 | && chmod +x "${PACKAGE_DIR}/scripts/setup/jupyter_lab_bkgrnd.sh" 165 | 166 | WORKDIR "${LIB_DIR}" 167 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright 2020 Ecobee Inc. 2 | 3 | Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 4 | 5 | 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 6 | 7 | 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 8 | 9 | 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. 10 | 11 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -------------------------------------------------------------------------------- /dev_docker.sh.template: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | 3 | # --rm removes container on exit 4 | # --service-ports causes defined ports to be mapped 5 | # --volume maps volumes individually 6 | source .env 7 | docker-compose run \ 8 | --rm \ 9 | --service-ports \ 10 | --volume=${LOCAL_PACKAGE_DIR}:${DOCKER_PACKAGE_DIR}:consistent\ 11 | building-controls-simulator bash 12 | -------------------------------------------------------------------------------- /docker-compose.yml.template: -------------------------------------------------------------------------------- 1 | version: '3.8' 2 | services: 3 | building-controls-simulator: 4 | build: 5 | context: ./ 6 | dockerfile: ./Dockerfile 7 | # change this if want to build your own image 8 | image: ${DOCKERHUB_REPOSITORY}/${DOCKER_IMAGE}:${VERSION_TAG} 9 | container_name: ${DOCKER_IMAGE}-v${VERSION_TAG} 10 | ports: 11 | - "8888-8890:8888-8890" # map 8888 to 8890 port for jupyter lab 12 | user: "bcs" 13 | volumes: 14 | - ${LOCAL_PACKAGE_DIR}:${DOCKER_PACKAGE_DIR}:consistent 15 | # for running with docker-compose up use jupyter lab setup script 16 | # docker-compose run will override this 17 | command: bash ${PACKAGE_DIR}/scripts/setup/jupyter_lab.sh 18 | stdin_open: true # docker run -i 19 | tty: true # docker run -t 20 | env_file: .env 21 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line, and also 5 | # from the environment for the first two. 6 | SPHINXOPTS ?= 7 | SPHINXBUILD ?= sphinx-build 8 | SOURCEDIR = source 9 | BUILDDIR = build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 21 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=source 11 | set BUILDDIR=build 12 | 13 | if "%1" == "" goto help 14 | 15 | %SPHINXBUILD% >NUL 2>NUL 16 | if errorlevel 9009 ( 17 | echo. 18 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 19 | echo.installed, then set the SPHINXBUILD environment variable to point 20 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 21 | echo.may add the Sphinx directory to PATH. 22 | echo. 23 | echo.If you don't have Sphinx installed, grab it from 24 | echo.http://sphinx-doc.org/ 25 | exit /b 1 26 | ) 27 | 28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 29 | goto end 30 | 31 | :help 32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 33 | 34 | :end 35 | popd 36 | -------------------------------------------------------------------------------- /docs/source/BuildingModels.rst: -------------------------------------------------------------------------------- 1 | BuildingModels 2 | ======================= 3 | Thermodynamic models of buildings. 4 | 5 | .. currentmodule:: BuildingControlsSimulator.BuildingModels.BuildingModel 6 | 7 | .. autosummary:: 8 | :toctree: generated 9 | :nosignatures: 10 | .. autoclass:: BuildingModel 11 | :members: 12 | :undoc-members: 13 | 14 | .. currentmodule:: BuildingControlsSimulator.BuildingModels.EnergyPlusBuildingModel 15 | 16 | .. autosummary:: 17 | :toctree: generated 18 | :nosignatures: 19 | .. autoclass:: EnergyPlusBuildingModel 20 | :members: 21 | :undoc-members: 22 | 23 | .. currentmodule:: BuildingControlsSimulator.BuildingModels.IDFPreprocessor 24 | 25 | .. autosummary:: 26 | :toctree: generated 27 | :nosignatures: 28 | .. autoclass:: IDFPreprocessor 29 | :members: 30 | :undoc-members: 31 | 32 | 33 | -------------------------------------------------------------------------------- /docs/source/DataClients.rst: -------------------------------------------------------------------------------- 1 | DataClients 2 | ======================= 3 | Clients to extract, transform, and load data for simulation from multiple data sources. 4 | 5 | .. currentmodule:: BuildingControlsSimulator.DataClients.DataClient 6 | 7 | .. autosummary:: 8 | :toctree: generated 9 | :nosignatures: 10 | .. autoclass:: DataClient 11 | :members: 12 | :undoc-members: 13 | 14 | .. currentmodule:: BuildingControlsSimulator.DataClients.DYDClient 15 | 16 | .. autosummary:: 17 | :toctree: generated 18 | :nosignatures: 19 | .. autoclass:: DYDClient 20 | :members: 21 | :undoc-members: 22 | 23 | 24 | -------------------------------------------------------------------------------- /docs/source/HVACSource.rst: -------------------------------------------------------------------------------- 1 | HVACSources 2 | ======================= 3 | Clients to extract, transform, and load data for simulation from multiple data sources. 4 | 5 | .. currentmodule:: BuildingControlsSimulator.DataClients.HVACSource 6 | 7 | .. autosummary:: 8 | :toctree: generated 9 | :nosignatures: 10 | .. autoclass:: HVACSource 11 | :members: 12 | :undoc-members: 13 | 14 | .. currentmodule:: BuildingControlsSimulator.DataClients.DYDHVACSource 15 | 16 | .. autosummary:: 17 | :toctree: generated 18 | :nosignatures: 19 | .. autoclass:: DYDHVACSource 20 | :members: 21 | :undoc-members: 22 | 23 | -------------------------------------------------------------------------------- /docs/source/WeatherSource.rst: -------------------------------------------------------------------------------- 1 | WeatherSources 2 | ======================= 3 | Clients to extract, transform, and load data for simulation from multiple data sources. 4 | 5 | .. currentmodule:: BuildingControlsSimulator.DataClients.WeatherSource 6 | .. autoclass:: WeatherSource 7 | :members: 8 | :undoc-members: 9 | 10 | .. currentmodule:: BuildingControlsSimulator.DataClients.DYDWeatherSource 11 | .. autoclass:: DYDWeatherSource 12 | :members: 13 | :undoc-members: 14 | -------------------------------------------------------------------------------- /docs/source/conf.py: -------------------------------------------------------------------------------- 1 | # Configuration file for the Sphinx documentation builder. 2 | # 3 | # This file only contains a selection of the most common options. For a full 4 | # list see the documentation: 5 | # https://www.sphinx-doc.org/en/master/usage/configuration.html 6 | 7 | # -- Path setup -------------------------------------------------------------- 8 | 9 | # If extensions (or modules to document with autodoc) are in another directory, 10 | # add these directories to sys.path here. If the directory is relative to the 11 | # documentation root, use os.path.abspath to make it absolute, like shown here. 12 | # 13 | import os 14 | 15 | # import sys 16 | # sys.path.insert(0, os.path.abspath('.')) 17 | 18 | import BuildingControlsSimulator 19 | 20 | import sphinx_rtd_theme 21 | 22 | # -- Project information ----------------------------------------------------- 23 | 24 | project = "building-controls-simulator" 25 | copyright = "2020, Ecobee Inc." 26 | author = "Tom Stesco" 27 | 28 | # The full version, including alpha/beta/rc tags 29 | release = "0.1.1" 30 | 31 | 32 | # -- General configuration --------------------------------------------------- 33 | 34 | # Add any Sphinx extension module names here, as strings. They can be 35 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 36 | # ones. 37 | extensions = [ 38 | "sphinx_rtd_theme", 39 | "sphinx.ext.autodoc", 40 | "sphinx.ext.autosummary", 41 | "sphinx.ext.doctest", 42 | "sphinx.ext.intersphinx", 43 | "sphinx.ext.todo", 44 | "sphinx.ext.coverage", 45 | "sphinx.ext.napoleon", 46 | "sphinx.ext.viewcode", 47 | "sphinx.ext.autosectionlabel", 48 | ] 49 | 50 | # build the templated autosummary files 51 | autosummary_generate = True 52 | 53 | # Add any paths that contain templates here, relative to this directory. 54 | templates_path = ["_templates"] 55 | 56 | # List of patterns, relative to source directory, that match files and 57 | # directories to ignore when looking for source files. 58 | # This pattern also affects html_static_path and html_extra_path. 59 | exclude_patterns = [] 60 | 61 | 62 | # -- Options for HTML output ------------------------------------------------- 63 | 64 | # The theme to use for HTML and HTML Help pages. See the documentation for 65 | # a list of builtin themes. 66 | # 67 | html_theme = "sphinx_rtd_theme" 68 | 69 | # Add any paths that contain custom static files (such as style sheets) here, 70 | # relative to this directory. They are copied after the builtin static files, 71 | # so a file named "default.css" will overwrite the builtin "default.css". 72 | html_static_path = ["_static"] 73 | 74 | master_doc = "index" 75 | -------------------------------------------------------------------------------- /docs/source/index.rst: -------------------------------------------------------------------------------- 1 | .. building-controls-simulator documentation master file 2 | 3 | :github_url: https://github.com/ecobee/building-controls-simulator 4 | 5 | building-controls-simulator documentation 6 | ======================================================= 7 | 8 | building-controls-simulator is a cross-platform environment for simulating building HVAC controls. 9 | 10 | Github: https://github.com/ecobee/building-controls-simulator 11 | 12 | .. toctree:: 13 | :glob: 14 | :maxdepth: 2 15 | :caption: Notes 16 | 17 | notes/* 18 | 19 | .. toctree:: 20 | :glob: 21 | :maxdepth: 2 22 | :caption: Building Models 23 | 24 | BuildingModels 25 | 26 | .. toctree:: 27 | :glob: 28 | :maxdepth: 2 29 | :caption: Data Clients 30 | 31 | DataClients 32 | HVACSource 33 | WeatherSource 34 | 35 | Indices and tables 36 | ================== 37 | 38 | * :ref:`genindex` 39 | -------------------------------------------------------------------------------- /docs/source/notes/getting_started_.rst: -------------------------------------------------------------------------------- 1 | :github_url: https://github.com/ecobee/building-controls-simulator 2 | 3 | building-controls-simulator getting started 4 | ======================================================= 5 | 6 | building-controls-simulator is a cross-platform environment for simulating building HVAC controls. 7 | 8 | See README.md for set up instructions. 9 | -------------------------------------------------------------------------------- /pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | python_files = test_*.py 3 | addopts = -p no:warnings 4 | log_level=INFO 5 | testpaths = 6 | src/python/BuildingControlsSimulator -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | adal==1.2.7 2 | aiohttp==3.8.1 3 | aiosignal==1.2.0 4 | alabaster==0.7.12 5 | anyio==3.5.0 6 | argon2-cffi==21.3.0 7 | argon2-cffi-bindings==21.2.0 8 | asttokens==2.0.5 9 | async-timeout==4.0.2 10 | attrs==21.4.0 11 | Babel==2.9.1 12 | backcall==0.2.0 13 | beautifulsoup4==4.8.0 14 | black==22.1.0 15 | bleach==4.1.0 16 | Bottleneck==1.3.4 17 | cachetools==5.0.0 18 | casadi==3.5.5 19 | certifi==2021.10.8 20 | cffi==1.15.0 21 | charset-normalizer==2.0.12 22 | click==8.0.4 23 | coverage==6.3.2 24 | cryptography==36.0.1 25 | cycler==0.11.0 26 | Cython==0.29.28 27 | db-dtypes==0.3.1 28 | debugpy==1.5.1 29 | decorator==5.1.1 30 | defusedxml==0.7.1 31 | docutils==0.17.1 32 | entrypoints==0.4 33 | eppy==0.5.57 34 | executing==0.8.3 35 | flake8==4.0.1 36 | fonttools==4.30.0 37 | frozenlist==1.3.0 38 | fsspec==2022.2.0 39 | future==0.18.2 40 | gcsfs==2022.2.0 41 | google-api-core==2.7.1 42 | google-api-python-client==2.40.0 43 | google-auth==2.6.0 44 | google-auth-httplib2==0.1.0 45 | google-auth-oauthlib==0.5.0 46 | google-cloud-bigquery==2.34.2 47 | google-cloud-bigquery-storage==2.13.0 48 | google-cloud-core==2.2.3 49 | google-cloud-storage==2.2.0 50 | google-crc32c==1.3.0 51 | google-resumable-media==2.3.2 52 | googleapis-common-protos==1.55.0 53 | gprof2dot==2021.2.21 54 | grpcio==1.44.0 55 | grpcio-status==1.44.0 56 | h5pyd==0.8.0 57 | httplib2==0.20.4 58 | idna==3.3 59 | imagesize==1.3.0 60 | importlib-metadata==4.11.3 61 | importlib-resources==5.4.0 62 | iniconfig==1.1.1 63 | ipykernel==6.9.2 64 | ipython==8.1.1 65 | ipython-genutils==0.2.0 66 | ipywidgets==7.6.5 67 | isodate==0.6.1 68 | jedi==0.18.1 69 | Jinja2==3.0.3 70 | joblib==1.1.0 71 | json5==0.9.6 72 | jsonschema==4.4.0 73 | jupyter-client==7.1.2 74 | jupyter-core==4.9.2 75 | jupyter-server==1.15.4 76 | jupyterlab==3.3.2 77 | jupyterlab-pygments==0.1.2 78 | jupyterlab-server==2.10.3 79 | jupyterlab-widgets==1.0.2 80 | kiwisolver==1.4.0 81 | llvmlite==0.38.0 82 | lxml==4.8.0 83 | MarkupSafe==2.1.0 84 | matplotlib==3.5.1 85 | matplotlib-inline==0.1.3 86 | mccabe==0.6.1 87 | mistune==0.8.4 88 | msrest==0.6.21 89 | msrestazure==0.6.4 90 | multidict==6.0.2 91 | munch==2.5.0 92 | mypy-extensions==0.4.3 93 | nbclassic==0.3.6 94 | nbclient==0.5.13 95 | nbconvert==6.4.4 96 | nbformat==5.2.0 97 | nest-asyncio==1.5.4 98 | notebook==6.4.9 99 | notebook-shim==0.1.0 100 | numba==0.55.1 101 | numexpr==2.8.1 102 | numpy==1.21.5 103 | oauthlib==3.2.0 104 | packaging==21.3 105 | pandas==1.4.1 106 | pandas-gbq==0.17.4 107 | pandocfilters==1.5.0 108 | parso==0.8.3 109 | pathspec==0.9.0 110 | patsy==0.5.2 111 | pexpect==4.8.0 112 | pickleshare==0.7.5 113 | Pillow==9.0.1 114 | platformdirs==2.5.1 115 | plotly==5.6.0 116 | pluggy==1.0.0 117 | prometheus-client==0.13.1 118 | prompt-toolkit==3.0.28 119 | proto-plus==1.20.3 120 | protobuf==3.19.4 121 | psutil==5.9.0 122 | ptyprocess==0.7.0 123 | pure-eval==0.2.2 124 | py==1.11.0 125 | pyarrow==6.0.1 126 | pyasn1==0.4.8 127 | pyasn1-modules==0.2.8 128 | pycodestyle==2.8.0 129 | pycparser==2.21 130 | pydata-google-auth==1.4.0 131 | pydot3k==1.0.17 132 | pyflakes==2.4.0 133 | Pygments==2.11.2 134 | PyJWT==2.3.0 135 | pyparsing==3.0.7 136 | pyrsistent==0.18.1 137 | pytest==7.1.0 138 | pytest-ordering==0.6 139 | pytest-profiling==1.7.0 140 | python-dateutil==2.8.2 141 | pytz==2021.3 142 | pyzmq==22.3.0 143 | requests==2.27.1 144 | requests-oauthlib==1.3.1 145 | rsa==4.8 146 | scikit-learn==1.0.2 147 | scipy==1.8.0 148 | Send2Trash==1.8.0 149 | six==1.16.0 150 | sniffio==1.2.0 151 | snowballstemmer==2.2.0 152 | soupsieve==2.3.1 153 | Sphinx==4.4.0 154 | sphinx-rtd-theme==1.0.0 155 | sphinxcontrib-applehelp==1.0.2 156 | sphinxcontrib-devhelp==1.0.2 157 | sphinxcontrib-htmlhelp==2.0.0 158 | sphinxcontrib-jsmath==1.0.1 159 | sphinxcontrib-qthelp==1.0.3 160 | sphinxcontrib-serializinghtml==1.1.5 161 | stack-data==0.2.0 162 | statsmodels==0.13.2 163 | tenacity==8.0.1 164 | terminado==0.13.3 165 | testpath==0.6.0 166 | threadpoolctl==3.1.0 167 | timezonefinder==5.2.0 168 | tinynumpy==1.2.1 169 | tomli==2.0.1 170 | tornado==6.1 171 | tqdm==4.63.0 172 | traitlets==5.1.1 173 | typing_extensions==4.1.1 174 | uritemplate==4.1.1 175 | urllib3==1.26.8 176 | wcwidth==0.2.5 177 | webencodings==0.5.1 178 | websocket-client==1.3.1 179 | widgetsnbextension==3.5.2 180 | yarl==1.7.2 181 | zipp==3.7.0 182 | -------------------------------------------------------------------------------- /requirements_unfixed.txt: -------------------------------------------------------------------------------- 1 | google-cloud-bigquery 2 | jupyterlab>=3 3 | pytest 4 | pytest-ordering 5 | pytest-profiling 6 | black 7 | coverage 8 | sphinx 9 | sphinx-rtd-theme 10 | flake8 11 | setuptools 12 | pandas 13 | matplotlib 14 | numpy 15 | cython 16 | lxml 17 | scipy 18 | eppy 19 | plotly 20 | ipywidgets>=7.6 21 | psutil 22 | attrs 23 | google-cloud-storage 24 | pandas-gbq 25 | tqdm 26 | numba 27 | gcsfs 28 | requests 29 | scikit-learn 30 | statsmodels 31 | timezonefinder 32 | numexpr 33 | bottleneck 34 | casadi 35 | h5pyd==0.8.0 36 | -------------------------------------------------------------------------------- /scripts/epvm.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | function printUsage(){ 4 | cat << EndOfMessage 5 | ================================================================================ 6 | EnergyPlus Version Manager: 7 | A bash CLI script to toggle between EnergyPlus versions. 8 | 9 | usage: . epvm.sh 10 | 11 | Supported versions: {8-9-0, 9-0-1, 9-1-0, 9-2-0, 9-3-0, 9-4-0} 12 | 13 | for example: . epvm.sh 8-9-0 14 | ================================================================================ 15 | EndOfMessage 16 | } 17 | 18 | TO_SET_VERSION="${1}" 19 | 20 | if [[ -z "${TO_SET_VERSION}" ]]; then 21 | printUsage 22 | elif [[ "${TO_SET_VERSION}" == "-h" ]]; then 23 | printUsage 24 | elif [[ "${TO_SET_VERSION}" =~ ^("8-9-0"|"9-0-1"|"9-1-0"|"9-2-0"|"9-3-0"|"9-4-0"|"9-5-0"|"9-6-0")$ ]]; then 25 | 26 | # set -u 27 | _NEW_EPLUS_NAME="EnergyPlus-${TO_SET_VERSION}" 28 | _EPLUS_DIR="${ENERGYPLUS_INSTALL_DIR}/${_NEW_EPLUS_NAME}" 29 | # set +u 30 | 31 | if [[ -z "${EPLUS_DIR+x}" && -z "${ENERGYPLUS_INSTALL_VERSION+x}" ]]; then 32 | # initialize 33 | export EPLUS_DIR="${_EPLUS_DIR}" 34 | export PATH="${PATH}:${EPLUS_DIR}" 35 | else 36 | # swap versions 37 | _CUR_EPLUS_NAME="EnergyPlus-${ENERGYPLUS_INSTALL_VERSION}" 38 | export EPLUS_DIR="${ENERGYPLUS_INSTALL_DIR}/${_NEW_EPLUS_NAME}" 39 | export PATH="${PATH//${_CUR_EPLUS_NAME}/${_NEW_EPLUS_NAME}}" 40 | fi 41 | 42 | # set -u 43 | if [[ -d "${_EPLUS_DIR}" ]]; then 44 | export ENERGYPLUS_INSTALL_VERSION="${TO_SET_VERSION}" 45 | 46 | # setup eplus version specific env vars 47 | if [[ -z "${TEST_DIR}" ]]; then 48 | export IDF_DIR="${PACKAGE_DIR}/idf/v${ENERGYPLUS_INSTALL_VERSION}" 49 | export IDF_PREPROCESSED_DIR="${PACKAGE_DIR}/idf/v${ENERGYPLUS_INSTALL_VERSION}/preprocessed" 50 | export FMU_DIR="${PACKAGE_DIR}/fmu/v${ENERGYPLUS_INSTALL_VERSION}" 51 | else 52 | export IDF_DIR="${TEST_DIR}/idf/v${ENERGYPLUS_INSTALL_VERSION}" 53 | export IDF_PREPROCESSED_DIR="${TEST_DIR}/idf/v${ENERGYPLUS_INSTALL_VERSION}/preprocessed" 54 | export FMU_DIR="${TEST_DIR}/fmu/v${ENERGYPLUS_INSTALL_VERSION}" 55 | fi 56 | mkdir -p "${IDF_DIR}" 57 | mkdir -p "${IDF_PREPROCESSED_DIR}" 58 | mkdir -p "${FMU_DIR}" 59 | 60 | # handle packaging for 9-0-1 being slightly different 61 | if [[ "${TO_SET_VERSION}" == "9-0-1" ]]; then 62 | export EPLUS_IDD="${EPLUS_DIR}/PreProcess/IDFVersionUpdater/V9-0-0-Energy+.idd" 63 | else 64 | export EPLUS_IDD="${EPLUS_DIR}/PreProcess/IDFVersionUpdater/V${ENERGYPLUS_INSTALL_VERSION}-Energy+.idd" 65 | fi 66 | 67 | # EnergyPlus uses symbolic links to define all runtime executables 68 | # we simply redefine these to hot-swap what version is currently used 69 | _LINK_DIR="/home/${USER_NAME}/.local/bin" 70 | if [ ! -d "${_LINK_DIR}" ]; then mkdir -p "${_LINK_DIR}"; fi 71 | 72 | ln -sf "${EPLUS_DIR}/runenergyplus" "${_LINK_DIR}/runenergyplus" 73 | ln -sf "${EPLUS_DIR}/runepmacro" "${_LINK_DIR}/runepmacro" 74 | ln -sf "${EPLUS_DIR}/runreadvars" "${_LINK_DIR}/runreadvars" 75 | ln -sf "${EPLUS_DIR}/energyplus" "${_LINK_DIR}/energyplus" 76 | ln -sf "${EPLUS_DIR}/PreProcess/FMUParser/parser" "${_LINK_DIR}/parser" 77 | ln -sf "${EPLUS_DIR}/PreProcess/GrndTempCalc/Basement" "${_LINK_DIR}/Basement" 78 | ln -sf "${EPLUS_DIR}/PreProcess/GrndTempCalc/BasementGHT.idd" "${_LINK_DIR}/BasementGHT.idd" 79 | ln -sf "${EPLUS_DIR}/PostProcess/EP-Compare/EP-Compare" "${_LINK_DIR}/EP-Compare" 80 | ln -sf "${EPLUS_DIR}/EPMacro" "${_LINK_DIR}/EPMacro" 81 | ln -sf "${EPLUS_DIR}/Energy+.idd" "${_LINK_DIR}/Energy+.idd" 82 | ln -sf "${EPLUS_DIR}/Energy+.schema.epJSON" "${_LINK_DIR}/Energy+.schema.epJSON" 83 | ln -sf "${EPLUS_DIR}/ExpandObjects" "${_LINK_DIR}/ExpandObjects" 84 | ln -sf "${EPLUS_DIR}/PostProcess/HVAC-Diagram" "${_LINK_DIR}/HVAC-Diagram" 85 | ln -sf "${EPLUS_DIR}/PreProcess/IDFVersionUpdater/IDFVersionUpdater" "${_LINK_DIR}/IDFVersionUpdater" 86 | ln -sf "${EPLUS_DIR}/PostProcess/ReadVarsESO" "${_LINK_DIR}/ReadVarsESO" 87 | ln -sf "${EPLUS_DIR}/PreProcess/GrndTempCalc/Slab" "${_LINK_DIR}/Slab" 88 | ln -sf "${EPLUS_DIR}/PreProcess/GrndTempCalc/SlabGHT.idd" "${_LINK_DIR}/SlabGHT.idd" 89 | ln -sf "${EPLUS_DIR}/PreProcess/IDFVersionUpdater/Transition-V8-2-0-to-V8-3-0" "${_LINK_DIR}/Transition-V8-2-0-to-V8-3-0" 90 | ln -sf "${EPLUS_DIR}/PreProcess/IDFVersionUpdater/V8-2-0-Energy+.idd" "${_LINK_DIR}/V8-2-0-Energy+.idd" 91 | ln -sf "${EPLUS_DIR}/PreProcess/IDFVersionUpdater/V8-3-0-Energy+.idd" "${_LINK_DIR}/V8-3-0-Energy+.idd" 92 | ln -sf "${EPLUS_DIR}/PostProcess/convertESOMTRpgm/convertESOMTR" "${_LINK_DIR}/convertESOMTR" 93 | find -L . -type l -delete 94 | 95 | echo "EnergyPlus set to version: ${TO_SET_VERSION}" 96 | else 97 | echo "EnergyPlus *not* set to version: ${TO_SET_VERSION}" 98 | echo "Directory does *not* exist: ${_EPLUS_DIR}" 99 | fi 100 | else 101 | echo "EnergyPlus version *not* supported: ${TO_SET_VERSION}" 102 | echo "EnergyPlus still version: ${ENERGYPLUS_INSTALL_VERSION}" 103 | fi 104 | 105 | # reset shell options so that sourcing script in current shell doesn't leave options on 106 | -------------------------------------------------------------------------------- /scripts/setup/.bashrc: -------------------------------------------------------------------------------- 1 | # ~/.bashrc: executed by bash(1) for non-login shells. 2 | # see /usr/share/doc/bash/examples/startup-files (in the package bash-doc) 3 | # for examples 4 | 5 | # If not running interactively, don't do anything 6 | case $- in 7 | *i*) ;; 8 | *) return;; 9 | esac 10 | 11 | # don't put duplicate lines or lines starting with space in the history. 12 | # See bash(1) for more options 13 | HISTCONTROL=ignoreboth 14 | 15 | # append to the history file, don't overwrite it 16 | shopt -s histappend 17 | 18 | # for setting history length see HISTSIZE and HISTFILESIZE in bash(1) 19 | HISTSIZE=1000 20 | HISTFILESIZE=2000 21 | 22 | # check the window size after each command and, if necessary, 23 | # update the values of LINES and COLUMNS. 24 | shopt -s checkwinsize 25 | 26 | # If set, the pattern "**" used in a pathname expansion context will 27 | # match all files and zero or more directories and subdirectories. 28 | #shopt -s globstar 29 | 30 | # make less more friendly for non-text input files, see lesspipe(1) 31 | [ -x /usr/bin/lesspipe ] && eval "$(SHELL=/bin/sh lesspipe)" 32 | 33 | # set variable identifying the chroot you work in (used in the prompt below) 34 | if [ -z "${debian_chroot:-}" ] && [ -r /etc/debian_chroot ]; then 35 | debian_chroot=$(cat /etc/debian_chroot) 36 | fi 37 | 38 | # set a fancy prompt (non-color, unless we know we "want" color) 39 | case "$TERM" in 40 | xterm-color|*-256color) color_prompt=yes;; 41 | esac 42 | 43 | # uncomment for a colored prompt, if the terminal has the capability; turned 44 | # off by default to not distract the user: the focus in a terminal window 45 | # should be on the output of commands, not on the prompt 46 | #force_color_prompt=yes 47 | 48 | if [ -n "$force_color_prompt" ]; then 49 | if [ -x /usr/bin/tput ] && tput setaf 1 >&/dev/null; then 50 | # We have color support; assume it's compliant with Ecma-48 51 | # (ISO/IEC-6429). (Lack of such support is extremely rare, and such 52 | # a case would tend to support setf rather than setaf.) 53 | color_prompt=yes 54 | else 55 | color_prompt= 56 | fi 57 | fi 58 | 59 | if [ "$color_prompt" = yes ]; then 60 | PS1='${debian_chroot:+($debian_chroot)}\[\033[01;32m\]\u@\h\[\033[00m\]:\[\033[01;34m\]\w\[\033[00m\]\$ ' 61 | else 62 | PS1='${debian_chroot:+($debian_chroot)}\u@\h:\w\$ ' 63 | fi 64 | unset color_prompt force_color_prompt 65 | 66 | # If this is an xterm set the title to user@host:dir 67 | case "$TERM" in 68 | xterm*|rxvt*) 69 | PS1="\[\e]0;${debian_chroot:+($debian_chroot)}\u@\h: \w\a\]$PS1" 70 | ;; 71 | *) 72 | ;; 73 | esac 74 | 75 | # enable color support of ls and also add handy aliases 76 | if [ -x /usr/bin/dircolors ]; then 77 | test -r ~/.dircolors && eval "$(dircolors -b ~/.dircolors)" || eval "$(dircolors -b)" 78 | alias ls='ls --color=auto' 79 | #alias dir='dir --color=auto' 80 | #alias vdir='vdir --color=auto' 81 | 82 | alias grep='grep --color=auto' 83 | alias fgrep='fgrep --color=auto' 84 | alias egrep='egrep --color=auto' 85 | fi 86 | 87 | # colored GCC warnings and errors 88 | #export GCC_COLORS='error=01;31:warning=01;35:note=01;36:caret=01;32:locus=01:quote=01' 89 | 90 | # some more ls aliases 91 | alias ll='ls -alF' 92 | alias la='ls -A' 93 | alias l='ls -CF' 94 | 95 | # Add an "alert" alias for long running commands. Use like so: 96 | # sleep 10; alert 97 | alias alert='notify-send --urgency=low -i "$([ $? = 0 ] && echo terminal || echo error)" "$(history|tail -n1|sed -e '\''s/^\s*[0-9]\+\s*//;s/[;&|]\s*alert$//'\'')"' 98 | 99 | # Alias definitions. 100 | # You may want to put all your additions into a separate file like 101 | # ~/.bash_aliases, instead of adding them here directly. 102 | # See /usr/share/doc/bash-doc/examples in the bash-doc package. 103 | 104 | if [ -f ~/.bash_aliases ]; then 105 | . ~/.bash_aliases 106 | fi 107 | 108 | # enable programmable completion features (you don't need to enable 109 | # this, if it's already enabled in /etc/bash.bashrc and /etc/profile 110 | # sources /etc/bash.bashrc). 111 | if ! shopt -oq posix; then 112 | if [ -f /usr/share/bash-completion/bash_completion ]; then 113 | . /usr/share/bash-completion/bash_completion 114 | elif [ -f /etc/bash_completion ]; then 115 | . /etc/bash_completion 116 | fi 117 | fi 118 | 119 | # manage shell for interactive login shell 120 | if [[ "$VIRTUAL_ENV" == "" ]]; then 121 | # if venv not activated 122 | cat << EndOfMessage 123 | ================================================================================ 124 | ██████╗ ██████╗ ███████╗ 125 | ██╔══██╗ ██╔════╝ ██╔════╝ 126 | ██████╔╝ ██║ ███████╗ 127 | ██╔══██╗ ██║ ╚════██║ 128 | ██████╔╝ ╚██████╗ ███████║ 129 | ╚═════╝ ╚═════╝ ╚══════╝ 130 | 131 | ---------Welcome to the Building Controls Simulator interactive shell!---------- 132 | ================================================================================ 133 | running configurable setting up commands in ${HOME}/.bashrc ... 134 | EndOfMessage 135 | if [ -d "${PACKAGE_DIR:?}" ]; then 136 | cd "${PACKAGE_DIR:?}" 137 | echo "Set this to desired versions using ${PACKAGE_DIR:?}/scripts/epvm.sh" 138 | echo "Setting EnergyPlus version to default: 9-4-0" 139 | echo "Configure this default in ${PACKAGE_DIR:?}/scripts/.bashrc" 140 | . "${PACKAGE_DIR:?}/scripts/epvm.sh" "9-4-0" 141 | else 142 | echo "PACKAGE_DIR=${PACKAGE_DIR} does not exist. PACKAGE_DIR env var may be incorrect." 143 | echo "Default value for PACKAGE_DIR is /home/bcs/lib/building-controls-simulator" 144 | echo "If unsure how to fix this, restart container or rebuild image from Dockerfile." 145 | fi 146 | 147 | if [ ! -d "${HOME}/.jupyter" ]; then mkdir "${HOME}/.jupyter"; fi 148 | if [ ! -f "${HOME}/.jupyter/jupyter_notebook_config.py" ]; then 149 | # this will create jupyter_notebook_config and add a dummy token so it can be accessed without password 150 | touch "${HOME}/.jupyter/jupyter_notebook_config.py" 151 | echo "c.NotebookApp.token = u''" >> "${HOME}/.jupyter/jupyter_notebook_config.py" 152 | else 153 | echo "${HOME}/.jupyter/jupyter_notebook_config.py already exists. Not overriden." 154 | echo "If jupyter server is inaccessible without password delete this file and re-run." 155 | fi 156 | . "${LIB_DIR}/${VENV_NAME}/bin/activate" 157 | fi 158 | -------------------------------------------------------------------------------- /scripts/setup/.pdbrc: -------------------------------------------------------------------------------- 1 | import IPython 2 | # Interactive shell 3 | alias interacti IPython.embed() 4 | -------------------------------------------------------------------------------- /scripts/setup/download_IECC_idfs.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | cat << EndOfMessage 4 | ================================================================================ 5 | EnergyPlus Version Manager: 6 | Downloads IECC 2018 building models for all climate zones into directories from: 7 | https://www.energycodes.gov/development/residential/iecc_models 8 | 9 | If the links are broken update them in this script. 10 | ================================================================================ 11 | EndOfMessage 12 | 13 | set -eu -o pipefail 14 | # -e exit on first error 15 | # -u exit when an undefined variable such as $FOO is accessed 16 | # -o pipefail exit when | any |cmd | in | a | pipe has exitcode != 0 17 | # -x print all commands (debug only) 18 | 19 | IECC_DIR="IECC_2018" 20 | 21 | if [ -d "${IDF_DIR}" ]; then 22 | mkdir -p "${IDF_DIR}/${IECC_DIR}" # make dir if doesn't exist 23 | cd "${IDF_DIR}/${IECC_DIR}" 24 | for cz in "1A" "2A" "2B" "3A" "3B" "3C" "4A" "4B" "4C" "5A" "5B" "6A" "7" "8"; do 25 | mkdir "cz_${cz}" 26 | wget "https://www.energycodes.gov/sites/default/files/documents/EnergyPlus_${cz}_2018_IECC.zip" 27 | unzip -d "cz_${cz}" "EnergyPlus_${cz}_2018_IECC.zip" 28 | rm cz_${cz}/*.htm 29 | rm "EnergyPlus_${cz}_2018_IECC.zip" 30 | done 31 | cd - 32 | else 33 | echo "IDF_DIR=${IDF_DIR} does not exists. Run epvm.sh to set EnergyPlus environment." 34 | fi 35 | 36 | # reset shell options so that sourcing script in current shell doesn't leave options on 37 | set +eu +o pipefail 38 | -------------------------------------------------------------------------------- /scripts/setup/install_acados.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # this script installs: 3 | # acados 4 | 5 | [[ ! -z "${ACADOS_DIR}" ]] && echo "ACADOS_DIR=${ACADOS_DIR}" || echo "ACADOS_DIR is empty." 6 | 7 | # install acados 8 | # see https://github.com/acados/acados/blob/master/README.md#installation 9 | git clone https://github.com/acados/acados.git "${ACADOS_DIR}" && cd "${ACADOS_DIR}" || exit 10 | git submodule update --recursive --init 11 | 12 | # Set the BLASFEO_TARGET in /CMakeLists.txt. 13 | # aupported targets: https://github.com/giaf/blasfeo/blob/master/README.md 14 | # the default is X64_AUTOMATIC, if you want something else set it in the 15 | 16 | mkdir -p "${ACADOS_DIR}/build" && cd "${ACADOS_DIR}/build" || exit 17 | cmake -DACADOS_WITH_QPOASES=ON "${ACADOS_DIR}" 18 | # add more optional arguments e.g. -DACADOS_WITH_OSQP=OFF/ON -DACADOS_INSTALL_DIR= above 19 | make install -j4 20 | 21 | # see https://github.com/acados/acados/tree/master/interfaces/acados_template 22 | cd "${ACADOS_DIR}" 23 | pip install -e "${ACADOS_DIR}/interfaces/acados_template" 24 | -------------------------------------------------------------------------------- /scripts/setup/install_ep.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # bash script to install EnergyPlus releases from: 4 | # https://github.com/NREL/docker-energyplus/blob/develop/Dockerfile 5 | 6 | set -eu 7 | # -e exit on first error 8 | # -u exit when an undefined variable such as $FOO is accessed 9 | # -o pipefail exit when | any |cmd | in | a | pipe has exitcode != 0 10 | # -x print all commands (debug only) 11 | 12 | function install_ep () { 13 | local _ENERGYPLUS_INSTALL_VERSION="${1}" 14 | local ENERGYPLUS_SHA="${2}" 15 | local _ENERGYPLUS_INSTALL_DIR="${3}" 16 | # this location is default for EnergyPlus and is hardcoded in some releases 17 | local DEFAULT_ENERGYPLUS_DIR="/usr/local" 18 | local ENERGYPLUS_VERSION=$( echo "${_ENERGYPLUS_INSTALL_VERSION}" | tr "-" ".") 19 | local ENERGYPLUS_TAG="v${ENERGYPLUS_VERSION}" 20 | local ENERGYPLUS_DOWNLOAD_BASE_URL="https://github.com/NREL/EnergyPlus/releases/download/${ENERGYPLUS_TAG}" 21 | 22 | # handle the different install script, manually force the install dir 23 | if [[ "${_ENERGYPLUS_INSTALL_VERSION}" == "9-1-0" ]]; then 24 | local ENERGYPLUS_DOWNLOAD_FILENAME="EnergyPlus-${ENERGYPLUS_VERSION}-${ENERGYPLUS_SHA}-Linux-x86_64.sh" 25 | local ENERGYPLUS_DOWNLOAD_URL="${ENERGYPLUS_DOWNLOAD_BASE_URL}/${ENERGYPLUS_DOWNLOAD_FILENAME}" 26 | curl -SLO "${ENERGYPLUS_DOWNLOAD_URL}" 27 | chmod +x "${ENERGYPLUS_DOWNLOAD_FILENAME}" 28 | mv "${DEFAULT_ENERGYPLUS_DIR}" "${DEFAULT_ENERGYPLUS_DIR}/../bkup" 29 | mkdir "${DEFAULT_ENERGYPLUS_DIR:?}" 30 | mkdir "${DEFAULT_ENERGYPLUS_DIR:?}/bin" 31 | echo -e "y\r" | "./${ENERGYPLUS_DOWNLOAD_FILENAME}" 32 | rm -rf "${DEFAULT_ENERGYPLUS_DIR:?}/bin" 33 | mv "${DEFAULT_ENERGYPLUS_DIR}" "$( dirname ${DEFAULT_ENERGYPLUS_DIR})/bkup/EnergyPlus-${_ENERGYPLUS_INSTALL_VERSION}" 34 | mv "$( dirname ${DEFAULT_ENERGYPLUS_DIR})/bkup" "${DEFAULT_ENERGYPLUS_DIR}" 35 | elif [[ "${_ENERGYPLUS_INSTALL_VERSION}" > "9-3-0" ]]; then 36 | local ENERGYPLUS_DOWNLOAD_FILENAME="EnergyPlus-${ENERGYPLUS_VERSION}-${ENERGYPLUS_SHA}-Linux-Ubuntu20.04-x86_64.sh" 37 | local ENERGYPLUS_DOWNLOAD_URL="${ENERGYPLUS_DOWNLOAD_BASE_URL}/${ENERGYPLUS_DOWNLOAD_FILENAME}" 38 | curl -SLO "${ENERGYPLUS_DOWNLOAD_URL}" 39 | chmod +x "${ENERGYPLUS_DOWNLOAD_FILENAME}" 40 | echo -e "y\r" | "./${ENERGYPLUS_DOWNLOAD_FILENAME}" 41 | else 42 | local ENERGYPLUS_DOWNLOAD_FILENAME="EnergyPlus-${ENERGYPLUS_VERSION}-${ENERGYPLUS_SHA}-Linux-x86_64.sh" 43 | local ENERGYPLUS_DOWNLOAD_URL="${ENERGYPLUS_DOWNLOAD_BASE_URL}/${ENERGYPLUS_DOWNLOAD_FILENAME}" 44 | curl -SLO "${ENERGYPLUS_DOWNLOAD_URL}" 45 | chmod +x "${ENERGYPLUS_DOWNLOAD_FILENAME}" 46 | echo -e "y\r" | "./${ENERGYPLUS_DOWNLOAD_FILENAME}" 47 | fi 48 | # remove all default symlinks 49 | find -L "${DEFAULT_ENERGYPLUS_DIR:?}/bin" -type l -delete 50 | # move to desired install directory 51 | mkdir -p "${_ENERGYPLUS_INSTALL_DIR}" 52 | mv "${DEFAULT_ENERGYPLUS_DIR}/EnergyPlus-${_ENERGYPLUS_INSTALL_VERSION}" "${_ENERGYPLUS_INSTALL_DIR}/EnergyPlus-${_ENERGYPLUS_INSTALL_VERSION}" 53 | rm "${ENERGYPLUS_DOWNLOAD_FILENAME:?}" 54 | } 55 | 56 | _ENERGYPLUS_INSTALL_DIR="${1}" 57 | 58 | # versions and SHA numbers can be found at: https://github.com/NREL/EnergyPlus/releases 59 | # example: https://github.com/NREL/docker-energyplus/blob/develop/Dockerfile 60 | # comment/uncomment each version as desired, they all work entirely independently 61 | # install_ep "8-6-0" "198c6a3cff" "${_ENERGYPLUS_INSTALL_DIR}" 62 | # install_ep "8-7-0" "78a111df4a" "${_ENERGYPLUS_INSTALL_DIR}" 63 | # install_ep "8-8-0" "7c3bbe4830" "${_ENERGYPLUS_INSTALL_DIR}" 64 | # install_ep "8-9-0" "40101eaafd" "${_ENERGYPLUS_INSTALL_DIR}" 65 | # install_ep "9-0-1" "bb7ca4f0da" "${_ENERGYPLUS_INSTALL_DIR}" 66 | # install_ep "9-1-0" "08d2e308bb" "${_ENERGYPLUS_INSTALL_DIR}" 67 | # install_ep "9-2-0" "921312fa1d" "${_ENERGYPLUS_INSTALL_DIR}" 68 | # install_ep "9-3-0" "baff08990c" "${_ENERGYPLUS_INSTALL_DIR}" 69 | install_ep "9-4-0" "998c4b761e" "${_ENERGYPLUS_INSTALL_DIR}" 70 | # install_ep "9-5-0" "de239b2e5f" "${_ENERGYPLUS_INSTALL_DIR}" 71 | # install_ep "9-6-0" "f420c06a69" "${_ENERGYPLUS_INSTALL_DIR}" 72 | 73 | exit 0 74 | -------------------------------------------------------------------------------- /scripts/setup/install_solvers.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # this script installs: 3 | # https://github.com/giaf/hpipm 4 | # https://github.com/giaf/blasfeo 5 | 6 | if [ -z "${BLASFEO_MAIN_FOLDER}" ]; then 7 | BLASFEO_MAIN_FOLDER="${EXT_DIR}/blasfeo" 8 | fi 9 | 10 | if [ -z "${HPIPM_MAIN_FOLDER}" ]; then 11 | HPIPM_MAIN_FOLDER="${EXT_DIR}/hpipm" 12 | fi 13 | 14 | cd "${EXT_DIR}" 15 | git clone https://github.com/giaf/blasfeo.git 16 | cd "${BLASFEO_MAIN_FOLDER}" 17 | # see https://blasfeo.syscop.de/docs/install/ 18 | # Makefile.rule has flags 19 | make shared_library -j4 && sudo make install_shared 20 | 21 | cd "${EXT_DIR}" 22 | git clone https://github.com/giaf/hpipm.git 23 | cd "${HPIPM_MAIN_FOLDER}" 24 | make shared_library -j4 && sudo make install_shared 25 | cd "${HPIPM_MAIN_FOLDER}/interfaces/python/hpipm_python" 26 | pip install . 27 | 28 | # if hpipm folder not specified assume parent of this folder 29 | export HPIPM_MAIN_FOLDER 30 | echo "HPIPM_MAIN_FOLDER=$HPIPM_MAIN_FOLDER" 31 | 32 | # if blasfeo folder not specified assume alongside the parent of this folder 33 | export BLASFEO_MAIN_FOLDER 34 | echo "BLASFEO_MAIN_FOLDER=$BLASFEO_MAIN_FOLDER" 35 | 36 | # export LD_LIBRARY_PATH 37 | export LD_LIBRARY_PATH="${LD_LIBRARY_PATH}:${HPIPM_MAIN_FOLDER}/lib:${BLASFEO_MAIN_FOLDER}/lib" 38 | echo "LD_LIBRARY_PATH=$LD_LIBRARY_PATH" 39 | 40 | cd "${PACKAGE_DIR}" 41 | -------------------------------------------------------------------------------- /scripts/setup/jupyter_lab.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -eu -o pipefail 4 | 5 | if [ ! -d "${DOCKER_HOME_DIR}/.jupyter" ]; then mkdir "${DOCKER_HOME_DIR}/.jupyter"; fi 6 | if [ ! -f "${DOCKER_HOME_DIR}/.jupyter/jupyter_notebook_config.py" ]; then 7 | # this will create jupyter_notebook_config and add a dummy token so it can be accessed without password 8 | touch "${DOCKER_HOME_DIR}/.jupyter/jupyter_notebook_config.py" 9 | echo "c.NotebookApp.token = u''" >> "${DOCKER_HOME_DIR}/.jupyter/jupyter_notebook_config.py" 10 | else 11 | echo "${DOCKER_HOME_DIR}/.jupyter/jupyter_notebook_config.py already exists. Not overriden." 12 | echo "If jupyter server is inaccessible without password delete this file and re-run." 13 | fi 14 | 15 | # set energyplus env variables, this is not built into the container and can 16 | # be modified more readily than .bashrc 17 | 18 | . "${PACKAGE_DIR:?}/scripts/epvm.sh" "9-4-0" 19 | echo "jupyter-lab accessable at: http://localhost:8888/lab" 20 | 21 | cd "${LIB_DIR}" 22 | . "${LIB_DIR}/${VENV_NAME}/bin/activate" 23 | 24 | jupyter-lab --ip="0.0.0.0" --no-browser 25 | 26 | set +eu +o pipefail 27 | -------------------------------------------------------------------------------- /scripts/setup/jupyter_lab_bkgrnd.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -eu -o pipefail 4 | 5 | TIMESTAMP=$(date +"%Y_%m_%dT%H_%M_%S") 6 | FNAME="jupyter_lab_logs_${TIMESTAMP}" 7 | 8 | if [ ! -d "${JUPYTER_LOG_DIR}" ]; then mkdir "${JUPYTER_LOG_DIR}"; fi 9 | nohup jupyter-lab --ip="0.0.0.0" --no-browser > "${JUPYTER_LOG_DIR}/${FNAME}" & 10 | echo "$!" > "${JUPYTER_LOG_DIR}/JUPYTER_SERVER_PID.txt" 11 | cat << EndOfMessage 12 | ================================================================================ 13 | jupyter-lab server running in background at PID=$(cat ${JUPYTER_LOG_DIR}/JUPYTER_SERVER_PID.txt) 14 | accessable at: http://localhost:8888/lab 15 | jupyter-lab logs are being stored in: ${JUPYTER_LOG_DIR}/${FNAME} 16 | ================================================================================ 17 | EndOfMessage 18 | 19 | set +eu +o pipefail 20 | -------------------------------------------------------------------------------- /scripts/setup/test_env_setup.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # this script should be sources into current shell 4 | echo "setting ${PACKAGE_DIR}/.test.env variables in shell" 5 | set -a && . "${PACKAGE_DIR}/.test.env" && set +a 6 | . "${PACKAGE_DIR}/scripts/epvm.sh" "9-4-0" 7 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup, find_namespace_packages 2 | 3 | REQUIRES = [] 4 | 5 | # Semantic Versioning (https://semver.org/) 6 | _MAJOR_VERSION = "0" 7 | _MINOR_VERSION = "6" 8 | _PATCH_VERSION = "0" 9 | 10 | _VERSION_SUFFIX = "alpha" 11 | 12 | __version__ = ".".join( 13 | [ 14 | _MAJOR_VERSION, 15 | _MINOR_VERSION, 16 | _PATCH_VERSION, 17 | ] 18 | ) 19 | if _VERSION_SUFFIX: 20 | __version__ = "{}-{}".format(__version__, _VERSION_SUFFIX) 21 | 22 | setup( 23 | name="BuildingControlsSimulator", 24 | keywords="building simulator simulation controls EnergyPlus research HVAC thermal heating air conditioning", 25 | version=__version__, 26 | author="Tom Stesco", 27 | author_email="tom.s@ecobee.com", 28 | description="building co-simulations for controls research", 29 | package_dir={"": "src/python"}, 30 | packages=find_namespace_packages(where="src/python"), 31 | install_requires=REQUIRES, 32 | python_requires=">=3.8", 33 | ) 34 | -------------------------------------------------------------------------------- /src/python/BuildingControlsSimulator/BuildingModels/BuildingModel.py: -------------------------------------------------------------------------------- 1 | # created by Tom Stesco tom.s@ecobee.com 2 | 3 | import os 4 | import logging 5 | from abc import ABC, abstractmethod 6 | 7 | 8 | import pandas as pd 9 | import attr 10 | import numpy as np 11 | from eppy import modeleditor 12 | 13 | 14 | logger = logging.getLogger(__name__) 15 | 16 | 17 | @attr.s 18 | class BuildingModel(ABC): 19 | """Abstract Base Class for building models""" 20 | 21 | input_states = attr.ib() 22 | output_states = attr.ib() 23 | step_size_seconds = attr.ib() 24 | 25 | status = attr.ib(default=0) 26 | log_level = attr.ib(default=0) 27 | 28 | @abstractmethod 29 | def initialize(self, start_utc, t_start, t_end, t_step, data_spec, categories_dict): 30 | pass 31 | 32 | @abstractmethod 33 | def do_step(self): 34 | """ 35 | Defines sequence of step internals. 36 | """ 37 | pass 38 | 39 | @abstractmethod 40 | def get_model_name(self): 41 | """Defines human readable uniquely identifing name""" 42 | pass 43 | -------------------------------------------------------------------------------- /src/python/BuildingControlsSimulator/BuildingModels/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ecobee/building-controls-simulator/de58c4dbedb6c3dfa478ee69f121964e74108bbd/src/python/BuildingControlsSimulator/BuildingModels/__init__.py -------------------------------------------------------------------------------- /src/python/BuildingControlsSimulator/BuildingModels/test_EnergyPlusBuildingModel.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # created by Tom Stesco tom.s@ecobee.com 3 | 4 | import subprocess 5 | import os 6 | import shutil 7 | import logging 8 | import pytz 9 | 10 | import pytest 11 | import pyfmi 12 | import pandas as pd 13 | import numpy as np 14 | 15 | from BuildingControlsSimulator.BuildingModels.IDFPreprocessor import IDFPreprocessor 16 | from BuildingControlsSimulator.BuildingModels.EnergyPlusBuildingModel import ( 17 | EnergyPlusBuildingModel, 18 | ) 19 | from BuildingControlsSimulator.DataClients.DataStates import STATES 20 | from BuildingControlsSimulator.DataClients.DataSpec import Internal 21 | from BuildingControlsSimulator.Simulator.Config import Config 22 | from BuildingControlsSimulator.DataClients.DataClient import DataClient 23 | from BuildingControlsSimulator.DataClients.LocalSource import LocalSource 24 | from BuildingControlsSimulator.DataClients.LocalDestination import LocalDestination 25 | from BuildingControlsSimulator.DataClients.DataSpec import DonateYourDataSpec 26 | 27 | logger = logging.getLogger(__name__) 28 | 29 | 30 | class TestEnergyPlusBuildingModel: 31 | @classmethod 32 | def setup_class(cls): 33 | cls.eplus_version = os.environ["ENERGYPLUS_INSTALL_VERSION"] 34 | 35 | # basic IDF file found in all EnergyPlus installations 36 | # make test/ dirs 37 | EnergyPlusBuildingModel.make_directories() 38 | 39 | cls.step_size = 300 40 | 41 | # pytest requires the obj containing the params to be called "request" 42 | @pytest.fixture( 43 | params=[ 44 | ( 45 | "Furnace.idf", 46 | "USA_IL_Chicago-OHare.Intl.AP.725300_TMY3.epw", 47 | ), 48 | ] 49 | ) 50 | def building_model(self, request): 51 | # if dummy files don't exist copy them from E+ installations 52 | dummy_epw_name = request.param[1] 53 | dummy_epw_path = os.path.join(os.environ.get("WEATHER_DIR"), dummy_epw_name) 54 | if not os.path.isfile(dummy_epw_path): 55 | _fpath = os.path.join( 56 | os.environ.get("EPLUS_DIR"), 57 | "WeatherData", 58 | dummy_epw_name, 59 | ) 60 | shutil.copyfile(_fpath, dummy_epw_path) 61 | 62 | # if dummy files don't exist copy them from E+ installations 63 | dummy_idf_name = request.param[0] 64 | dummy_idf_path = os.path.join(os.environ.get("IDF_DIR"), dummy_idf_name) 65 | if not os.path.isfile(dummy_idf_path): 66 | _fpath = os.path.join( 67 | os.environ.get("EPLUS_DIR"), "ExampleFiles", dummy_idf_name 68 | ) 69 | shutil.copyfile(_fpath, dummy_idf_path) 70 | return EnergyPlusBuildingModel( 71 | idf=IDFPreprocessor( 72 | idf_file=dummy_idf_path, 73 | init_temperature=20.0, 74 | ), 75 | epw_path=dummy_epw_path, 76 | step_size_seconds=300, 77 | ) 78 | 79 | @pytest.fixture 80 | def test_sim_config(self): 81 | return ( 82 | Config.make_sim_config( 83 | identifier=[ 84 | "DYD_dummy_data", 85 | ], # has full data periods 86 | latitude=41.8781, 87 | longitude=-87.6298, 88 | start_utc="2018-01-01", 89 | end_utc="2018-01-04", 90 | min_sim_period="1D", 91 | sim_step_size_seconds=300, 92 | output_step_size_seconds=300, 93 | ) 94 | .iloc[0] 95 | .to_dict() 96 | ) 97 | 98 | @classmethod 99 | def teardown_class(cls): 100 | """teardown any state that was previously setup with a call to 101 | setup_class. 102 | """ 103 | pass 104 | 105 | def test_energyplus_accessible(self): 106 | """test that energyplus version is test version and is accessible""" 107 | cmd = "energyplus -v" 108 | out = subprocess.run(cmd, shell=True, capture_output=True, text=True) 109 | if self.eplus_version == "8-9-0": 110 | assert out.stdout == "EnergyPlus, Version 8.9.0-40101eaafd\n" 111 | elif self.eplus_version == "9-4-0": 112 | assert out.stdout == "EnergyPlus, Version 9.4.0-998c4b761e\n" 113 | else: 114 | raise ValueError(f"Untested version of energyplus: {self.eplus_version}") 115 | 116 | @pytest.mark.skip(reason="Redundant with test_simulator.py") 117 | @pytest.mark.usefixtures("building_model") 118 | def test_preprocess(self, test_sim_config, building_model): 119 | """test that preprocessing produces output file""" 120 | # datetime_channel= 121 | 122 | prep_idf = building_model.idf.preprocess( 123 | sim_config=test_sim_config, 124 | preprocess_check=False, 125 | datetime_channel=datetime_channel, 126 | ) 127 | assert os.path.exists(prep_idf) 128 | 129 | # test that preprocessing produces valid IDF output file 130 | assert building_model.idf.check_valid_idf(prep_idf) is True 131 | 132 | @pytest.mark.skip(reason="Redundant with test_simulator.py") 133 | @pytest.mark.usefixtures("building_model") 134 | def test_make_fmu(self, test_sim_config, building_model): 135 | """test that make_fmu produces fmu file""" 136 | dc = DataClient( 137 | source=LocalSource( 138 | local_cache=os.environ.get("LOCAL_CACHE_DIR"), 139 | data_spec=DonateYourDataSpec(), 140 | ), 141 | destination=LocalDestination( 142 | local_cache=os.environ.get("LOCAL_CACHE_DIR"), 143 | data_spec=DonateYourDataSpec(), 144 | ), 145 | nrel_dev_api_key=os.environ.get("NREL_DEV_API_KEY"), 146 | nrel_dev_email=os.environ.get("NREL_DEV_EMAIL"), 147 | archive_tmy3_meta=os.environ.get("ARCHIVE_TMY3_META"), 148 | archive_tmy3_data_dir=os.environ.get("ARCHIVE_TMY3_DATA_DIR"), 149 | ep_tmy3_cache_dir=os.environ.get("EP_TMY3_CACHE_DIR"), 150 | simulation_epw_dir=os.environ.get("SIMULATION_EPW_DIR"), 151 | ) 152 | dc.sim_config = test_sim_config 153 | dc.get_data() 154 | 155 | fmu = building_model.create_model_fmu( 156 | sim_config=test_sim_config, 157 | weather_channel=dc.weather, 158 | datetime_channel=dc.datetime, 159 | ) 160 | assert os.path.exists(fmu) 161 | 162 | # @pytest.mark.usefixtures("building_model") 163 | # def test_fmu_compliance(self, building_model): 164 | """test that fmu file is compliant with FMI.""" 165 | output_path = os.path.join( 166 | os.environ.get("OUTPUT_DIR"), "compliance_check_output.csv" 167 | ) 168 | # use `bash expect` to run non-interactive 169 | # Note: if this test fails check ./Output_EPExport_Slave/Furnace_prep.err 170 | cmd = ( 171 | "expect 'Press enter to continue.' {{ send '\r' }} |" 172 | f' {os.environ.get("EXT_DIR")}/FMUComplianceChecker/fmuCheck.linux64' 173 | f" -h {self.step_size}" 174 | " -s 172800" 175 | f" -o {output_path} {building_model.fmu_path}" 176 | ) 177 | logger.info("FMU compliance checker command:") 178 | logger.info(cmd) 179 | # shlex causes FMUComplianceChecker to run with options, use cmd string 180 | out = subprocess.run( 181 | cmd, shell=True, capture_output=False, text=True, input="\n" 182 | ) 183 | 184 | assert out.returncode == 0 185 | 186 | @pytest.mark.skip(reason="Redundant with test_simulator.py.") 187 | @pytest.mark.usefixtures("building_model") 188 | def test_pyfmi_load_fmu(self, building_model): 189 | """test that fmu can be loaded with pyfmi""" 190 | fmu = pyfmi.load_fmu(building_model.fmu_path) 191 | assert fmu.get_version() == "1.0" 192 | 193 | @pytest.mark.skip(reason="Redundant with test_simulator.py") 194 | @pytest.mark.usefixtures("building_model") 195 | def test_simulate_fmu(self, building_model): 196 | """test that fmu can be simulated with pyfmi 197 | 198 | Note: if this test fails check ./Output_EPExport_Slave/Furnace_prep.err 199 | """ 200 | fmu = pyfmi.load_fmu(building_model.fmu_path) 201 | opts = fmu.simulate_options() 202 | t_start = 0.0 203 | t_end = 86400.0 204 | opts["ncp"] = int(t_end / self.step_size) 205 | 206 | res = fmu.simulate(start_time=t_start, final_time=t_end, options=opts) 207 | 208 | output = res.result_data.get_data_matrix() 209 | 210 | assert output.shape == (30, opts["ncp"] + 1) 211 | 212 | @pytest.mark.skip( 213 | reason="Segfaults when run without PDB breakpoint. Tried fmu.free_instance(), fmu.terminate()" 214 | ) 215 | def test_step_fmu(self): 216 | """test that fmu can be simulated with pyfmi 217 | 218 | Note: if this test fails check ./Output_EPExport_Slave/Furnace_prep.err 219 | """ 220 | fmu = pyfmi.load_fmu(self.building_model.fmu_path) 221 | t_start = 0 222 | t_end = 86400.0 223 | t_step = 300.0 224 | ns = int(t_end / t_step) 225 | 226 | fmu.initialize(t_start, t_end) 227 | status = np.full(ns, False, dtype="int8") 228 | 229 | for i in range(ns): 230 | status[i] = fmu.do_step( 231 | current_t=t_start, 232 | step_size=t_step, 233 | new_step=True, 234 | ) 235 | t_start += t_step 236 | logger.info(f"status={all(status == 0)}") 237 | 238 | # fmu.free_instance() 239 | # status == 0 corresponds to `fmi1_status_ok` 240 | # see: https://github.com/modelon-community/PyFMI/blob/PyFMI-2.7.4/src/pyfmi/fmil_import.pxd 241 | assert all(status == 0) 242 | 243 | @pytest.mark.skip(reason="Redundant with test_simulator.py.") 244 | @pytest.mark.usefixtures("building_model") 245 | def test_step_model(self, test_sim_config, building_model): 246 | """test that fmu can be simulated with pyfmi 247 | 248 | Note: if this test fails check ./Output_EPExport_Slave/Furnace_prep.err 249 | """ 250 | start_utc = pd.Timestamp("2020-01-01", tz="utc") 251 | t_start = 0 252 | t_step = 300 253 | t_end = 86400.0 254 | ns = int(t_end / t_step) 255 | 256 | building_model.create_model_fmu( 257 | sim_config=test_sim_config, 258 | epw_path=building_model.epw_path, 259 | preprocess_check=False, 260 | ) 261 | # need to recude t_end because of non-inclusion of last time step 262 | building_model.initialize( 263 | start_utc=start_utc, 264 | t_start=t_start, 265 | t_end=t_end - t_step, 266 | t_step=t_step, 267 | data_spec=Internal(), 268 | categories_dict={}, 269 | ) 270 | 271 | step_control_input = { 272 | STATES.AUXHEAT1: t_step, 273 | STATES.AUXHEAT2: 0, 274 | STATES.AUXHEAT3: 0, 275 | STATES.COMPCOOL1: 0, 276 | STATES.COMPCOOL2: 0, 277 | STATES.COMPHEAT1: 0, 278 | STATES.COMPHEAT2: 0, 279 | STATES.FAN_STAGE_ONE: t_step, 280 | STATES.FAN_STAGE_TWO: 0, 281 | STATES.FAN_STAGE_THREE: 0, 282 | } 283 | 284 | step_sensor_input = {STATES.THERMOSTAT_MOTION: False} 285 | 286 | for i in range(ns): 287 | building_model.do_step( 288 | t_start=building_model.output[STATES.SIMULATION_TIME][i], 289 | t_step=t_step, 290 | step_control_input=step_control_input, 291 | step_sensor_input=step_sensor_input, 292 | step_weather_input={}, 293 | ) 294 | assert ( 295 | pytest.approx(33.394825, 0.01) 296 | == building_model.fmu_output["EAST_ZONE_zone_air_temperature"].mean() 297 | ) 298 | -------------------------------------------------------------------------------- /src/python/BuildingControlsSimulator/BuildingModels/test_IDFPreprocessor.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # created by Tom Stesco tom.s@ecobee.com 3 | 4 | import subprocess 5 | import os 6 | import shutil 7 | import logging 8 | import pytz 9 | 10 | import pytest 11 | import pyfmi 12 | 13 | from BuildingControlsSimulator.Simulator.Config import Config 14 | from BuildingControlsSimulator.BuildingModels.IDFPreprocessor import IDFPreprocessor 15 | from BuildingControlsSimulator.BuildingModels.EnergyPlusBuildingModel import ( 16 | EnergyPlusBuildingModel, 17 | ) 18 | 19 | 20 | logger = logging.getLogger(__name__) 21 | 22 | 23 | class TestIDFPreprocessor: 24 | @classmethod 25 | def setup_class(cls): 26 | # basic IDF file found in all EnergyPlus installations 27 | cls.dummy_idf_name = "Furnace.idf" 28 | cls.dummy_weather_name = "USA_IL_Chicago-OHare.Intl.AP.725300_TMY3.epw" 29 | 30 | # make test/ dirs 31 | EnergyPlusBuildingModel.make_directories() 32 | 33 | cls.dummy_idf_path = os.path.join(os.environ.get("IDF_DIR"), cls.dummy_idf_name) 34 | 35 | cls.dummy_weather_file = os.path.join( 36 | os.environ.get("WEATHER_DIR"), cls.dummy_weather_name 37 | ) 38 | 39 | # if dummy files don't exist copy them from E+ installations 40 | if not os.path.isfile(cls.dummy_idf_path): 41 | _fpath = os.path.join( 42 | os.environ.get("EPLUS_DIR"), "ExampleFiles", cls.dummy_idf_name 43 | ) 44 | shutil.copyfile(_fpath, cls.dummy_idf_path) 45 | 46 | if not os.path.isfile(cls.dummy_weather_file): 47 | _fpath = os.path.join( 48 | os.environ.get("EPLUS_DIR"), 49 | "WeatherData", 50 | cls.dummy_weather_name, 51 | ) 52 | shutil.copyfile(_fpath, cls.dummy_weather_file) 53 | 54 | cls.idf = IDFPreprocessor(idf_file=cls.dummy_idf_path, timesteps_per_hour=12) 55 | cls.step_size = int(3600.0 / cls.idf.timesteps_per_hour) 56 | 57 | cls.test_sim_config = ( 58 | Config.make_sim_config( 59 | identifier=[ 60 | "2df6959cdf502c23f04f3155758d7b678af0c631", # has full data periods 61 | ], 62 | latitude=33.481136, 63 | longitude=-112.078232, 64 | start_utc="2018-05-16", 65 | end_utc="2018-05-26", 66 | min_sim_period="1D", 67 | min_chunk_period="30D", 68 | sim_step_size_seconds=60, 69 | output_step_size_seconds=300, 70 | ) 71 | .iloc[0] 72 | .to_dict() 73 | ) 74 | 75 | @classmethod 76 | def teardown_class(cls): 77 | """teardown any state that was previously setup with a call to 78 | setup_class. 79 | """ 80 | pass 81 | 82 | @pytest.mark.skip(reason="Redundant with test_simulator.py") 83 | def test_preprocess(self): 84 | """ 85 | test that preprocessing produces output file 86 | """ 87 | 88 | prep_idf = self.idf.preprocess( 89 | sim_config=self.test_sim_config, 90 | datetime_channel=datetime_channel, 91 | preprocess_check=False, 92 | ) 93 | assert os.path.exists(prep_idf) 94 | 95 | # test that preprocessing produces valid IDF output file 96 | assert self.idf.check_valid_idf(prep_idf) is True 97 | -------------------------------------------------------------------------------- /src/python/BuildingControlsSimulator/ControllerModels/ControllerModel.py: -------------------------------------------------------------------------------- 1 | # created by Tom Stesco tom.s@ecobee.com 2 | 3 | from abc import ABC, abstractmethod 4 | from enum import IntEnum 5 | import logging 6 | 7 | import attr 8 | import pandas as pd 9 | import numpy as np 10 | 11 | from BuildingControlsSimulator.ControllerModels.ControllerStatus import CONTROLLERSTATUS 12 | 13 | 14 | @attr.s 15 | class ControllerModel(ABC): 16 | """ABC for control models""" 17 | 18 | input_states = attr.ib() 19 | output_states = attr.ib() 20 | step_size_seconds = attr.ib() 21 | discretization_size_seconds = attr.ib() 22 | options = attr.ib(default=None) 23 | 24 | output = attr.ib(factory=dict) 25 | step_output = attr.ib(factory=dict) 26 | settings = attr.ib(factory=dict) 27 | 28 | current_t_idx = attr.ib(default=None) 29 | current_t_start = attr.ib(default=None) 30 | start_utc = attr.ib(default=None) 31 | 32 | init_status = attr.ib(factory=list) 33 | step_status = attr.ib(factory=list) 34 | log_level = attr.ib(default=0) 35 | 36 | @abstractmethod 37 | def initialize(self, start_utc, t_start, t_end, t_step, data_spec, categories_dict): 38 | """Run on first setup and not again.""" 39 | pass 40 | 41 | @abstractmethod 42 | def do_step(self): 43 | """Defines sequence of step internals.""" 44 | pass 45 | 46 | @abstractmethod 47 | def change_settings(self, new_settings): 48 | """Change persistent internal settings to model.""" 49 | pass 50 | 51 | @abstractmethod 52 | def get_model_name(self): 53 | """Defines human readable uniquely identifing name""" 54 | pass 55 | 56 | def get_step_time_utc(self): 57 | """For debugging use""" 58 | return self.start_utc + pd.Timedelta( 59 | seconds=self.current_t_idx * self.step_size_seconds 60 | ) 61 | 62 | def update_settings( 63 | self, 64 | change_points_schedule, 65 | change_points_comfort_prefs, 66 | change_points_hvac_mode, 67 | time_utc=None, 68 | init=False, 69 | ): 70 | """Ensure settings are correct for given time step.""" 71 | 72 | _init_time_hvac_mode = min(change_points_hvac_mode.keys()) 73 | 74 | _init_time_schedule = min(change_points_schedule.keys()) 75 | 76 | _init_schedule_names = set( 77 | [sch["name"] for sch in change_points_schedule[_init_time_schedule]] 78 | ) 79 | 80 | _init_time_setpoints = [] 81 | for _name in _init_schedule_names: 82 | _schedule_change_times = [ 83 | k for k, v in change_points_comfort_prefs.items() if _name in v.keys() 84 | ] 85 | if _schedule_change_times: 86 | _init_time_setpoints.append(min(_schedule_change_times)) 87 | else: 88 | # TODO: can make some assumption about set points 89 | raise ValueError( 90 | f"Setpoints could not be detected for: {_name} schedule." 91 | ) 92 | 93 | if init: 94 | if not change_points_comfort_prefs: 95 | logging.error( 96 | "change_points_comfort_prefs is empty. update_settings will not work." 97 | ) 98 | 99 | if not change_points_schedule: 100 | logging.error( 101 | "change_points_schedule is empty. update_settings will not work." 102 | ) 103 | 104 | if not change_points_hvac_mode: 105 | logging.error( 106 | "change_points_hvac_mode is empty. update_settings will not work." 107 | ) 108 | 109 | self.settings = {} 110 | self.settings["hvac_mode"] = change_points_hvac_mode[_init_time_hvac_mode] 111 | self.settings["schedules"] = change_points_schedule[_init_time_schedule] 112 | 113 | # need to update setpoints per schedule 114 | self.settings["setpoints"] = {} 115 | for _name in _init_schedule_names: 116 | _schedule_change_times = [ 117 | k 118 | for k, v in change_points_comfort_prefs.items() 119 | if _name in v.keys() 120 | ] 121 | self.settings["setpoints"][_name] = change_points_comfort_prefs[ 122 | min(_schedule_change_times) 123 | ][_name] 124 | 125 | elif time_utc: 126 | settings_updated = False 127 | 128 | if ( 129 | time_utc in change_points_hvac_mode.keys() 130 | and time_utc != _init_time_hvac_mode 131 | ): 132 | # check that this is not init time for hvac_mode 133 | self.settings["hvac_mode"] = change_points_hvac_mode[time_utc] 134 | settings_updated = True 135 | 136 | # must observe new schedule at or before setpoint change 137 | if ( 138 | time_utc in change_points_schedule.keys() 139 | and time_utc != _init_time_schedule 140 | ): 141 | # check that this is not init time for setpoint 142 | self.settings["schedules"] = change_points_schedule[time_utc] 143 | settings_updated = True 144 | 145 | if ( 146 | time_utc in change_points_comfort_prefs.keys() 147 | and time_utc not in _init_time_setpoints 148 | ): 149 | # do not need to reset all setpoints, store previous setpoints 150 | # even after schedule is removed because it may be readded 151 | # check that this is not init time for setpoint 152 | for k, v in change_points_comfort_prefs[time_utc].items(): 153 | # overwrite existing or make new setpoint comfort prefs 154 | self.settings["setpoints"][k] = v 155 | 156 | settings_updated = True 157 | 158 | if settings_updated: 159 | self.change_settings(self.settings) 160 | else: 161 | raise ValueError( 162 | "Invalid arguments supplied to update_settings()" 163 | + "Neither time_utc or init flag given." 164 | ) 165 | -------------------------------------------------------------------------------- /src/python/BuildingControlsSimulator/ControllerModels/ControllerStatus.py: -------------------------------------------------------------------------------- 1 | import enum 2 | 3 | 4 | @enum.unique 5 | class CONTROLLERSTATUS(enum.IntEnum): 6 | """Definition of ControllerModel status codes.""" 7 | 8 | DEFAULT = enum.auto() 9 | INITIALIZED = enum.auto() 10 | STEP_BEGAN = enum.auto() 11 | MISSING_INFERENCE_DATA = enum.auto() 12 | MISSING_TRAINING_DATA = enum.auto() 13 | MODEL_VALID = enum.auto() 14 | MODEL_VALIDATION_FAILED = enum.auto() 15 | OPTIMIZATION_SUCCESSFUL = enum.auto() 16 | INFEASIBLE_OPTIMIZATION = enum.auto() 17 | POSTPROCESSING_SUCCESSFUL = enum.auto() 18 | STEP_SUCCESSFUL = enum.auto() 19 | STEP_SKIPPED = enum.auto() 20 | STEP_FAILED = enum.auto() 21 | FMU_CRASHED = enum.auto() 22 | 23 | # add other status codes to aid in debugging 24 | -------------------------------------------------------------------------------- /src/python/BuildingControlsSimulator/ControllerModels/Deadband.py: -------------------------------------------------------------------------------- 1 | # created by Tom Stesco tom.s@ecobee.com 2 | # NOTE: this controller is an over simplified example and does not represent 3 | # anything related to the HVAC control work used or developed at ecobee. 4 | 5 | import attr 6 | import pandas as pd 7 | import numpy as np 8 | 9 | from BuildingControlsSimulator.ControllerModels.ControllerModel import ControllerModel 10 | from BuildingControlsSimulator.DataClients.DataStates import STATES 11 | from BuildingControlsSimulator.ControllerModels.ControllerStatus import CONTROLLERSTATUS 12 | from BuildingControlsSimulator.Conversions.Conversions import Conversions 13 | 14 | 15 | @attr.s 16 | class Deadband(ControllerModel): 17 | """Deadband controller""" 18 | 19 | deadband = attr.ib(default=1.0) 20 | step_output = attr.ib(factory=dict) 21 | step_size_seconds = attr.ib(default=None) 22 | current_t_idx = attr.ib(default=None) 23 | 24 | output = attr.ib(factory=dict) 25 | 26 | # for reference on how attr defaults wor for mutable types (e.g. list) see: 27 | # https://www.attrs.org/en/stable/init.html#defaults 28 | input_states = attr.ib() 29 | output_states = attr.ib() 30 | 31 | @input_states.default 32 | def get_input_states(self): 33 | return [ 34 | STATES.THERMOSTAT_TEMPERATURE_ESTIMATE, 35 | STATES.TEMPERATURE_STP_COOL, 36 | STATES.TEMPERATURE_STP_HEAT, 37 | ] 38 | 39 | @output_states.default 40 | def get_output_states(self): 41 | return [ 42 | STATES.TEMPERATURE_CTRL, 43 | STATES.TEMPERATURE_STP_COOL, 44 | STATES.TEMPERATURE_STP_HEAT, 45 | STATES.AUXHEAT1, 46 | STATES.AUXHEAT2, 47 | STATES.AUXHEAT3, 48 | STATES.COMPCOOL1, 49 | STATES.COMPCOOL2, 50 | STATES.COMPHEAT1, 51 | STATES.COMPHEAT2, 52 | STATES.FAN_STAGE_ONE, 53 | STATES.FAN_STAGE_TWO, 54 | STATES.FAN_STAGE_THREE, 55 | ] 56 | 57 | def get_model_name(self): 58 | _model_name = f"Deadband_{self.deadband}" 59 | _model_name = _model_name.replace(".", "_") 60 | return _model_name 61 | 62 | def initialize( 63 | self, 64 | start_utc, 65 | t_start, 66 | t_end, 67 | t_step, 68 | data_spec, 69 | categories_dict, 70 | ): 71 | """""" 72 | self.deadband = self.options["deadband"] 73 | self.current_t_idx = 0 74 | self.step_size_seconds = t_step 75 | self.allocate_output_memory( 76 | t_start=t_start, 77 | t_end=t_end, 78 | t_step=t_step, 79 | data_spec=data_spec, 80 | categories_dict=categories_dict, 81 | ) 82 | self.init_step_output() 83 | 84 | def allocate_output_memory( 85 | self, t_start, t_end, t_step, data_spec, categories_dict 86 | ): 87 | """preallocate output memory to speed up simulation""" 88 | # reset output 89 | self.output = {} 90 | 91 | self.output = { 92 | STATES.SIMULATION_TIME: np.arange( 93 | t_start, t_end + t_step, t_step, dtype="int64" 94 | ) 95 | } 96 | n_s = len(self.output[STATES.SIMULATION_TIME]) 97 | 98 | # add state variables 99 | for state in self.output_states: 100 | if data_spec.full.spec[state]["dtype"] == "category": 101 | self.output[state] = pd.Series( 102 | pd.Categorical( 103 | pd.Series(index=np.arange(n_s)), 104 | categories=categories_dict[state], 105 | ) 106 | ) 107 | else: 108 | ( 109 | np_default_value, 110 | np_dtype, 111 | ) = Conversions.numpy_down_cast_default_value_dtype( 112 | data_spec.full.spec[state]["dtype"] 113 | ) 114 | self.output[state] = np.full( 115 | n_s, 116 | np_default_value, 117 | dtype=np_dtype, 118 | ) 119 | 120 | self.output[STATES.STEP_STATUS] = np.full(n_s, 0, dtype="int8") 121 | 122 | def tear_down(self): 123 | """tear down FMU""" 124 | pass 125 | 126 | def init_step_output(self): 127 | # initialize all off 128 | self.step_output = {state: 0 for state in self.output_states} 129 | 130 | def calc_t_control(self, step_sensor_input): 131 | t_ctrl = step_sensor_input[STATES.THERMOSTAT_TEMPERATURE_ESTIMATE] 132 | return t_ctrl 133 | 134 | def do_step( 135 | self, 136 | t_start, 137 | t_step, 138 | step_thermostat_input, 139 | step_sensor_input, 140 | step_weather_input, 141 | step_weather_forecast_input, 142 | ): 143 | """Simulate controller time step.""" 144 | self.step_status = [] 145 | self.step_status.append(CONTROLLERSTATUS.STEP_BEGAN) 146 | 147 | t_ctrl = self.calc_t_control(step_sensor_input) 148 | self.step_output[STATES.TEMPERATURE_CTRL] = t_ctrl 149 | 150 | # stop overlap of heating and cooling set points 151 | self.step_output[STATES.TEMPERATURE_STP_COOL] = max( 152 | step_thermostat_input[STATES.TEMPERATURE_STP_COOL], 153 | step_thermostat_input[STATES.TEMPERATURE_STP_HEAT] + self.deadband, 154 | ) 155 | self.step_output[STATES.TEMPERATURE_STP_HEAT] = min( 156 | step_thermostat_input[STATES.TEMPERATURE_STP_COOL] - self.deadband, 157 | step_thermostat_input[STATES.TEMPERATURE_STP_HEAT], 158 | ) 159 | 160 | if t_ctrl < (self.step_output[STATES.TEMPERATURE_STP_HEAT] - self.deadband): 161 | # turn on heat 162 | self.step_output[STATES.AUXHEAT1] = self.step_size_seconds 163 | self.step_output[STATES.FAN_STAGE_ONE] = self.step_size_seconds 164 | # turn off cool 165 | self.step_output[STATES.COMPCOOL1] = 0 166 | elif t_ctrl > (self.step_output[STATES.TEMPERATURE_STP_HEAT] + self.deadband): 167 | # turn off heat 168 | self.step_output[STATES.FAN_STAGE_ONE] = 0 169 | self.step_output[STATES.AUXHEAT1] = 0 170 | 171 | # cooling mode 172 | if t_ctrl > (self.step_output[STATES.TEMPERATURE_STP_COOL] + self.deadband): 173 | # turn on cool 174 | self.step_output[STATES.COMPCOOL1] = self.step_size_seconds 175 | self.step_output[STATES.FAN_STAGE_ONE] = self.step_size_seconds 176 | # turn off heat 177 | self.step_output[STATES.AUXHEAT1] = 0 178 | elif t_ctrl < (self.step_output[STATES.TEMPERATURE_STP_COOL] - self.deadband): 179 | # turn off cool 180 | self.step_output[STATES.FAN_STAGE_ONE] = 0 181 | self.step_output[STATES.COMPCOOL1] = 0 182 | 183 | self.add_step_to_output(self.step_output) 184 | self.current_t_idx += 1 185 | self.step_status.append(CONTROLLERSTATUS.STEP_SUCCESSFUL) 186 | return self.step_output 187 | 188 | def add_step_to_output(self, step_output): 189 | for k, v in step_output.items(): 190 | self.output[k][self.current_t_idx] = v 191 | 192 | def change_settings(self, new_settings): 193 | # this model has no settings 194 | pass 195 | -------------------------------------------------------------------------------- /src/python/BuildingControlsSimulator/ControllerModels/FMIController.py: -------------------------------------------------------------------------------- 1 | # created by Tom Stesco tom.s@ecobee.com 2 | 3 | from abc import ABC, abstractmethod 4 | import logging 5 | import os 6 | 7 | import attr 8 | import pyfmi 9 | 10 | from BuildingControlsSimulator.ControllerModels.ControllerModel import ControllerModel 11 | from BuildingControlsSimulator.ControllerModels.ControllerStatus import CONTROLLERSTATUS 12 | 13 | 14 | @attr.s(kw_only=True) 15 | class FMIController(ControllerModel): 16 | """Deadband controller 17 | 18 | Example: 19 | ```python 20 | from BuildingControlsSimulator.ControllerModels.Deadband import Deadband 21 | ``` 22 | 23 | """ 24 | 25 | fmu_path = attr.ib() 26 | 27 | # current_t_idx = attr.ib(default=None) 28 | # step_size_seconds = attr.ib() 29 | 30 | def get_model_name(self): 31 | _model_name = os.path.basename(self.fmu_path) 32 | _model_name = _model_name.replace(".", "_") 33 | return _model_name 34 | -------------------------------------------------------------------------------- /src/python/BuildingControlsSimulator/ControllerModels/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ecobee/building-controls-simulator/de58c4dbedb6c3dfa478ee69f121964e74108bbd/src/python/BuildingControlsSimulator/ControllerModels/__init__.py -------------------------------------------------------------------------------- /src/python/BuildingControlsSimulator/Conversions/Conversions.py: -------------------------------------------------------------------------------- 1 | # created by Tom Stesco tom.s@ecobee.com 2 | 3 | import pandas as pd 4 | import numpy as np 5 | 6 | 7 | class Conversions: 8 | @staticmethod 9 | def F2C(temp_F): 10 | return (temp_F - 32) * 5 / 9 11 | 12 | @staticmethod 13 | def C2F(temp_C): 14 | return (temp_C * 9 / 5) + 32 15 | 16 | @staticmethod 17 | def C2Fx10(temp_C): 18 | return Conversions.C2F(temp_C) * 10 19 | 20 | @staticmethod 21 | def saturation_vapor_pressure(temperature): 22 | """ 23 | The formula used is that from [Bolton1980] for T in degrees Celsius: 24 | """ 25 | return 6.112 * np.exp(17.67 * temperature / (temperature + 243.5)) 26 | 27 | @staticmethod 28 | def relative_humidity_from_dewpoint(temperature, dewpoint): 29 | """Return RH in % [0-100]""" 30 | return ( 31 | Conversions.saturation_vapor_pressure(dewpoint) 32 | / Conversions.saturation_vapor_pressure(temperature) 33 | ) * 100 34 | 35 | @staticmethod 36 | def relative_humidity_to_dewpoint(temp_air, relative_humidity): 37 | """ 38 | Magnus formula with Arden Buck constants to calculate dew point. 39 | 40 | see: 41 | https://en.wikipedia.org/wiki/Dew_point 42 | https://doi.org/10.1175/1520-0450(1981)020%3C1527:NEFCVP%3E2.0.CO;2 43 | 44 | Buck, Arden L. 45 | "New equations for computing vapor pressure and enhancement factor." 46 | Journal of applied meteorology 20.12 (1981): 1527-1532. 47 | 48 | :param temp_air: Temperature in Celsius. 49 | :type temp_air: float or np.array of floats 50 | :param relative_humidity: Relative humidity in % [0-100] 51 | :type relative_humidity: float or np.array of floats 52 | 53 | :return dew_point: The dew point temperature in Celcius. 54 | """ 55 | b = 18.678 56 | c = 257.14 57 | d = 234.5 58 | exp_arg = (b - (temp_air / d)) * (temp_air / (c + temp_air)) 59 | gamma = np.log((relative_humidity / 100) * np.exp(exp_arg)) 60 | return (c * gamma) / (b - gamma) 61 | 62 | @staticmethod 63 | def numpy_down_cast_default_value_dtype(dtype): 64 | """Default values for numpy/pandas dtypes. These are used when setting 65 | initial values for input and output data. This mostly chooses sane defaults 66 | that allows for not using nullable dtypes that consume much more memory. 67 | Checks for these coded default values should be done before using simulation 68 | output data. 69 | """ 70 | if dtype in ["bool", "boolean"]: 71 | return (False, "bool") 72 | elif dtype in ["float32", "Float32"]: 73 | return (-9999.0, "float32") 74 | elif dtype in ["int64", "Int64"]: 75 | return (-99999, "int64") 76 | elif dtype in ["int32", "Int32"]: 77 | return (-9999, "int32") 78 | elif dtype in ["int16", "Int16"]: 79 | return (-999, "int16") 80 | elif dtype in ["int8", "Int8"]: 81 | return (-99, "int8") 82 | elif dtype in ["category", "Category"]: 83 | # 32 byte unicode str 84 | return ("", "125 MB) query results 90 | # more quickly at an increased cost. We are querying once per ID, which 91 | # should be no more than 50 MB. 92 | # max_results=1,000,000 93 | # 8760 * 12 = 105,120 records per thermostat-year 94 | # use max_results as guard to query accidentaly getting multiple datasets 95 | # there should never be 1,000,000 results for a single identifier 96 | 97 | _df = pandas_gbq.read_gbq( 98 | query_or_table=query_str, 99 | project_id=self.gcp_project, 100 | credentials=self.gbq_token, 101 | col_order=gbq_columns, 102 | reauth=True, 103 | dialect="standard", 104 | max_results=1000000, 105 | use_bqstorage_api=False, 106 | dtypes={ 107 | k: v["dtype"] 108 | for k, v in self.data_spec.full.spec.items() 109 | if k in gbq_columns 110 | }, 111 | ) 112 | 113 | if _df.empty: 114 | logger.error( 115 | ( 116 | f"Identifier: {sim_config['identifier']}", 117 | f" not found in GBQ table: {self.gbq_table}", 118 | ) 119 | ) 120 | 121 | if local_cache_file: 122 | if os.path.isdir(os.path.dirname(local_cache_file)): 123 | # store as gzip compressed parquet file 124 | _df.to_parquet(local_cache_file, compression="gzip", index=False) 125 | else: 126 | logger.info( 127 | "GCSDataSource received no local_cache. Proceeding without caching." 128 | ) 129 | 130 | return _df 131 | -------------------------------------------------------------------------------- /src/python/BuildingControlsSimulator/DataClients/GCSDYDSource.py: -------------------------------------------------------------------------------- 1 | # created by Tom Stesco tom.s@ecobee.com 2 | 3 | import logging 4 | 5 | import attr 6 | import pandas as pd 7 | import numpy as np 8 | import gcsfs 9 | 10 | from BuildingControlsSimulator.DataClients.GCSDataSource import GCSDataSource 11 | from BuildingControlsSimulator.DataClients.DataSpec import DonateYourDataSpec 12 | 13 | 14 | logger = logging.getLogger(__name__) 15 | gcsfs_logger = logging.getLogger("gcsfs") 16 | gcsfs_logger.setLevel(logging.WARN) 17 | 18 | 19 | @attr.s(kw_only=True) 20 | class GCSDYDSource(GCSDataSource): 21 | 22 | data_spec = attr.ib(factory=DonateYourDataSpec) 23 | file_extension = attr.ib(default="csv.zip") 24 | source_name = attr.ib(default="GCSDYD") 25 | meta_gcs_uri = attr.ib(default=None) 26 | 27 | def __attrs_post_init__(self): 28 | self.make_data_directories() 29 | 30 | def get_metadata(self): 31 | if self.meta_gcs_uri: 32 | _fs = gcsfs.GCSFileSystem( 33 | project=self.gcp_project, 34 | token=self.gcs_token, 35 | access="read_only", 36 | ) 37 | with _fs.open(self.meta_gcs_uri) as _file: 38 | _df = pd.read_csv(_file).drop_duplicates(subset=["Identifier"]) 39 | 40 | else: 41 | raise ValueError("Must supply `meta_gcs_uri` to dataclient.") 42 | 43 | return _df 44 | 45 | def get_gcs_uri(self, sim_config): 46 | # first cast to utc timestamp 47 | # DYD uses UTC 48 | start_utc = pd.to_datetime( 49 | sim_config["start_utc"], utc=True, infer_datetime_format=True 50 | ) 51 | end_utc = pd.to_datetime( 52 | sim_config["end_utc"], utc=True, infer_datetime_format=True 53 | ) 54 | 55 | if isinstance(start_utc, pd.Timestamp): 56 | start_year = start_utc.year 57 | else: 58 | start_year = start_utc.dt.year 59 | 60 | if isinstance(end_utc, pd.Timestamp): 61 | end_year = end_utc.year 62 | else: 63 | end_year = end_utc.dt.year 64 | 65 | # supporting cross year simulations would require loading both years 66 | if np.any(end_year != start_year): 67 | raise ValueError("start_utc must be in same year as end_utc.") 68 | 69 | years_supported = [2016, 2017, 2018, 2019] 70 | if start_year not in years_supported: 71 | raise ValueError(f"start_utc must be in supported years: {years_supported}") 72 | 73 | return ( 74 | self.gcs_uri_base 75 | + "/" 76 | + str(start_year) 77 | + "/" 78 | + sim_config["identifier"] 79 | + "." 80 | + self.file_extension 81 | ) 82 | -------------------------------------------------------------------------------- /src/python/BuildingControlsSimulator/DataClients/GCSDataSource.py: -------------------------------------------------------------------------------- 1 | # created by Tom Stesco tom.s@ecobee.com 2 | 3 | import logging 4 | import os 5 | from abc import ABC, abstractmethod 6 | 7 | import attr 8 | import pandas as pd 9 | import numpy as np 10 | import gcsfs 11 | from google.cloud import storage, exceptions 12 | 13 | from BuildingControlsSimulator.DataClients.DataSource import DataSource 14 | from BuildingControlsSimulator.DataClients.DataSpec import ( 15 | Internal, 16 | convert_spec, 17 | ) 18 | from BuildingControlsSimulator.DataClients.DataStates import CHANNELS 19 | 20 | logger = logging.getLogger(__name__) 21 | gcsfs_logger = logging.getLogger("gcsfs") 22 | gcsfs_logger.setLevel(logging.WARN) 23 | 24 | 25 | @attr.s(kw_only=True) 26 | class GCSDataSource(DataSource, ABC): 27 | 28 | # TODO: add validators 29 | gcp_project = attr.ib(default=None) 30 | gcs_uri_base = attr.ib(default=None) 31 | gcs_token = attr.ib(default=os.environ.get("GOOGLE_APPLICATION_CREDENTIALS")) 32 | 33 | def get_data(self, sim_config): 34 | # first check if file in local cache 35 | local_cache_file = self.get_local_cache_file( 36 | identifier=sim_config["identifier"] 37 | ) 38 | _data = self.get_local_cache(local_cache_file) 39 | if _data.empty: 40 | _data = self.get_gcs_cache(sim_config, local_cache_file) 41 | _data = self.drop_unused_columns(_data=_data) 42 | _data = convert_spec( 43 | df=_data, 44 | src_spec=self.data_spec, 45 | dest_spec=Internal(), 46 | src_nullable=True, 47 | dest_nullable=True, 48 | ) 49 | return _data 50 | 51 | @abstractmethod 52 | def get_gcs_uri(self, sim_config): 53 | """This is implemented in the specialized source class""" 54 | pass 55 | 56 | def get_gcs_cache(self, sim_config, local_cache_file): 57 | if not self.gcs_uri_base: 58 | raise ValueError( 59 | f"gcs_uri_base={self.gcs_uri_base} is unset. " 60 | + "Set env variable for specific source, e.g. DYD_GCS_URI_BASE" 61 | ) 62 | 63 | if not sim_config["identifier"]: 64 | raise ValueError( 65 | f"Invalid sim_config: sim_config[identifier]={sim_config['identifier']}" 66 | ) 67 | 68 | gcs_uri = self.get_gcs_uri(sim_config) 69 | if local_cache_file: 70 | if os.path.isdir(os.path.dirname(local_cache_file)): 71 | client = storage.Client(project=self.gcp_project) 72 | with open(local_cache_file, "wb") as _file: 73 | try: 74 | client.download_blob_to_file(gcs_uri, _file) 75 | except exceptions.NotFound: 76 | # file not found in DYD 77 | logger.error( 78 | ( 79 | f"File: {gcs_uri}", 80 | " not found in gcs cache dataset.", 81 | ) 82 | ) 83 | return self.get_empty_df() 84 | _df = self.read_data_by_extension(local_cache_file) 85 | else: 86 | raise ValueError( 87 | "GCSDataSource received invalid directory: " 88 | + f"local_cache={self.local_cache}" 89 | ) 90 | else: 91 | logger.info( 92 | "GCSDataSource received no local_cache. Proceeding without caching." 93 | ) 94 | _fs = gcsfs.GCSFileSystem( 95 | project=self.gcp_project, 96 | token=self.gcs_token, 97 | access="read_only", 98 | ) 99 | try: 100 | with _fs.open(gcs_uri) as _file: 101 | _df = self.read_data_by_extension(_file) 102 | 103 | except FileNotFoundError: 104 | # file not found in DYD 105 | logger.error( 106 | ( 107 | f"File: {gcs_uri}", 108 | " not found in gcs cache dataset.", 109 | ) 110 | ) 111 | _df = self.get_empty_df() 112 | 113 | return _df 114 | -------------------------------------------------------------------------------- /src/python/BuildingControlsSimulator/DataClients/GCSDestination.py: -------------------------------------------------------------------------------- 1 | # created by Tom Stesco tom.s@ecobee.com 2 | 3 | import os 4 | import logging 5 | from abc import ABC, abstractmethod 6 | 7 | import attr 8 | import pandas as pd 9 | import numpy as np 10 | import gcsfs 11 | 12 | from BuildingControlsSimulator.DataClients.DataStates import CHANNELS 13 | from BuildingControlsSimulator.DataClients.DataDestination import DataDestination 14 | from BuildingControlsSimulator.DataClients.DataSpec import convert_spec 15 | 16 | 17 | logger = logging.getLogger(__name__) 18 | # gcsfs DEBUG logging prints raw data bytes and is too verbose 19 | gcsfs_logger = logging.getLogger("gcsfs") 20 | gcsfs_logger.setLevel(logging.WARN) 21 | 22 | 23 | @attr.s(kw_only=True) 24 | class GCSDestination(DataDestination): 25 | 26 | # TODO: add validators 27 | gcp_project = attr.ib(default=None) 28 | gcs_uri_base = attr.ib(default=None) 29 | gcs_token = attr.ib(default=os.environ.get("GOOGLE_APPLICATION_CREDENTIALS")) 30 | 31 | def put_data(self, df, sim_name, src_spec): 32 | _df = convert_spec( 33 | df=df, src_spec=src_spec, dest_spec=self.data_spec, copy=True 34 | ) 35 | local_cache_file = self.get_local_cache_file(sim_name) 36 | self.put_local_cache(_df, local_cache_file) 37 | gcs_uri = self.get_gcs_uri(sim_name=sim_name) 38 | self.put_gcs(_df, gcs_uri) 39 | 40 | def get_gcs_uri(self, sim_name): 41 | return os.path.join(self.gcs_uri_base, self.get_file_name(sim_name=sim_name)) 42 | 43 | def put_gcs(self, df, gcs_uri): 44 | if gcs_uri: 45 | _fs = gcsfs.GCSFileSystem( 46 | project=self.gcp_project, 47 | token=self.gcs_token, 48 | access="read_write", 49 | ) 50 | with _fs.open(gcs_uri, "wb") as _file: 51 | self.write_data_by_extension(df, _file, gcs_uri=gcs_uri) 52 | else: 53 | logger.error("put_gcs: gcs_uri is None.") 54 | -------------------------------------------------------------------------------- /src/python/BuildingControlsSimulator/DataClients/GCSFlatFilesSource.py: -------------------------------------------------------------------------------- 1 | # created by Tom Stesco tom.s@ecobee.com 2 | 3 | import logging 4 | 5 | import attr 6 | import pandas as pd 7 | import numpy as np 8 | 9 | from BuildingControlsSimulator.DataClients.GCSDataSource import GCSDataSource 10 | 11 | from BuildingControlsSimulator.DataClients.DataSpec import FlatFilesSpec 12 | 13 | 14 | @attr.s(kw_only=True) 15 | class GCSFlatFilesSource(GCSDataSource): 16 | 17 | data_spec = attr.ib(factory=FlatFilesSpec) 18 | file_extension = attr.ib(default="csv.gz") 19 | source_name = attr.ib(default="GCSFlatFiles") 20 | 21 | def get_gcs_uri(self, sim_config): 22 | gcs_uri = ( 23 | self.gcs_uri_base 24 | + "/" 25 | + sim_config["identifier"] 26 | + "." 27 | + self.file_extension 28 | ) 29 | 30 | return gcs_uri 31 | -------------------------------------------------------------------------------- /src/python/BuildingControlsSimulator/DataClients/LocalDestination.py: -------------------------------------------------------------------------------- 1 | # created by Tom Stesco tom.s@ecobee.com 2 | 3 | import os 4 | import logging 5 | from abc import ABC, abstractmethod 6 | 7 | import attr 8 | import pandas as pd 9 | import numpy as np 10 | 11 | from BuildingControlsSimulator.DataClients.DataStates import CHANNELS 12 | from BuildingControlsSimulator.DataClients.DataDestination import DataDestination 13 | from BuildingControlsSimulator.DataClients.DataSpec import convert_spec 14 | 15 | 16 | logger = logging.getLogger(__name__) 17 | 18 | 19 | @attr.s(kw_only=True) 20 | class LocalDestination(DataDestination): 21 | 22 | local_cache = attr.ib(default=os.environ.get("LOCAL_CACHE_DIR")) 23 | source_name = attr.ib(default="local") 24 | 25 | def __attrs_post_init__(self): 26 | self.make_data_directories() 27 | 28 | def put_data(self, df, sim_name, src_spec): 29 | _df = convert_spec( 30 | df=df, src_spec=src_spec, dest_spec=self.data_spec, copy=True 31 | ) 32 | local_cache_file = self.get_local_cache_file(sim_name) 33 | self.put_local_cache(_df, local_cache_file) 34 | -------------------------------------------------------------------------------- /src/python/BuildingControlsSimulator/DataClients/LocalSource.py: -------------------------------------------------------------------------------- 1 | # created by Tom Stesco tom.s@ecobee.com 2 | 3 | import logging 4 | import os 5 | from abc import ABC, abstractmethod 6 | 7 | import attr 8 | import pandas as pd 9 | import numpy as np 10 | 11 | from BuildingControlsSimulator.DataClients.DataSpec import ( 12 | Internal, 13 | convert_spec, 14 | ) 15 | from BuildingControlsSimulator.DataClients.DataStates import CHANNELS 16 | from BuildingControlsSimulator.DataClients.DataSource import DataSource 17 | 18 | 19 | logger = logging.getLogger(__name__) 20 | 21 | 22 | @attr.s(kw_only=True) 23 | class LocalSource(DataSource): 24 | 25 | source_name = attr.ib(default="local") 26 | local_cache = attr.ib(default=os.environ.get("LOCAL_CACHE_DIR")) 27 | data_spec = attr.ib() 28 | file_extension = attr.ib(default=None) 29 | 30 | def __attrs_post_init__(self): 31 | """Infer the file_extension from local_cache supplied""" 32 | self.make_data_directories() 33 | if not os.path.isdir(self.local_cache_source): 34 | raise ValueError( 35 | f"{self.local_cache_source} is not a directory or does not exist." 36 | ) 37 | 38 | def get_data(self, sim_config): 39 | """Get local cache""" 40 | local_cache_file = self.get_local_cache_file( 41 | identifier=sim_config["identifier"] 42 | ) 43 | _data = self.get_local_cache(local_cache_file) 44 | _data = self.drop_unused_columns(_data=_data) 45 | _data = convert_spec( 46 | df=_data, 47 | src_spec=self.data_spec, 48 | dest_spec=Internal(), 49 | copy=False, 50 | src_nullable=True, 51 | dest_nullable=True, 52 | ) 53 | return _data 54 | -------------------------------------------------------------------------------- /src/python/BuildingControlsSimulator/DataClients/SensorsChannel.py: -------------------------------------------------------------------------------- 1 | # created by Tom Stesco tom.s@ecobee.com 2 | 3 | import logging 4 | 5 | import attr 6 | import pandas as pd 7 | import numpy as np 8 | 9 | from BuildingControlsSimulator.DataClients.DataChannel import DataChannel 10 | from BuildingControlsSimulator.DataClients.DataStates import STATES 11 | 12 | logger = logging.getLogger(__name__) 13 | 14 | 15 | @attr.s(kw_only=True) 16 | class SensorsChannel(DataChannel): 17 | def __attrs_post_init__(self): 18 | # validate sensor data 19 | if all(self.data[STATES.THERMOSTAT_MOTION].isnull()): 20 | raise NotImplementedError( 21 | "Support for devices without thermostat motion sensor." 22 | ) 23 | 24 | self.drop_unused_room_sensors() 25 | 26 | def drop_unused_room_sensors(self): 27 | """null room sensors temperature and motion data can safely be dropped""" 28 | drop_columns = [] 29 | for _col in self.data.columns: 30 | # check for room sensor states 31 | if str(_col).startswith("STATES.RS"): 32 | if self.data[_col].isnull().all(): 33 | drop_columns.append(_col) 34 | 35 | self.data = self.data.drop(axis=1, columns=drop_columns) 36 | -------------------------------------------------------------------------------- /src/python/BuildingControlsSimulator/DataClients/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ecobee/building-controls-simulator/de58c4dbedb6c3dfa478ee69f121964e74108bbd/src/python/BuildingControlsSimulator/DataClients/__init__.py -------------------------------------------------------------------------------- /src/python/BuildingControlsSimulator/DataClients/test_DataClient.py: -------------------------------------------------------------------------------- 1 | # created by Tom Stesco tom.s@ecobee.com 2 | import logging 3 | import os 4 | import copy 5 | 6 | import pytest 7 | import pandas as pd 8 | import numpy as np 9 | 10 | from BuildingControlsSimulator.Simulator.Config import Config 11 | from BuildingControlsSimulator.DataClients.DataClient import DataClient 12 | from BuildingControlsSimulator.DataClients.LocalSource import LocalSource 13 | from BuildingControlsSimulator.DataClients.LocalDestination import LocalDestination 14 | from BuildingControlsSimulator.DataClients.DataSpec import EnergyPlusWeather 15 | from BuildingControlsSimulator.DataClients.DataStates import STATES 16 | from BuildingControlsSimulator.DataClients.DataSpec import ( 17 | Internal, 18 | DonateYourDataSpec, 19 | convert_spec, 20 | ) 21 | 22 | logger = logging.getLogger(__name__) 23 | 24 | 25 | class TestDataClient: 26 | @classmethod 27 | def setup_class(cls): 28 | # initialize with data to avoid pulling multiple times 29 | 30 | cls.sim_config = Config.make_sim_config( 31 | identifier=[ 32 | "DYD_dummy_data", 33 | ], # test file 34 | latitude=33.481136, 35 | longitude=-112.078232, 36 | start_utc=[ 37 | "2018-01-01 00:00:00", 38 | ], 39 | end_utc=[ 40 | "2018-12-31 23:55:00", 41 | ], 42 | min_sim_period="3D", 43 | min_chunk_period="30D", 44 | sim_step_size_seconds=300, 45 | output_step_size_seconds=300, 46 | ) 47 | 48 | cls.data_client = DataClient( 49 | source=LocalSource( 50 | local_cache=os.environ.get("LOCAL_CACHE_DIR"), 51 | data_spec=DonateYourDataSpec(), 52 | ), 53 | destination=LocalDestination( 54 | local_cache=os.environ.get("LOCAL_CACHE_DIR"), 55 | data_spec=DonateYourDataSpec(), 56 | ), 57 | ) 58 | cls.data_client.sim_config = cls.sim_config.iloc[0] 59 | 60 | cls.data_client.get_data() 61 | 62 | @classmethod 63 | def teardown_class(cls): 64 | """teardown any state that was previously setup with a call to 65 | setup_class. 66 | """ 67 | pass 68 | 69 | def test_generate_dummy_data(self): 70 | _sim_config = Config.make_sim_config( 71 | identifier=[ 72 | "generated_dummy_data", 73 | ], # test file 74 | latitude=33.481136, 75 | longitude=-112.078232, 76 | start_utc=[ 77 | "2018-01-01 00:00:00", 78 | ], 79 | end_utc=[ 80 | "2018-12-31 23:55:00", 81 | ], 82 | min_sim_period="3D", 83 | min_chunk_period="30D", 84 | sim_step_size_seconds=300, 85 | output_step_size_seconds=300, 86 | ) 87 | _df = DataClient.generate_dummy_data( 88 | sim_config=_sim_config, spec=DonateYourDataSpec() 89 | ) 90 | assert len(_df) == 105120 91 | assert all(_df["Schedule"].value_counts().values == [74460, 30660]) 92 | 93 | def test_upresample_to_step_size(self): 94 | df = self.data_client.get_full_input() 95 | _col = STATES.AUXHEAT1 96 | _sequence = np.array( 97 | [ 98 | 0, 99 | 150, 100 | 150, 101 | 150, 102 | 150, 103 | 30, 104 | 270, 105 | 30, 106 | 270, 107 | 0, 108 | 300, 109 | 300, 110 | 240, 111 | 0, 112 | 150, 113 | 300, 114 | 150, 115 | ] 116 | ) 117 | df.loc[0 : len(_sequence) - 1, _col] = _sequence 118 | 119 | res_df = DataClient.upsample_to_step_size( 120 | df, step_size_seconds=60, data_spec=self.data_client.internal_spec 121 | ) 122 | # check sum 123 | res_rt = np.sum(_sequence) 124 | res_sum_rt = np.sum(res_df.loc[0 : (len(_sequence) - 1) * 5, _col].values) 125 | assert res_rt == res_sum_rt 126 | 127 | # check exact sequence 128 | assert all( 129 | res_df.loc[0 : (len(_sequence) - 1) * 5, _col].values 130 | == np.array( 131 | [ 132 | 0, 133 | 0, 134 | 0, 135 | 30, 136 | 60, 137 | 60, 138 | 60, 139 | 60, 140 | 30, 141 | 0, 142 | 0, 143 | 0, 144 | 0, 145 | 30, 146 | 60, 147 | 60, 148 | 60, 149 | 60, 150 | 30, 151 | 0, 152 | 0, 153 | 0, 154 | 0, 155 | 0, 156 | 0, 157 | 30, 158 | 60, 159 | 60, 160 | 60, 161 | 60, 162 | 30, 163 | 0, 164 | 0, 165 | 0, 166 | 0, 167 | 30, 168 | 60, 169 | 60, 170 | 60, 171 | 60, 172 | 30, 173 | 0, 174 | 0, 175 | 0, 176 | 0, 177 | 0, 178 | 60, 179 | 60, 180 | 60, 181 | 60, 182 | 60, 183 | 60, 184 | 60, 185 | 60, 186 | 60, 187 | 60, 188 | 60, 189 | 60, 190 | 60, 191 | 60, 192 | 0, 193 | 0, 194 | 0, 195 | 0, 196 | 0, 197 | 0, 198 | 0, 199 | 0, 200 | 30, 201 | 60, 202 | 60, 203 | 60, 204 | 60, 205 | 60, 206 | 60, 207 | 60, 208 | 0, 209 | 0, 210 | 30, 211 | 60, 212 | 60, 213 | ] 214 | ) 215 | ) 216 | -------------------------------------------------------------------------------- /src/python/BuildingControlsSimulator/DataClients/test_GBQFlatFilesSource.py: -------------------------------------------------------------------------------- 1 | # created by Tom Stesco tom.s@ecobee.com 2 | 3 | import logging 4 | import copy 5 | 6 | import pytest 7 | import pandas as pd 8 | import pytz 9 | import os 10 | 11 | from BuildingControlsSimulator.Simulator.Config import Config 12 | from BuildingControlsSimulator.DataClients.DataClient import DataClient 13 | from BuildingControlsSimulator.DataClients.GBQDataSource import GBQDataSource 14 | from BuildingControlsSimulator.DataClients.DataSpec import FlatFilesSpec 15 | from BuildingControlsSimulator.DataClients.LocalDestination import LocalDestination 16 | from BuildingControlsSimulator.DataClients.DataSpec import EnergyPlusWeather 17 | from BuildingControlsSimulator.DataClients.DataStates import STATES 18 | 19 | logger = logging.getLogger(__name__) 20 | 21 | 22 | @pytest.mark.skipif( 23 | (not os.environ.get("TEST_FLATFILES_GBQ_IDENTIFIER")) 24 | or (not os.environ.get("FLATFILE_GOOGLE_CLOUD_PROJECT")) 25 | or (not os.environ.get("FLATFILES_GBQ_TABLE")), 26 | reason="GBQ FlatFiles not configured.", 27 | ) 28 | class TestGBQFlatFilesSource: 29 | @classmethod 30 | def setup_class(cls): 31 | # initialize with data to avoid pulling multiple times 32 | cls.sim_config = Config.make_sim_config( 33 | identifier=[ 34 | os.environ.get("TEST_FLATFILES_GBQ_IDENTIFIER"), # has all holds 35 | "9999999", # file not found 36 | ], 37 | latitude=33.481136, 38 | longitude=-112.078232, 39 | start_utc="2019-01-02 00:00:00", 40 | end_utc="2019-02-01 00:00:00", 41 | min_sim_period="3D", 42 | sim_step_size_seconds=300, 43 | output_step_size_seconds=300, 44 | ) 45 | 46 | cls.data_clients = [] 47 | 48 | # set local_cache=None to test connection with GCS 49 | cls.data_client = DataClient( 50 | source=GBQDataSource( 51 | gcp_project=os.environ.get("FLATFILE_GOOGLE_CLOUD_PROJECT"), 52 | gbq_table=os.environ.get("FLATFILES_GBQ_TABLE"), 53 | data_spec=FlatFilesSpec(), 54 | local_cache=None, 55 | ), 56 | destination=LocalDestination( 57 | local_cache=os.environ.get("LOCAL_CACHE_DIR"), 58 | data_spec=FlatFilesSpec(), 59 | ), 60 | nrel_dev_api_key=os.environ.get("NREL_DEV_API_KEY"), 61 | nrel_dev_email=os.environ.get("NREL_DEV_EMAIL"), 62 | archive_tmy3_dir=os.environ.get("ARCHIVE_TMY3_DIR"), 63 | archive_tmy3_meta=os.environ.get("ARCHIVE_TMY3_META"), 64 | archive_tmy3_data_dir=os.environ.get("ARCHIVE_TMY3_DATA_DIR"), 65 | ep_tmy3_cache_dir=os.environ.get("EP_TMY3_CACHE_DIR"), 66 | simulation_epw_dir=os.environ.get("SIMULATION_EPW_DIR"), 67 | ) 68 | for _idx, _sim_config in cls.sim_config.iterrows(): 69 | dc = copy.deepcopy(cls.data_client) 70 | dc.sim_config = _sim_config 71 | if _sim_config["identifier"] == "9999999": 72 | with pytest.raises(ValueError): 73 | dc.get_data() 74 | else: 75 | dc.get_data() 76 | 77 | cls.data_clients.append(dc) 78 | 79 | @classmethod 80 | def teardown_class(cls): 81 | """teardown any state that was previously setup with a call to 82 | setup_class. 83 | """ 84 | pass 85 | 86 | def test_get_data(self): 87 | # test HVAC data returns dict of non-empty pd.DataFrame 88 | for dc in self.data_clients: 89 | if dc.datetime: 90 | assert isinstance(dc.datetime.data, pd.DataFrame) 91 | assert isinstance(dc.thermostat.data, pd.DataFrame) 92 | assert isinstance(dc.equipment.data, pd.DataFrame) 93 | assert isinstance(dc.sensors.data, pd.DataFrame) 94 | assert isinstance(dc.weather.data, pd.DataFrame) 95 | 96 | def test_read_epw(self): 97 | # read back cached filled epw files 98 | for dc in self.data_clients: 99 | if dc.weather and not dc.weather.data.empty: 100 | # generate the epw file before checking it 101 | _epw_path = dc.weather.make_epw_file( 102 | sim_config=dc.sim_config, 103 | datetime_channel=dc.datetime, 104 | epw_step_size_seconds=dc.sim_config["sim_step_size_seconds"], 105 | ) 106 | data, meta, meta_lines = dc.weather.read_epw(_epw_path) 107 | assert not data.empty 108 | assert all(data.columns == dc.weather.epw_columns) 109 | 110 | def test_data_utc(self): 111 | for dc in self.data_clients: 112 | if dc.datetime and not dc.datetime.data.empty: 113 | assert ( 114 | dc.datetime.data[dc.datetime.spec.datetime_column].dtype.tz 115 | == pytz.utc 116 | ) 117 | 118 | def test_fill_missing_data(self): 119 | """Check that filled data exists and doesnt over fill""" 120 | for dc in self.data_clients: 121 | if dc.sim_config["identifier"] == os.environ.get( 122 | "TEST_FLATFILES_GBQ_IDENTIFIER" 123 | ): 124 | # verify that data bfill works with full_data_periods 125 | assert ( 126 | pytest.approx(24.69999885559082) 127 | == dc.thermostat.data.iloc[ 128 | dc.datetime.data[ 129 | ( 130 | dc.datetime.data[STATES.DATE_TIME] 131 | >= pd.Timestamp("2019-01-07 11:20", tz="utc") 132 | ) 133 | & ( 134 | dc.datetime.data[STATES.DATE_TIME] 135 | <= pd.Timestamp("2019-01-07 11:40:00", tz="utc") 136 | ) 137 | ].index, 138 | ][STATES.TEMPERATURE_CTRL].mean() 139 | ) 140 | assert dc.full_data_periods[0] == [ 141 | pd.to_datetime("2019-01-02 00:00:00", utc=True), 142 | pd.to_datetime("2019-01-07 11:20:00", utc=True), 143 | ] 144 | -------------------------------------------------------------------------------- /src/python/BuildingControlsSimulator/DataClients/test_GCSDYDSource.py: -------------------------------------------------------------------------------- 1 | # created by Tom Stesco tom.s@ecobee.com 2 | import logging 3 | import os 4 | import copy 5 | 6 | import pytest 7 | import pandas as pd 8 | import pytz 9 | 10 | from BuildingControlsSimulator.Simulator.Config import Config 11 | from BuildingControlsSimulator.DataClients.DataClient import DataClient 12 | from BuildingControlsSimulator.DataClients.GCSDYDSource import GCSDYDSource 13 | from BuildingControlsSimulator.DataClients.DataSpec import DonateYourDataSpec 14 | from BuildingControlsSimulator.DataClients.DataSpec import EnergyPlusWeather 15 | from BuildingControlsSimulator.DataClients.DataStates import STATES 16 | from BuildingControlsSimulator.DataClients.LocalDestination import LocalDestination 17 | 18 | logger = logging.getLogger(__name__) 19 | 20 | 21 | @pytest.mark.skipif( 22 | (not os.environ.get("DYD_GOOGLE_CLOUD_PROJECT")) 23 | or (not os.environ.get("DYD_GCS_URI_BASE")), 24 | reason="GCS output not configured.", 25 | ) 26 | class TestGCSDYDSource: 27 | @classmethod 28 | def setup_class(cls): 29 | # initialize with data to avoid pulling multiple times 30 | cls.sim_config = Config.make_sim_config( 31 | identifier=[ 32 | "d310f1c1f600c374d8975c753f7c0fb8de9c96b1", 33 | "8c00c9bb17bfcca53809cb1b2d033a448bc017df", # has full data periods 34 | "6773291da5427ae87d34bb75022ee54ee3b1fc17", # file not found 35 | ], 36 | latitude=33.481136, 37 | longitude=-112.078232, 38 | start_utc=[ 39 | "2018-01-01 00:00:00", 40 | "2018-01-01 00:00:00", 41 | "2018-01-01 00:00:00", 42 | ], 43 | end_utc=[ 44 | "2018-12-31 23:55:00", 45 | "2018-12-31 23:55:00", 46 | "2018-12-31 23:55:00", 47 | ], 48 | min_sim_period="7D", 49 | min_chunk_period="30D", 50 | sim_step_size_seconds=300, 51 | output_step_size_seconds=300, 52 | ) 53 | 54 | cls.data_clients = [] 55 | 56 | # set local_cache=None to avoid caching locally and always testing connection with GCS 57 | cls.data_client = DataClient( 58 | source=GCSDYDSource( 59 | gcp_project=os.environ.get("DYD_GOOGLE_CLOUD_PROJECT"), 60 | gcs_uri_base=os.environ.get("DYD_GCS_URI_BASE"), 61 | local_cache=None, 62 | ), 63 | destination=LocalDestination( 64 | data_spec=DonateYourDataSpec(), 65 | ), 66 | ) 67 | 68 | for _idx, _sim_config in cls.sim_config.iterrows(): 69 | dc = copy.deepcopy(cls.data_client) 70 | dc.sim_config = _sim_config 71 | 72 | if _sim_config["identifier"] in [ 73 | "6773291da5427ae87d34bb75022ee54ee3b1fc17", 74 | ]: 75 | with pytest.raises(ValueError): 76 | dc.get_data() 77 | else: 78 | dc.get_data() 79 | 80 | cls.data_clients.append(dc) 81 | 82 | @classmethod 83 | def teardown_class(cls): 84 | """teardown any state that was previously setup with a call to 85 | setup_class. 86 | """ 87 | pass 88 | 89 | def test_get_data(self): 90 | # test HVAC data returns dict of non-empty pd.DataFrame 91 | for dc in self.data_clients: 92 | if dc.datetime: 93 | assert isinstance(dc.datetime.data, pd.DataFrame) 94 | assert isinstance(dc.thermostat.data, pd.DataFrame) 95 | assert isinstance(dc.equipment.data, pd.DataFrame) 96 | assert isinstance(dc.sensors.data, pd.DataFrame) 97 | assert isinstance(dc.weather.data, pd.DataFrame) 98 | 99 | def test_read_epw(self): 100 | # read back cached filled epw files 101 | for dc in self.data_clients: 102 | if dc.weather and not dc.weather.data.empty: 103 | # generate the epw file before checking it 104 | _epw_path = dc.weather.make_epw_file( 105 | sim_config=dc.sim_config, 106 | datetime_channel=dc.datetime, 107 | epw_step_size_seconds=dc.sim_config["sim_step_size_seconds"], 108 | ) 109 | data, meta, meta_lines = dc.weather.read_epw(_epw_path) 110 | assert not data.empty 111 | assert all(data.columns == dc.weather.epw_columns) 112 | 113 | def test_data_utc(self): 114 | for dc in self.data_clients: 115 | if dc.datetime and not dc.datetime.data.empty: 116 | assert ( 117 | dc.datetime.data[dc.datetime.spec.datetime_column].dtype.tz 118 | == pytz.utc 119 | ) 120 | 121 | def test_fill_missing_data(self): 122 | """Check that filled data exists and doesnt over fill""" 123 | for dc in self.data_clients: 124 | if ( 125 | dc.sim_config["identifier"] 126 | == "8c00c9bb17bfcca53809cb1b2d033a448bc017df" 127 | ): 128 | # verify that data bfill works with full_data_periods 129 | assert ( 130 | pytest.approx(21.037790298461914) 131 | == dc.thermostat.data.iloc[ 132 | dc.datetime.data[ 133 | ( 134 | dc.datetime.data[STATES.DATE_TIME] 135 | >= pd.Timestamp("2018-02-21 16:25:00+0000", tz="UTC") 136 | ) 137 | & ( 138 | dc.datetime.data[STATES.DATE_TIME] 139 | <= pd.Timestamp("2018-02-26 17:00:00+0000", tz="UTC") 140 | ) 141 | ].index, 142 | ][STATES.TEMPERATURE_CTRL].mean() 143 | ) 144 | assert dc.full_data_periods[0] == [ 145 | pd.Timestamp("2018-01-02 22:05:00+0000", tz="UTC"), 146 | pd.Timestamp("2018-02-01 15:30:00+0000", tz="UTC"), 147 | ] 148 | -------------------------------------------------------------------------------- /src/python/BuildingControlsSimulator/DataClients/test_GCSDestination.py: -------------------------------------------------------------------------------- 1 | # created by Tom Stesco tom.s@ecobee.com 2 | import logging 3 | import os 4 | import copy 5 | 6 | import pytest 7 | import pandas as pd 8 | import pytz 9 | 10 | from BuildingControlsSimulator.Simulator.Config import Config 11 | from BuildingControlsSimulator.DataClients.DataClient import DataClient 12 | from BuildingControlsSimulator.DataClients.LocalSource import LocalSource 13 | from BuildingControlsSimulator.DataClients.GCSDestination import GCSDestination 14 | from BuildingControlsSimulator.DataClients.DataSpec import EnergyPlusWeather 15 | from BuildingControlsSimulator.DataClients.DataStates import STATES 16 | from BuildingControlsSimulator.DataClients.DataSpec import ( 17 | Internal, 18 | DonateYourDataSpec, 19 | convert_spec, 20 | ) 21 | 22 | logger = logging.getLogger(__name__) 23 | 24 | 25 | @pytest.mark.skipif( 26 | (not os.environ.get("BCS_GOOGLE_CLOUD_PROJECT")) 27 | or (not os.environ.get("BCS_OUTPUT_GCS_URI_BASE")), 28 | reason="GCS output not configured.", 29 | ) 30 | class TestGCSDestination: 31 | @classmethod 32 | def setup_class(cls): 33 | # initialize with data to avoid pulling multiple times 34 | cls.sim_config = Config.make_sim_config( 35 | identifier=[ 36 | "DYD_dummy_data", 37 | ], # test file 38 | latitude=33.481136, 39 | longitude=-112.078232, 40 | start_utc=[ 41 | "2018-01-01 00:00:00", 42 | ], 43 | end_utc=[ 44 | "2018-12-31 23:55:00", 45 | ], 46 | min_sim_period="3D", 47 | min_chunk_period="30D", 48 | sim_step_size_seconds=300, 49 | output_step_size_seconds=300, 50 | ) 51 | 52 | cls.data_client = DataClient( 53 | source=LocalSource( 54 | local_cache=os.environ.get("LOCAL_CACHE_DIR"), 55 | data_spec=DonateYourDataSpec(), 56 | ), 57 | destination=GCSDestination( 58 | gcp_project=os.environ.get("BCS_GOOGLE_CLOUD_PROJECT"), 59 | gcs_uri_base=os.environ.get("BCS_OUTPUT_GCS_URI_BASE"), 60 | data_spec=DonateYourDataSpec(), 61 | local_cache=None, 62 | ), 63 | nrel_dev_api_key=os.environ.get("NREL_DEV_API_KEY"), 64 | nrel_dev_email=os.environ.get("NREL_DEV_EMAIL"), 65 | archive_tmy3_dir=os.environ.get("ARCHIVE_TMY3_DIR"), 66 | archive_tmy3_meta=os.environ.get("ARCHIVE_TMY3_META"), 67 | archive_tmy3_data_dir=os.environ.get("ARCHIVE_TMY3_DATA_DIR"), 68 | ep_tmy3_cache_dir=os.environ.get("EP_TMY3_CACHE_DIR"), 69 | simulation_epw_dir=os.environ.get("SIMULATION_EPW_DIR"), 70 | ) 71 | cls.data_client.sim_config = cls.sim_config.iloc[0] 72 | 73 | cls.data_client.get_data() 74 | 75 | @classmethod 76 | def teardown_class(cls): 77 | """teardown any state that was previously setup with a call to 78 | setup_class. 79 | """ 80 | pass 81 | 82 | def get_sim_name(self): 83 | _prefix = "sim" 84 | _sim_run_identifier = "test_run" 85 | _data_source = self.data_client.source.source_name 86 | _identifier = self.data_client.sim_config["identifier"] 87 | _building_model_name = "dummy_building" 88 | _controller_model_name = "dummy_controller" 89 | 90 | return "_".join( 91 | [ 92 | _prefix, 93 | _sim_run_identifier, 94 | _data_source, 95 | _identifier, 96 | _building_model_name, 97 | _controller_model_name, 98 | ] 99 | ) 100 | 101 | def test_put_data(self): 102 | sim_name = self.get_sim_name() 103 | _df = self.data_client.get_full_input() 104 | self.data_client.destination.put_data(_df, sim_name, src_spec=Internal()) 105 | _gcs_uri = self.data_client.destination.get_gcs_uri(sim_name) 106 | 107 | r_df = pd.read_parquet(_gcs_uri) 108 | cr_df = convert_spec( 109 | r_df, 110 | src_spec=self.data_client.destination.data_spec, 111 | dest_spec=Internal(), 112 | src_nullable=True, 113 | dest_nullable=False, 114 | ) 115 | 116 | # remove states not in dest spec 117 | for _col in _df.columns: 118 | _state = [ 119 | v["internal_state"] 120 | for k, v in self.data_client.destination.data_spec.full.spec.items() 121 | if v["internal_state"] == _col 122 | ] 123 | if not _state: 124 | _df = _df.drop(columns=[_col]) 125 | 126 | pd.testing.assert_frame_equal(_df, cr_df) 127 | -------------------------------------------------------------------------------- /src/python/BuildingControlsSimulator/DataClients/test_GCSFlatFilesSource.py: -------------------------------------------------------------------------------- 1 | # created by Tom Stesco tom.s@ecobee.com 2 | 3 | import logging 4 | import copy 5 | 6 | import pytest 7 | import pandas as pd 8 | import pytz 9 | import os 10 | 11 | from BuildingControlsSimulator.Simulator.Config import Config 12 | from BuildingControlsSimulator.DataClients.DataClient import DataClient 13 | from BuildingControlsSimulator.DataClients.GCSFlatFilesSource import GCSFlatFilesSource 14 | from BuildingControlsSimulator.DataClients.DataSpec import FlatFilesSpec 15 | from BuildingControlsSimulator.DataClients.LocalDestination import LocalDestination 16 | from BuildingControlsSimulator.DataClients.DataSpec import EnergyPlusWeather 17 | from BuildingControlsSimulator.DataClients.DataStates import STATES 18 | 19 | logger = logging.getLogger(__name__) 20 | 21 | 22 | @pytest.mark.skipif( 23 | (not os.environ.get("FLATFILE_GOOGLE_CLOUD_PROJECT")) 24 | or (not os.environ.get("FLATFILES_GCS_URI_BASE")), 25 | reason="GCS output not configured.", 26 | ) 27 | class TestGCSFlatFilesSource: 28 | @classmethod 29 | def setup_class(cls): 30 | # initialize with data to avoid pulling multiple times 31 | cls.sim_config = Config.make_sim_config( 32 | identifier=[ 33 | os.environ.get("TEST_FLATFILES_IDENTIFIER_MISSING"), # missing 34 | os.environ.get("TEST_FLATFILES_IDENTIFIER_FULL"), # full 35 | "9999999", # file not found 36 | ], 37 | latitude=33.481136, 38 | longitude=-112.078232, 39 | start_utc="2018-01-01 00:00:00", 40 | end_utc="2018-12-31 00:00:00", 41 | min_sim_period="7D", 42 | min_chunk_period="30D", 43 | sim_step_size_seconds=300, 44 | output_step_size_seconds=300, 45 | ) 46 | 47 | cls.data_clients = [] 48 | 49 | # set local_cache=None to test connection with GCS 50 | cls.data_client = DataClient( 51 | source=GCSFlatFilesSource( 52 | gcp_project=os.environ.get("FLATFILE_GOOGLE_CLOUD_PROJECT"), 53 | gcs_uri_base=os.environ.get("FLATFILES_GCS_URI_BASE"), 54 | local_cache=None, 55 | ), 56 | destination=LocalDestination( 57 | local_cache=os.environ.get("LOCAL_CACHE_DIR"), 58 | data_spec=FlatFilesSpec(), 59 | ), 60 | nrel_dev_api_key=os.environ.get("NREL_DEV_API_KEY"), 61 | nrel_dev_email=os.environ.get("NREL_DEV_EMAIL"), 62 | archive_tmy3_dir=os.environ.get("ARCHIVE_TMY3_DIR"), 63 | archive_tmy3_meta=os.environ.get("ARCHIVE_TMY3_META"), 64 | archive_tmy3_data_dir=os.environ.get("ARCHIVE_TMY3_DATA_DIR"), 65 | ep_tmy3_cache_dir=os.environ.get("EP_TMY3_CACHE_DIR"), 66 | simulation_epw_dir=os.environ.get("SIMULATION_EPW_DIR"), 67 | ) 68 | for _idx, _sim_config in cls.sim_config.iterrows(): 69 | dc = copy.deepcopy(cls.data_client) 70 | dc.sim_config = _sim_config 71 | 72 | if _sim_config["identifier"] in [ 73 | "9999999", 74 | ]: 75 | with pytest.raises(ValueError): 76 | dc.get_data() 77 | else: 78 | dc.get_data() 79 | 80 | cls.data_clients.append(dc) 81 | 82 | @classmethod 83 | def teardown_class(cls): 84 | """teardown any state that was previously setup with a call to 85 | setup_class. 86 | """ 87 | pass 88 | 89 | def test_get_data(self): 90 | # test HVAC data returns dict of non-empty pd.DataFrame 91 | for dc in self.data_clients: 92 | if dc.datetime: 93 | assert isinstance(dc.datetime.data, pd.DataFrame) 94 | assert isinstance(dc.thermostat.data, pd.DataFrame) 95 | assert isinstance(dc.equipment.data, pd.DataFrame) 96 | assert isinstance(dc.sensors.data, pd.DataFrame) 97 | assert isinstance(dc.weather.data, pd.DataFrame) 98 | 99 | def test_read_epw(self): 100 | # read back cached filled epw files 101 | for dc in self.data_clients: 102 | if dc.weather and not dc.weather.data.empty: 103 | # generate the epw file before checking it 104 | _epw_path = dc.weather.make_epw_file( 105 | sim_config=dc.sim_config, 106 | datetime_channel=dc.datetime, 107 | epw_step_size_seconds=dc.sim_config["sim_step_size_seconds"], 108 | ) 109 | data, meta, meta_lines = dc.weather.read_epw(_epw_path) 110 | assert not data.empty 111 | assert all(data.columns == dc.weather.epw_columns) 112 | 113 | def test_data_utc(self): 114 | for dc in self.data_clients: 115 | if dc.datetime and not dc.datetime.data.empty: 116 | assert ( 117 | dc.datetime.data[dc.datetime.spec.datetime_column].dtype.tz 118 | == pytz.utc 119 | ) 120 | 121 | def test_fill_missing_data(self): 122 | """Check that filled data exists and doesnt over fill""" 123 | for dc in self.data_clients: 124 | if dc.sim_config["identifier"] == os.environ.get( 125 | "TEST_FLATFILES_IDENTIFIER_FULL" 126 | ): 127 | # verify that data bfill works with full_data_periods 128 | assert ( 129 | pytest.approx(26.864197) 130 | == dc.thermostat.data.iloc[ 131 | dc.datetime.data[ 132 | ( 133 | dc.datetime.data[STATES.DATE_TIME] 134 | >= pd.Timestamp("2018-06-18 22:10:00", tz="utc") 135 | ) 136 | & ( 137 | dc.datetime.data[STATES.DATE_TIME] 138 | <= pd.Timestamp("2018-06-18 22:50:00", tz="utc") 139 | ) 140 | ].index, 141 | ][STATES.TEMPERATURE_CTRL].mean() 142 | ) 143 | assert dc.full_data_periods[0] == [ 144 | pd.to_datetime("2018-06-10 04:00:00", utc=True), 145 | pd.to_datetime("2018-06-18 22:10:00", utc=True), 146 | ] 147 | -------------------------------------------------------------------------------- /src/python/BuildingControlsSimulator/DataClients/test_LocalDestination.py: -------------------------------------------------------------------------------- 1 | # created by Tom Stesco tom.s@ecobee.com 2 | import logging 3 | import os 4 | import copy 5 | 6 | import pytest 7 | import pandas as pd 8 | import pytz 9 | 10 | from BuildingControlsSimulator.Simulator.Config import Config 11 | from BuildingControlsSimulator.DataClients.DataClient import DataClient 12 | from BuildingControlsSimulator.DataClients.LocalSource import LocalSource 13 | from BuildingControlsSimulator.DataClients.LocalDestination import LocalDestination 14 | from BuildingControlsSimulator.DataClients.DataStates import STATES 15 | from BuildingControlsSimulator.DataClients.DataSpec import ( 16 | Internal, 17 | DonateYourDataSpec, 18 | EnergyPlusWeather, 19 | convert_spec, 20 | ) 21 | 22 | logger = logging.getLogger(__name__) 23 | 24 | 25 | class TestLocalDestination: 26 | @classmethod 27 | def setup_class(cls): 28 | # initialize with data to avoid pulling multiple times 29 | cls.sim_config = Config.make_sim_config( 30 | identifier=[ 31 | "DYD_dummy_data", 32 | ], # test file 33 | latitude=33.481136, 34 | longitude=-112.078232, 35 | start_utc=[ 36 | "2018-01-01 00:00:00", 37 | ], 38 | end_utc=[ 39 | "2018-12-31 23:55:00", 40 | ], 41 | min_sim_period="3D", 42 | min_chunk_period="30D", 43 | sim_step_size_seconds=300, 44 | output_step_size_seconds=300, 45 | ) 46 | 47 | cls.data_client = DataClient( 48 | source=LocalSource( 49 | local_cache=os.environ.get("LOCAL_CACHE_DIR"), 50 | data_spec=DonateYourDataSpec(), 51 | ), 52 | destination=LocalDestination( 53 | local_cache=os.environ.get("LOCAL_CACHE_DIR"), 54 | data_spec=DonateYourDataSpec(), 55 | ), 56 | nrel_dev_api_key=os.environ.get("NREL_DEV_API_KEY"), 57 | nrel_dev_email=os.environ.get("NREL_DEV_EMAIL"), 58 | archive_tmy3_dir=os.environ.get("ARCHIVE_TMY3_DIR"), 59 | archive_tmy3_meta=os.environ.get("ARCHIVE_TMY3_META"), 60 | archive_tmy3_data_dir=os.environ.get("ARCHIVE_TMY3_DATA_DIR"), 61 | ep_tmy3_cache_dir=os.environ.get("EP_TMY3_CACHE_DIR"), 62 | simulation_epw_dir=os.environ.get("SIMULATION_EPW_DIR"), 63 | ) 64 | cls.data_client.sim_config = cls.sim_config.iloc[0] 65 | 66 | cls.data_client.get_data() 67 | 68 | @classmethod 69 | def teardown_class(cls): 70 | """teardown any state that was previously setup with a call to 71 | setup_class. 72 | """ 73 | pass 74 | 75 | def get_sim_name(self): 76 | _prefix = "sim" 77 | _sim_run_identifier = "test_run" 78 | _data_source = self.data_client.source.source_name 79 | _identifier = self.data_client.sim_config["identifier"] 80 | _building_model_name = "dummy_building" 81 | _controller_model_name = "dummy_controller" 82 | 83 | return "_".join( 84 | [ 85 | _prefix, 86 | _sim_run_identifier, 87 | _data_source, 88 | _identifier, 89 | _building_model_name, 90 | _controller_model_name, 91 | ] 92 | ) 93 | 94 | def test_put_data(self): 95 | sim_name = self.get_sim_name() 96 | _df = self.data_client.get_full_input() 97 | self.data_client.destination.put_data(_df, sim_name, src_spec=Internal()) 98 | _fpath = os.path.join( 99 | self.data_client.destination.local_cache, 100 | self.data_client.destination.operator_name, 101 | sim_name + "." + self.data_client.destination.file_extension, 102 | ) 103 | r_df = pd.read_parquet(_fpath) 104 | cr_df = convert_spec( 105 | r_df, 106 | src_spec=self.data_client.destination.data_spec, 107 | dest_spec=Internal(), 108 | src_nullable=True, 109 | dest_nullable=False, 110 | ) 111 | 112 | # remove states not in dest spec 113 | for _col in _df.columns: 114 | _state = [ 115 | v["internal_state"] 116 | for k, v in self.data_client.destination.data_spec.full.spec.items() 117 | if v["internal_state"] == _col 118 | ] 119 | if not _state: 120 | _df = _df.drop(columns=[_col]) 121 | 122 | pd.testing.assert_frame_equal(_df, cr_df) 123 | -------------------------------------------------------------------------------- /src/python/BuildingControlsSimulator/DataClients/test_LocalSource.py: -------------------------------------------------------------------------------- 1 | # created by Tom Stesco tom.s@ecobee.com 2 | import logging 3 | import os 4 | import copy 5 | 6 | import pytest 7 | import pandas as pd 8 | import pytz 9 | 10 | from BuildingControlsSimulator.Simulator.Config import Config 11 | from BuildingControlsSimulator.DataClients.DataSpec import DonateYourDataSpec 12 | from BuildingControlsSimulator.DataClients.DataClient import DataClient 13 | from BuildingControlsSimulator.DataClients.LocalSource import LocalSource 14 | from BuildingControlsSimulator.DataClients.LocalDestination import LocalDestination 15 | from BuildingControlsSimulator.DataClients.DataSpec import EnergyPlusWeather 16 | from BuildingControlsSimulator.DataClients.DataStates import STATES 17 | 18 | logger = logging.getLogger(__name__) 19 | 20 | 21 | class TestLocalSource: 22 | @classmethod 23 | def setup_class(cls): 24 | # initialize with data to avoid pulling multiple times 25 | cls.sim_config = Config.make_sim_config( 26 | identifier=[ 27 | "DYD_dummy_data", 28 | ], # test file 29 | latitude=33.481136, 30 | longitude=-112.078232, 31 | start_utc=[ 32 | "2018-01-01 00:00:00", 33 | ], 34 | end_utc=[ 35 | "2018-12-31 23:55:00", 36 | ], 37 | min_sim_period="3D", 38 | min_chunk_period="30D", 39 | sim_step_size_seconds=300, 40 | output_step_size_seconds=300, 41 | ) 42 | 43 | cls.data_clients = [] 44 | cls.data_client = DataClient( 45 | source=LocalSource( 46 | local_cache=os.environ.get("LOCAL_CACHE_DIR"), 47 | data_spec=DonateYourDataSpec(), 48 | ), 49 | destination=LocalDestination( 50 | local_cache=os.environ.get("LOCAL_CACHE_DIR"), 51 | data_spec=DonateYourDataSpec(), 52 | ), 53 | nrel_dev_api_key=os.environ.get("NREL_DEV_API_KEY"), 54 | nrel_dev_email=os.environ.get("NREL_DEV_EMAIL"), 55 | archive_tmy3_dir=os.environ.get("ARCHIVE_TMY3_DIR"), 56 | archive_tmy3_meta=os.environ.get("ARCHIVE_TMY3_META"), 57 | archive_tmy3_data_dir=os.environ.get("ARCHIVE_TMY3_DATA_DIR"), 58 | ep_tmy3_cache_dir=os.environ.get("EP_TMY3_CACHE_DIR"), 59 | simulation_epw_dir=os.environ.get("SIMULATION_EPW_DIR"), 60 | ) 61 | 62 | for _idx, _sim_config in cls.sim_config.iterrows(): 63 | dc = copy.deepcopy(cls.data_client) 64 | dc.sim_config = _sim_config 65 | 66 | dc.get_data() 67 | 68 | cls.data_clients.append(dc) 69 | 70 | @classmethod 71 | def teardown_class(cls): 72 | """teardown any state that was previously setup with a call to 73 | setup_class. 74 | """ 75 | pass 76 | 77 | def test_get_data(self): 78 | # test HVAC data returns dict of non-empty pd.DataFrame 79 | for dc in self.data_clients: 80 | assert isinstance(dc.datetime.data, pd.DataFrame) 81 | assert isinstance(dc.thermostat.data, pd.DataFrame) 82 | assert isinstance(dc.equipment.data, pd.DataFrame) 83 | assert isinstance(dc.sensors.data, pd.DataFrame) 84 | assert isinstance(dc.weather.data, pd.DataFrame) 85 | 86 | def test_read_epw(self): 87 | # read back cached filled epw files 88 | for dc in self.data_clients: 89 | if not dc.weather.data.empty: 90 | # generate the epw file before checking it 91 | _epw_path = dc.weather.make_epw_file( 92 | sim_config=dc.sim_config, 93 | datetime_channel=dc.datetime, 94 | epw_step_size_seconds=dc.sim_config["sim_step_size_seconds"], 95 | ) 96 | data, meta, meta_lines = dc.weather.read_epw(_epw_path) 97 | assert not data.empty 98 | assert all(data.columns == dc.weather.epw_columns) 99 | 100 | def test_data_utc(self): 101 | for dc in self.data_clients: 102 | if not dc.datetime.data.empty: 103 | assert ( 104 | dc.datetime.data[dc.datetime.spec.datetime_column].dtype.tz 105 | == pytz.utc 106 | ) 107 | 108 | def test_fill_missing_data(self): 109 | """Check that filled data exists and doesnt over fill""" 110 | for dc in self.data_clients: 111 | if dc.sim_config["identifier"] == "DYD_dummy_data": 112 | # verify that data bfill works with full_data_periods 113 | 114 | assert ( 115 | pytest.approx(21.6666316986084) 116 | == dc.thermostat.data.iloc[ 117 | dc.datetime.data[ 118 | ( 119 | dc.datetime.data[STATES.DATE_TIME] 120 | >= pd.Timestamp("2018-06-21 16:55", tz="utc") 121 | ) 122 | & ( 123 | dc.datetime.data[STATES.DATE_TIME] 124 | <= pd.Timestamp("2018-06-22 17:00:00", tz="utc") 125 | ) 126 | ].index, 127 | ][STATES.TEMPERATURE_CTRL].mean() 128 | ) 129 | assert dc.full_data_periods[0] == [ 130 | pd.Timestamp("2018-03-25 17:00:00", tz="utc"), 131 | pd.Timestamp("2018-06-21 16:55:00", tz="utc"), 132 | ] 133 | -------------------------------------------------------------------------------- /src/python/BuildingControlsSimulator/DataClients/test_WeatherChannel.py: -------------------------------------------------------------------------------- 1 | # created by Tom Stesco tom.s@ecobee.com 2 | 3 | import logging 4 | 5 | import pytest 6 | import os 7 | 8 | from BuildingControlsSimulator.DataClients.WeatherChannel import WeatherChannel 9 | from BuildingControlsSimulator.DataClients.DateTimeChannel import DateTimeChannel 10 | from BuildingControlsSimulator.DataClients.DataSpec import Internal 11 | from BuildingControlsSimulator.DataClients.DataStates import STATES 12 | from BuildingControlsSimulator.Simulator.Config import Config 13 | 14 | 15 | from pandas import Timestamp 16 | from pandas import Timedelta 17 | import pandas as pd 18 | 19 | logger = logging.getLogger(__name__) 20 | 21 | 22 | class TestWeatherChannel: 23 | @classmethod 24 | def setup_class(cls): 25 | # initialize with data to avoid pulling multiple times 26 | 27 | cls.weather = WeatherChannel( 28 | data=[], 29 | spec=[], 30 | nrel_dev_api_key=os.environ.get("NREL_DEV_API_KEY"), 31 | nrel_dev_email=os.environ.get("NREL_DEV_EMAIL"), 32 | archive_tmy3_dir=os.environ.get("ARCHIVE_TMY3_DIR"), 33 | archive_tmy3_meta=os.environ.get("ARCHIVE_TMY3_META"), 34 | archive_tmy3_data_dir=os.environ.get("ARCHIVE_TMY3_DATA_DIR"), 35 | ep_tmy3_cache_dir=os.environ.get("EP_TMY3_CACHE_DIR"), 36 | nsrdb_cache_dir=os.environ.get("NSRDB_CACHE_DIR"), 37 | simulation_epw_dir=os.environ.get("SIMULATION_EPW_DIR"), 38 | ) 39 | 40 | @classmethod 41 | def teardown_class(cls): 42 | """teardown any state that was previously setup with a call to 43 | setup_class. 44 | """ 45 | pass 46 | 47 | def test_get_epw_tmy3(self): 48 | """ 49 | test that preprocessing produces output file 50 | """ 51 | lat = 33.481136 52 | lon = -112.078232 53 | test_fname = "USA_AZ_Phoenix-Sky.Harbor.Intl.AP.722780_TMY.epw" 54 | test_fpath = os.path.join(self.weather.ep_tmy3_cache_dir, test_fname) 55 | 56 | # remove test file if previously existing 57 | if os.path.exists(test_fpath): 58 | os.remove(test_fpath) 59 | fpath, fname = self.weather.get_tmy_epw(lat, lon) 60 | assert os.path.exists(fpath) 61 | 62 | # epw file can be read and has correct columns 63 | data, meta, meta_epw_lines = self.weather.read_epw(fpath) 64 | cols = data.columns.to_list() 65 | 66 | assert cols == self.weather.epw_columns 67 | 68 | def test_make_epw_file(self): 69 | _start_utc = "2019-01-17" 70 | _end_utc = "2019-01-19" 71 | _step_size_seconds = 300 72 | _sim_step_size_seconds = 60 73 | 74 | _data = pd.DataFrame( 75 | { 76 | STATES.DATE_TIME: pd.date_range( 77 | start=_start_utc, 78 | end=_end_utc, 79 | freq=f"{_sim_step_size_seconds}S", 80 | tz="utc", 81 | ) 82 | } 83 | ) 84 | 85 | sim_config = Config.make_sim_config( 86 | identifier="511863952006", 87 | latitude=43.798577, 88 | longitude=-79.239087, 89 | start_utc=_start_utc, 90 | end_utc=_end_utc, 91 | min_sim_period="1D", 92 | sim_step_size_seconds=_sim_step_size_seconds, 93 | output_step_size_seconds=_step_size_seconds, 94 | ).iloc[0] 95 | 96 | _internal_timezone = DateTimeChannel.get_timezone( 97 | sim_config["latitude"], sim_config["longitude"] 98 | ) 99 | internal_spec = Internal() 100 | 101 | datetime_channel = DateTimeChannel( 102 | data=_data[ 103 | internal_spec.intersect_columns( 104 | _data.columns, internal_spec.datetime.spec 105 | ) 106 | ], 107 | spec=internal_spec.datetime, 108 | latitude=sim_config["latitude"], 109 | longitude=sim_config["longitude"], 110 | internal_timezone=_internal_timezone, 111 | ) 112 | 113 | weather_channel = WeatherChannel( 114 | data=pd.DataFrame(), 115 | spec=internal_spec, 116 | nrel_dev_api_key=os.environ.get("NREL_DEV_API_KEY"), 117 | nrel_dev_email=os.environ.get("NREL_DEV_EMAIL"), 118 | archive_tmy3_dir=os.environ.get("ARCHIVE_TMY3_DIR"), 119 | archive_tmy3_meta=os.environ.get("ARCHIVE_TMY3_META"), 120 | archive_tmy3_data_dir=os.environ.get("ARCHIVE_TMY3_DATA_DIR"), 121 | ep_tmy3_cache_dir=os.environ.get("EP_TMY3_CACHE_DIR"), 122 | nsrdb_cache_dir=os.environ.get("NSRDB_CACHE_DIR"), 123 | simulation_epw_dir=os.environ.get("SIMULATION_EPW_DIR"), 124 | ) 125 | 126 | weather_channel.get_epw_data(sim_config, datetime_channel) 127 | 128 | epw_path = weather_channel.make_epw_file( 129 | sim_config=sim_config, 130 | datetime_channel=datetime_channel, 131 | epw_step_size_seconds=_step_size_seconds, 132 | ) 133 | 134 | assert weather_channel.data.empty == False 135 | assert ( 136 | pytest.approx(weather_channel.data[STATES.OUTDOOR_TEMPERATURE].mean()) 137 | == 1.78746962860115 138 | ) 139 | 140 | # TODO: Need to rework this test now that get_nsrdb has been absorbed into fill_nsrdb 141 | @pytest.mark.skip(reason="Need to rework this test") 142 | def test_get_nsrdb(self): 143 | """ 144 | test that we can pull nsrdb data 145 | """ 146 | sim_config = { 147 | "identifier": "511858737641", 148 | "latitude": 47.650447, 149 | "longitude": -117.464061, 150 | "start_utc": Timestamp("2019-04-16 00:00:00+0000", tz="UTC"), 151 | "end_utc": Timestamp("2019-04-24 00:00:00+0000", tz="UTC"), 152 | "min_sim_period": Timedelta("1 days 00:00:00"), 153 | "min_chunk_period": Timedelta("30 days 00:00:00"), 154 | "sim_step_size_seconds": 300, 155 | "output_step_size_seconds": 300, 156 | } 157 | 158 | df_solar = self.weather.get_nsrdb(sim_config) 159 | assert df_solar.shape == (17520, 5) 160 | assert df_solar.at[17515, "dni"] == 18.0 161 | assert df_solar.at[17519, "ghi"] == 4.0 162 | -------------------------------------------------------------------------------- /src/python/BuildingControlsSimulator/OutputAnalysis/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ecobee/building-controls-simulator/de58c4dbedb6c3dfa478ee69f121964e74108bbd/src/python/BuildingControlsSimulator/OutputAnalysis/__init__.py -------------------------------------------------------------------------------- /src/python/BuildingControlsSimulator/Simulator/Config.py: -------------------------------------------------------------------------------- 1 | # created by Tom Stesco tom.s@ecobee.com 2 | from collections.abc import Iterable 3 | 4 | 5 | import pandas as pd 6 | import numpy as np 7 | 8 | 9 | class Config: 10 | @staticmethod 11 | def make_sim_config( 12 | identifier, 13 | latitude, 14 | longitude, 15 | start_utc, 16 | end_utc, 17 | min_sim_period, 18 | sim_step_size_seconds, 19 | output_step_size_seconds, 20 | min_chunk_period="30D", 21 | ): 22 | # first make sure identifier is iterable and wrapped if a str 23 | if not isinstance(identifier, Iterable) or isinstance(identifier, str): 24 | identifier = [identifier] 25 | 26 | # broadcast single values to lists of len(identifier) 27 | ( 28 | latitude, 29 | longitude, 30 | start_utc, 31 | end_utc, 32 | min_sim_period, 33 | sim_step_size_seconds, 34 | output_step_size_seconds, 35 | min_chunk_period, 36 | ) = [ 37 | [v] * len(identifier) 38 | if (not isinstance(v, Iterable) or isinstance(v, str)) 39 | else v 40 | for v in [ 41 | latitude, 42 | longitude, 43 | start_utc, 44 | end_utc, 45 | min_sim_period, 46 | sim_step_size_seconds, 47 | output_step_size_seconds, 48 | min_chunk_period, 49 | ] 50 | ] 51 | 52 | # parse and validate input 53 | for i in range(len(identifier)): 54 | if not isinstance(latitude[i], float): 55 | raise ValueError(f"latitude[{i}]: {latitude[i]} is not a float.") 56 | if not isinstance(longitude[i], float): 57 | raise ValueError(f"longitude[{i}]: {longitude[i]} is not a float.") 58 | # convert str to datetime utc 59 | if isinstance(start_utc[i], str): 60 | start_utc[i] = pd.Timestamp(start_utc[i], tz="utc") 61 | if isinstance(end_utc[i], str): 62 | end_utc[i] = pd.Timestamp(end_utc[i], tz="utc") 63 | 64 | if not isinstance(start_utc[i], pd.Timestamp): 65 | raise ValueError( 66 | f"start_utc[{i}]: {start_utc[i]} is not convertable to pd.Timestamp." 67 | ) 68 | if not isinstance(end_utc[i], pd.Timestamp): 69 | raise ValueError( 70 | f"end_utc[{i}]: {end_utc[i]} is not convertable to pd.Timestamp." 71 | ) 72 | 73 | # convert str to timedelta 74 | if isinstance(min_sim_period[i], str): 75 | min_sim_period[i] = pd.Timedelta(min_sim_period[i]) 76 | if isinstance(min_chunk_period[i], str): 77 | min_chunk_period[i] = pd.Timedelta(min_chunk_period[i]) 78 | 79 | if not isinstance(min_sim_period[i], pd.Timedelta): 80 | raise ValueError( 81 | f"min_sim_period[{i}]: {min_sim_period[i]} is not convertable to pd.Timedelta." 82 | ) 83 | if not isinstance(min_chunk_period[i], pd.Timedelta): 84 | raise ValueError( 85 | f"min_chunk_period[{i}]: {min_chunk_period[i]} is not convertable to pd.Timedelta." 86 | ) 87 | 88 | _df = pd.DataFrame.from_dict( 89 | { 90 | "identifier": identifier, 91 | "latitude": latitude, 92 | "longitude": longitude, 93 | "start_utc": start_utc, 94 | "end_utc": end_utc, 95 | "min_sim_period": min_sim_period, 96 | "min_chunk_period": min_chunk_period, 97 | "sim_step_size_seconds": sim_step_size_seconds, 98 | "output_step_size_seconds": output_step_size_seconds, 99 | } 100 | ) 101 | 102 | return _df 103 | -------------------------------------------------------------------------------- /src/python/BuildingControlsSimulator/Simulator/Simulator.py: -------------------------------------------------------------------------------- 1 | # created by Tom Stesco tom.s@ecobee.com 2 | 3 | import os 4 | import logging 5 | import copy 6 | from datetime import datetime 7 | import uuid 8 | 9 | import pandas as pd 10 | import numpy as np 11 | import attr 12 | 13 | from BuildingControlsSimulator.Simulator.Simulation import Simulation 14 | from BuildingControlsSimulator.BuildingModels.BuildingModel import BuildingModel 15 | from BuildingControlsSimulator.BuildingModels.EnergyPlusBuildingModel import ( 16 | EnergyPlusBuildingModel, 17 | ) 18 | from BuildingControlsSimulator.BuildingModels.IDFPreprocessor import IDFPreprocessor 19 | from BuildingControlsSimulator.ControllerModels.ControllerModel import ControllerModel 20 | from BuildingControlsSimulator.DataClients.DataClient import DataClient 21 | from BuildingControlsSimulator.ControllerModels.FMIController import FMIController 22 | from BuildingControlsSimulator.ControllerModels.Deadband import Deadband 23 | from BuildingControlsSimulator.StateEstimatorModels.StateEstimatorModel import ( 24 | StateEstimatorModel, 25 | ) 26 | from BuildingControlsSimulator.OutputAnalysis.OutputAnalysis import OutputAnalysis 27 | 28 | logger = logging.getLogger(__name__) 29 | 30 | 31 | @attr.s(kw_only=True) 32 | class Simulator: 33 | """Creates list of lazy init simulations with same building model and controller model""" 34 | 35 | sim_config = attr.ib() 36 | data_client = attr.ib(validator=attr.validators.instance_of(DataClient)) 37 | building_models = attr.ib( 38 | validator=attr.validators.deep_iterable( 39 | member_validator=attr.validators.instance_of(BuildingModel), 40 | iterable_validator=attr.validators.instance_of(list), 41 | ) 42 | ) 43 | controller_models = attr.ib( 44 | validator=attr.validators.deep_iterable( 45 | member_validator=attr.validators.instance_of(ControllerModel), 46 | iterable_validator=attr.validators.instance_of(list), 47 | ) 48 | ) 49 | state_estimator_models = attr.ib( 50 | validator=attr.validators.deep_iterable( 51 | member_validator=attr.validators.instance_of(StateEstimatorModel), 52 | iterable_validator=attr.validators.instance_of(list), 53 | ) 54 | ) 55 | 56 | simulations = attr.ib(factory=list) 57 | 58 | sim_run_identifier = attr.ib() 59 | 60 | @sim_run_identifier.default 61 | def get_sim_run_identifier(self): 62 | # use current UTC time as human readable identifier 63 | # use last 6 chars of hex coded uuid to disambiguate in unlikely case 64 | # of multiple simulations occuring in same second 65 | return datetime.utcnow().strftime("%Y_%m_%d_%H_%M_%S") + uuid.uuid4().hex[-6:] 66 | 67 | def __attrs_post_init__(self): 68 | """Lazy init of all simulations""" 69 | # simulation for each permutation: data, building, and controller 70 | logger.info(f"Initializing simulation run: {self.sim_run_identifier}") 71 | for _idx, _sim_config in self.sim_config.iterrows(): 72 | 73 | # the data client is copied once per sim_config so that permutations 74 | # of building and controller models can reuse data where possible 75 | dc = copy.deepcopy(self.data_client) 76 | dc.sim_config = _sim_config.to_dict() 77 | 78 | for _b in self.building_models: 79 | for _c in self.controller_models: 80 | for _e in self.state_estimator_models: 81 | 82 | # lazy init of simulation model 83 | # deep copies are used so that models can be in user code 84 | # and then be fully initialized lazily per simulation 85 | self.simulations.append( 86 | Simulation( 87 | config=_sim_config, 88 | data_client=dc, 89 | building_model=copy.deepcopy(_b), 90 | controller_model=copy.deepcopy(_c), 91 | state_estimator_model=copy.deepcopy(_e), 92 | sim_run_identifier=self.sim_run_identifier, 93 | ) 94 | ) 95 | 96 | def simulate(self, local=True, preprocess_check=False): 97 | """Run all simulations locally or in cloud. 98 | :param local: run simulations locally 99 | """ 100 | if local: 101 | for sim in self.simulations: 102 | # weather data is required during model creation 103 | sim.data_client.get_data() 104 | sim.create_models(preprocess_check=preprocess_check) 105 | sim.run(local=True) 106 | -------------------------------------------------------------------------------- /src/python/BuildingControlsSimulator/Simulator/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ecobee/building-controls-simulator/de58c4dbedb6c3dfa478ee69f121964e74108bbd/src/python/BuildingControlsSimulator/Simulator/__init__.py -------------------------------------------------------------------------------- /src/python/BuildingControlsSimulator/Simulator/test_Simulator.py: -------------------------------------------------------------------------------- 1 | # created by Tom Stesco tom.s@ecobee.com 2 | 3 | import logging 4 | 5 | import pytest 6 | import pandas as pd 7 | import os 8 | import shutil 9 | 10 | from BuildingControlsSimulator.Simulator.Simulator import Simulator 11 | from BuildingControlsSimulator.Simulator.Config import Config 12 | from BuildingControlsSimulator.DataClients.DataClient import DataClient 13 | from BuildingControlsSimulator.DataClients.GCSDYDSource import GCSDYDSource 14 | from BuildingControlsSimulator.DataClients.GCSFlatFilesSource import GCSFlatFilesSource 15 | from BuildingControlsSimulator.DataClients.GBQDataSource import GBQDataSource 16 | from BuildingControlsSimulator.DataClients.LocalSource import LocalSource 17 | from BuildingControlsSimulator.BuildingModels.IDFPreprocessor import IDFPreprocessor 18 | from BuildingControlsSimulator.BuildingModels.EnergyPlusBuildingModel import ( 19 | EnergyPlusBuildingModel, 20 | ) 21 | from BuildingControlsSimulator.DataClients.LocalDestination import LocalDestination 22 | from BuildingControlsSimulator.DataClients.DataSpec import ( 23 | DonateYourDataSpec, 24 | Internal, 25 | FlatFilesSpec, 26 | ) 27 | from BuildingControlsSimulator.ControllerModels.FMIController import FMIController 28 | from BuildingControlsSimulator.ControllerModels.Deadband import Deadband 29 | from BuildingControlsSimulator.DataClients.DataStates import STATES 30 | from BuildingControlsSimulator.StateEstimatorModels.LowPassFilter import LowPassFilter 31 | import BuildingControlsSimulator.Simulator.params_test_Simulator as params 32 | 33 | logger = logging.getLogger(__name__) 34 | 35 | 36 | class TestSimulator: 37 | @classmethod 38 | def setup_class(cls): 39 | # initialize with data to avoid pulling multiple times 40 | EnergyPlusBuildingModel.make_directories() 41 | 42 | def get_epw_path(self, epw_name): 43 | # the weather file here does not need to be correct for IDF file as we 44 | # will be testing permutations and erroneous case 45 | 46 | # if we took the time to supply a full path, might as well try it out 47 | if os.path.isfile(epw_name): 48 | return epw_name 49 | 50 | # check WEATHER_DIR 51 | test_weather_path = os.path.join(os.environ.get("WEATHER_DIR"), epw_name) 52 | # if not found search energyplus default weather files 53 | # these are included in all energyplus installs 54 | if not os.path.isfile(test_weather_path): 55 | _fpath = os.path.join( 56 | os.environ.get("EPLUS_DIR"), 57 | "WeatherData", 58 | epw_name, 59 | ) 60 | if os.path.isfile(_fpath): 61 | shutil.copyfile(_fpath, test_weather_path) 62 | else: 63 | raise ValueError(f"Could not find supplied weather file: {epw_name}") 64 | 65 | return test_weather_path 66 | 67 | def get_idf_path(self, idf_name): 68 | # the weather file here does not need to be correct for IDF file as we 69 | # will be testing permutations and erroneous cases 70 | 71 | # if we took the time to supply a full path, might as well try it out 72 | if os.path.isfile(idf_name): 73 | return idf_name 74 | 75 | # check IDF_DIR 76 | test_idf_path = os.path.join(os.environ.get("IDF_DIR"), idf_name) 77 | # if not found search energyplus default weather files 78 | # these are included in all energyplus installs 79 | if not os.path.isfile(test_idf_path): 80 | _fpath = os.path.join( 81 | os.environ.get("EPLUS_DIR"), 82 | "ExampleFiles", 83 | idf_name, 84 | ) 85 | if os.path.isfile(_fpath): 86 | shutil.copyfile(_fpath, test_idf_path) 87 | else: 88 | raise ValueError(f"Could not find supplied idf file: {idf_name}") 89 | 90 | return test_idf_path 91 | 92 | def get_fmu(self, fmu_name): 93 | return f"{os.environ.get('FMU_DIR')}/../fmu-models/deadband/deadband.fmu" 94 | 95 | @classmethod 96 | def teardown_class(cls): 97 | """teardown any state that was previously setup with a call to 98 | setup_class. 99 | """ 100 | pass 101 | 102 | def get_data_source(self, data_client_params): 103 | _source = None 104 | if data_client_params["is_local_source"]: 105 | _source = LocalSource( 106 | data_spec=data_client_params["source_data_spec"], 107 | local_cache=data_client_params["source_local_cache"], 108 | ) 109 | elif data_client_params["is_gcs_source"]: 110 | if isinstance(data_client_params["source_data_spec"], DonateYourDataSpec): 111 | _source = GCSDYDSource( 112 | gcp_project=data_client_params["gcp_project"], 113 | gcs_uri_base=data_client_params["gcs_uri_base"], 114 | local_cache=data_client_params["source_local_cache"], 115 | ) 116 | elif isinstance(data_client_params["source_data_spec"], FlatFilesSpec): 117 | _source = GCSFlatFilesSource( 118 | gcp_project=data_client_params["gcp_project"], 119 | gcs_uri_base=data_client_params["gcs_uri_base"], 120 | local_cache=data_client_params["source_local_cache"], 121 | ) 122 | 123 | elif data_client_params["is_gbq_source"]: 124 | _source = GBQDataSource( 125 | data_spec=data_client_params["source_data_spec"], 126 | gcp_project=data_client_params["gcp_project"], 127 | gbq_table=data_client_params["gbq_table"], 128 | local_cache=data_client_params["source_local_cache"], 129 | ) 130 | return _source 131 | 132 | def get_data_destination(self, data_client_params): 133 | _dest = None 134 | if data_client_params["is_local_destination"]: 135 | _dest = LocalDestination( 136 | local_cache=data_client_params["destination_local_cache"], 137 | data_spec=data_client_params["destination_data_spec"], 138 | ) 139 | return _dest 140 | 141 | def get_building_model(self, building_model_params): 142 | _building = None 143 | if building_model_params["is_energyplus_building"]: 144 | _building = EnergyPlusBuildingModel( 145 | idf=IDFPreprocessor( 146 | idf_file=self.get_idf_path(building_model_params["idf_name"]), 147 | building_config=building_model_params["building_config"], 148 | debug=True, 149 | ), 150 | step_size_seconds=building_model_params["step_size_seconds"], 151 | ) 152 | 153 | return _building 154 | 155 | def get_controller_model(self, controller_model_params): 156 | _controller = None 157 | if controller_model_params["is_deadband"]: 158 | _controller = Deadband( 159 | options={ 160 | "deadband": 1.0, 161 | }, 162 | step_size_seconds=controller_model_params["step_size_seconds"], 163 | discretization_size_seconds=60, 164 | ) 165 | 166 | return _controller 167 | 168 | def get_state_estimator_model(self, state_estimator_model_params): 169 | _state_estimator = None 170 | if state_estimator_model_params["is_low_pass_filter"]: 171 | _state_estimator = LowPassFilter( 172 | alpha_temperature=state_estimator_model_params["low_pass_filter_alpha"], 173 | alpha_humidity=state_estimator_model_params["low_pass_filter_alpha"], 174 | ) 175 | 176 | return _state_estimator 177 | 178 | @pytest.mark.parametrize("test_params", params.test_params) 179 | def test_simulator(self, test_params): 180 | _config_params = test_params["config"] 181 | test_sim_config = Config.make_sim_config( 182 | identifier=_config_params["identifier"], 183 | latitude=_config_params["latitude"], 184 | longitude=_config_params["longitude"], 185 | start_utc=_config_params["start_utc"], 186 | end_utc=_config_params["end_utc"], 187 | min_sim_period=_config_params["min_sim_period"], 188 | sim_step_size_seconds=_config_params["sim_step_size_seconds"], 189 | output_step_size_seconds=_config_params["output_step_size_seconds"], 190 | ) 191 | _epw_path = None 192 | if test_params["data_client"].get("epw_name"): 193 | _epw_path = self.get_epw_path(test_params["data_client"].get("epw_name")) 194 | 195 | # do not use NREL data in test cases in case it changes or becomes unavailable 196 | dc = DataClient( 197 | source=self.get_data_source(test_params["data_client"]), 198 | destination=self.get_data_destination(test_params["data_client"]), 199 | nrel_dev_api_key=None, 200 | epw_path=_epw_path, 201 | ) 202 | 203 | # test HVAC data returns dict of non-empty pd.DataFrame 204 | master = Simulator( 205 | data_client=dc, 206 | sim_config=test_sim_config, 207 | building_models=[self.get_building_model(test_params["building_model"])], 208 | controller_models=[ 209 | self.get_controller_model(test_params["controller_model"]) 210 | ], 211 | state_estimator_models=[ 212 | self.get_state_estimator_model(test_params["state_estimator_model"]) 213 | ], 214 | ) 215 | logger.info("calling master.simulate ... ") 216 | master.simulate(local=True, preprocess_check=False) 217 | logger.info("done master.simulate") 218 | # read back stored output and check it 219 | sim_name = master.simulations[0].sim_name 220 | _fpath = os.path.join( 221 | master.simulations[0].data_client.destination.local_cache, 222 | master.simulations[0].data_client.destination.operator_name, 223 | sim_name 224 | + "." 225 | + master.simulations[0].data_client.destination.file_extension, 226 | ) 227 | r_df = pd.read_parquet(_fpath) 228 | t_ctrl_name = [ 229 | _k 230 | for _k, _v in master.simulations[ 231 | 0 232 | ].data_client.destination.data_spec.full.spec.items() 233 | if _v["internal_state"] == STATES.TEMPERATURE_CTRL 234 | ][0] 235 | humidity_name = [ 236 | _k 237 | for _k, _v in master.simulations[ 238 | 0 239 | ].data_client.destination.data_spec.full.spec.items() 240 | if _v["internal_state"] == STATES.THERMOSTAT_HUMIDITY 241 | ][0] 242 | 243 | mean_thermostat_temperature = ( 244 | master.simulations[0].output[STATES.THERMOSTAT_TEMPERATURE].mean() 245 | ) 246 | mean_thermostat_humidity = ( 247 | master.simulations[0].output[STATES.THERMOSTAT_HUMIDITY].mean() 248 | ) 249 | 250 | output_format_mean_thermostat_temperature = r_df[t_ctrl_name].mean() 251 | output_format_mean_thermostat_humidity = r_df[humidity_name].mean() 252 | 253 | # print out values in case of slight divergence to avoid re-running tests 254 | logger.info( 255 | f"\nmean_thermostat_temperature= {mean_thermostat_temperature}\n" 256 | + f"mean_thermostat_humidity= {mean_thermostat_humidity}\n" 257 | + f"output_format_mean_thermostat_temperature= {output_format_mean_thermostat_temperature}\n" 258 | + f"output_format_mean_thermostat_humidity= {output_format_mean_thermostat_humidity}\n" 259 | ) 260 | 261 | assert ( 262 | pytest.approx(test_params["expected_result"]["mean_thermostat_temperature"]) 263 | == mean_thermostat_temperature 264 | ) 265 | assert ( 266 | pytest.approx(test_params["expected_result"]["mean_thermostat_humidity"]) 267 | == mean_thermostat_humidity 268 | ) 269 | assert ( 270 | pytest.approx( 271 | test_params["expected_result"][ 272 | "output_format_mean_thermostat_temperature" 273 | ] 274 | ) 275 | == output_format_mean_thermostat_temperature 276 | ) 277 | assert ( 278 | pytest.approx( 279 | test_params["expected_result"]["output_format_mean_thermostat_humidity"] 280 | ) 281 | == output_format_mean_thermostat_humidity 282 | ) 283 | -------------------------------------------------------------------------------- /src/python/BuildingControlsSimulator/StateEstimatorModels/LowPassFilter.py: -------------------------------------------------------------------------------- 1 | # created by Tom Stesco tom.s@ecobee.com 2 | 3 | import attr 4 | import pandas as pd 5 | import numpy as np 6 | 7 | from BuildingControlsSimulator.StateEstimatorModels.StateEstimatorModel import ( 8 | StateEstimatorModel, 9 | ) 10 | from BuildingControlsSimulator.DataClients.DataStates import STATES 11 | from BuildingControlsSimulator.Conversions.Conversions import Conversions 12 | 13 | 14 | @attr.s 15 | class LowPassFilter(StateEstimatorModel): 16 | """LowPassFilter state estimator model""" 17 | 18 | # default is no filtering, use current measurement 100% 19 | alpha_temperature = attr.ib(default=1.0) 20 | alpha_humidity = attr.ib(default=1.0) 21 | step_output = attr.ib(factory=dict) 22 | step_size_seconds = attr.ib(default=None) 23 | current_t_idx = attr.ib(default=None) 24 | 25 | output = attr.ib(factory=dict) 26 | 27 | # for reference on how attr defaults wor for mutable types (e.g. list) see: 28 | # https://www.attrs.org/en/stable/init.html#defaults 29 | input_states = attr.ib() 30 | output_states = attr.ib() 31 | 32 | @input_states.default 33 | def get_input_states(self): 34 | return [ 35 | STATES.THERMOSTAT_TEMPERATURE, 36 | STATES.THERMOSTAT_HUMIDITY, 37 | STATES.THERMOSTAT_MOTION, 38 | ] 39 | 40 | @output_states.default 41 | def get_output_states(self): 42 | return [ 43 | STATES.THERMOSTAT_TEMPERATURE_ESTIMATE, 44 | STATES.THERMOSTAT_HUMIDITY_ESTIMATE, 45 | STATES.THERMOSTAT_MOTION_ESTIMATE, 46 | ] 47 | 48 | def get_model_name(self): 49 | _model_name = "LowPass" 50 | _model_name = _model_name.replace(".", "_") 51 | return _model_name 52 | 53 | def initialize( 54 | self, 55 | start_utc, 56 | t_start, 57 | t_end, 58 | t_step, 59 | data_spec, 60 | categories_dict, 61 | ): 62 | """""" 63 | self.current_t_idx = 0 64 | self.step_size_seconds = t_step 65 | self.allocate_output_memory( 66 | t_start=t_start, 67 | t_end=t_end, 68 | t_step=t_step, 69 | data_spec=data_spec, 70 | categories_dict=categories_dict, 71 | ) 72 | self.init_step_output() 73 | 74 | def allocate_output_memory( 75 | self, t_start, t_end, t_step, data_spec, categories_dict 76 | ): 77 | """preallocate output memory to speed up simulation""" 78 | # reset output 79 | self.output = {} 80 | 81 | self.output = { 82 | STATES.SIMULATION_TIME: np.arange( 83 | t_start, t_end + t_step, t_step, dtype="int64" 84 | ) 85 | } 86 | n_s = len(self.output[STATES.SIMULATION_TIME]) 87 | 88 | # add state variables 89 | for state in self.output_states: 90 | if data_spec.full.spec[state]["dtype"] == "category": 91 | self.output[state] = pd.Series( 92 | pd.Categorical( 93 | pd.Series(index=np.arange(n_s)), 94 | categories=categories_dict[state], 95 | ) 96 | ) 97 | else: 98 | ( 99 | np_default_value, 100 | np_dtype, 101 | ) = Conversions.numpy_down_cast_default_value_dtype( 102 | data_spec.full.spec[state]["dtype"] 103 | ) 104 | self.output[state] = np.full( 105 | n_s, 106 | np_default_value, 107 | dtype=np_dtype, 108 | ) 109 | 110 | self.output[STATES.STEP_STATUS] = np.full(n_s, 0, dtype="int8") 111 | 112 | def tear_down(self): 113 | """tear down FMU""" 114 | pass 115 | 116 | def init_step_output(self): 117 | # initialize all off 118 | self.step_output = {state: None for state in self.output_states} 119 | 120 | def calc_t_control(self, step_sensor_input): 121 | t_ctrl = step_sensor_input[STATES.THERMOSTAT_TEMPERATURE] 122 | return t_ctrl 123 | 124 | @staticmethod 125 | def filter(state, prev_state_estimate, alpha): 126 | if prev_state_estimate: 127 | # y[i] := y[i-1] + α * (x[i] - y[i-1]) 128 | state_estimate = prev_state_estimate + alpha * (state - prev_state_estimate) 129 | else: 130 | # cold start 131 | state_estimate = state 132 | return state_estimate 133 | 134 | def do_step( 135 | self, 136 | t_start, 137 | t_step, 138 | step_sensor_input, 139 | ): 140 | """Simulate controller time step.""" 141 | self.step_output[STATES.STEP_STATUS] = 1 142 | 143 | self.step_output[STATES.THERMOSTAT_TEMPERATURE_ESTIMATE] = LowPassFilter.filter( 144 | state=step_sensor_input[STATES.THERMOSTAT_TEMPERATURE], 145 | prev_state_estimate=self.step_output[ 146 | STATES.THERMOSTAT_TEMPERATURE_ESTIMATE 147 | ], 148 | alpha=self.alpha_temperature, 149 | ) 150 | 151 | self.step_output[STATES.THERMOSTAT_HUMIDITY_ESTIMATE] = LowPassFilter.filter( 152 | state=step_sensor_input[STATES.THERMOSTAT_HUMIDITY], 153 | prev_state_estimate=self.step_output[STATES.THERMOSTAT_HUMIDITY_ESTIMATE], 154 | alpha=self.alpha_temperature, 155 | ) 156 | 157 | # non filtered states 158 | self.step_output[STATES.THERMOSTAT_MOTION_ESTIMATE] = step_sensor_input[ 159 | STATES.THERMOSTAT_MOTION 160 | ] 161 | 162 | self.step_output[STATES.STEP_STATUS] = 0 163 | self.add_step_to_output(self.step_output) 164 | self.current_t_idx += 1 165 | 166 | return self.step_output[STATES.STEP_STATUS] 167 | 168 | def add_step_to_output(self, step_output): 169 | for k, v in step_output.items(): 170 | self.output[k][self.current_t_idx] = v 171 | 172 | def change_settings(self, new_settings): 173 | # this model has no settings 174 | pass 175 | -------------------------------------------------------------------------------- /src/python/BuildingControlsSimulator/StateEstimatorModels/StateEstimatorModel.py: -------------------------------------------------------------------------------- 1 | # created by Tom Stesco tom.s@ecobee.com 2 | 3 | from abc import ABC, abstractmethod 4 | from enum import IntEnum 5 | import logging 6 | 7 | import attr 8 | import pandas as pd 9 | import numpy as np 10 | 11 | 12 | @attr.s 13 | class StateEstimatorModel(ABC): 14 | """ABC for state estimator models""" 15 | 16 | input_states = attr.ib() 17 | output_states = attr.ib() 18 | 19 | output = attr.ib(factory=dict) 20 | step_output = attr.ib(factory=dict) 21 | settings = attr.ib(factory=dict) 22 | 23 | @abstractmethod 24 | def initialize(self, start_utc, t_start, t_end, t_step, data_spec, categories_dict): 25 | """Run on first setup and not again.""" 26 | pass 27 | 28 | @abstractmethod 29 | def do_step(self): 30 | """Defines sequence of step internals.""" 31 | pass 32 | 33 | @abstractmethod 34 | def change_settings(self, new_settings): 35 | """Change persistent internal settings to model.""" 36 | pass 37 | 38 | @abstractmethod 39 | def get_model_name(self): 40 | """Defines human readable uniquely identifing name""" 41 | pass 42 | -------------------------------------------------------------------------------- /src/python/BuildingControlsSimulator/StateEstimatorModels/test_LowPassFilter.py: -------------------------------------------------------------------------------- 1 | # created by Tom Stesco tom.s@ecobee.com 2 | 3 | import logging 4 | 5 | import pytest 6 | import pandas as pd 7 | import numpy as np 8 | 9 | from BuildingControlsSimulator.DataClients.DataStates import STATES 10 | from BuildingControlsSimulator.DataClients.DataSpec import Internal 11 | from BuildingControlsSimulator.StateEstimatorModels.LowPassFilter import LowPassFilter 12 | 13 | logger = logging.getLogger(__name__) 14 | 15 | 16 | class TestLowPassFilter: 17 | @classmethod 18 | def setup_class(cls): 19 | # initialize with data to avoid pulling multiple times 20 | cls.step_size_seconds = 300 21 | 22 | @classmethod 23 | def teardown_class(cls): 24 | """teardown any state that was previously setup with a call to 25 | setup_class. 26 | """ 27 | pass 28 | 29 | def test_low_pass_filter(self): 30 | # test HVAC data returns dict of non-empty pd.DataFrame 31 | state_estimator_model = LowPassFilter( 32 | alpha_temperature=0.75, alpha_humidity=0.75 33 | ) 34 | 35 | test_temperature = np.arange(-40, 60, 0.05) 36 | test_humidity = np.linspace(0, 100, len(test_temperature)) 37 | test_motion = np.full(len(test_temperature), False) 38 | test_sim_time = np.arange( 39 | 0, 40 | len(test_temperature) * self.step_size_seconds, 41 | self.step_size_seconds, 42 | dtype="int64", 43 | ) 44 | 45 | test_sensor_data = pd.DataFrame.from_dict( 46 | { 47 | STATES.THERMOSTAT_TEMPERATURE: test_temperature, 48 | STATES.THERMOSTAT_HUMIDITY: test_humidity, 49 | STATES.THERMOSTAT_MOTION: test_humidity, 50 | } 51 | ) 52 | 53 | state_estimator_model.initialize( 54 | start_utc=pd.Timestamp("now"), 55 | t_start=0, 56 | t_end=len(test_temperature) * self.step_size_seconds, 57 | t_step=self.step_size_seconds, 58 | data_spec=Internal(), 59 | categories_dict={}, 60 | ) 61 | 62 | for i in range(0, len(test_sim_time)): 63 | state_estimator_model.do_step( 64 | t_start=test_sim_time[i], 65 | t_step=self.step_size_seconds, 66 | step_sensor_input=test_sensor_data.iloc[i], 67 | ) 68 | 69 | test_output = pd.DataFrame.from_dict(state_estimator_model.output) 70 | test_output = test_output.drop(axis="rows", index=len(test_sim_time)) 71 | 72 | assert ( 73 | pytest.approx(9.95837688446045) 74 | == test_output[STATES.THERMOSTAT_TEMPERATURE_ESTIMATE].mean() 75 | ) 76 | assert ( 77 | test_sensor_data[STATES.THERMOSTAT_TEMPERATURE].mean() 78 | > test_output[STATES.THERMOSTAT_TEMPERATURE_ESTIMATE].mean() 79 | ) 80 | assert ( 81 | pytest.approx(49.98334503173828) 82 | == test_output[STATES.THERMOSTAT_HUMIDITY_ESTIMATE].mean() 83 | ) 84 | assert ( 85 | test_sensor_data[STATES.THERMOSTAT_HUMIDITY].mean() 86 | > test_output[STATES.THERMOSTAT_HUMIDITY_ESTIMATE].mean() 87 | ) 88 | -------------------------------------------------------------------------------- /src/python/BuildingControlsSimulator/__init__.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import os 3 | 4 | # configure logging 5 | if os.getenv("LOG_LEVEL") == "DEBUG": 6 | logging.basicConfig( 7 | level=logging.DEBUG, 8 | format="%(asctime)s %(name)s %(levelname)-8s %(message)s", 9 | datefmt="%Y-%m-%d %H:%M:%S", 10 | ) 11 | elif os.getenv("LOG_LEVEL") == "INFO": 12 | logging.basicConfig( 13 | level=logging.INFO, 14 | format="%(asctime)s %(name)s %(levelname)-8s %(message)s", 15 | datefmt="%Y-%m-%d %H:%M:%S", 16 | ) 17 | else: 18 | logging.basicConfig( 19 | level=logging.WARNING, 20 | format="%(asctime)s %(name)s %(levelname)-8s %(message)s", 21 | datefmt="%Y-%m-%d %H:%M:%S", 22 | ) 23 | -------------------------------------------------------------------------------- /test/data/input/local/DYD_dummy_data.csv.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ecobee/building-controls-simulator/de58c4dbedb6c3dfa478ee69f121964e74108bbd/test/data/input/local/DYD_dummy_data.csv.zip -------------------------------------------------------------------------------- /test/fmu/fmu-models/deadband/deadband.fmu: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ecobee/building-controls-simulator/de58c4dbedb6c3dfa478ee69f121964e74108bbd/test/fmu/fmu-models/deadband/deadband.fmu --------------------------------------------------------------------------------