├── nilm_metadata ├── tests │ ├── __init__.py │ ├── test_convert_yaml_to_hdf5.py │ ├── test_nilm_metadata.py │ └── test_object_concatenation.py ├── central_metadata │ ├── __init__.py │ ├── appliance_types │ │ ├── __init__.py │ │ ├── appliance.yaml │ │ ├── commercial.yaml │ │ ├── cold.yaml │ │ ├── components.yaml │ │ ├── misc.yaml │ │ ├── lighting.yaml │ │ ├── ict.yaml │ │ ├── consumer_electronics.yaml │ │ ├── wet.yaml │ │ ├── cooking.yaml │ │ └── heating.yaml │ └── country.yaml ├── __init__.py ├── file_management.py ├── object_concatenation.py └── convert_yaml_to_hdf5.py ├── docs ├── source │ ├── manual.rst │ ├── index.rst │ ├── conf.py │ ├── central_metadata.rst │ ├── tutorial.rst │ ├── dataset_metadata.rst │ └── schema.svg ├── Makefile └── make.bat ├── .gitignore ├── MANIFEST.in ├── pyproject.toml ├── README.md └── LICENSE /nilm_metadata/tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /nilm_metadata/central_metadata/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /nilm_metadata/central_metadata/appliance_types/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /docs/source/manual.rst: -------------------------------------------------------------------------------- 1 | ******************** 2 | NILM Metadata Manual 3 | ******************** 4 | 5 | This has been renamed :doc:`tutorial`. 6 | -------------------------------------------------------------------------------- /nilm_metadata/central_metadata/appliance_types/appliance.yaml: -------------------------------------------------------------------------------- 1 | appliance: # the super class of all appliances 2 | parent: null 3 | min_on_duration: 20 4 | min_off_duration: 20 -------------------------------------------------------------------------------- /nilm_metadata/central_metadata/country.yaml: -------------------------------------------------------------------------------- 1 | GB: 2 | nominal: 230 3 | upper_limit: 253 # +10% 4 | lower_limit: 216 # -6% 5 | related_documents: 6 | - www.legislation.gov.uk/uksi/2002/2665/regulation/27 7 | 8 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .ropeproject/ 2 | index.yaml 3 | *.pyc 4 | nilm_metadata.egg-info/ 5 | NILM_metadata.egg-info/ 6 | docs/build 7 | .idea/ 8 | build/ 9 | dist/ 10 | .venv/ 11 | venv/ 12 | __pycache__/ 13 | .pytest_cache/ 14 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | graft docs 2 | graft nilm_metadata 3 | # `-> adds all files inside a directory 4 | 5 | prune **/__pycache__ 6 | prune docs/build 7 | # `-> excludes all files inside a directory 8 | 9 | global-include *.yaml 10 | # `-> matches file names (regardless of directory) 11 | -------------------------------------------------------------------------------- /nilm_metadata/__init__.py: -------------------------------------------------------------------------------- 1 | from nilm_metadata.object_concatenation import get_appliance_types, recursively_update_dict 2 | from nilm_metadata.convert_yaml_to_hdf5 import convert_yaml_to_hdf5, save_yaml_to_datastore 3 | 4 | import os 5 | 6 | _ROOT = os.path.abspath(os.path.dirname(__file__)) 7 | def get_data(path): 8 | return os.path.join(_ROOT, 'central_metadata', path) 9 | -------------------------------------------------------------------------------- /docs/source/index.rst: -------------------------------------------------------------------------------- 1 | .. NILM Metadata documentation master file, created by 2 | sphinx-quickstart on Mon May 19 10:47:20 2014. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | Welcome to NILM Metadata's documentation! 7 | ========================================= 8 | 9 | Contents: 10 | 11 | .. toctree:: 12 | :maxdepth: 2 13 | 14 | tutorial 15 | dataset_metadata 16 | central_metadata 17 | 18 | 19 | Indices and tables 20 | ================== 21 | 22 | * :ref:`genindex` 23 | * :ref:`search` 24 | 25 | -------------------------------------------------------------------------------- /nilm_metadata/central_metadata/appliance_types/commercial.yaml: -------------------------------------------------------------------------------- 1 | Commercial appliance: 2 | parent: appliance 3 | 4 | 5 | #------------- HVAC ------------------------ 6 | 7 | AHU: 8 | parent: Commercial appliance 9 | 10 | elevator: 11 | parent: Commercial appliance 12 | 13 | #------------- Industrial ------------------------ 14 | 15 | plug-in electric vehicle station: 16 | parent: Commercial appliance 17 | 18 | photovoltaic system total power: 19 | parent: Commercial appliance 20 | 21 | cooling systems total power: 22 | parent: Commercial appliance 23 | 24 | combined heat and power total: 25 | parent: Commercial appliance -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools>=65.5.0", "setuptools_scm[toml]", "wheel"] 3 | build-backend = "setuptools.build_meta" 4 | 5 | [project] 6 | name = "nilm_metadata" 7 | dynamic = ["version"] 8 | requires-python = ">=3.11" 9 | dependencies = ["pandas~=2.2.0", "pyyaml"] 10 | authors = [{ name = "Jack Kelly", email = "jack.kelly@imperial.ac.uk" }] 11 | description = "Concatenate NILM metadata" 12 | readme = "README.md" 13 | license = { file = "LICENSE" } 14 | keywords = [ 15 | "smartmeters", 16 | "power", 17 | "electricity", 18 | "energy", 19 | "analytics", 20 | "redd", 21 | "disaggregation", 22 | "nilm", 23 | "nialm", 24 | ] 25 | classifiers = [ 26 | "Development Status :: 3 - Alpha", 27 | "Intended Audience :: Developers", 28 | "License :: OSI Approved :: Apache 2.0", 29 | "Programming Language :: Python :: 3", 30 | "Programming Language :: Python :: 3.11", 31 | "Programming Language :: Python :: 3.12", 32 | "Topic :: Scientific/Engineering :: Mathematics", 33 | ] 34 | 35 | [project.optional-dependencies] 36 | dev = ["pytest", "sphinx"] 37 | 38 | [project.urls] 39 | Repository = "https://github.com/nilmtk/nilm_metadata" 40 | 41 | [tool.setuptools] 42 | packages = ["nilm_metadata"] 43 | 44 | [tool.setuptools_scm] 45 | version_scheme = "guess-next-dev" 46 | local_scheme = "node-and-date" 47 | fallback_version = "0.0.1.dev0+unknown" 48 | 49 | [tool.pytest.ini_options] 50 | minversion = "6.0" 51 | addopts = "-ra -q" 52 | testpaths = ["nilm_metadata/tests"] 53 | -------------------------------------------------------------------------------- /nilm_metadata/central_metadata/appliance_types/cold.yaml: -------------------------------------------------------------------------------- 1 | cold appliance: 2 | parent: appliance 3 | categories: 4 | traditional: cold 5 | size: large 6 | distributions: 7 | rooms: 8 | - distribution_of_data: 9 | categories: [kitchen, utility, basement, garage] 10 | values: [0.5, 0.1, 0.1, 0.1] 11 | source: subjective 12 | 13 | fridge: 14 | parent: cold appliance 15 | components: 16 | - type: light 17 | control: [manual] 18 | do_not_merge_categories: true 19 | - type: compressor 20 | control: [thermostat] 21 | subtypes: 22 | - chest 23 | - upright 24 | additional_properties: 25 | fridge_volume: 26 | type: number 27 | description: litres 28 | synonyms: 29 | - fridge freezer 30 | - freezer 31 | 32 | freezer: 33 | parent: fridge 34 | do_not_inherit: [additional_properties] 35 | additional_properties: &freezerVolume 36 | freezer_volume: 37 | type: number 38 | description: litres 39 | synonyms: 40 | - fridge 41 | - fridge freezer 42 | 43 | fridge freezer: 44 | parent: fridge 45 | subtypes: 46 | - fridge on top 47 | - fridge on bottom 48 | - side-by-side 49 | additional_properties: *freezerVolume 50 | synonyms: 51 | - fridge 52 | - freezer 53 | 54 | air conditioner: 55 | parent: appliance 56 | synonyms: [air conditioning, AC] 57 | control: [manual, timer, thermostat] 58 | categories: 59 | google_shopping: 60 | - climate control 61 | additional_properties: 62 | heat_dump: 63 | enum: [ground, air, water] 64 | components: 65 | - type: compressor 66 | 67 | air handling unit: 68 | parent: fan 69 | -------------------------------------------------------------------------------- /nilm_metadata/tests/test_convert_yaml_to_hdf5.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from __future__ import print_function 3 | import unittest 4 | from nilm_metadata.convert_yaml_to_hdf5 import ( 5 | _sanity_check_appliances, 6 | NilmMetadataError 7 | ) 8 | 9 | 10 | class TestConvertYamlToHdf5(unittest.TestCase): 11 | 12 | def test_sanity_check_appliances(self): 13 | def building(appliances): 14 | return { 15 | 'appliances': appliances, 16 | 'instance': 1, 17 | 'elec_meters': {1: {}} 18 | } 19 | 20 | # this should succeed with no errors 21 | _sanity_check_appliances(building([ 22 | {'instance': 1, 'type': 'fridge freezer', 'meters': [1]}])) 23 | 24 | BAD_APPLIANCES = [ 25 | {'type': 'fridge freezer', 'meters': [1]}, # no instance 26 | {'meters': [1]}, # no instance or type 27 | {}, # empty 28 | {'instance': 1, 'meters': [1]}, # no type 29 | {'instance': 1, 'type': 'fridge freezer'}, # no meters 30 | {'instance': 1, 'type': 'fridge freezer', 31 | 'meters': [2]}, # bad meter 32 | {'instance': 1, 'type': 'blah blah', 'meters': [1]}, # bad type 33 | {'instance': 2, 'type': 'fridge freezer', 34 | 'meters': [1]}, # bad instance 35 | ['blah'], # not a dict 36 | 'blah', # not a dict 37 | None # not a dict 38 | ] 39 | 40 | for bad_appliance in BAD_APPLIANCES: 41 | with self.assertRaises(NilmMetadataError): 42 | _sanity_check_appliances(building([bad_appliance])) 43 | 44 | 45 | if __name__ == '__main__': 46 | unittest.main() 47 | -------------------------------------------------------------------------------- /nilm_metadata/central_metadata/appliance_types/components.yaml: -------------------------------------------------------------------------------- 1 | component: 2 | parent: null 3 | 4 | # ----------------- COMPONENTS ------------------------ # 5 | 6 | motor: 7 | parent: component 8 | categories: 9 | electrical: 10 | - single-phase induction motor 11 | 12 | #-------- PUMP --------- 13 | 14 | water pump: 15 | parent: motor 16 | 17 | #-------- COMPRESSOR --------- 18 | 19 | compressor: 20 | parent: motor 21 | 22 | #---------- HEATING ELEMENTS ----------- 23 | 24 | electric heating element: 25 | parent: component 26 | categories: 27 | electrical: 28 | - resistive 29 | 30 | electric air heater: 31 | parent: electric heating element 32 | 33 | electric water heater: 34 | parent: electric heating element 35 | additional_properties: 36 | volume: 37 | type: number 38 | description: litres 39 | 40 | #--------------- SCREEN ----------------- 41 | 42 | screen: 43 | parent: component 44 | description: > 45 | if the display technology is known then please specify 46 | 'flat screen' or 'CRT screen' 47 | additional_properties: 48 | diagonal_size: 49 | type: number 50 | description: diagonal size in inches 51 | minimum: 0 52 | exclusiveMinimum: true 53 | max_resolution: 54 | type: object 55 | properties: 56 | horizontal: { type: integer } 57 | vertical: { type: integer } 58 | display_format: 59 | type: string 60 | description: e.g. <1080p,PAL,NTSC,1080i> etc. 61 | max_refresh_rate: 62 | type: number 63 | description: Hz 64 | 65 | CRT screen: 66 | parent: screen 67 | 68 | flat screen: 69 | parent: screen 70 | additional_properties: 71 | display_technology: 72 | enum: [CRT, LCD, plasma, OLED, projector] 73 | backlight_technology: 74 | enum: [LED, cold cathode] 75 | -------------------------------------------------------------------------------- /nilm_metadata/central_metadata/appliance_types/misc.yaml: -------------------------------------------------------------------------------- 1 | manual misc appliance: 2 | parent: appliance 3 | control: [manual] 4 | 5 | #--------- DOMESTIC TOOLS ------------- 6 | 7 | vacuum cleaner: 8 | parent: manual misc appliance 9 | synonyms: [vaccum cleaner] 10 | control: [manual] 11 | components: 12 | - type: motor 13 | 14 | sewing machine: 15 | parent: manual misc appliance 16 | components: 17 | - type: motor 18 | 19 | #-------------- CLOTHES, HEALTH AND BEAUTY ----------------- 20 | 21 | hair dryer: 22 | categories: 23 | electrical: 24 | - resistive 25 | parent: manual misc appliance 26 | min_on_duration: 15 27 | min_off_duration: 300 28 | 29 | hair straighteners: 30 | categories: 31 | electrical: 32 | - resistive 33 | parent: manual misc appliance 34 | 35 | sun bed: 36 | parent: manual misc appliance 37 | 38 | trouser press: 39 | parent: manual misc appliance 40 | 41 | clothes iron: 42 | categories: 43 | electrical: 44 | - resistive 45 | parent: manual misc appliance 46 | subtypes: 47 | - dry 48 | - steam 49 | 50 | 51 | motorised chair: 52 | parent: manual misc appliance 53 | 54 | massage bed: 55 | parent: manual misc appliance 56 | 57 | running machine: 58 | parent: manual misc appliance 59 | 60 | #------------ TOOLS ----------------- 61 | 62 | soldering iron: 63 | parent: manual misc appliance 64 | categories: 65 | electrical: 66 | - resistive 67 | 68 | drill: 69 | parent: manual misc appliance 70 | components: 71 | - type: motor 72 | 73 | saw: 74 | parent: manual misc appliance 75 | components: 76 | - type: motor 77 | 78 | 79 | #-------------- CLIMATE CONTROL -------------- 80 | 81 | dehumidifier: 82 | parent: appliance 83 | 84 | #------------------ MISC 85 | 86 | electric vehicle: 87 | parent: appliance 88 | 89 | unknown: 90 | parent: appliance 91 | 92 | sockets: 93 | parent: appliance 94 | 95 | subpanel: 96 | parent: appliance 97 | 98 | bouncy castle pump: 99 | parent: motor 100 | -------------------------------------------------------------------------------- /nilm_metadata/file_management.py: -------------------------------------------------------------------------------- 1 | from __future__ import print_function, division 2 | from inspect import currentframe, getfile, getsourcefile 3 | from os.path import dirname, join, isdir, abspath 4 | import os 5 | from os import getcwd 6 | from sys import getfilesystemencoding 7 | import yaml 8 | 9 | 10 | 11 | def get_appliance_types_from_disk(): 12 | obj_filenames = _find_all_appliance_type_files() 13 | obj_cache = {} 14 | for filename in obj_filenames: 15 | with open(filename, 'rb') as fh: 16 | objs = yaml.safe_load(fh) 17 | obj_cache.update(objs) 18 | 19 | return obj_cache 20 | 21 | 22 | def _find_all_appliance_type_files(): 23 | filenames = _find_all_files_with_suffix('.yaml', 24 | _get_appliance_types_directory()) 25 | return filenames 26 | 27 | 28 | def _get_appliance_types_directory(): 29 | return _path_to_directory('central_metadata', 'appliance_types') 30 | 31 | 32 | def _find_all_files_with_suffix(suffix, directory): 33 | """Find all files with suffix, recursively.""" 34 | accumulator = [] 35 | 36 | for root, dirs, fnames in os.walk(directory): 37 | new_files = [os.path.join(root, fname) for fname in fnames 38 | if fname.endswith(suffix)] 39 | accumulator.extend(new_files) 40 | 41 | return accumulator 42 | 43 | 44 | def _path_to_directory(*args): 45 | path_to_directory = join(_get_module_directory(), *args) 46 | assert isdir(path_to_directory) 47 | return path_to_directory 48 | 49 | 50 | def _get_module_directory(): 51 | # Taken from http://stackoverflow.com/a/6098238/732596 52 | path_to_this_file = dirname(getfile(currentframe())) 53 | if not isdir(path_to_this_file): 54 | encoding = getfilesystemencoding() 55 | path_to_this_file = dirname(unicode(__file__, encoding)) 56 | if not isdir(path_to_this_file): 57 | abspath(getsourcefile(lambda _: None)) 58 | if not isdir(path_to_this_file): 59 | path_to_this_file = getcwd() 60 | assert isdir(path_to_this_file), path_to_this_file + ' is not a directory' 61 | return path_to_this_file 62 | -------------------------------------------------------------------------------- /nilm_metadata/tests/test_nilm_metadata.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from __future__ import print_function 3 | import unittest 4 | from copy import deepcopy 5 | from six import iteritems 6 | from nilm_metadata.object_concatenation import get_appliance_types 7 | 8 | 9 | class TestNilmMetadata(unittest.TestCase): 10 | 11 | def test_appliance_types(self): 12 | types = get_appliance_types() 13 | 14 | # COLD APPLIANCE 15 | cold = types['cold appliance'] 16 | cold_answers = {'n_ancestors': 1, 17 | 'categories': {'traditional': 'cold', 'size': 'large'}} 18 | for k, v in iteritems(cold_answers): 19 | self.assertEqual(cold[k], v) 20 | 21 | # FRIDGE 22 | fridge = types['fridge'] 23 | fridge_answers = {'n_ancestors': 2, 24 | 'categories': { 25 | 'traditional': 'cold', 26 | 'size': 'large', 27 | 'electrical': ['single-phase induction motor']}, 28 | 'subtypes': ['chest', 'upright']} 29 | for k, v in iteritems(fridge_answers): 30 | self.assertEqual(fridge[k], v) 31 | 32 | fridge_dists = fridge['distributions'] 33 | self.assertEqual(list(fridge_dists), ['rooms']) 34 | rooms = fridge_dists['rooms'] 35 | self.assertEqual(len(rooms), 1) 36 | self.assertEqual(rooms[0]['distance'], 1) 37 | self.assertEqual(rooms[0]['from_appliance_type'], 'cold appliance') 38 | 39 | # FREEZER 40 | freezer = types['freezer'] 41 | freezer_dists = freezer['distributions'] 42 | self.assertEqual(list(freezer_dists), ['rooms']) 43 | rooms = freezer_dists['rooms'] 44 | self.assertEqual(len(rooms), 1) 45 | self.assertEqual(rooms[0]['distance'], 2) 46 | self.assertEqual(rooms[0]['from_appliance_type'], 'cold appliance') 47 | freezer_answers = deepcopy(fridge_answers) 48 | freezer_answers.update({'n_ancestors': 3}) 49 | for k, v in iteritems(freezer_answers): 50 | self.assertEqual(freezer[k], v) 51 | 52 | if __name__ == '__main__': 53 | unittest.main() 54 | -------------------------------------------------------------------------------- /nilm_metadata/central_metadata/appliance_types/lighting.yaml: -------------------------------------------------------------------------------- 1 | light: 2 | description: | 3 | We distinguish between the 'luminaire' (light fitting) and the 4 | electric lamp(s) within the light fitting. 5 | 6 | See: 7 | - https://en.wikipedia.org/wiki/Luminaire 8 | - https://en.wikipedia.org/wiki/Lamp_%28electrical_component%29 9 | 10 | The light appliance takes its category from the lamp type fitted. 11 | 12 | parent: appliance 13 | 14 | categories: 15 | electrical: 16 | - lighting 17 | 18 | synonyms: [luminaire, light fitting] 19 | 20 | usual_components: [dimmer, lamp] 21 | 22 | additional_properties: 23 | main_room_light: 24 | type: boolean 25 | 26 | subtypes: 27 | - ceiling pendant 28 | - ceiling downlight 29 | - desk 30 | - wall 31 | - table 32 | - floor standing 33 | - floor level 34 | - decorative 35 | - night light 36 | - signage 37 | - outdoor 38 | - mood 39 | 40 | control: [manual, timer] 41 | 42 | # ----------------- LIGHTING COMPONENTS ------------------------ # 43 | 44 | lighting component: 45 | parent: component 46 | categories: 47 | electrical: 48 | - lighting 49 | 50 | # --------- LAMPS -------------------- # 51 | 52 | lamp: 53 | parent: lighting component 54 | 55 | # --------- INCANDESENT --------- # 56 | incandescent lamp: 57 | parent: lamp 58 | categories: 59 | electrical: 60 | - incandescent 61 | subtypes: 62 | - halogen 63 | 64 | # --------- INCANDESENT --------- # 65 | LED lamp: 66 | parent: lamp 67 | categories: 68 | electrical: 69 | - LED 70 | 71 | # --------- FLUORESCENT --------- # 72 | fluorescent lamp: 73 | parent: lamp 74 | categories: 75 | electrical: 76 | - fluorescent 77 | 78 | linear fluorescent lamp: 79 | parent: fluorescent lamp 80 | categories: 81 | electrical: 82 | - linear 83 | synonyms: [ strip light ] 84 | 85 | compact fluorescent lamp: 86 | parent: fluorescent lamp 87 | categories: 88 | electrical: 89 | - compact 90 | synonyms: [ CFL ] 91 | 92 | # --------- ARC --------- # 93 | arc lamp: 94 | parent: lamp 95 | 96 | # --------- GAS DISCHARGE --------- # 97 | gas discharge lamp: 98 | parent: lamp 99 | subtypes: 100 | - mercury-vapor 101 | - metal-halide 102 | - sodium-vapor 103 | - neon 104 | 105 | # --------- DIMMER --------- # 106 | dimmer: 107 | parent: lighting component 108 | additional_properties: 109 | number_of_dimmer_levels: 110 | type: integer 111 | description: not including OFF 112 | 113 | subtypes: 114 | - TRIAC 115 | -------------------------------------------------------------------------------- /nilm_metadata/central_metadata/appliance_types/ict.yaml: -------------------------------------------------------------------------------- 1 | ICT appliance: 2 | parent: appliance 3 | categories: 4 | traditional: ICT 5 | size: small 6 | electrical: 7 | - SMPS 8 | google_shopping: 9 | - electronics 10 | 11 | #------------- COMPUTERS ------------------------ 12 | 13 | computer: 14 | parent: ICT appliance 15 | 16 | categories: 17 | google_shopping: 18 | - computers 19 | 20 | additional_properties: 21 | cpu: { type: string } 22 | 23 | control: [manual, timer] 24 | 25 | server computer: 26 | parent: computer 27 | 28 | laptop computer: 29 | parent: computer 30 | categories: 31 | google_shopping: 32 | - laptops 33 | 34 | desktop computer: 35 | parent: computer 36 | 37 | HTPC: 38 | parent: computer 39 | synonyms: [Home Theatre PC] 40 | 41 | computer monitor: 42 | parent: ICT appliance 43 | usual_components: [CRT screen, flat screen] 44 | 45 | 46 | #-------------- PRINTERS, SCANNER, FAX, PHOTOCOPIER, MFD ---------------- 47 | 48 | printer: 49 | parent: ICT appliance 50 | categories: 51 | google_shopping: 52 | - "print, copy, scan and fax" 53 | - printers and copiers 54 | - printers 55 | description: > 56 | if known then please specify a 'small printer' or 'large printer' 57 | as the two have quite different power consumption profiles. 58 | subtypes: 59 | - laser 60 | - LED 61 | - inkjet 62 | - dot matrix 63 | 64 | small printer: 65 | parent: printer 66 | 67 | large printer: 68 | parent: printer 69 | 70 | scanner: 71 | parent: ICT appliance 72 | 73 | photocopier: 74 | parent: ICT appliance 75 | categories: 76 | google_shopping: 77 | - "print, copy, scan and fax" 78 | - printers and copiers 79 | - copiers 80 | 81 | fax: 82 | parent: ICT appliance 83 | categories: 84 | google_shopping: 85 | - "print, copy, scan and fax" 86 | - fax machines 87 | 88 | multi-function device: 89 | parent: ICT appliance 90 | usual_components: [fax, printer, photocopier, scanner] 91 | 92 | #-------------- CHARGERS -------------------- 93 | 94 | charger: 95 | parent: ICT appliance 96 | 97 | mobile phone charger: 98 | parent: charger 99 | 100 | wireless phone charger: 101 | parent: charger 102 | 103 | tablet computer charger: 104 | parent: charger 105 | 106 | #------------- ICT MISC ------------- 107 | 108 | external hard disk: 109 | parent: ICT appliance 110 | 111 | network attached storage: 112 | parent: ICT appliance 113 | synonyms: [NAS] 114 | 115 | broadband router: 116 | parent: ICT appliance 117 | 118 | ethernet switch: 119 | parent: ICT appliance 120 | subtypes: 121 | - 1gigabit 122 | - 100megabit 123 | 124 | ethernet hub: 125 | parent: ethernet switch 126 | 127 | modem: 128 | parent: ethernet switch 129 | 130 | USB hub: 131 | parent: ICT appliance 132 | 133 | UPS: 134 | parent: ICT appliance 135 | synonyms: [uninterruptible power supply] 136 | 137 | paper shredder: 138 | parent: ICT appliance 139 | 140 | -------------------------------------------------------------------------------- /nilm_metadata/tests/test_object_concatenation.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from __future__ import print_function 3 | import unittest 4 | from nilm_metadata.object_concatenation import ( 5 | recursively_update_dict, 6 | _concatenate_complete_object 7 | ) 8 | 9 | 10 | class TestObjectConcatenation(unittest.TestCase): 11 | 12 | def testrecursively_update_dict(self): 13 | d1 = {} 14 | d2 = {'a':1, 'b':2, 'c': {'ca':10, 'cb': 20} } 15 | recursively_update_dict(d1,d2) 16 | self.assertEqual(d1, d2) 17 | 18 | d1 = {'a':-1, 'b':-3, 'c': {}} 19 | d2 = {'a':1, 'b':2, 'c': {'ca':10, 'cb': 20} } 20 | recursively_update_dict(d1,d2) 21 | self.assertEqual(d1, d2) 22 | 23 | d1 = {'a':-1, 'b':-3, 'c': {}, 'list': [1,2,3]} 24 | d2 = {'a':1, 'b':2, 'c': {'ca':10, 'cb': 20}, 'list': [4,5,6] } 25 | recursively_update_dict(d1,d2) 26 | self.assertEqual(d1, {'a':1, 'b':2, 'c': {'ca':10, 'cb': 20}, 'list': [1,2,3,4,5,6] }) 27 | 28 | d1 = {'a':-1, 'b':-3} 29 | d2 = {'a':1, 'b':2, 'c': {'ca':10, 'cb': 20} } 30 | recursively_update_dict(d1,d2) 31 | self.assertEqual(d1, d2) 32 | 33 | d1 = {'a':-1, 'b':-3, 'c': {'ca':-10, 'cc': 30} } 34 | d2 = {'a':1, 'b':2, 'c': {'ca':10, 'cb': 20} } 35 | recursively_update_dict(d1,d2) 36 | self.assertEqual(d1, {'a':1, 'b':2, 'c': {'ca':10, 'cb': 20, 'cc': 30} }) 37 | 38 | def test_distance(self): 39 | objects = { 40 | "a": { 41 | "distributions": { 42 | "on_power": [ 43 | {"description": "a"} 44 | ], 45 | "on_duration": [ 46 | {"description": "a"} 47 | ] 48 | } 49 | }, 50 | "b": { 51 | "parent": "a", 52 | "distributions": { 53 | "on_power": [ 54 | {"description": "b"} 55 | ] 56 | } 57 | }, 58 | "c": { 59 | "parent": "b", 60 | "distributions": { 61 | "on_power": [ 62 | {"description": "c"} 63 | ] 64 | } 65 | } 66 | } 67 | obj = _concatenate_complete_object('c', objects) 68 | on_power = obj['distributions']['on_power'] 69 | self.assertEqual(on_power[0], {'distance':2, 'description': 'a', 70 | 'from_appliance_type': 'a'}) 71 | self.assertEqual(on_power[1], {'distance':1, 'description': 'b', 72 | 'from_appliance_type': 'b'}) 73 | self.assertEqual(on_power[2], {'distance':0, 'description': 'c', 74 | 'from_appliance_type': 'c'}) 75 | on_duration = obj['distributions']['on_duration'] 76 | self.assertEqual(on_duration[0], {'distance':2, 'description': 'a', 77 | 'from_appliance_type': 'a'}) 78 | 79 | if __name__ == '__main__': 80 | unittest.main() 81 | -------------------------------------------------------------------------------- /nilm_metadata/central_metadata/appliance_types/consumer_electronics.yaml: -------------------------------------------------------------------------------- 1 | CE appliance: 2 | parent: appliance 3 | categories: &CECategories 4 | traditional: consumer electronics 5 | size: small 6 | google_shopping: 7 | - electronics 8 | distributions: 9 | rooms: 10 | - distribution_of_data: 11 | categories: [lounge, bedroom, kitchen] 12 | values: [0.6, 0.2, 0,1] 13 | source: subjective 14 | 15 | #------------- DISPLAYS ------------------------ 16 | 17 | television: 18 | parent: CE appliance 19 | control: [manual] 20 | usual_components: [flat screen, CRT screen] 21 | subtypes: 22 | - smart 23 | additional_properties: 24 | integrated_av_sources: &avSources 25 | type: array 26 | description: select all that are relevant 27 | items: 28 | enum: [ "digital TV tuner", "analogue TV tuner", 29 | "videodisc", "CD", "DVR", "DVD", "blu-ray", 30 | "tape", 31 | "analogue radio", "digital radio", 32 | "satellite", "cable", "broadband", "fibre" ] 33 | 34 | 35 | projector: 36 | parent: television 37 | do_not_inherit: [usual_components, subtypes] 38 | 39 | #--------------- AV SOURCES / AUDIO PLAYERS ------------------- 40 | 41 | games console: 42 | parent: computer 43 | categories: 44 | traditional: consumer electronics 45 | 46 | set top box: 47 | parent: CE appliance 48 | 49 | AV recorder: 50 | parent: CE appliance 51 | subtypes: 52 | - digital video recorder 53 | - DVD recorder 54 | - blu-ray recorder 55 | 56 | audio system: 57 | parent: CE appliance 58 | usual_components: [audio amplifier, CD player, radio] 59 | 60 | audio amplifier: 61 | parent: CE appliance 62 | usual_components: [DAC, radio] 63 | subtypes: 64 | - home theatre 65 | 66 | active speaker: 67 | parent: audio amplifier 68 | 69 | active subwoofer: 70 | parent: active speaker 71 | 72 | DAC: 73 | synonyms: [Digital to Analogue Converter] 74 | parent: CE appliance 75 | 76 | DVD player: 77 | parent: CE appliance 78 | 79 | CD player: 80 | parent: CE appliance 81 | 82 | record deck: 83 | parent: CE appliance 84 | 85 | cassette deck: 86 | parent: CE appliance 87 | 88 | blu-ray player: 89 | parent: CE appliance 90 | 91 | radio: 92 | parent: CE appliance 93 | subtypes: 94 | - analogue 95 | - DAB 96 | 97 | #------------ CE MISC --------------- 98 | 99 | RF booster: 100 | parent: CE appliance 101 | 102 | video sender: 103 | parent: CE appliance 104 | 105 | digital picture frame: 106 | parent: CE appliance 107 | 108 | musical instrument: 109 | parent: CE appliance 110 | 111 | baby monitor: 112 | parent: CE appliance 113 | subtypes: 114 | - baby unit 115 | - parent unit 116 | 117 | door bell: 118 | parent: CE appliance 119 | 120 | security alarm: 121 | parent: CE appliance 122 | 123 | smoke alarm: 124 | parent: CE appliance 125 | 126 | fan: 127 | parent: motor 128 | categories: *CECategories 129 | subtypes: 130 | - desk 131 | - ceiling 132 | - extractor 133 | - single-room MVHR # mechanical ventilation with heat recovery 134 | -------------------------------------------------------------------------------- /nilm_metadata/central_metadata/appliance_types/wet.yaml: -------------------------------------------------------------------------------- 1 | wet appliance: 2 | parent: appliance 3 | categories: 4 | traditional: wet 5 | size: large 6 | google_shopping: 7 | - laundry appliances 8 | control: [manual] 9 | distributions: 10 | rooms: 11 | - distribution_of_data: 12 | categories: [kitchen, utility, basement, garage] 13 | values: [0.3, 0.3, 0.2, 0.1] 14 | source: subjective 15 | 16 | #------------- CLOTHES WASHING AND DRYING ------------------------ 17 | 18 | spin dryer: 19 | parent: wet appliance 20 | subtypes: [top-loader, front-loader] 21 | control: [manual, timer] 22 | components: 23 | - type: motor 24 | description: drum motor 25 | - type: water pump 26 | 27 | tumble dryer: 28 | parent: spin dryer 29 | components: &dryerElement 30 | - type: electric air heater 31 | description: clothes drying heating element 32 | 33 | washing machine: 34 | parent: spin dryer 35 | components: 36 | - type: electric water heater 37 | min_on_duration: 600 38 | min_off_duration: 300 39 | synonyms: 40 | - washer dryer 41 | 42 | washer dryer: 43 | parent: washing machine 44 | components: *dryerElement 45 | synonyms: 46 | - washing machine 47 | 48 | #------------ DISH WASHING -------------------------- 49 | 50 | dish washer: 51 | parent: wet appliance 52 | min_on_duration: 600 53 | min_off_duration: 1800 54 | components: 55 | - type: electric air heater 56 | - type: electric water heater 57 | distributions: 58 | on_power: 59 | - model: 60 | distribution_name: summary stats 61 | min: 1800 62 | max: 2500 63 | source: empirical from publication 64 | related_documents: &Stamminger2008 65 | - > 66 | P. D. R. Stamminger, Synergy potential of smart appliances, 67 | in smart domestic appliances in sustainable energy systems (Smart-A), 68 | University of Bonn, Tech. Rep., 2008. 69 | specific_to: { country: GB } 70 | 71 | - type: motor 72 | distributions: 73 | on_duration: 74 | - model: 75 | distribution_name: summary stats 76 | min: 900 # 15 mins 77 | mean: 4500 # 75 mins 78 | max: 10800 # 3 hours 79 | source: empirical from publication 80 | related_documents: *Stamminger2008 81 | 82 | dishwasher: 83 | parent: dish washer 84 | 85 | waste disposal unit: 86 | parent: wet appliance 87 | 88 | #--------------- WATER PUMPS ---------------- 89 | 90 | shower pump: 91 | parent: water pump 92 | categories: 93 | google_shopping: 94 | - plumbing 95 | - plumbing fixtures 96 | - shower 97 | synonyms: [power shower] 98 | description: > 99 | note that this does not for an electrically heated shower. For 100 | that, please use 'electric shower heater' 101 | 102 | acquarium pump: 103 | parent: water pump 104 | 105 | swimming pool pump: 106 | parent: water pump 107 | 108 | pond pump: 109 | parent: water pump 110 | 111 | hot tub pump: 112 | parent: water pump 113 | 114 | jacuzzi pump: 115 | parent: water pump 116 | 117 | garden sprinkler: 118 | parent: appliance 119 | 120 | water filter: 121 | parent: appliance 122 | -------------------------------------------------------------------------------- /nilm_metadata/central_metadata/appliance_types/cooking.yaml: -------------------------------------------------------------------------------- 1 | #----------- SMALL COOKING APPLIANCES --------------# 2 | 3 | small cooking appliance: 4 | parent: appliance 5 | categories: 6 | traditional: cooking 7 | size: small 8 | google_shopping: 9 | - kitchen and dining 10 | - kitchen appliances 11 | control: [manual] 12 | distributions: 13 | rooms: 14 | - distribution_of_data: 15 | categories: [kitchen, utility] 16 | values: [0.7, 0.1] 17 | source: subjective 18 | 19 | #------------- small resistive cooking appliances ---- 20 | 21 | small resistive cooking appliance: 22 | parent: small cooking appliance 23 | categories: 24 | electrical: 25 | - resistive 26 | 27 | kettle: 28 | parent: small resistive cooking appliance 29 | 30 | toaster: 31 | parent: small resistive cooking appliance 32 | 33 | toasted sandwich maker: 34 | parent: small resistive cooking appliance 35 | 36 | fryer: 37 | parent: small resistive cooking appliance 38 | 39 | coffee maker: 40 | parent: small resistive cooking appliance 41 | min_on_duration: 60 42 | min_off_duration: 600 43 | 44 | rice cooker: 45 | parent: small resistive cooking appliance 46 | 47 | food steamer: 48 | parent: small resistive cooking appliance 49 | 50 | bottle warmer: 51 | parent: small resistive cooking appliance 52 | 53 | steriliser: 54 | parent: small resistive cooking appliance 55 | 56 | plate warmer: 57 | parent: small resistive cooking appliance 58 | 59 | slow cooker: 60 | parent: small resistive cooking appliance 61 | 62 | #--------------- small non-resistive cooking appliances 63 | 64 | microwave: 65 | parent: small cooking appliance 66 | do_not_inherit: [categories] 67 | categories: 68 | traditional: cooking 69 | size: small 70 | electrical: 71 | - power electronics 72 | - passive PFC 73 | 74 | food processor: 75 | parent: small cooking appliance 76 | do_not_inherit: [categories] 77 | synonyms: [food mixer] 78 | categories: 79 | traditional: cooking 80 | size: small 81 | electrical: 82 | - single-phase induction motor 83 | - capacitor start-run 84 | - constant torque 85 | 86 | kitchen aid: 87 | parent: food processor 88 | 89 | yoghurt maker: 90 | parent: small cooking appliance 91 | control: [timer] 92 | 93 | breadmaker: 94 | categories: 95 | google_shopping: 96 | - breadmakers 97 | parent: small cooking appliance 98 | control: [timer] 99 | components: 100 | - type: electric heating element 101 | - type: motor 102 | 103 | #------------------ STOVE, COOKER, OVEN, GRILL ---------------# 104 | 105 | cooker: 106 | parent: small cooking appliance 107 | do_not_inherit: [categories] 108 | categories: 109 | traditional: cooking 110 | size: large 111 | control: [timer] # append 'timer' to 'manual' from small cooking appliance 112 | usual_components: [stove, oven, grill] 113 | additional_properties: 114 | fuel: 115 | enum: [natural gas, coal, wood, oil, LPG] 116 | description: > 117 | if all components use the same fuel then specify the fuel once 118 | for the cooker. Otherwise, if components use different fuels, 119 | then do not specify the fuel for the cooker but do specify fuels 120 | for each component. 121 | 122 | stove: 123 | parent: cooker 124 | do_not_inherit: [usual_components] 125 | description: a non-electric stove or a stove with unknown heating source 126 | synonyms: [hob] 127 | 128 | electric stove: 129 | parent: stove 130 | description: a stove where the cooking heat is provided from electricity 131 | do_not_inherit: [additional_properties] 132 | categories: 133 | electrical: 134 | - resistive 135 | 136 | oven: 137 | parent: stove 138 | description: a non-electric oven or a oven with unknown heating source 139 | 140 | electric oven: 141 | parent: electric stove 142 | description: an oven where the cooking heat is provided from electricity 143 | 144 | grill: 145 | parent: stove 146 | description: a non-electric grill or a grill with unknown heating source 147 | 148 | electric grill: 149 | parent: electric stove 150 | description: a grill where the cooking heat is provided from electricity 151 | -------------------------------------------------------------------------------- /nilm_metadata/central_metadata/appliance_types/heating.yaml: -------------------------------------------------------------------------------- 1 | heating appliance: 2 | parent: appliance 3 | categories: 4 | traditional: heating 5 | size: large 6 | 7 | #------------- BOILERS ------------------------ 8 | 9 | boiler: 10 | 11 | description: all boilers except for electric boilers 12 | 13 | parent: heating appliance 14 | 15 | on_power_threshold: 40 16 | 17 | synonyms: [furnace] 18 | 19 | # Categories of the child object are appended 20 | # to existing categories in the parent. 21 | categories: 22 | google_shopping: 23 | - climate control 24 | - furnaces and boilers 25 | 26 | # Here we specify that boilers have a component 27 | # which is itself an object whose parent 28 | # is `water pump` 29 | components: 30 | - type: water pump 31 | 32 | # Boilers have a property which most other appliances 33 | # do not have: a fuel source. We specify additional 34 | # properties using the JSON Schema syntax. 35 | additional_properties: 36 | fuel: 37 | enum: [natural gas, coal, wood, oil, LPG] 38 | 39 | subtypes: 40 | - combi 41 | - system 42 | 43 | # We can specify the different mechanisms that 44 | # control the boiler. This is useful, for example, 45 | # if we want to find all appliances which 46 | # must be manually controlled (e.g. toasters) 47 | control: [manual, timer, thermostat] 48 | 49 | # We can also declare prior knowledge about boilers. 50 | # For example, we know that boilers tend to be in 51 | # bathrooms, utility rooms or kitchens 52 | distributions: 53 | room: 54 | - distribution_of_data: 55 | categories: [bathroom, utility, kitchen] 56 | values: [0.3, 0.2, 0.2] 57 | # If the values do not add to 1 then the assumption 58 | # is that the remaining probability mass is distributed equally to 59 | # all other rooms. 60 | source: subjective # These values are basically guesses! 61 | 62 | #------------ ELECTRIC SPACE HEATERS 63 | 64 | electric furnace: 65 | parent: boiler 66 | description: a boiler where the heat is provided from electricity. 67 | components: 68 | - type: electric water heater 69 | synonyms: [electric boiler] 70 | 71 | electric space heater: 72 | parent: electric air heater 73 | control: [manual, timer, thermostat] 74 | categories: 75 | traditional: heating 76 | 77 | #------- WATER HEATERS ---------- 78 | 79 | electric water heating appliance: 80 | parent: electric water heater 81 | control: [manual, timer, thermostat] 82 | categories: 83 | traditional: heating 84 | 85 | immersion heater: 86 | parent: electric water heating appliance 87 | 88 | electric swimming pool heater: 89 | parent: electric water heating appliance 90 | 91 | electric shower heater: 92 | parent: electric water heating appliance 93 | distributions: 94 | on_power: 95 | - model: 96 | distribution_name: summary stats 97 | min: 4000 98 | max: 9000 99 | source: empirical from publication 100 | related_documents: &Stamminger2008 101 | - > 102 | P. D. R. Stamminger, Synergy potential of smart appliances, 103 | in smart domestic appliances in sustainable energy systems (Smart-A), 104 | University of Bonn, Tech. Rep., 2008. 105 | specific_to: { country: GB } 106 | 107 | 108 | electric hot tub heater: 109 | parent: electric water heating appliance 110 | 111 | #------- SOLAR THERMAL --------------------------- 112 | 113 | solar thermal pumping station: 114 | parent: heating appliance 115 | categories: 116 | google_shopping: 117 | - renewable energy 118 | - solar energy 119 | - solar panels 120 | components: 121 | - type: water pump 122 | control: [manual, sunlight, timer] 123 | on_power_threshold: 30 124 | 125 | #------- HEAT PUMP --------------------------- 126 | 127 | heat pump: 128 | parent: heating appliance 129 | control: [manual, timer, thermostat] 130 | categories: 131 | google_shopping: 132 | - climate control 133 | additional_properties: 134 | heat_source: 135 | enum: [ground, air, water] 136 | 137 | #-------------- MISC ---------------- 138 | 139 | electric blanket: 140 | parent: appliance 141 | control: [manual, timer] 142 | distributions: 143 | room: 144 | - distribution_of_data: 145 | categories: [bedroom] 146 | values: [0.8] 147 | source: subjective 148 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = build 9 | 10 | # User-friendly check for sphinx-build 11 | ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) 12 | $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) 13 | endif 14 | 15 | # Internal variables. 16 | PAPEROPT_a4 = -D latex_paper_size=a4 17 | PAPEROPT_letter = -D latex_paper_size=letter 18 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source 19 | # the i18n builder cannot share the environment and doctrees with the others 20 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source 21 | 22 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext 23 | 24 | help: 25 | @echo "Please use \`make ' where is one of" 26 | @echo " html to make standalone HTML files" 27 | @echo " dirhtml to make HTML files named index.html in directories" 28 | @echo " singlehtml to make a single large HTML file" 29 | @echo " pickle to make pickle files" 30 | @echo " json to make JSON files" 31 | @echo " htmlhelp to make HTML files and a HTML help project" 32 | @echo " qthelp to make HTML files and a qthelp project" 33 | @echo " devhelp to make HTML files and a Devhelp project" 34 | @echo " epub to make an epub" 35 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 36 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 37 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" 38 | @echo " text to make text files" 39 | @echo " man to make manual pages" 40 | @echo " texinfo to make Texinfo files" 41 | @echo " info to make Texinfo files and run them through makeinfo" 42 | @echo " gettext to make PO message catalogs" 43 | @echo " changes to make an overview of all changed/added/deprecated items" 44 | @echo " xml to make Docutils-native XML files" 45 | @echo " pseudoxml to make pseudoxml-XML files for display purposes" 46 | @echo " linkcheck to check all external links for integrity" 47 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 48 | 49 | clean: 50 | rm -rf $(BUILDDIR)/* 51 | 52 | html: 53 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 54 | @echo 55 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 56 | 57 | dirhtml: 58 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 59 | @echo 60 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 61 | 62 | singlehtml: 63 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 64 | @echo 65 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 66 | 67 | pickle: 68 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 69 | @echo 70 | @echo "Build finished; now you can process the pickle files." 71 | 72 | json: 73 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 74 | @echo 75 | @echo "Build finished; now you can process the JSON files." 76 | 77 | htmlhelp: 78 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 79 | @echo 80 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 81 | ".hhp project file in $(BUILDDIR)/htmlhelp." 82 | 83 | qthelp: 84 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 85 | @echo 86 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 87 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 88 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/NILMMetadata.qhcp" 89 | @echo "To view the help file:" 90 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/NILMMetadata.qhc" 91 | 92 | devhelp: 93 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 94 | @echo 95 | @echo "Build finished." 96 | @echo "To view the help file:" 97 | @echo "# mkdir -p $$HOME/.local/share/devhelp/NILMMetadata" 98 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/NILMMetadata" 99 | @echo "# devhelp" 100 | 101 | epub: 102 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 103 | @echo 104 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 105 | 106 | latex: 107 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 108 | @echo 109 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 110 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 111 | "(use \`make latexpdf' here to do that automatically)." 112 | 113 | latexpdf: 114 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 115 | @echo "Running LaTeX files through pdflatex..." 116 | $(MAKE) -C $(BUILDDIR)/latex all-pdf 117 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 118 | 119 | latexpdfja: 120 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 121 | @echo "Running LaTeX files through platex and dvipdfmx..." 122 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja 123 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 124 | 125 | text: 126 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 127 | @echo 128 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 129 | 130 | man: 131 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 132 | @echo 133 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 134 | 135 | texinfo: 136 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 137 | @echo 138 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." 139 | @echo "Run \`make' in that directory to run these through makeinfo" \ 140 | "(use \`make info' here to do that automatically)." 141 | 142 | info: 143 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 144 | @echo "Running Texinfo files through makeinfo..." 145 | make -C $(BUILDDIR)/texinfo info 146 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." 147 | 148 | gettext: 149 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale 150 | @echo 151 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." 152 | 153 | changes: 154 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 155 | @echo 156 | @echo "The overview file is in $(BUILDDIR)/changes." 157 | 158 | linkcheck: 159 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 160 | @echo 161 | @echo "Link check complete; look for any errors in the above output " \ 162 | "or in $(BUILDDIR)/linkcheck/output.txt." 163 | 164 | doctest: 165 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 166 | @echo "Testing of doctests in the sources finished, look at the " \ 167 | "results in $(BUILDDIR)/doctest/output.txt." 168 | 169 | xml: 170 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml 171 | @echo 172 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml." 173 | 174 | pseudoxml: 175 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml 176 | @echo 177 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." 178 | -------------------------------------------------------------------------------- /nilm_metadata/object_concatenation.py: -------------------------------------------------------------------------------- 1 | from __future__ import print_function, division 2 | from copy import deepcopy 3 | from six import iteritems 4 | from nilm_metadata.file_management import get_appliance_types_from_disk 5 | 6 | 7 | def get_appliance_types(): 8 | """ 9 | Returns 10 | ------- 11 | dict of all appliance types. Fully concatenated and with components 12 | recursively resolved. 13 | """ 14 | appliance_types_from_disk = get_appliance_types_from_disk() 15 | appliance_types = _concatenate_all_appliance_types( 16 | appliance_types_from_disk) 17 | return appliance_types 18 | 19 | 20 | class ObjectConcatenationError(Exception): 21 | pass 22 | 23 | 24 | def _concatenate_all_appliance_types(appliance_types_from_disk): 25 | concatenated = {} 26 | for appliance_type_name in appliance_types_from_disk: 27 | concatenated_appliance_type = _concatenate_complete_appliance_type( 28 | appliance_type_name, appliance_types_from_disk) 29 | concatenated[appliance_type_name] = concatenated_appliance_type 30 | 31 | return concatenated 32 | 33 | 34 | def _concatenate_complete_appliance_type( 35 | appliance_type_name, appliance_types_from_disk): 36 | 37 | concatenated_app_type = _concatenate_complete_object( 38 | appliance_type_name, appliance_types_from_disk) 39 | categories = concatenated_app_type.setdefault('categories', {}) 40 | 41 | # Instantiate components recursively 42 | components = concatenated_app_type.get('components', []) 43 | for i, component_appliance_obj in enumerate(components): 44 | component_type_name = component_appliance_obj['type'] 45 | component_type_obj = _concatenate_complete_appliance_type( 46 | component_type_name, appliance_types_from_disk) 47 | recursively_update_dict(component_appliance_obj, component_type_obj) 48 | components[i] = component_appliance_obj 49 | 50 | # Now merge component categories into owner appliance type object 51 | if not component_appliance_obj.get('do_not_merge_categories'): 52 | recursively_update_dict( 53 | categories, component_appliance_obj.get('categories', {})) 54 | 55 | return concatenated_app_type 56 | 57 | 58 | def _init_distributions(appliance_type): 59 | for list_of_dists in appliance_type.get('distributions', {}).values(): 60 | for dist in list_of_dists: 61 | dist.update({'from_appliance_type': appliance_type['type'], 62 | 'distance': 0}) 63 | 64 | 65 | def _concatenate_complete_object(object_name, object_cache): 66 | """ 67 | Returns 68 | ------- 69 | merged_object: dict. 70 | If `child_object` is None then merged_object will be the object 71 | identified by `object_name` merged with its ancestor tree. 72 | If `child_object` is not None then it will be merged as the 73 | most-derived object (i.e. a child of object_name). This is 74 | useful for appliances. 75 | """ 76 | ancestors = _get_ancestors(object_name, object_cache) 77 | 78 | # Now descend from super-object downwards, 79 | # collecting and updating properties as we go. 80 | merged_object = deepcopy(ancestors[0]) 81 | _init_distributions(merged_object) 82 | merged_object['n_ancestors'] = len(ancestors) - 1 83 | 84 | for i, next_child in enumerate(ancestors[1:]): 85 | # Remove properties that the child does not want to inherit 86 | do_not_inherit = next_child.get('do_not_inherit', []) 87 | do_not_inherit.extend(['synonyms', 'description', 'do_not_inherit']) 88 | for property_to_not_inherit in do_not_inherit: 89 | merged_object.pop(property_to_not_inherit, None) 90 | 91 | # Now, for each probability distribution, we tag it with a 92 | # 'distance' property, showing how far away it is from 93 | # the most derived object. 94 | distributions = merged_object.get('distributions', {}) 95 | for list_of_dists in distributions.values(): 96 | for dist in list_of_dists: 97 | dist['distance'] += 1 98 | 99 | _init_distributions(next_child) 100 | 101 | recursively_update_dict(merged_object, next_child) 102 | 103 | return merged_object 104 | 105 | 106 | def _get_ancestors(appliance_type_name, appliance_types_from_disk): 107 | """ 108 | Arguments 109 | --------- 110 | appliance_type_name: string 111 | 112 | Returns 113 | ------- 114 | A list of dicts where each dict is an object. The first 115 | dict is the highest on the inheritance hierarchy; the last dict 116 | is the object with type == `appliance_type_name`. 117 | 118 | Raises 119 | ------ 120 | ObjectConcatenationError 121 | """ 122 | if appliance_type_name is None: 123 | return [] 124 | 125 | # walk the inheritance tree from 126 | # bottom upwards (which is the wrong direction 127 | # for actually doing inheritance) 128 | try: 129 | current_appliance_type_dict = appliance_types_from_disk[ 130 | appliance_type_name] 131 | except KeyError as e: 132 | msg = "'{}' not found!".format(appliance_type_name) 133 | raise ObjectConcatenationError(msg) 134 | 135 | current_appliance_type_dict['type'] = appliance_type_name 136 | ancestors = [current_appliance_type_dict] 137 | 138 | while current_appliance_type_dict.get('parent'): 139 | parent_type = current_appliance_type_dict['parent'] 140 | try: 141 | current_appliance_type_dict = appliance_types_from_disk[ 142 | parent_type] 143 | except KeyError as e: 144 | msg = ("Object '{}' claims its parent is '{}' but that" 145 | " object is not recognised!" 146 | .format(current_appliance_type_dict['type'], e)) 147 | raise ObjectConcatenationError(msg) 148 | 149 | current_appliance_type_dict['type'] = parent_type 150 | ancestors.append(current_appliance_type_dict) 151 | 152 | ancestors.reverse() 153 | return ancestors 154 | 155 | 156 | def recursively_update_dict(dict_to_update, source_dict): 157 | """ Recursively extends lists in dict_to_update with lists in source_dict, 158 | and updates dicts. 159 | 160 | This function is required because Python's `dict.update()` function 161 | does not descend into dicts within dicts. 162 | 163 | Parameters 164 | ---------- 165 | dict_to_update, source_dict : dict 166 | Updates `dict_to_update` in place. 167 | """ 168 | source_dict = deepcopy(source_dict) 169 | for key_from_source, value_from_source in iteritems(source_dict): 170 | try: 171 | value_to_update = dict_to_update[key_from_source] 172 | except KeyError: 173 | dict_to_update[key_from_source] = value_from_source 174 | else: 175 | if isinstance(value_from_source, dict): 176 | assert isinstance(value_to_update, dict) 177 | recursively_update_dict(value_to_update, value_from_source) 178 | elif isinstance(value_from_source, list): 179 | assert isinstance(value_to_update, list) 180 | value_to_update.extend(value_from_source) 181 | if not any([isinstance(v, dict) for v in value_to_update]): 182 | dict_to_update[key_from_source] = list( 183 | set(value_to_update)) 184 | else: 185 | dict_to_update[key_from_source] = value_from_source 186 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | NILM METADATA 2 | ============= 3 | 4 | NILM Metadata (where 'NILM' stands for 'non-instrusive load 5 | monitoring') is a metadata framework for describing appliances, meters, 6 | measurements, buildings and datasets. 7 | 8 | Please jump in and add to or modify the schema and documentation! 9 | 10 | ### Documentation 11 | 12 | The 13 | [documentation is available online](http://nilm-metadata.readthedocs.org). 14 | 15 | If you're new to NILM Metadata then please read this README and then 16 | dive into the [tutorial](http://nilm-metadata.readthedocs.org/en/latest/tutorial.html) 17 | to find out how 18 | to see a worked example. 19 | 20 | Or, if you are already familiar with NILM Metadata then perhaps you 21 | want direct access to the full description of the 22 | "[Dataset metadata](http://nilm-metadata.readthedocs.org/en/latest/dataset_metadata.html)". 23 | 24 | ## There are two sides to NILM Metadata: 25 | 26 | ### 1) A schema describing energy datasets 27 | 28 | Modelled objects include: 29 | 30 | * electricity meters (whole-home and individual appliance meters) 31 | * wiring hierarchy of meters 32 | * a controlled vocabulary for measurement names 33 | * description of pre-processing applied 34 | * storage of pre-processed statistics 35 | * domestic appliances 36 | * a controlled vocabulary for appliance names 37 | * each appliance can contain any number of components (e.g. a 38 | light fitting can contain multiple lamps and a dimmer) 39 | * a list of time periods when each appliance was active 40 | * manufacturer, model, nominal power consumption etc. 41 | * a mapping of which appliances are connected to which meters 42 | * buildings 43 | * datasets 44 | 45 | The metadata itself can be either 46 | [YAML](http://en.wikipedia.org/wiki/YAML) or JSON. 47 | 48 | ### 2) Central metadata 49 | 50 | Common info about appliances is stored in NILM Metadata. This includes: 51 | 52 | * Categories for each appliance type 53 | * prior knowledge about the distribution of variables such as: 54 | * on power 55 | * on duration 56 | * usage in terms of hour per day 57 | * appliance correlations (e.g. that the TV is usually on if the 58 | games console is on) 59 | * valid additional properties for each appliance 60 | * mapping from country codes to nominal mains voltage ranges 61 | 62 | The common info about appliances uses a simple but powerful 63 | inheritance mechanism to allow appliances to inherit from a other 64 | appliances. For example, `laptop computer` is a specialisation of 65 | `computer` and the two share several properties (e.g. both are in the 66 | `ICT` category). So `laptop computer` inherits from `computer` and 67 | modifies and adds any properties it needs. In this way, we can 68 | embrace the 69 | ["don't repeat yourself (DRY)"](http://en.wikipedia.org/wiki/Don%27t_repeat_yourself) 70 | principal by exploiting the relationship between appliances. 71 | 72 | ### Python utilities 73 | 74 | NILM Metadata comes with a Python module which collects all 75 | ApplianceTypes in `central_metadata/appliance_types/*.yaml`, 76 | performs inheritance and instantiates components and 77 | returns a dictionary where each key is an ApplianceType name and each 78 | value is an ApplianceType dict. Here's how to use it: 79 | 80 | ```python 81 | from nilm_metadata import get_appliance_types 82 | appliance_types = get_appliance_types() 83 | ``` 84 | 85 | NILM Metadata also comes with a `convert_yaml_to_hdf5()` function 86 | which will convert a YAML instance of NILM Metadata to the HDF5 file 87 | format. 88 | 89 | ## Research paper describing NILM metadata 90 | 91 | The following paper describes NILM metadata in detail: 92 | 93 | * Jack Kelly and William Knottenbelt (2014). **Metadata for Energy 94 | Disaggregation**. In The 2nd IEEE International Workshop on Consumer 95 | Devices and Systems (CDS 2014) in Västerås, Sweden. 96 | arXiv:[1403.5946](http://arxiv.org/abs/1403.5946) 97 | DOI:[10.1109/COMPSACW.2014.97](http://dx.doi.org/10.1109/COMPSACW.2014.97) 98 | 99 | Bibtex: 100 | 101 | ``` 102 | @inproceedings{NILM_Metadata, 103 | title = {{Metadata for Energy Disaggregation}}, 104 | author = {Kelly, Jack and Knottenbelt, William}, 105 | year = {2014}, 106 | month = jul, 107 | address = {V{\" a}ster{\aa}s, Sweden}, 108 | booktitle = {The 2nd IEEE International Workshop on Consumer Devices and Systems (CDS 2014)}, 109 | archivePrefix = {arXiv}, 110 | arxivId = {1403.5946}, 111 | eprint = {1403.5946}, 112 | doi = {10.1109/COMPSACW.2014.97} 113 | } 114 | ``` 115 | 116 | Please cite this paper if you use NILM metadata in academic research. 117 | But please also be aware that the online documentation is more 118 | up-to-date than the paper. 119 | 120 | 121 | ## JSON Schema has been depreciated 122 | 123 | In 124 | [version 0.1 of the schema](https://github.com/nilmtk/nilm_metadata/tree/v0.1.0), 125 | we wrote a very comprehensive (and complex) schema using 126 | [JSON Schema](http://json-schema.org/) in order to automate the 127 | validation of metadata instances. JSON Schema is a lovely language 128 | and can capture everything we need but, because our metadata is quite 129 | comprehensive, we found that using JSON Schema was a significant time 130 | drain and made it hard to move quickly and add new ideas to the 131 | metadata. As such, when we moved from v0.1 to v0.2, the JSON Schema 132 | has been dropped. Please use the 133 | [human-readable documentation](http://nilm-metadata.readthedocs.org) 134 | instead. If there is a real desire for automated validation then we 135 | could resurrect the JSON Schema, but it is a fair amount of work to 136 | maintain. 137 | 138 | However, there are YAML validators freely available to make sure you are 139 | using the correct YAML format. For example: [YAMLlint](http://www.yamllint.com) 140 | 141 | 142 | Installation 143 | ============ 144 | 145 | This Python package uses `uv` for installation. `uv` is a fast and modern Python package manager that replaces tools like `pip` and `virtualenv`, with support for pyproject.toml and ultra-fast dependency resolution. To install `uv`, please follow the instructions given in [Astral Docs](https://docs.astral.sh/uv/#installation). 146 | 147 | If you want to use the Python package in order to concatenate the 148 | common appliance metadata, then please run: 149 | 150 | ``` 151 | uv pip install git+https://github.com/nilmtk/nilm_metadata.git 152 | ``` 153 | 154 | To exceute tests please run from the project root directory: 155 | 156 | ``` 157 | pytest 158 | ``` 159 | 160 | To generate documentation please run from the project root directory: 161 | 162 | ``` 163 | make -C docs singlehtml 164 | ``` 165 | 166 | Related projects 167 | ================ 168 | 169 | * [Project Haystack](http://project-haystack.org/), to quote their 170 | website, "*is an open source initiative to develop tagging 171 | conventions and taxonomies for building equipment and operational 172 | data. We define standardized data models for sites, equipment, and 173 | points related to energy, HVAC, lighting, and other environmental 174 | systems.*" Haystack is an awesome project but it does not specify a 175 | controlled vocabulary for appliances, which is the meat of the 176 | `nilm_metadata` project. Where appropriate, `nilm_metadata` does 177 | use similar properties to Haystack (e.g. the "site_meter" property 178 | is borrowed directly from Haystack). 179 | * [WikiEnergy](http://wiki-energy.org/) "*A Universe of Energy Data, 180 | Available Around the World*". 181 | * [sMAP metadata tags](http://www.eecs.berkeley.edu/~stevedh/smap2/tags.html) 182 | - sMAP is Berkley's "Simple Measurement and Actuation Profile". 183 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | REM Command file for Sphinx documentation 4 | 5 | if "%SPHINXBUILD%" == "" ( 6 | set SPHINXBUILD=sphinx-build 7 | ) 8 | set BUILDDIR=build 9 | set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% source 10 | set I18NSPHINXOPTS=%SPHINXOPTS% source 11 | if NOT "%PAPER%" == "" ( 12 | set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% 13 | set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% 14 | ) 15 | 16 | if "%1" == "" goto help 17 | 18 | if "%1" == "help" ( 19 | :help 20 | echo.Please use `make ^` where ^ is one of 21 | echo. html to make standalone HTML files 22 | echo. dirhtml to make HTML files named index.html in directories 23 | echo. singlehtml to make a single large HTML file 24 | echo. pickle to make pickle files 25 | echo. json to make JSON files 26 | echo. htmlhelp to make HTML files and a HTML help project 27 | echo. qthelp to make HTML files and a qthelp project 28 | echo. devhelp to make HTML files and a Devhelp project 29 | echo. epub to make an epub 30 | echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter 31 | echo. text to make text files 32 | echo. man to make manual pages 33 | echo. texinfo to make Texinfo files 34 | echo. gettext to make PO message catalogs 35 | echo. changes to make an overview over all changed/added/deprecated items 36 | echo. xml to make Docutils-native XML files 37 | echo. pseudoxml to make pseudoxml-XML files for display purposes 38 | echo. linkcheck to check all external links for integrity 39 | echo. doctest to run all doctests embedded in the documentation if enabled 40 | goto end 41 | ) 42 | 43 | if "%1" == "clean" ( 44 | for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i 45 | del /q /s %BUILDDIR%\* 46 | goto end 47 | ) 48 | 49 | 50 | %SPHINXBUILD% 2> nul 51 | if errorlevel 9009 ( 52 | echo. 53 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 54 | echo.installed, then set the SPHINXBUILD environment variable to point 55 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 56 | echo.may add the Sphinx directory to PATH. 57 | echo. 58 | echo.If you don't have Sphinx installed, grab it from 59 | echo.http://sphinx-doc.org/ 60 | exit /b 1 61 | ) 62 | 63 | if "%1" == "html" ( 64 | %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html 65 | if errorlevel 1 exit /b 1 66 | echo. 67 | echo.Build finished. The HTML pages are in %BUILDDIR%/html. 68 | goto end 69 | ) 70 | 71 | if "%1" == "dirhtml" ( 72 | %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml 73 | if errorlevel 1 exit /b 1 74 | echo. 75 | echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. 76 | goto end 77 | ) 78 | 79 | if "%1" == "singlehtml" ( 80 | %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml 81 | if errorlevel 1 exit /b 1 82 | echo. 83 | echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. 84 | goto end 85 | ) 86 | 87 | if "%1" == "pickle" ( 88 | %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle 89 | if errorlevel 1 exit /b 1 90 | echo. 91 | echo.Build finished; now you can process the pickle files. 92 | goto end 93 | ) 94 | 95 | if "%1" == "json" ( 96 | %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json 97 | if errorlevel 1 exit /b 1 98 | echo. 99 | echo.Build finished; now you can process the JSON files. 100 | goto end 101 | ) 102 | 103 | if "%1" == "htmlhelp" ( 104 | %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp 105 | if errorlevel 1 exit /b 1 106 | echo. 107 | echo.Build finished; now you can run HTML Help Workshop with the ^ 108 | .hhp project file in %BUILDDIR%/htmlhelp. 109 | goto end 110 | ) 111 | 112 | if "%1" == "qthelp" ( 113 | %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp 114 | if errorlevel 1 exit /b 1 115 | echo. 116 | echo.Build finished; now you can run "qcollectiongenerator" with the ^ 117 | .qhcp project file in %BUILDDIR%/qthelp, like this: 118 | echo.^> qcollectiongenerator %BUILDDIR%\qthelp\NILMMetadata.qhcp 119 | echo.To view the help file: 120 | echo.^> assistant -collectionFile %BUILDDIR%\qthelp\NILMMetadata.ghc 121 | goto end 122 | ) 123 | 124 | if "%1" == "devhelp" ( 125 | %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp 126 | if errorlevel 1 exit /b 1 127 | echo. 128 | echo.Build finished. 129 | goto end 130 | ) 131 | 132 | if "%1" == "epub" ( 133 | %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub 134 | if errorlevel 1 exit /b 1 135 | echo. 136 | echo.Build finished. The epub file is in %BUILDDIR%/epub. 137 | goto end 138 | ) 139 | 140 | if "%1" == "latex" ( 141 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 142 | if errorlevel 1 exit /b 1 143 | echo. 144 | echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. 145 | goto end 146 | ) 147 | 148 | if "%1" == "latexpdf" ( 149 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 150 | cd %BUILDDIR%/latex 151 | make all-pdf 152 | cd %BUILDDIR%/.. 153 | echo. 154 | echo.Build finished; the PDF files are in %BUILDDIR%/latex. 155 | goto end 156 | ) 157 | 158 | if "%1" == "latexpdfja" ( 159 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 160 | cd %BUILDDIR%/latex 161 | make all-pdf-ja 162 | cd %BUILDDIR%/.. 163 | echo. 164 | echo.Build finished; the PDF files are in %BUILDDIR%/latex. 165 | goto end 166 | ) 167 | 168 | if "%1" == "text" ( 169 | %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text 170 | if errorlevel 1 exit /b 1 171 | echo. 172 | echo.Build finished. The text files are in %BUILDDIR%/text. 173 | goto end 174 | ) 175 | 176 | if "%1" == "man" ( 177 | %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man 178 | if errorlevel 1 exit /b 1 179 | echo. 180 | echo.Build finished. The manual pages are in %BUILDDIR%/man. 181 | goto end 182 | ) 183 | 184 | if "%1" == "texinfo" ( 185 | %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo 186 | if errorlevel 1 exit /b 1 187 | echo. 188 | echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. 189 | goto end 190 | ) 191 | 192 | if "%1" == "gettext" ( 193 | %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale 194 | if errorlevel 1 exit /b 1 195 | echo. 196 | echo.Build finished. The message catalogs are in %BUILDDIR%/locale. 197 | goto end 198 | ) 199 | 200 | if "%1" == "changes" ( 201 | %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes 202 | if errorlevel 1 exit /b 1 203 | echo. 204 | echo.The overview file is in %BUILDDIR%/changes. 205 | goto end 206 | ) 207 | 208 | if "%1" == "linkcheck" ( 209 | %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck 210 | if errorlevel 1 exit /b 1 211 | echo. 212 | echo.Link check complete; look for any errors in the above output ^ 213 | or in %BUILDDIR%/linkcheck/output.txt. 214 | goto end 215 | ) 216 | 217 | if "%1" == "doctest" ( 218 | %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest 219 | if errorlevel 1 exit /b 1 220 | echo. 221 | echo.Testing of doctests in the sources finished, look at the ^ 222 | results in %BUILDDIR%/doctest/output.txt. 223 | goto end 224 | ) 225 | 226 | if "%1" == "xml" ( 227 | %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml 228 | if errorlevel 1 exit /b 1 229 | echo. 230 | echo.Build finished. The XML files are in %BUILDDIR%/xml. 231 | goto end 232 | ) 233 | 234 | if "%1" == "pseudoxml" ( 235 | %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml 236 | if errorlevel 1 exit /b 1 237 | echo. 238 | echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. 239 | goto end 240 | ) 241 | 242 | :end 243 | -------------------------------------------------------------------------------- /nilm_metadata/convert_yaml_to_hdf5.py: -------------------------------------------------------------------------------- 1 | from __future__ import print_function, division 2 | import yaml 3 | import pandas as pd 4 | from os.path import isdir, isfile, join, splitext 5 | from os import listdir 6 | from sys import stderr 7 | from copy import deepcopy 8 | from six import iteritems 9 | from nilm_metadata.object_concatenation import get_appliance_types 10 | 11 | 12 | class NilmMetadataError(Exception): 13 | pass 14 | 15 | 16 | def convert_yaml_to_hdf5(yaml_dir, hdf_filename): 17 | """Converts a NILM Metadata YAML instance to HDF5. 18 | 19 | Also does a set of sanity checks on the metadata. 20 | 21 | Parameters 22 | ---------- 23 | yaml_dir : str 24 | Directory path of all *.YAML files describing this dataset. 25 | hdf_filename : str 26 | Filename and path of output HDF5 file. If file exists then will 27 | attempt to append metadata to file. If file does not exist then 28 | will create it. 29 | """ 30 | 31 | assert isdir(yaml_dir) 32 | store = pd.HDFStore(hdf_filename, 'a') 33 | 34 | # Load Dataset and MeterDevice metadata 35 | metadata = _load_file(yaml_dir, 'dataset.yaml') 36 | meter_devices = _load_file(yaml_dir, 'meter_devices.yaml') 37 | metadata['meter_devices'] = meter_devices 38 | store.root._v_attrs.metadata = metadata 39 | 40 | # Load buildings 41 | building_filenames = [fname for fname in listdir(yaml_dir) 42 | if fname.startswith('building') 43 | and fname.endswith('.yaml')] 44 | 45 | for fname in building_filenames: 46 | building = splitext(fname)[0] # e.g. 'building1' 47 | try: 48 | group = store._handle.create_group('/', building) 49 | except: 50 | group = store._handle.get_node('/' + building) 51 | building_metadata = _load_file(yaml_dir, fname) 52 | elec_meters = building_metadata['elec_meters'] 53 | _deep_copy_meters(elec_meters) 54 | _set_data_location(elec_meters, building) 55 | _sanity_check_meters(elec_meters, meter_devices) 56 | _sanity_check_appliances(building_metadata) 57 | group._f_setattr('metadata', building_metadata) 58 | 59 | store.close() 60 | print("Done converting YAML metadata to HDF5!") 61 | 62 | 63 | def save_yaml_to_datastore(yaml_dir, store): 64 | """Saves a NILM Metadata YAML instance to a NILMTK datastore. 65 | 66 | Parameters 67 | ---------- 68 | yaml_dir : str 69 | Directory path of all *.YAML files describing this dataset. 70 | store : DataStore 71 | DataStore object 72 | """ 73 | 74 | assert isdir(yaml_dir) 75 | 76 | # Load Dataset and MeterDevice metadata 77 | metadata = _load_file(yaml_dir, 'dataset.yaml') 78 | print("Loaded metadata") 79 | meter_devices = _load_file(yaml_dir, 'meter_devices.yaml') 80 | metadata['meter_devices'] = meter_devices 81 | store.save_metadata('/', metadata) 82 | 83 | # Load buildings 84 | building_filenames = [fname for fname in listdir(yaml_dir) 85 | if fname.startswith('building') 86 | and fname.endswith('.yaml')] 87 | 88 | for fname in building_filenames: 89 | building = splitext(fname)[0] # e.g. 'building1' 90 | building_metadata = _load_file(yaml_dir, fname) 91 | elec_meters = building_metadata['elec_meters'] 92 | _deep_copy_meters(elec_meters) 93 | _set_data_location(elec_meters, building) 94 | _sanity_check_meters(elec_meters, meter_devices) 95 | _sanity_check_appliances(building_metadata) 96 | store.save_metadata('/'+building, building_metadata) 97 | 98 | store.close() 99 | print("Done converting YAML metadata to HDF5!") 100 | 101 | 102 | def _load_file(yaml_dir, yaml_filename): 103 | yaml_full_filename = join(yaml_dir, yaml_filename) 104 | if isfile(yaml_full_filename): 105 | with open(yaml_full_filename, 'rb') as fh: 106 | return yaml.safe_load(fh) 107 | else: 108 | print(yaml_full_filename, "not found.", file=stderr) 109 | 110 | 111 | def _deep_copy_meters(elec_meters): 112 | for meter_instance, meter in iteritems(elec_meters): 113 | elec_meters[meter_instance] = deepcopy(meter) 114 | 115 | 116 | def _set_data_location(elec_meters, building): 117 | """Goes through each ElecMeter in elec_meters and sets `data_location`. 118 | Modifies `elec_meters` in place. 119 | 120 | Parameters 121 | ---------- 122 | elec_meters : dict of dicts 123 | building : string e.g. 'building1' 124 | """ 125 | for meter_instance in elec_meters: 126 | data_location = '/{:s}/elec/meter{:d}'.format(building, meter_instance) 127 | elec_meters[meter_instance]['data_location'] = data_location 128 | 129 | 130 | def _sanity_check_meters(meters, meter_devices): 131 | """ 132 | Checks: 133 | * Make sure all meter devices map to meter_device keys 134 | * Makes sure all IDs are unique 135 | """ 136 | if len(meters) != len(set(meters)): 137 | raise NilmMetadataError("elec_meters not unique") 138 | 139 | for meter_instance, meter in iteritems(meters): 140 | assert meter['device_model'] in meter_devices 141 | 142 | 143 | def _sanity_check_appliances(building_metadata): 144 | """ 145 | Checks: 146 | * Make sure we use proper NILM Metadata names. 147 | * Make sure there aren't multiple appliance types with same instance 148 | """ 149 | appliances = building_metadata['appliances'] 150 | appliance_types = get_appliance_types() 151 | building_instance = building_metadata['instance'] 152 | REQUIRED_KEYS = ['type', 'instance', 'meters'] 153 | 154 | for appliance in appliances: 155 | if not isinstance(appliance, dict): 156 | raise NilmMetadataError( 157 | "Appliance '{}' is {} when it should be a dict." 158 | .format(appliance, type(appliance))) 159 | 160 | # Generate string for specifying which is the problematic 161 | # appliance for error messages: 162 | appl_string = ("ApplianceType '{}', instance '{}', in building {:d}" 163 | .format(appliance.get('type'), 164 | appliance.get('instance'), 165 | building_instance)) 166 | 167 | # Check required keys are all present 168 | for key in REQUIRED_KEYS: 169 | if key not in appliance: 170 | raise NilmMetadataError("key '{}' missing for {}" 171 | .format(key, appl_string)) 172 | 173 | appl_type = appliance['type'] 174 | 175 | # check all appliance names are valid 176 | if appl_type not in appliance_types: 177 | raise NilmMetadataError( 178 | appl_string + " not in appliance_types." 179 | " In other words, '{}' is not a recognised appliance type." 180 | .format(appl_type)) 181 | 182 | # Check appliance references valid meters 183 | meters = appliance['meters'] 184 | if len(meters) != len(set(meters)): 185 | msg = "In {}, meters '{}' not unique.".format(appl_string, meters) 186 | raise NilmMetadataError(msg) 187 | 188 | for meter in meters: 189 | if meter != 0 and meter not in building_metadata['elec_meters']: 190 | msg = ("In ({}), meter '{:d}' is not in" 191 | " this building's 'elec_meters'" 192 | .format(appl_string, meter)) 193 | raise NilmMetadataError(msg) 194 | 195 | # Check list of instances for each appliance is valid. 196 | appliance_instances = {} 197 | for appliance in appliances: 198 | appl_type = appliance['type'] 199 | instances = appliance_instances.setdefault(appl_type, []) 200 | instances.append(appliance['instance']) 201 | 202 | for appliance_type, instances in iteritems(appliance_instances): 203 | instances.sort() 204 | correct_instances = list(range(1, len(instances)+1)) 205 | if instances != correct_instances: 206 | msg = ("In building {:d}, appliance '{}' appears {:d} time(s)." 207 | " Yet the list of instances is '{}'. The list of instances" 208 | " should be '{}'." 209 | .format(building_metadata['instance'], appliance_type, 210 | len(instances), instances, correct_instances)) 211 | raise NilmMetadataError(msg) 212 | -------------------------------------------------------------------------------- /docs/source/conf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # NILM Metadata documentation build configuration file, created by 4 | # sphinx-quickstart on Mon May 19 10:47:20 2014. 5 | # 6 | # This file is execfile()d with the current directory set to its 7 | # containing dir. 8 | # 9 | # Note that not all possible configuration values are present in this 10 | # autogenerated file. 11 | # 12 | # All configuration values have a default; values that are commented out 13 | # serve to show the default. 14 | 15 | import sys 16 | import os 17 | 18 | # If extensions (or modules to document with autodoc) are in another directory, 19 | # add these directories to sys.path here. If the directory is relative to the 20 | # documentation root, use os.path.abspath to make it absolute, like shown here. 21 | #sys.path.insert(0, os.path.abspath('.')) 22 | 23 | # -- General configuration ------------------------------------------------ 24 | 25 | # If your documentation needs a minimal Sphinx version, state it here. 26 | #needs_sphinx = '1.0' 27 | 28 | # Add any Sphinx extension module names here, as strings. They can be 29 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 30 | # ones. 31 | extensions = [ 32 | 'sphinx.ext.autodoc', 33 | ] 34 | 35 | # Add any paths that contain templates here, relative to this directory. 36 | templates_path = ['_templates'] 37 | 38 | # The suffix of source filenames. 39 | source_suffix = '.rst' 40 | 41 | # The encoding of source files. 42 | #source_encoding = 'utf-8-sig' 43 | 44 | # The master toctree document. 45 | master_doc = 'index' 46 | 47 | # General information about the project. 48 | project = u'NILM Metadata' 49 | copyright = u'2014, Jack Kelly' 50 | 51 | # The version info for the project you're documenting, acts as replacement for 52 | # |version| and |release|, also used in various other places throughout the 53 | # built documents. 54 | # 55 | # The short X.Y version. 56 | version = '0.2.0' 57 | # The full version, including alpha/beta/rc tags. 58 | release = '0.2.0' 59 | 60 | # The language for content autogenerated by Sphinx. Refer to documentation 61 | # for a list of supported languages. 62 | #language = None 63 | 64 | # There are two options for replacing |today|: either, you set today to some 65 | # non-false value, then it is used: 66 | #today = '' 67 | # Else, today_fmt is used as the format for a strftime call. 68 | #today_fmt = '%B %d, %Y' 69 | 70 | # List of patterns, relative to source directory, that match files and 71 | # directories to ignore when looking for source files. 72 | exclude_patterns = [] 73 | 74 | # The reST default role (used for this markup: `text`) to use for all 75 | # documents. 76 | #default_role = None 77 | 78 | # If true, '()' will be appended to :func: etc. cross-reference text. 79 | #add_function_parentheses = True 80 | 81 | # If true, the current module name will be prepended to all description 82 | # unit titles (such as .. function::). 83 | #add_module_names = True 84 | 85 | # If true, sectionauthor and moduleauthor directives will be shown in the 86 | # output. They are ignored by default. 87 | #show_authors = False 88 | 89 | # The name of the Pygments (syntax highlighting) style to use. 90 | pygments_style = 'sphinx' 91 | 92 | # A list of ignored prefixes for module index sorting. 93 | #modindex_common_prefix = [] 94 | 95 | # If true, keep warnings as "system message" paragraphs in the built documents. 96 | #keep_warnings = False 97 | 98 | 99 | # -- Options for HTML output ---------------------------------------------- 100 | 101 | # The theme to use for HTML and HTML Help pages. See the documentation for 102 | # a list of builtin themes. 103 | html_theme = 'default' 104 | 105 | # Theme options are theme-specific and customize the look and feel of a theme 106 | # further. For a list of options available for each theme, see the 107 | # documentation. 108 | #html_theme_options = {} 109 | 110 | # Add any paths that contain custom themes here, relative to this directory. 111 | #html_theme_path = [] 112 | 113 | # The name for this set of Sphinx documents. If None, it defaults to 114 | # " v documentation". 115 | #html_title = None 116 | 117 | # A shorter title for the navigation bar. Default is the same as html_title. 118 | #html_short_title = None 119 | 120 | # The name of an image file (relative to this directory) to place at the top 121 | # of the sidebar. 122 | #html_logo = None 123 | 124 | # The name of an image file (within the static path) to use as favicon of the 125 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 126 | # pixels large. 127 | #html_favicon = None 128 | 129 | # Add any paths that contain custom static files (such as style sheets) here, 130 | # relative to this directory. They are copied after the builtin static files, 131 | # so a file named "default.css" will overwrite the builtin "default.css". 132 | #html_static_path = ['_static'] 133 | html_static_path = [] 134 | 135 | # Add any extra paths that contain custom files (such as robots.txt or 136 | # .htaccess) here, relative to this directory. These files are copied 137 | # directly to the root of the documentation. 138 | #html_extra_path = [] 139 | 140 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, 141 | # using the given strftime format. 142 | #html_last_updated_fmt = '%b %d, %Y' 143 | 144 | # If true, SmartyPants will be used to convert quotes and dashes to 145 | # typographically correct entities. 146 | #html_use_smartypants = True 147 | 148 | # Custom sidebar templates, maps document names to template names. 149 | #html_sidebars = {} 150 | 151 | # Additional templates that should be rendered to pages, maps page names to 152 | # template names. 153 | #html_additional_pages = {} 154 | 155 | # If false, no module index is generated. 156 | #html_domain_indices = True 157 | 158 | # If false, no index is generated. 159 | #html_use_index = True 160 | 161 | # If true, the index is split into individual pages for each letter. 162 | #html_split_index = False 163 | 164 | # If true, links to the reST sources are added to the pages. 165 | #html_show_sourcelink = True 166 | 167 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. 168 | #html_show_sphinx = True 169 | 170 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. 171 | #html_show_copyright = True 172 | 173 | # If true, an OpenSearch description file will be output, and all pages will 174 | # contain a tag referring to it. The value of this option must be the 175 | # base URL from which the finished HTML is served. 176 | #html_use_opensearch = '' 177 | 178 | # This is the file name suffix for HTML files (e.g. ".xhtml"). 179 | #html_file_suffix = None 180 | 181 | # Output file base name for HTML help builder. 182 | htmlhelp_basename = 'NILMMetadatadoc' 183 | 184 | 185 | # -- Options for LaTeX output --------------------------------------------- 186 | 187 | latex_elements = { 188 | # The paper size ('letterpaper' or 'a4paper'). 189 | #'papersize': 'letterpaper', 190 | 191 | # The font size ('10pt', '11pt' or '12pt'). 192 | #'pointsize': '10pt', 193 | 194 | # Additional stuff for the LaTeX preamble. 195 | #'preamble': '', 196 | } 197 | 198 | # Grouping the document tree into LaTeX files. List of tuples 199 | # (source start file, target name, title, 200 | # author, documentclass [howto, manual, or own class]). 201 | latex_documents = [ 202 | ('index', 'NILMMetadata.tex', u'NILM Metadata Documentation', 203 | u'Jack Kelly', 'manual'), 204 | ] 205 | 206 | # The name of an image file (relative to this directory) to place at the top of 207 | # the title page. 208 | #latex_logo = None 209 | 210 | # For "manual" documents, if this is true, then toplevel headings are parts, 211 | # not chapters. 212 | #latex_use_parts = False 213 | 214 | # If true, show page references after internal links. 215 | #latex_show_pagerefs = False 216 | 217 | # If true, show URL addresses after external links. 218 | #latex_show_urls = False 219 | 220 | # Documents to append as an appendix to all manuals. 221 | #latex_appendices = [] 222 | 223 | # If false, no module index is generated. 224 | #latex_domain_indices = True 225 | 226 | 227 | # -- Options for manual page output --------------------------------------- 228 | 229 | # One entry per manual page. List of tuples 230 | # (source start file, name, description, authors, manual section). 231 | man_pages = [ 232 | ('index', 'nilmmetadata', u'NILM Metadata Documentation', 233 | [u'Jack Kelly'], 1) 234 | ] 235 | 236 | # If true, show URL addresses after external links. 237 | #man_show_urls = False 238 | 239 | 240 | # -- Options for Texinfo output ------------------------------------------- 241 | 242 | # Grouping the document tree into Texinfo files. List of tuples 243 | # (source start file, target name, title, author, 244 | # dir menu entry, description, category) 245 | texinfo_documents = [ 246 | ('index', 'NILMMetadata', u'NILM Metadata Documentation', 247 | u'Jack Kelly', 'NILMMetadata', 'One line description of project.', 248 | 'Miscellaneous'), 249 | ] 250 | 251 | # Documents to append as an appendix to all manuals. 252 | #texinfo_appendices = [] 253 | 254 | # If false, no module index is generated. 255 | #texinfo_domain_indices = True 256 | 257 | # How to display URL addresses: 'footnote', 'no', or 'inline'. 258 | #texinfo_show_urls = 'footnote' 259 | 260 | # If true, do not generate a @detailmenu in the "Top" node's menu. 261 | #texinfo_no_detailmenu = False 262 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "{}" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright {yyyy} {name of copyright owner} 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | 203 | -------------------------------------------------------------------------------- /docs/source/central_metadata.rst: -------------------------------------------------------------------------------- 1 | .. highlight:: yaml 2 | 3 | ********************************** 4 | Central appliance metadata 5 | ********************************** 6 | 7 | Manual 8 | ====== 9 | 10 | Please see the `NILM Metadata README 11 | `_ 12 | section on 'Central metadata' for a quick introduction. 13 | 14 | 15 | Inheritance 16 | ----------- 17 | 18 | * protypical inheritance; like JavaScript 19 | * dicts are updated; lists are extended; other properties are overwritten 20 | * arbitrary inheritance depth 21 | 22 | Components 23 | ---------- 24 | 25 | * recursive 26 | * categories of container appliance is updated with categories from 27 | each component (unless ``do_not_merge_categories: true`` is set in 28 | the component) 29 | 30 | Subtypes versus a new child object 31 | ---------------------------------- 32 | 33 | Appliance specification objects can take a 'subtype' property. Why 34 | not use inheritance for all subtypes? The rule of thumb is that if a 35 | subtype is functionally different to its parent then it should be 36 | specified as a separate o child bject (for example, a gas hob and an electric 37 | hob clearly have radically different electricity usage profiles) but 38 | if the differences are minor (e.g. a digital radio versus an analogue 39 | radio) then the appliances should be specified as subtypes of the same object. 40 | 41 | 42 | Naming conventions 43 | ------------------ 44 | 45 | * properties are lowercase with underscores, e.g. `subtype` 46 | * object names (not specific makes and models) are lowercase with 47 | spaces, unless they are acronyms in which case they are uppercase 48 | (e.g. 'LED') 49 | * category names are lowercase with spaces 50 | 51 | 52 | Example 53 | ------- 54 | 55 | To demonstrate the inheritance system, let's look at specifying a 56 | boiler. 57 | 58 | First, NILM Metadata specifies a 'heating appliance' object, which is 59 | can be considered the 'base class':: 60 | 61 | heating appliance: 62 | parent: appliance 63 | categories: 64 | traditional: heating 65 | size: large 66 | 67 | Next, we specify a 'boiler' object, which inherits from 'heating appliance':: 68 | 69 | 70 | #------------- BOILERS ------------------------ 71 | 72 | boiler: # all boilers except for electric boilers 73 | 74 | parent: heating appliance 75 | 76 | synonyms: [furnace] 77 | 78 | # Categories of the child object are appended 79 | # to existing categories in the parent. 80 | categories: 81 | google_shopping: 82 | - climate control 83 | - furnaces and boilers 84 | 85 | # Here we specify that boilers have a component 86 | # which is itself an object whose parent 87 | # is `water pump`. 88 | components: 89 | - type: water pump 90 | 91 | # Boilers have a property which most other appliances 92 | # do not have: a fuel source. We specify additional 93 | # properties using the JSON Schema syntax. 94 | additional_properties: 95 | fuel: 96 | enum: [natural gas, coal, wood, oil, LPG] 97 | 98 | subtypes: 99 | - combi 100 | - regular 101 | 102 | # We can specify the different mechanisms that 103 | # control the boiler. This is useful, for example, 104 | # if we want to find all appliances which 105 | # must be manually controlled (e.g. toasters) 106 | control: [manual, timer, thermostat] 107 | 108 | # We can also declare prior knowledge about boilers. 109 | # For example, we know that boilers tend to be in 110 | # bathrooms, utility rooms or kitchens 111 | distributions: 112 | room: 113 | distribution_of_data: 114 | categories: [bathroom, utility, kitchen] 115 | values: [0.3, 0.2, 0.2] 116 | # If the values do not add to 1 then the assumption 117 | # is that the remaining probability mass is distributed equally to 118 | # all other rooms. 119 | source: subjective # These values are basically guesses! 120 | 121 | 122 | Finally, in the metadata for the dataset itself, we can do:: 123 | 124 | type: boiler 125 | manufacturer: Worcester 126 | model: Greenstar 30CDi Conventional natural gas 127 | room: bathroom 128 | year_of_purchase: 2011 129 | fuel: natural gas 130 | subtype: regular 131 | part_number: 41-311-71 132 | efficiency_rating: 133 | certification_name: SEDBUK 134 | rating: A 135 | nominal_consumption: 136 | on_power: 70 137 | 138 | 139 | Schema details 140 | ============== 141 | 142 | Below is a UML Class Diagram 143 | showing all the classes and the relationships between classes: 144 | 145 | .. image:: schema.svg 146 | 147 | (Please see the :doc:`tutorial` for more background about the NILM 148 | Metadata schema) 149 | 150 | Below we describe all the classes and their attributes and possible values. 151 | 152 | ApplianceType 153 | ------------- 154 | 155 | Has many of the attributes that :ref:`appliance-schema` has, with the addition 156 | of: 157 | 158 | * on_power_threshold 159 | * min_off_duration 160 | * min_on_duration 161 | * control 162 | * components 163 | 164 | :parent: (string) Name of the parent ApplianceType object from which 165 | this object inherits. 166 | :categories: (dict) 167 | 168 | :traditional: (enum) one of {wet, cold, consumer electronics, ICT, cooking, heating} 169 | :size: (enum) one of {small, large} 170 | :electrical: (list of strings) Any combination of: 171 | 172 | - lighting, incandescent, fluorescent, compact, linear, LED 173 | - resistive 174 | - power electronics 175 | - SMPS, no PFC, passive PFC, active PFC 176 | - single-phase induction motor, capacitor start-run, constant torque 177 | :misc: (enum) one of {misc, sockets} 178 | :google_shopping: (list of strings) anything from the Google 179 | Shopping schema. e.g.: climate control', 180 | 'furnaces and boilers', 'renewable energy', 'solar 181 | energy', 'solar panels', 'computers', 182 | 'electronics', 'laptops', 'printers and copiers', 183 | 'print, copy, scan and fax', 'printers', 'laundry 184 | appliances', 'kitchen and dining', 'kitchen 185 | appliances', 'breadmakers' 186 | 187 | :subtypes: (list of strings) A list of all the valid subtypes. 188 | :additional_properties: (dict) Used for specifying additional 189 | properties which can be specified for 190 | Appliances of this ApplianceType. Each key is 191 | a property. Each value is a JSON Schema 192 | definition of the property. 193 | :do_not_inherit: (list of strings) properties which should not be 194 | inherited from the parent. 195 | :synonyms: (list of strings) 196 | :usual_components: (list of strings) Just a list of hints for human 197 | readers. 198 | :n_ancestors: (int) Filled in by ``_concatenate_complete_object``. 199 | 200 | .. _distributions-schema: 201 | 202 | :distributions: (dict) Distribution of random variables. 203 | 204 | :on_power: (list of :ref:`prior-schema` objects) bin_edges in units of watts 205 | :on_duration: (list of :ref:`prior-schema` objects) bin_edges in units of seconds 206 | :off_duration: (list of :ref:`prior-schema` objects) bin_edges in units of seconds 207 | :usage_hour_per_day: (list of :ref:`prior-schema` objects) bin_edges = [0,1,2,...,24] 208 | :usage_day_per_week: (list of :ref:`prior-schema` objects) categories = 209 | ['mon', 'tue', ..., 'sun'] 210 | :usage_month_per_year: (list of :ref:`prior-schema` objects) bin_edges are 211 | in units of days (we need bin edges because 212 | months are not equal lengths). The first 213 | bin represents January. 214 | :rooms: (list of :ref:`prior-schema` objects) Categorical distribution over 215 | the rooms where this appliance is likely to be 216 | used. e.g. for a fridge this might be 'kitchen:0.9, 217 | garage:0.1'. Please use the standard room names defined in 218 | room.json (category names in distributions are not 219 | automatically validated). 220 | :subtypes: (list of :ref:`prior-schema` objects) Categorical distribution 221 | over the subtypes. 222 | :appliance_correlations: (list of :ref:`prior-schema` objects) list of other 223 | appliances. Probability of this appliance 224 | being on given that the other appliance is 225 | on. e.g. 'tv:0.1, amp:0.4, ...' means that 226 | there is a 10% probability of this 227 | appliance being on if the TV is on. Each 228 | category name can either be just an 229 | appliance name (e.g. 'fridge') or 230 | , 231 | e.g. 'fridge,1' 232 | :ownership_per_country: (list of :ref:`prior-schema` objects) Probability of 233 | this appliance being owned by a household 234 | in each country (i.e. a categorical 235 | distribution where categories are standard 236 | two-letter country code defined by ISO 237 | 3166-1 alpha-2. e.g. 'GB' or 'US'. 238 | http://en.wikipedia.org/wiki/ISO_3166-1_alpha-2). If 239 | the probability refers to the entire globe 240 | then use 'GLOBAL' as the country code. 241 | :ownership_per_continent: (list of :ref:`prior-schema` objects) Probability 242 | of this appliance being owned by a 243 | household in each country (i.e. a 244 | categorical distribution where categories 245 | are standard two-letter continent code 246 | defined at 247 | http://en.wikipedia.org/wiki/List_of_sovereign_states_and_dependent_territories_by_continent_%28data_file%29 248 | 249 | Country 250 | ------- 251 | 252 | One large dict specifying country-specific information. Specified in 253 | :file:`nilm_metadata/central_metadata/country.yaml` 254 | 255 | Each key is a 'country' (string). Please use a 256 | standard two-letter country code defined by `ISO 3166-1 alpha-2 257 | `_. e.g. 'GB' or 258 | 'US'. 259 | 260 | Each value is a dict with the following attributes: 261 | 262 | :mains_voltage: (dict): 263 | 264 | :nominal: (number) (required) volts 265 | :upper_limit: (number) volts 266 | :lower_limit: (number) volts 267 | :related_documents: (list of strings) 268 | 269 | 270 | .. _prior-schema: 271 | 272 | Prior 273 | ----- 274 | 275 | Represent prior knowledge. For continuous variables, specify either 276 | the distribution of data (i.e. the data represented in a histogram), 277 | or a density estimate (a model fitted to the data), or both. For 278 | categorical variables, specify the categorical distribution. 279 | 280 | :distribution_of_data: (dict) Distribution of the data expressed as 281 | normalised frequencies per discrete bin (for 282 | continuous variables) or per category (for 283 | categorical variables). 'categories' can be 284 | used instead of 'bin_edges' for continuous 285 | variables where it makes sense; e.g. where each 286 | bin represents a day of the week 287 | 288 | :bin_edges: (list of numbers of list of strings) (required) \|bin_edges\| == 289 | \|values\| + 1 290 | :categories: (list of strings) (required) \|bin_edges\| == \|values\| 291 | :values: (list of numbers) (required) The normalised frequencies. 292 | For continuous variables, in integral over the range must 293 | be 1. For categorical variables, the sum of frequences can 294 | be <= 1. If < 1 then the system will assume that the 295 | remaining mass is distributed equaly across all other 296 | categories. For example, for the probability of a fridge 297 | being in a specific room, it is sufficient to just state 298 | that the probability is 0.9 for a fridge to be in the 299 | kitchen. 300 | 301 | :model: (dict) A fitted model to describe the probability density 302 | function (for continuous variables) or the probability mass 303 | function (for discrete variables). Use additional properties 304 | for the relevant parameters, written as Greek letters spelt 305 | out in lowercase English e.g. 'mu' and 'lambda' except for 306 | summary stats where we use some combination of 'min', 'max', 307 | 'mean', 'mode'. 308 | 309 | :distribution_name: (enum) one of {'normal', 'inverse gaussian', 310 | 'summary stats'} 311 | :sum_of_squared_error: (number) 312 | 313 | :n_datapoints: (int) 314 | :date_prepared: (string) ISO 8601 date format 315 | :source: (enum) one of {'subjective', 'empirical from data', 316 | 'empirical from publication'}. What is the source of this 317 | prior? If from publication then use ``related_documents`` to 318 | provide references. If from data then provide details using 319 | the ``software`` and ``training_data`` properties. 320 | :related_documents: (list of strings) If 'source==empirical from 321 | publication' then enter the reference(s) here. 322 | :software: (string) the software used to generate the prior from data. 323 | :specific_to: (dict): 324 | 325 | :country: (string) standard two-letter country code defined by 326 | `ISO 3166-1 alpha-2 327 | `_ 328 | e.g. 'GB' or 'US'. 329 | :continent: (string) standard 330 | `two-letter continent code defined on WikiPedia 331 | `_ 332 | :distance: (int) this is filled in by the 333 | ``concatenate_complete_object`` function and reports the 334 | distance (in numbers of generations) between this prior and 335 | the most-derived object. In other words, the larger this 336 | number, the less specific to the object this prior is. If 337 | this is not set the the prior applies to the current 338 | object. 339 | :from_appliance_type: (string) this is filled in by the 340 | ``concatenate_complete_object`` function and 341 | reports the appliance type name from the 342 | ancestor hierarchy from which this distribution 343 | came from. 344 | :description: (string) 345 | :training_data: (array of dicts). Each element is a dict with these properties: 346 | 347 | :dataset: (string) Short name of dataset 348 | :buildings: (list of dicts): 349 | 350 | :building_id: (int) 351 | :dates: (list of :ref:`interval-schema` objects) 352 | :country: (string) standard two-letter country code defined by 353 | `ISO 3166-1 alpha-2 354 | `_ 355 | e.g. 'GB' or 'US'. 356 | 357 | DisaggregationModel 358 | -------------- 359 | 360 | This is not especially well defined yet. Just an initial sketch. The 361 | basic idea is that we would be able to specify models for each 362 | appliance type. 363 | 364 | :appliance_type: (string) Reference to the specific `ApplianceType`_ 365 | that we are modelling. 366 | :model_type: (enum) one of {'HMM', 'FHMM', 'gubernatorial 367 | optimisation'} 368 | :parameters: (dict) Parameters specific to each model type. 369 | 370 | DisaggregationModel re-uses several properties from :ref:`prior-schema` : 371 | 372 | * training_data 373 | * specific_to 374 | * software 375 | * related_documents 376 | * date_prepared 377 | * description 378 | -------------------------------------------------------------------------------- /docs/source/tutorial.rst: -------------------------------------------------------------------------------- 1 | .. highlight:: yaml 2 | 3 | ********************** 4 | NILM Metadata Tutorial 5 | ********************** 6 | 7 | Before reading this tutorial, please make sure you have read the NILM 8 | Metadata `README `_ 9 | which introduces the project. Also, if you are not 10 | familiar with YAML, please see the 11 | `WikiPedia page on YAML `_ 12 | for a quick introduction. 13 | 14 | NILM Metadata allows us to describe many of the objects we typically 15 | find in a disaggregated energy dataset. Below is a UML Class Diagram 16 | showing all the classes and the relationships between classes: 17 | 18 | .. image:: schema.svg 19 | 20 | A dark black diamond indicates a 'composition' relationship whilst a 21 | hollow diamond indicates an 'aggregation'. For example, the 22 | relationship between ``Dataset`` and ``Building`` is read as '*each 23 | Dataset contains any number of Buildings and each Building belongs to 24 | exactly one Dataset*'. We use hollow diamonds to mean that objects of 25 | one class *refer* to objects in another class. For example, each 26 | ``Appliance`` object refers to exactly one 27 | ``ApplianceType``. Instances of the classes in the shaded area on the 28 | left are intended to be shipped with each dataset whilst objects of 29 | the classes on the right are common to all datasets and are stored 30 | within the NILM Metadata project as the 'central metadata'. Some 31 | ``ApplianceTypes`` contain ``Appliances``, hence the box representing 32 | the ``Appliance`` class slightly protrudes into the 'central metadata' 33 | area on the right. 34 | 35 | Below we will use examples to illustrate how to build a metadata 36 | schema for a dataset. 37 | 38 | Examples 39 | ======== 40 | 41 | Simple example 42 | -------------- 43 | 44 | The illustration below shows a cartoon mains wiring diagram for 45 | a domestic building. Black lines indicate mains wires. This home has a 46 | split-phase mains supply (common in North America, for example). The 47 | washing machine draws power across both splits. All other appliances 48 | draw power from a single split. 49 | 50 | .. image:: circuit_no_metadata.svg 51 | 52 | The text below shows a minimalistic description (using the NILM 53 | Metadata schema) of the wiring diagram above. The YAML below 54 | would go into the file :file:`building1.yaml`:: 55 | 56 | instance: 1 # this is the first building in the dataset 57 | elec_meters: # a dictionary where each key is a meter instance 58 | 1: 59 | site_meter: true # meter 1 measures the whole-building aggregate 60 | 2: 61 | site_meter: true 62 | 3: 63 | submeter_of: 1 # meter 3 is directly downstream of meter 1 64 | 4: 65 | submeter_of: 1 66 | 5: 67 | submeter_of: 2 68 | 6: 69 | submeter_of: 2 70 | 7: 71 | submeter_of: 6 72 | appliances: 73 | - {type: kettle, instance: 1, room: kitchen, meters: [3]} 74 | - {type: washing machine, instance: 1, meters: [4,5]} 75 | - {type: light, instance: 1, room: kitchen, meters: [7]} 76 | - {type: light, instance: 2, multiple: true, meters: [6]} 77 | 78 | ``elec_meters`` holds a dictionary of dictionaries. Each key is a 79 | meter instance (a unique integer identifier within the building). We 80 | start numbering from 1 because that is common in existing datasets. 81 | Each value of the ``elec_meters`` dict is a dictionary recording 82 | information about that specific meter (see the documentation on the 83 | :ref:`elec-meter-schema` schema for full information). ``site_meter`` 84 | is set to ``true`` if this meter measures the whole-building aggregate 85 | power demand. ``submeter_of`` records the meter instance of the 86 | upstream meter. In this way, we can specify wiring hierarchies of 87 | arbitrary complexity. 88 | 89 | ``appliances`` is a list of dictionaries. Each dictionary describes a 90 | single appliance. The appliance ``type`` (e.g. 'kettle' or 'washing 91 | machine') is taken from a controlled vocabulary defined in NILM 92 | Metadata. See the :ref:`appliance-schema` schema for more information. 93 | 94 | For each appliance, we must also specify an ``instance`` 95 | (an integer which, within each building, allows us to distinguish 96 | between multiple instances of a particular appliance ``type``). We 97 | must also specify a list of ``meters``. Each element in this list is 98 | an integer which corresponds to a meter ``instance``. In this way, we 99 | can specify which meter is directly upstream of this appliance. The 100 | vast majority of domestic appliances will only specify a single meter. 101 | We use two meters for north-American appliances which draw power from 102 | both mains legs. We use three meters for three-phase appliances. 103 | 104 | See the documentation of the :doc:`dataset_metadata` for a full 105 | listing of all elements which can be described, or continue below for 106 | a more detailed example. 107 | 108 | 109 | Representing REDD using NILM Metadata 110 | ------------------------------------- 111 | 112 | The `Reference Energy Disaggregation Data set (REDD) 113 | `_ (`Kolter & Johnson 2011 114 | `_) was the first 115 | public dataset to be released for the energy disaggregation community. 116 | It consists of six homes. Each home has its whole-home aggregate 117 | power demand measured and also has its circuits measured. REDD 118 | provides both low frequency (3 second sample period) and high 119 | frequency data. We will only specify the low frequency data in this 120 | example. 121 | 122 | NILM Metadata can be specified as either YAML or as metadata within an 123 | HDF5 binary file. YAML is probably best for distribution with a 124 | dataset. HDF5 is used by `NILMTK `_ to store 125 | both the data itself and the metadata. The data structures are very 126 | similar no matter if the metadata is represented on disk as YAML or 127 | HDF5. The main difference is where the metadata is stored. In this 128 | example, we will only consider YAML. The YAML files are stored in a 129 | :file:`metadata` directory included with the dataset. For details of 130 | where this information is stored within HDF5, please see the relevant 131 | sections of the :doc:`dataset_metadata` page. 132 | 133 | First we will specify the details of the dataset, then details about 134 | each building. 135 | 136 | Dataset 137 | ^^^^^^^ 138 | 139 | We will use the :ref:`Dataset schema ` to describe the name of 140 | the dataset, authors, geographical location etc. If you want to 141 | create a minimal metadata description of a dataset then you don't need 142 | to specify anything for the ``Dataset``. 143 | 144 | This information would be stored in :file:`dataset.yaml`. 145 | 146 | First, let us specify the name of the dataset and the creators:: 147 | 148 | name: REDD 149 | long_name: The Reference Energy Disaggregation Data set 150 | creators: 151 | - Kolter, Zico 152 | - Johnson, Matthew 153 | publication_date: 2011 154 | institution: Massachusetts Institute of Technology (MIT) 155 | contact: zkolter@cs.cmu.edu # Zico moved from MIT to CMU 156 | description: Several weeks of power data for 6 different homes. 157 | subject: Disaggregated power demand from domestic buildings. 158 | number_of_buildings: 6 159 | timezone: US/Eastern # MIT is on the east coast 160 | geo_location: 161 | locality: Massachusetts # village, town, city or state 162 | country: US # standard two-letter country code defined by ISO 3166-1 alpha-2 163 | latitude: 42.360091 # MIT's coorindates 164 | longitude: -71.09416 165 | related_documents: 166 | - http://redd.csail.mit.edu 167 | - > 168 | J. Zico Kolter and Matthew J. Johnson. 169 | REDD: A public data set for energy disaggregation research. 170 | In proceedings of the SustKDD workshop on 171 | Data Mining Applications in Sustainability, 2011. 172 | http://redd.csail.mit.edu/kolter-kddsust11.pdf 173 | schema: https://github.com/nilmtk/nilm_metadata/tree/v0.2 174 | 175 | The nominal mains voltage can be inferred from the 176 | ``geo_location:country`` value. 177 | 178 | Meter Devices 179 | ^^^^^^^^^^^^^ 180 | 181 | Next, we describe the common characteristics of each type of meter 182 | used to record the data. See the documentation section on 183 | :ref:`meter-device-schema` for full details. You can think of this as 184 | the 'specification sheet' supplied with each model of meter used to 185 | record the dataset. This information would be stored in 186 | :file:`meter_devices.yaml`. 187 | 188 | This data structure is one big dictionary. Each key is a model name. 189 | Each value is a dictionary describing the meter:: 190 | 191 | eMonitor: 192 | model: eMonitor 193 | manufacturer: Powerhouse Dynamics 194 | manufacturer_url: http://powerhousedynamics.com 195 | description: > 196 | Measures circuit-level power demand. Comes with 24 CTs. 197 | This FAQ page suggests the eMonitor measures real (active) 198 | power: http://www.energycircle.com/node/14103 although the REDD 199 | readme.txt says all channels record apparent power. 200 | sample_period: 3 # the interval between samples. In seconds. 201 | max_sample_period: 50 # Max allowable interval between samples. Seconds. 202 | measurements: 203 | - physical_quantity: power # power, voltage, energy, current? 204 | type: active # active (real power), reactive or apparent? 205 | upper_limit: 5000 206 | lower_limit: 0 207 | wireless: false 208 | 209 | REDD_whole_house: 210 | description: > 211 | REDD's DIY power meter used to measure whole-home AC waveforms 212 | at high frequency. To quote from their paper: "CTs from TED 213 | (http://www.theenergydetective.com) to measure current in the 214 | power mains, a Pico TA041 oscilloscope probe 215 | (http://www.picotechnologies.com) to measure voltage for one of 216 | the two phases in the home, and a National Instruments NI-9239 217 | analog to digital converter to transform both these analog 218 | signals to digital readings. This A/D converter has 24 bit 219 | resolution with noise of approximately 70 µV, which determines 220 | the noise level of our current and voltage readings: the TED CTs 221 | are rated for 200 amp circuits and a maximum of 3 volts, so we 222 | are able to differentiate between currents of approximately 223 | ((200))(70 × 10−6)/(3) = 4.66mA, corresponding to power changes 224 | of about 0.5 watts. Similarly, since we use a 1:100 voltage 225 | stepdown in the oscilloscope probe, we can detect voltage 226 | differences of about 7mV." 227 | sample_period: 1 228 | max_sample_period: 30 229 | measurements: 230 | - physical_quantity: power 231 | type: apparent 232 | upper_limit: 50000 233 | lower_limit: 0 234 | wireless: false 235 | 236 | 237 | Buildings, electricity meters and appliances 238 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 239 | 240 | Finally, we need to specify metadata for each building in the 241 | dataset. Information about each electricity meter and each appliance 242 | is specified along with the building. Metadata for each building goes 243 | into :file:`building{}.yaml` where *i* is an integer starting 244 | from 1. e.g. :file:`building1.yaml` 245 | 246 | We will describe ``house_1`` from REDD. First, we describe the basic 247 | information about ``house_1`` using the :ref:`building-schema` schema:: 248 | 249 | instance: 1 # this is the first building in the dataset 250 | original_name: house_1 # original name from REDD dataset 251 | elec_meters: # see below 252 | appliances: # see below 253 | 254 | We do now know the specific geographical location of ``house_1`` in REDD. As 255 | such, we can assume that ``house_1`` will just 'inherit' 256 | ``geo_location`` and ``timezone`` from the ``dataset`` metadata. If we did 257 | know the geographical location of ``house_1`` then we could specify it 258 | in ``building1.yaml``. 259 | 260 | Next, we specify every electricity meter and the wiring between the 261 | meters using the :ref:`elec-meter-schema` schema. ``elec_meters`` is 262 | a dictionary. Each key is a meter instance. Each value is a 263 | dictionary describing that meter. To keep this short, we won't show 264 | every meter:: 265 | 266 | elec_meters: 267 | 1: 268 | site_meter: true 269 | device_model: REDD_whole_house # keys into meter_devices dictionary 270 | data_location: house_1/channel_1.dat 271 | 2: 272 | site_meter: true 273 | device_model: REDD_whole_house 274 | data_location: house_1/channel_2.dat 275 | 3: 276 | submeter_of: 0 # '0' means 'one of the site_meters'. We don't know 277 | # which site meter feeds which appliance in REDD. 278 | device_model: eMonitor 279 | data_location: house_1/channel_3.dat 280 | 4: 281 | submeter_of: 0 282 | device_model: eMonitor 283 | data_location: house_4/channel_4.dat 284 | 285 | We could also specify attributes such as ``room, floor, 286 | preprocessing_applied, statistics, upstream_meter_in_building`` but 287 | none of these are relevant for REDD. 288 | 289 | Now we can specify which appliances connect to which meters. 290 | 291 | For reference, here is the original :file:`labels.dat` for 292 | :file:`house_1` in REDD:: 293 | 294 | 1 mains 295 | 2 mains 296 | 3 oven 297 | 4 oven 298 | 5 refrigerator 299 | 6 dishwaser 300 | 7 kitchen_outlets 301 | 8 kitchen_outlets 302 | 9 lighting 303 | 10 washer_dryer 304 | 11 microwave 305 | 12 bathroom_gfi 306 | 13 electric_heat 307 | 14 stove 308 | 15 kitchen_outlets 309 | 16 kitchen_outlets 310 | 17 lighting 311 | 18 lighting 312 | 19 washer_dryer 313 | 20 washer_dryer 314 | 315 | We use the :ref:`appliance-schema` schema to specify appliances. In 316 | REDD, all the meters measure *circuits* using CT clamps in the homes' 317 | fuse box. Some circuits deliver power to *individual* appliances. 318 | Other circuits deliver power to *groups* of appliances. 319 | 320 | ``appliances`` is a list of dictionaries. 321 | 322 | Let us start by demonstrating how we describe circuits which deliver 323 | power to an individual appliance:: 324 | 325 | appliances: 326 | 327 | - type: fridge 328 | instance: 1 329 | meters: [5] 330 | original_name: refrigerator 331 | 332 | 333 | Recall from the `Simple example`_ that the value of appliance ``type`` 334 | is taken from the NILM Metadata controlled vocabulary of appliance 335 | types. ``original_name`` is the name used in REDD, prior to 336 | conversion to the NILM Metadata controlled vocabulary. 337 | 338 | Now we specify two 240-volt appliances. North American homes have 339 | split-phase mains supplies. Each split is 120 volts relative to 340 | neutral. The two splits are 240 volts relative to each other. Large 341 | appliances can connect to both splits to draw lots of power. REDD 342 | separately meters both splits to these large appliances so we 343 | specify two meters per 240-volt appliance:: 344 | 345 | appliances: 346 | 347 | - type: electric oven 348 | instance: 1 349 | meters: [3, 4] # the oven draws power from both 120 volt legs 350 | original_name: oven 351 | 352 | - original_name: washer_dryer 353 | type: washer dryer 354 | instance: 1 355 | meters: [10, 20] 356 | components: # we can specify which components connect to which leg 357 | - type: motor 358 | meters: [10] 359 | - type: electric heating element 360 | meters: [20] 361 | 362 | Now we specify loads which aren't single appliances but, instead, are 363 | categories of appliances:: 364 | 365 | appliances: 366 | 367 | - original_name: kitchen_outlets 368 | room: kitchen 369 | type: sockets # sockets is treated as an appliance 370 | instance: 1 371 | multiple: true # likely to be more than 1 socket 372 | meters: [7] 373 | 374 | - original_name: kitchen_outlets 375 | room: kitchen 376 | type: sockets 377 | instance: 2 # 2nd instance of 'sockets' in this building 378 | multiple: true # likely to be more than 1 socket 379 | meters: [8] 380 | 381 | - original_name: lighting 382 | type: light 383 | instance: 1 384 | multiple: true # likely to be more than 1 light 385 | meters: [9] 386 | 387 | - original_name: lighting 388 | type: light 389 | instance: 2 # 2nd instance of 'light' in this building 390 | multiple: true 391 | meters: [17] 392 | 393 | - original_name: lighting 394 | type: light 395 | instance: 3 # 3rd instance of 'light' in this building 396 | multiple: true 397 | meters: [18] 398 | 399 | - original_name: bathroom_gfi # ground fault interrupter 400 | room: bathroom 401 | type: unknown 402 | instance: 1 403 | multiple: true 404 | meters: [12] 405 | 406 | Note that if we have multiple distinct instances of the same type of 407 | appliance then we must use separate appliance objects for each 408 | instance and must *not* bunch these together as a single appliance 409 | object with multiple ``meters``. We only specify multiple 410 | ``meters`` per ``appliance`` if there is a single appliance which 411 | draws power from more than one phase or mains leg. 412 | 413 | In REDD, houses 3, 5 and 6 also have an ``electronics`` channel. How would we 414 | handle this in NILM Metadata? This is a meter which doesn't record a 415 | single appliance but records a *category* of appliances. Luckily, 416 | because NILM Metadata uses an inheritance structure for the central 417 | metadata, we already have a ``CE appliance`` (CE = consumer 418 | electronics). The ``CE appliance`` object was first built to 419 | act as an abstract superclass for all consumer electronics 420 | objects, but it comes in handy for REDD:: 421 | 422 | - original_name: electronics 423 | type: CE appliance 424 | instance: 1 425 | multiple: true 426 | meters: [6] 427 | 428 | The `full description of the REDD dataset using NILM Metadata can be 429 | found in the NILMTK project 430 | `_ 431 | along with the `metadata descriptions for many other datasets `_. 432 | 433 | Summary 434 | ------- 435 | 436 | We have seen how to represent the REDD dataset using NILM Metadata. 437 | The example above shows the majority of the *structure* of the NILM 438 | Metadata schema for datasets. There are many more attributes that can 439 | be attached to this basic structure. Please see the 440 | :doc:`dataset_metadata` documentation for full details of all the 441 | attributes and values that can be used. 442 | 443 | Central Metadata 444 | ^^^^^^^^^^^^^^^^ 445 | 446 | A second part to the NILM Metadata project is the 'central metadata'. 447 | This 'central metadata' is stored in the NILM Metadata project itself 448 | and consists of information such as the mapping of appliance type to 449 | appliance category; and the mapping of country code to nominal voltage 450 | values. Please see the documentation page on :doc:`central_metadata` 451 | for more information. 452 | 453 | Improving NILM Metadata 454 | ^^^^^^^^^^^^^^^^^^^^^^^ 455 | 456 | The NILM Metadata schema will, of course, never be complete enough to 457 | cover every conceivable dataset! You are warmly invited to suggest 458 | changes and extensions. You can do this either using the 459 | `github issue queue 460 | `_, 461 | or by `forking the project, modifying it and issuing a pull request 462 | `_. 463 | -------------------------------------------------------------------------------- /docs/source/dataset_metadata.rst: -------------------------------------------------------------------------------- 1 | **************** 2 | Dataset metadata 3 | **************** 4 | 5 | This page describes the metadata schema for describing a dataset. 6 | 7 | There are two file formats for the metadata: YAML and HDF5. 8 | YAML metadata files should be in a ``metadata`` folder. 9 | Each section of this doc starts by describing where the relevant 10 | metadata is stored in both file formats. 11 | 12 | .. _dataset-schema: 13 | 14 | Dataset 15 | ------- 16 | 17 | This object describes aspects about the entire dataset. For example, 18 | the name of the dataset, the authors, the geographical location of the 19 | entire dataset etc. 20 | 21 | * Location in YAML: ``dataset.yaml`` 22 | * Location in HDF5: ``store.root._v_attrs.metadata`` 23 | 24 | Metadata attributes (some of these attributes are adapted from the 25 | Dublin Core Metadata Initiative (DCMI)): 26 | 27 | :name: (string) (required) Short name for the dataset. e.g. 'REDD' or 28 | 'UK-DALE'. Equivalent DCMI element is 'title'. If this 29 | dataset is the output of a disaggregation algorithm then `name` 30 | will be set to a short name for the algorithm; e.g. 'CO' or 'FHMM'. 31 | :long_name: (string) Full name of the dataset, eg. 'Reference Energy 32 | Disaggregation Data Set'. 33 | :creators: (list of strings) in the format ', 34 | '. DCMI element. 35 | :timezone: (string) Please use the standard TZ name from the `IANA 36 | (aka Olson) Time Zone Database 37 | `_ 38 | e.g. 'America/New_York' or 'Europe/London'. 39 | :date: (string) ISO 8601 format. e.g. '2014-06-23' Identical to the 40 | 'date' DCMI element. 41 | :contact: (string) Email address 42 | :institution: (string) 43 | :description: (string) DCMI element. Human-readable, brief 44 | description. e.g. describe sample rate, geo location etc. 45 | :number_of_buildings: (int) 46 | :identifier: (string): A digital object identifier (DOI) or URI for 47 | the dataset. DCMI element. 48 | :subject: (string): For example, is this dataset about domestic or 49 | commercial buildings? Does it include disaggregated 50 | appliance-by-appliance data or just whole-building data? 51 | DCMI element. Human-readable free text. 52 | :geospatial_coverage: (string): Spatial coverage. e.g. 'Southern 53 | England'. Related to the 'coverage' DCMI 54 | element. Human-readable free text. 55 | :timeframe: (`TimeFrame`_, see below) Start and end dates for 56 | the entire dataset. 57 | :funding: (list of strings) A list of all the sources of funding used 58 | to produce this dataset. 59 | :publisher: (string) The entity responsible for making the resource 60 | available. Examples of a Publisher include a person, an 61 | organization, or a service. DCMI element. 62 | :geo_location: (dict) 63 | 64 | :locality: (string) village, town, city or state 65 | :country: (string) Please use a standard two-letter country code 66 | defined by `ISO 3166-1 alpha-2 67 | `_. e.g. 'GB' or 'US'. 68 | :latitude: (number) 69 | :longitude: (number) 70 | :rights_list: (list of dicts) License(s) under which this dataset is 71 | released. Related to the 'rights' DCMI element. 72 | Each element has these attributes: 73 | 74 | :uri: (string) License URI 75 | :name: (string) License name 76 | :description_of_subjects: (string) A brief description of how subjects 77 | were recruited. Are they all PhD students, 78 | for example? Were they incentivised to 79 | reduce their energy consumption? How were 80 | they chosen? 81 | :related_documents: (list of strings) References about this dataset 82 | (e.g. references to academic papers or web pages). 83 | Also briefly describe the contents of each 84 | reference (e.g. does it contain a description of 85 | the metering setup? Or an analysis of the data?) 86 | Related to the 'relation' DCMI element. 87 | :schema: (string) The URL of the NILM_metadata version (tag) against 88 | which this metadata is 89 | validated. e.g. https://github.com/nilmtk/nilm_metadata/tree/v0.2 90 | 91 | .. _meter-device-schema: 92 | 93 | MeterDevice 94 | ----------- 95 | 96 | Metadata describing every model of meter used in the dataset. (Please 97 | note that `ElecMeter`_ is used for representing individual *instances* 98 | of meters in a building whilst ``MeterDevice`` is used to represent 99 | information common to all instances of a specific make and model of 100 | meter). Think of this section as a catalogue of meter models used in 101 | the dataset. 102 | 103 | * Location in YAML: ``meter_devices.yaml`` 104 | * Location in HDF5: ``store.root._v_attrs.metadata`` in ``meter_devices`` 105 | 106 | One big dict. Keys are device model names (e.g. 'EnviR'). The 107 | purpose is to record information about specific models of meter. 108 | Values are dicts with these keys: 109 | 110 | :model: (string) (required) The model name for this meter device. 111 | :model_url: (string) The URL with more information about this meter model. 112 | :manufacturer: (string) 113 | :manufacturer_url: (string) 114 | :sample_period: (number) (required) The meter's nominal sample period 115 | (i.e. the length of time between consecutive 116 | samples) in seconds. 117 | :max_sample_period: (number) (required) The maximum permissible length 118 | of time between consecutive samples. We assume the 119 | meter is switched off during any gap longer than 120 | ``max_sample_period``. In other words, we define a 121 | 'gap' to be any two samples which are more than 122 | ``max_sample_period`` apart. 123 | :measurements: (list) (required) The order is the order of the columns 124 | in the data table. 125 | 126 | :physical_quantity: (string) (required) One of {'power', 'energy', 127 | 'cumulative energy', 'voltage', 'current', 128 | 'frequency', 'power factor', 'state', 'phase 129 | angle', 'total harmonic distortion', 'temperature'}. 130 | 'state' columns store an integer 131 | state ID where 0 is off and >0 refers to 132 | defined states. (TODO: store mapping of state 133 | ID per appliance to state name). Units: phase angle: 134 | degrees; power: watts; energy: kWh; voltage: 135 | volts; current: amps; temperature: degrees Celsius. 136 | :type: (string) (required for 'power' and 'energy') Alternative 137 | Current (AC) Type. One of {'reactive', 'active', 138 | 'apparent'}. 139 | :upper_limit: (number) 140 | :lower_limit: (number) 141 | 142 | :description: (string) 143 | :pre_pay: (boolean) Is this a pre-pay meter? 144 | :wireless: (boolean) 145 | 146 | :wireless_configuration: (dict) All strings are human-readable free text: 147 | 148 | :base: (string) Description of the base station used. Manufacturer, model, 149 | version etc. 150 | :protocol: (string) e.g. 'zibgee', 'WiFi', 'custom'. If 151 | custom then add a link to documentation if 152 | available. 153 | :carrier_frequency: (number) MHz 154 | :data_logger: (string) Description of the data logger used 155 | 156 | .. _building-schema: 157 | 158 | Building 159 | -------- 160 | 161 | * Location in YAML: ``building.yaml`` 162 | * Location in HDF5: ``store.root.building._v_attrs.metadata`` 163 | 164 | :instance: (int) (required) The building instance in this dataset, starting from 1 165 | :original_name: (string) Original name of building from old (pre-NILM 166 | Metadata) metadata. 167 | :elec_meters: (dict of dicts) (required) Each key is an integer 168 | (>= 1) representing the meter instance in this building. 169 | Each value is an ``ElecMeter``. See section below on 170 | `ElecMeter`_. 171 | :appliances: (list of dicts) (required) See section below on `Appliance`_. 172 | :water_meters: (dict of dicts) Same structure as ``elec_meters``. 173 | :gas_meters: (dict of dicts) Same structure as ``elec_meters``. 174 | :description: (string) 175 | :rooms: (list of dicts): 176 | 177 | :name: (string) (required) one of {'lounge', 'kitchen', 'bedroom', 'utility', 178 | 'garage', 'basement', 'bathroom', 'study', 179 | 'nursery', 'hall', 'dining room', 180 | 'outdoors'} 181 | :instance: (int) (optional. Starts from 1. If absent then assume to be 1.) 182 | :description: (string) 183 | :floor: (int) Ground floor is floor 0. 184 | :n_occupants: (int) Mode number of occupants. 185 | :description_of_occupants: (string) free-text describing the 186 | occupants. Number of children, teenagers, 187 | adults, pensioners? Demographics? Were 188 | all occupants away from the house during 189 | all week days? 190 | :timeframe: (`TimeFrame`_, see below) 191 | :periods_unoccupied: (list of `TimeFrame` objects, see below) Periods when this 192 | building was empty for more than a day 193 | (e.g. holidays) 194 | :construction_year: (int) Four-digit calendar year of construction. 195 | :energy_improvements: (list of strings) Any post-construction 196 | modifications? Some combination of 197 | {'photovoltaics', 'solar thermal', 'cavity wall 198 | insulation', 'loft insulation', 'solid wall 199 | insulation', 'double glazing', 'secondary 200 | glazing', 'triple glazing'} 201 | :heating: (ordered list of strings, with the most dominant fuel first) 202 | Some combination of {'natural gas', 'electricity', 'coal', 203 | 'wood', 'biomass', 'oil', 'LPG'} 204 | :communal_boiler: boolean (set to true if heating is provided by a 205 | shared boiler for the flats) 206 | :ownership: (string) one of {'rented', 'bought'} 207 | :building_type: (string) one of {'bungalow', 'cottage', 'detached', 208 | 'end of terrace', 'flat', 'semi-detached', 209 | 'mid-terrace', 'student halls', 'factory', 'office', 210 | 'university'} 211 | 212 | Building metadata which is inherited from `Dataset`_ but can be 213 | overridden by ``Building``: 214 | 215 | * geo_location 216 | * timezone 217 | * timeframe 218 | 219 | .. _elec-meter-schema: 220 | 221 | ElecMeter 222 | --------- 223 | 224 | ElecMeters are the values of the ``elec_meters`` dict of each building (see the 225 | section on `Building`_ metadata above). 226 | 227 | :device_model: (string) (required) ``model`` which keys into ``meter_devices`` 228 | :submeter_of: (int) (required) the meter instance of the upstream 229 | meter. Or set to ``0`` to mean "*one of the 230 | site_meters*". In practice, ``0`` will be interpreted to 231 | mean "downstream of a 'MeterGroup' representing all the 232 | site meters summed together". 233 | :submeter_of_is_uncertain: (boolean) Set to true if the value for 234 | `submeter_of` is uncertain. 235 | :upstream_meter_in_building: (int) If the upstream meter is 236 | in a different building then specify that 237 | building instance here. If left blank 238 | then we assume the upstream meter is in 239 | the same building as this meter. 240 | :site_meter: (boolean): required and set to True if this is a site 241 | meter (i.e. furthest upstream meter) otherwise not 242 | required. If there are multiple mains phases 243 | (e.g. 3-phase mains) or multiple mains 'splits' (e.g. in 244 | North America where there are two 120 volt splits) then 245 | set ``site_meter=true`` in every site meter. All 246 | non-site-meters directly downstream of the site meters 247 | should set ``submeter_of=0``. Optionally also use 248 | ``phase`` to describe which phase this meter measures. 249 | What happens if there are multiple site meters in 250 | *parallel* (i.e. there are redundant meters)? For 251 | example, perhaps there is a site meter installed by the 252 | utility company which provides infrequent readings; and 253 | there is also a fancy digital site meter which measures 254 | at the same point in the wiring tree and so, in a sense, 255 | the utility meter can be considered 'redundant' but is 256 | included in the dataset for comparison). In this 257 | situation, set ``site_meter=true`` in every site meter. 258 | Then set ``disabled=true`` in all but the 'favoured' site 259 | meter (which would usually be the site meter which 260 | provides the 'best' readings). It is important to set 261 | ``disabled=true`` so NILMTK does not sum together 262 | parallel site meters. The disabled site meters should 263 | also set ``submeter_of`` to the ID of the enabled site 264 | meter. All non-site-meters directly downstream of site 265 | meters should set ``submeter_of=0``. 266 | :utility_meter: (boolean) required and set to True if this is meter 267 | was installed by the utility company. Otherwise not 268 | required. 269 | :timeframe: (`TimeFrame`_ object) 270 | :name: (string) (optional) e.g. 'first floor total'. 271 | :phase: (int or string) (optional) Used in multiple-phase setups. 272 | 273 | .. _ElecMeter-room: 274 | 275 | :room: (string) ``[,]``. e.g. 'kitchen' or 276 | 'bedroom,2'. If no ``instance`` is specified (e.g. 'room: 277 | kitchen' then it is assumed to be 'kitchen,1' 278 | (i.e. kitchen instance 1). If the building metadata specifies set of 279 | ``rooms`` then the room specified here will key into the 280 | building's ``rooms`` (but not all datasets enumerate every room 281 | for each building). 282 | :floor: (int) Not necessary if ``room`` is specified. Ground floor is 0. 283 | 284 | :data_location: (string) (required) Path relative to root directory of 285 | dataset. e.g. ``house1/channel_2.dat``. Reference tables and 286 | columns within a Hierarchical file 287 | e.g. ``data.h5?table=/building1/elec/meter1`` or, if this 288 | metadata is stored in the same HDF file as the sensor data itself 289 | then just use the key e.g. ``/building1/elec/meter1``. 290 | 291 | :disabled: (bool): Set to true if NILMTK should ignore this channel. 292 | This is useful if, for example, this channel is a redundant 293 | site_meter. 294 | 295 | :preprocessing_applied: (dict): Each key is optional and is only 296 | present if that preprocessing function has been run. 297 | 298 | :clip: (dict) 299 | 300 | :lower_limit: 301 | :upper_limit: 302 | 303 | :statistics: (list of dicts): Each dict describes statistics for 304 | one set of timeframes. Each dict has: 305 | 306 | :timeframes: (list of `TimeFrame`_ objects) (required) The timeframes 307 | over which these statistics were calculated. If the 308 | stat(s) refer to the entire timeseries then enter the 309 | start and end of the timeseries as the only TimeFrame. 310 | :good_sections: (list of `TimeFrame`_ objects) 311 | :contiguous_sections: (list of `TimeFrame`_ objects) 312 | :total_energy: (dict) kWh 313 | 314 | :active: (number) 315 | :reactive: (number) 316 | :apparent: (number) 317 | 318 | Note that some of these statistics are cached by 319 | `NILMTK `_ at 320 | ``building/elec/cache/meter/``. 321 | For more details, see the docstring of 322 | ``nilmtk.ElecMeter._get_stat_from_cache_or_compute()``. 323 | 324 | WaterMeter and GasMeter 325 | ----------------------- 326 | 327 | Same attributes as `ElecMeter`_. 328 | 329 | .. _appliance-schema: 330 | 331 | Appliance 332 | --------- 333 | 334 | Each appliance dict has: 335 | 336 | :type: (string) (required) appliance type (e.g. 'kettle'). Use NILM 337 | Metadata controlled vocabulary. See 338 | `nilm_metadata/central_metadata/appliance_types/*.yaml `_. Each ``*.yaml`` file in 339 | ``nilm_metadata/central_metadata/appliance_types`` is a large dictionary. Each key 340 | in these dictionaries is a legal appliance ``type``. 341 | :instance: (int starting from 1) (required) instance of this appliance within 342 | the building. 343 | :meters: (list of ints) (required) meter instance(s) directly 344 | upstream of this appliance. This is a list to handle the case 345 | where some appliances draw power from both 120 volt legs in a 346 | north American house. Or 3-phase appliances. 347 | :dominant_appliance: (boolean) (required if multiple appliances 348 | attached to one meter). Is this appliance 349 | responsible for most of the power demand on this 350 | meter? 351 | :on_power_threshold: (number) watts. Not required. Default is taken 352 | from the appliance `type`. The threshold (in 353 | watts) used to decide if the appliance is `on` or `off`. 354 | :max_power: (number) watts. Not required. 355 | :min_off_duration: (number) (seconds) Not required. 356 | :min_on_duration: (number) (seconds) Not required. 357 | :room: see `ElecMeter-room`_ 358 | :multiple: (boolean) True if there are more than one 359 | of these appliances represented by this single 360 | ``appliance`` object. 361 | If there is exactly one appliance then do not specify 362 | ``multiple``. 363 | :count: (int) If there are more than one of these appliances 364 | represented by this ``appliance`` object and if the exact 365 | number of appliances is known then specify that number here. 366 | :control: (list of strings) Give a list of all control methods which 367 | apply. For example, a video recorder would be both 'manual' 368 | and 'timer'. The vocabulary is: {'timer', 'manual', 369 | 'motion', 'sunlight', 'thermostat', 'always on'} 370 | :efficiency_rating: (dict): 371 | 372 | :certification_name: (string) e.g. 'SEDBUK' or 'Energy Star 5.0' 373 | :rating: (string) e.g. 'A+' 374 | 375 | :nominal_consumption: (dict): Specifications reported by the manufacturer. 376 | 377 | :on_power: (number) active power in watts when on. 378 | :standby_power: (number) active power in watts when in standby. 379 | :energy_per_year: (number) kWh per year 380 | :energy_per_cycle: (number) kWh per cycle 381 | 382 | :components: (list of dicts): Components within this appliance. Each dict is an Appliance dict. 383 | :model: (string) 384 | :manufacturer: (string) 385 | :brand: (string) 386 | :original_name: (string) 387 | :model_url: (string) URL for this model of appliance 388 | :manufacturer_url: (string) URL for the manufacturer 389 | :dates_active: (list of `TimeFrame`_ objects, see below) Can be used to specify 390 | a change in appliance over time (for example if one 391 | appliance is replaced with another). 392 | :year_of_purchase: (int) Four-digit year. 393 | :year_of_manufacture: (int) Four-digit year. 394 | :subtype: (string) 395 | :part_number: (string) 396 | :gtin: (int) http://en.wikipedia.org/wiki/Global_Trade_Item_Number 397 | :version: (string) 398 | :portable: (boolean) 399 | 400 | Additional properties are specified for some Appliance Types. Please 401 | look up objects in 402 | :file:`nilm_metadata/central_metadata/appliances/*.yaml` for details. 403 | 404 | When an Appliance object is used as a component for an ApplianceType, 405 | then the Appliance object may have a ``distributions`` dict (see 406 | ``ApplianceType:distributions`` in :doc:`central_metadata`) 407 | specified and may also use a property ``do_not_merge_categories: 408 | true`` which prevents the system from merging categories from the 409 | component into the container appliance. 410 | 411 | .. _timeframe-schema: 412 | 413 | TimeFrame 414 | --------- 415 | 416 | Represent an arbitrary time frame. If either start or end is absent 417 | then assume it equals the start or the end of the dataset, 418 | respectively. Please use `ISO 8601 format 419 | `_ for dates or date times 420 | (e.g. 2014-03-17 or 2014-03-17T21:00:52+00:00) 421 | 422 | :start: (string) 423 | :end: (string) 424 | -------------------------------------------------------------------------------- /docs/source/schema.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 18 | 20 | 47 | 56 | 57 | 59 | 60 | 62 | image/svg+xml 63 | 65 | 66 | 67 | 68 | 69 | 74 | 81 | 87 | 93 | 99 | 105 | 111 | 117 | 123 | 129 | 135 | 141 | 147 | 153 | 159 | 165 | 172 | 179 | Dataset 191 | Building 203 | 1 215 | * 227 | 234 | ElecMeter 246 | * 258 | 1 270 | 277 | MeterDevice 289 | * 301 | 1 313 | * 325 | 1 337 | submeter of 349 | 356 | * 368 | 375 | ApplianceType 387 | Metadata shippedwith dataset 404 | Central metadata 421 | 1 433 | * 445 | 1 457 | contains 469 | type 481 | 488 | Measurement 500 | 1..3 514 | 1..* 528 | 535 | Prior 547 | 554 | DisaggregationModel 566 | 1 578 | * 590 | 1 602 | * 614 | parent 626 | 1 638 | * 650 | contains 662 | * 674 | 1 686 | 691 | 696 | 701 | 706 | 711 | 716 | 721 | 726 | 1 738 | 743 | 748 | 753 | 758 | * 770 | 775 | Appliance 787 | 1 799 | 806 | Country 818 | * 830 | 1 842 | 847 | 848 | 849 | --------------------------------------------------------------------------------